From 1ddf6af2a5d69f7aa6ee857bcbfffb519c55ad3c Mon Sep 17 00:00:00 2001 From: Sergey Yedrikov Date: Mon, 12 Jun 2023 14:26:02 -0400 Subject: [PATCH 01/24] feat: Add syslog codec Original commit from syedriko --- lib/codecs/src/encoding/format/mod.rs | 2 + lib/codecs/src/encoding/format/syslog.rs | 446 +++++++++++++++++++++ lib/codecs/src/encoding/mod.rs | 56 ++- src/codecs/encoding/config.rs | 3 +- src/codecs/encoding/encoder.rs | 3 +- src/components/validation/resources/mod.rs | 1 + src/sinks/websocket/sink.rs | 4 +- 7 files changed, 499 insertions(+), 16 deletions(-) create mode 100644 lib/codecs/src/encoding/format/syslog.rs diff --git a/lib/codecs/src/encoding/format/mod.rs b/lib/codecs/src/encoding/format/mod.rs index e61f7cae0bb96..2cb326d25229c 100644 --- a/lib/codecs/src/encoding/format/mod.rs +++ b/lib/codecs/src/encoding/format/mod.rs @@ -14,6 +14,7 @@ mod native_json; mod protobuf; mod raw_message; mod text; +mod syslog; use std::fmt::Debug; @@ -28,6 +29,7 @@ pub use native_json::{NativeJsonSerializer, NativeJsonSerializerConfig}; pub use protobuf::{ProtobufSerializer, ProtobufSerializerConfig, ProtobufSerializerOptions}; pub use raw_message::{RawMessageSerializer, RawMessageSerializerConfig}; pub use text::{TextSerializer, TextSerializerConfig}; +pub use syslog::{SyslogSerializer, SyslogSerializerConfig}; use vector_core::event::Event; /// Serialize a structured event into a byte frame. diff --git a/lib/codecs/src/encoding/format/syslog.rs b/lib/codecs/src/encoding/format/syslog.rs new file mode 100644 index 0000000000000..017e054dcc850 --- /dev/null +++ b/lib/codecs/src/encoding/format/syslog.rs @@ -0,0 +1,446 @@ +use bytes::{BufMut, BytesMut}; +use tokio_util::codec::Encoder; +use vector_core::{config::DataType, event::{Event, LogEvent}, schema}; +use chrono::{DateTime, SecondsFormat, Local}; +use vrl::value::Value; +use serde::{de, Deserialize}; +use vector_config::configurable_component; + +const NIL_VALUE: &'static str = "-"; + +/// Syslog RFC +#[configurable_component] +#[derive(Clone, Debug, Eq, PartialEq)] +#[serde(rename_all = "snake_case")] +pub enum SyslogRFC { + /// RFC 3164 + Rfc3164, + + /// RFC 5424 + Rfc5424 +} + +impl Default for SyslogRFC { + fn default() -> Self { + SyslogRFC::Rfc5424 + } +} + +/// Syslog facility +#[configurable_component] +#[derive(Clone, Debug, Eq, PartialEq)] +enum Facility { + /// Syslog facility ordinal number + Fixed(u8), + + /// Syslog facility name + Field(String) +} + +impl Default for Facility { + fn default() -> Self { + Facility::Fixed(1) + } +} + +/// Syslog severity +#[configurable_component] +#[derive(Clone, Debug, Eq, PartialEq)] +enum Severity { + /// Syslog severity ordinal number + Fixed(u8), + + /// Syslog severity name + Field(String) +} + +impl Default for Severity { + fn default() -> Self { + Severity::Fixed(6) + } +} + +/// Config used to build a `SyslogSerializer`. +#[configurable_component] +#[derive(Debug, Clone, Default)] +pub struct SyslogSerializerConfig { + /// RFC + #[serde(default)] + rfc: SyslogRFC, + + /// Facility + #[serde(default)] + #[serde(deserialize_with = "deserialize_facility")] + facility: Facility, + + /// Severity + #[serde(default)] + #[serde(deserialize_with = "deserialize_severity")] + severity: Severity, + + /// Tag + #[serde(default)] + tag: String, + + /// Trim prefix + trim_prefix: Option, + + /// Payload key + #[serde(default)] + payload_key: String, + + /// Add log source + #[serde(default)] + add_log_source: bool, + + /// App Name, RFC 5424 only + #[serde(default = "default_app_name")] + app_name: String, + + /// Proc ID, RFC 5424 only + #[serde(default = "default_nil_value")] + proc_id: String, + + /// Msg ID, RFC 5424 only + #[serde(default = "default_nil_value")] + msg_id: String +} + +impl SyslogSerializerConfig { + /// Build the `SyslogSerializer` from this configuration. + pub fn build(&self) -> SyslogSerializer { + SyslogSerializer::new(&self) + } + + /// The data type of events that are accepted by `SyslogSerializer`. + pub fn input_type(&self) -> DataType { + DataType::Log + } + + /// The schema required by the serializer. + pub fn schema_requirement(&self) -> schema::Requirement { + schema::Requirement::empty() + } +} + +/// Serializer that converts an `Event` to bytes using the Syslog format. +#[derive(Debug, Clone)] +pub struct SyslogSerializer { + config: SyslogSerializerConfig +} + +impl SyslogSerializer { + /// Creates a new `SyslogSerializer`. + pub fn new(conf: &SyslogSerializerConfig) -> Self { + Self { config: conf.clone() } + } +} + +impl Encoder for SyslogSerializer { + type Error = vector_common::Error; + + fn encode(&mut self, event: Event, buffer: &mut BytesMut) -> Result<(), Self::Error> { + match event { + Event::Log(log) => { + let mut buf = String::from("<"); + let pri = get_num_facility(&self.config.facility, &log) * 8 + get_num_severity(&self.config.severity, &log); + buf.push_str(&pri.to_string()); + buf.push_str(">"); + match self.config.rfc { + SyslogRFC::Rfc3164 => { + let timestamp = get_timestamp(&log); + let formatted_timestamp = format!(" {} ", timestamp.format("%b %e %H:%M:%S")); + buf.push_str(&formatted_timestamp); + buf.push_str(&get_field("hostname", &log)); + buf.push(' '); + buf.push_str(&get_field_or_config(&self.config.tag, &log)); + buf.push_str(": "); + if self.config.add_log_source { + add_log_source(&log, &mut buf); + } + }, + SyslogRFC::Rfc5424 => { + buf.push_str("1 "); + let timestamp = get_timestamp(&log); + buf.push_str(×tamp.to_rfc3339_opts(SecondsFormat::Millis, true)); + buf.push(' '); + buf.push_str(&get_field("hostname", &log)); + buf.push(' '); + buf.push_str(&get_field_or_config(&&self.config.app_name, &log)); + buf.push(' '); + buf.push_str(&get_field_or_config(&&self.config.proc_id, &log)); + buf.push(' '); + buf.push_str(&get_field_or_config(&&self.config.msg_id, &log)); + buf.push_str(" - "); // no structured data + if self.config.add_log_source { + add_log_source(&log, &mut buf); + } + } + } + let mut payload = if self.config.payload_key.is_empty() { + serde_json::to_vec(&log).unwrap_or_default() + } else { + get_field(&&self.config.payload_key, &log).as_bytes().to_vec() + }; + let mut vec = buf.as_bytes().to_vec(); + vec.append(&mut payload); + buffer.put_slice(&vec); + }, + _ => {} + } + Ok(()) + } +} + +fn deserialize_facility<'de, D>(d: D) -> Result + where D: de::Deserializer<'de> +{ + let value: String = String::deserialize(d)?; + let num_value = value.parse::(); + match num_value { + Ok(num) => { + if num > 23 { + return Err(de::Error::invalid_value(de::Unexpected::Unsigned(num as u64), &"facility number too large")); + } else { + return Ok(Facility::Fixed(num)); + } + } + Err(_) => { + if let Some(field_name) = value.strip_prefix("$.message.") { + return Ok(Facility::Field(field_name.to_string())); + } else { + let num = match value.to_uppercase().as_str() { + "KERN" => 0, + "USER" => 1, + "MAIL" => 2, + "DAEMON" => 3, + "AUTH" => 4, + "SYSLOG" => 5, + "LPR" => 6, + "NEWS" => 7, + "UUCP" => 8, + "CRON" => 9, + "AUTHPRIV" => 10, + "FTP" => 11, + "NTP" => 12, + "SECURITY" => 13, + "CONSOLE" => 14, + "SOLARIS-CRON" => 15, + "LOCAL0" => 16, + "LOCAL1" => 17, + "LOCAL2" => 18, + "LOCAL3" => 19, + "LOCAL4" => 20, + "LOCAL5" => 21, + "LOCAL6" => 22, + "LOCAL7" => 23, + _ => 24, + }; + if num > 23 { + return Err(de::Error::invalid_value(de::Unexpected::Unsigned(num as u64), &"unknown facility")); + } else { + return Ok(Facility::Fixed(num)) + } + } + } + } +} + +fn deserialize_severity<'de, D>(d: D) -> Result + where D: de::Deserializer<'de> +{ + let value: String = String::deserialize(d)?; + let num_value = value.parse::(); + match num_value { + Ok(num) => { + if num > 7 { + return Err(de::Error::invalid_value(de::Unexpected::Unsigned(num as u64), &"severity number too large")) + } else { + return Ok(Severity::Fixed(num)) + } + } + Err(_) => { + if let Some(field_name) = value.strip_prefix("$.message.") { + return Ok(Severity::Field(field_name.to_string())); + } else { + let num = match value.to_uppercase().as_str() { + "EMERGENCY" => 0, + "ALERT" => 1, + "CRITICAL" => 2, + "ERROR" => 3, + "WARNING" => 4, + "NOTICE" => 5, + "INFORMATIONAL" => 6, + "DEBUG" => 7, + _ => 8, + }; + if num > 7 { + return Err(de::Error::invalid_value(de::Unexpected::Unsigned(num as u64), &"unknown severity")) + } else { + return Ok(Severity::Fixed(num)) + } + } + } + } +} + +fn default_app_name() -> String { + String::from("vector") +} + +fn default_nil_value() -> String { + String::from(NIL_VALUE) +} + +fn add_log_source(log: &LogEvent, buf: &mut String) { + buf.push_str("namespace_name="); + buf.push_str(&String::from_utf8( + log + .get("kubernetes.namespace_name") + .map(|h| h.coerce_to_bytes()) + .unwrap_or_default().to_vec() + ).unwrap()); + buf.push_str(", container_name="); + buf.push_str(&String::from_utf8( + log + .get("kubernetes.container_name") + .map(|h| h.coerce_to_bytes()) + .unwrap_or_default().to_vec() + ).unwrap()); + buf.push_str(", pod_name="); + buf.push_str(&String::from_utf8( + log + .get("kubernetes.pod_name") + .map(|h| h.coerce_to_bytes()) + .unwrap_or_default().to_vec() + ).unwrap()); + buf.push_str(", message="); +} + +fn get_num_facility(config_facility: &Facility, log: &LogEvent) -> u8 { + match config_facility { + Facility::Fixed(num) => return *num, + Facility::Field(field_name) => { + if let Some(field_value) = log.get(field_name.as_str()) { + let field_value_string = String::from_utf8(field_value.coerce_to_bytes().to_vec()).unwrap_or_default(); + let num_value = field_value_string.parse::(); + match num_value { + Ok(num) => { + if num > 23 { + return 1 // USER + } else { + return num + } + } + Err(_) => { + let num = match field_value_string.to_uppercase().as_str() { + "KERN" => 0, + "USER" => 1, + "MAIL" => 2, + "DAEMON" => 3, + "AUTH" => 4, + "SYSLOG" => 5, + "LPR" => 6, + "NEWS" => 7, + "UUCP" => 8, + "CRON" => 9, + "AUTHPRIV" => 10, + "FTP" => 11, + "NTP" => 12, + "SECURITY" => 13, + "CONSOLE" => 14, + "SOLARIS-CRON" => 15, + "LOCAL0" => 16, + "LOCAL1" => 17, + "LOCAL2" => 18, + "LOCAL3" => 19, + "LOCAL4" => 20, + "LOCAL5" => 21, + "LOCAL6" => 22, + "LOCAL7" => 23, + _ => 24, + }; + if num > 23 { + return 1 // USER + } else { + return num + } + } + } + } else { + return 1 // USER + } + } + } +} + +fn get_num_severity(config_severity: &Severity, log: &LogEvent) -> u8 { + match config_severity { + Severity::Fixed(num) => return *num, + Severity::Field(field_name) => { + if let Some(field_value) = log.get(field_name.as_str()) { + let field_value_string = String::from_utf8(field_value.coerce_to_bytes().to_vec()).unwrap_or_default(); + let num_value = field_value_string.parse::(); + match num_value { + Ok(num) => { + if num > 7 { + return 6 // INFORMATIONAL + } else { + return num + } + } + Err(_) => { + let num = match field_value_string.to_uppercase().as_str() { + "EMERGENCY" => 0, + "ALERT" => 1, + "CRITICAL" => 2, + "ERROR" => 3, + "WARNING" => 4, + "NOTICE" => 5, + "INFORMATIONAL" => 6, + "DEBUG" => 7, + _ => 8, + }; + if num > 7 { + return 6 // INFORMATIONAL + } else { + return num + } + } + } + } else { + return 6 // INFORMATIONAL + } + } + } +} + +fn get_field_or_config(config_name: &String, log: &LogEvent) -> String { + if let Some(field_name) = config_name.strip_prefix("$.message.") { + return get_field(field_name, log) + } else { + return config_name.clone() + } +} + +fn get_field(field_name: &str, log: &LogEvent) -> String { + if let Some(field_value) = log.get(field_name) { + return String::from_utf8(field_value.coerce_to_bytes().to_vec()).unwrap_or_default(); + } else { + return NIL_VALUE.to_string() + } +} + +fn get_timestamp(log: &LogEvent) -> DateTime:: { + match log.get("@timestamp") { + Some(value) => { + if let Value::Timestamp(timestamp) = value { + DateTime::::from(*timestamp) + } else { + Local::now() + } + }, + _ => Local::now() + } +} + diff --git a/lib/codecs/src/encoding/mod.rs b/lib/codecs/src/encoding/mod.rs index 2f28c27ec7bf8..7e969ae7b015c 100644 --- a/lib/codecs/src/encoding/mod.rs +++ b/lib/codecs/src/encoding/mod.rs @@ -8,17 +8,24 @@ use std::fmt::Debug; use bytes::BytesMut; pub use format::{ - AvroSerializer, AvroSerializerConfig, AvroSerializerOptions, CsvSerializer, - CsvSerializerConfig, GelfSerializer, GelfSerializerConfig, JsonSerializer, - JsonSerializerConfig, LogfmtSerializer, LogfmtSerializerConfig, NativeJsonSerializer, - NativeJsonSerializerConfig, NativeSerializer, NativeSerializerConfig, ProtobufSerializer, - ProtobufSerializerConfig, ProtobufSerializerOptions, RawMessageSerializer, - RawMessageSerializerConfig, TextSerializer, TextSerializerConfig, + AvroSerializer, AvroSerializerConfig, AvroSerializerOptions, + CsvSerializer, CsvSerializerConfig, + GelfSerializer, GelfSerializerConfig, + JsonSerializer, JsonSerializerConfig, + LogfmtSerializer, LogfmtSerializerConfig, + NativeJsonSerializer, NativeJsonSerializerConfig, + NativeSerializer, NativeSerializerConfig, + ProtobufSerializer, ProtobufSerializerConfig, ProtobufSerializerOptions, + RawMessageSerializer, RawMessageSerializerConfig, + TextSerializer, TextSerializerConfig, + SyslogSerializer, SyslogSerializerConfig, }; pub use framing::{ - BoxedFramer, BoxedFramingError, BytesEncoder, BytesEncoderConfig, CharacterDelimitedEncoder, - CharacterDelimitedEncoderConfig, CharacterDelimitedEncoderOptions, LengthDelimitedEncoder, - LengthDelimitedEncoderConfig, NewlineDelimitedEncoder, NewlineDelimitedEncoderConfig, + BoxedFramer, BoxedFramingError, + BytesEncoder, BytesEncoderConfig, + CharacterDelimitedEncoder, CharacterDelimitedEncoderConfig, CharacterDelimitedEncoderOptions, + LengthDelimitedEncoder, LengthDelimitedEncoderConfig, + NewlineDelimitedEncoder, NewlineDelimitedEncoderConfig, }; use vector_config::configurable_component; use vector_core::{config::DataType, event::Event, schema}; @@ -259,6 +266,10 @@ pub enum SerializerConfig { /// transform) and removing the message field while doing additional parsing on it, as this /// could lead to the encoding emitting empty strings for the given event. Text(TextSerializerConfig), + + /// Syslog encoding + /// RFC 3164 and 5424 are supported + Syslog (SyslogSerializerConfig), } impl From for SerializerConfig { @@ -321,6 +332,12 @@ impl From for SerializerConfig { } } +impl From for SerializerConfig { + fn from(config: SyslogSerializerConfig) -> Self { + Self::Syslog(config) + } +} + impl SerializerConfig { /// Build the `Serializer` from this configuration. pub fn build(&self) -> Result> { @@ -341,6 +358,7 @@ impl SerializerConfig { Ok(Serializer::RawMessage(RawMessageSerializerConfig.build())) } SerializerConfig::Text(config) => Ok(Serializer::Text(config.build())), + SerializerConfig::Syslog(config) => Ok(Serializer::Syslog(config.build())), } } @@ -367,7 +385,8 @@ impl SerializerConfig { | SerializerConfig::Logfmt | SerializerConfig::NativeJson | SerializerConfig::RawMessage - | SerializerConfig::Text(_) => FramingConfig::NewlineDelimited, + | SerializerConfig::Text(_) + | SerializerConfig::Syslog(_) => FramingConfig::NewlineDelimited, } } @@ -386,6 +405,7 @@ impl SerializerConfig { SerializerConfig::Protobuf(config) => config.input_type(), SerializerConfig::RawMessage => RawMessageSerializerConfig.input_type(), SerializerConfig::Text(config) => config.input_type(), + SerializerConfig::Syslog(config) => config.input_type(), } } @@ -404,6 +424,7 @@ impl SerializerConfig { SerializerConfig::Protobuf(config) => config.schema_requirement(), SerializerConfig::RawMessage => RawMessageSerializerConfig.schema_requirement(), SerializerConfig::Text(config) => config.schema_requirement(), + SerializerConfig::Syslog(config) => config.schema_requirement(), } } } @@ -431,6 +452,8 @@ pub enum Serializer { RawMessage(RawMessageSerializer), /// Uses a `TextSerializer` for serialization. Text(TextSerializer), + /// Uses a `SyslogSerializer` for serialization. + Syslog(SyslogSerializer), } impl Serializer { @@ -444,7 +467,8 @@ impl Serializer { | Serializer::Text(_) | Serializer::Native(_) | Serializer::Protobuf(_) - | Serializer::RawMessage(_) => false, + | Serializer::RawMessage(_) + | Serializer::Syslog(_) => false, } } @@ -465,7 +489,8 @@ impl Serializer { | Serializer::Text(_) | Serializer::Native(_) | Serializer::Protobuf(_) - | Serializer::RawMessage(_) => { + | Serializer::RawMessage(_) + | Serializer::Syslog(_) => { panic!("Serializer does not support JSON") } } @@ -532,6 +557,12 @@ impl From for Serializer { } } +impl From for Serializer { + fn from(serializer: SyslogSerializer) -> Self { + Self::Syslog(serializer) + } +} + impl tokio_util::codec::Encoder for Serializer { type Error = vector_common::Error; @@ -547,6 +578,7 @@ impl tokio_util::codec::Encoder for Serializer { Serializer::Protobuf(serializer) => serializer.encode(event, buffer), Serializer::RawMessage(serializer) => serializer.encode(event, buffer), Serializer::Text(serializer) => serializer.encode(event, buffer), + Serializer::Syslog(serializer) => serializer.encode(event, buffer), } } } diff --git a/src/codecs/encoding/config.rs b/src/codecs/encoding/config.rs index d16ec78b627e4..c742f14f84f41 100644 --- a/src/codecs/encoding/config.rs +++ b/src/codecs/encoding/config.rs @@ -123,7 +123,8 @@ impl EncodingConfigWithFraming { | Serializer::Logfmt(_) | Serializer::NativeJson(_) | Serializer::RawMessage(_) - | Serializer::Text(_), + | Serializer::Text(_) + | Serializer::Syslog(_), ) => NewlineDelimitedEncoder::new().into(), }; diff --git a/src/codecs/encoding/encoder.rs b/src/codecs/encoding/encoder.rs index d12f2ab85cb78..5bc74dcb258a2 100644 --- a/src/codecs/encoding/encoder.rs +++ b/src/codecs/encoding/encoder.rs @@ -122,7 +122,8 @@ impl Encoder { | Serializer::Logfmt(_) | Serializer::NativeJson(_) | Serializer::RawMessage(_) - | Serializer::Text(_), + | Serializer::Text(_) + | Serializer::Syslog(_), _, ) => "text/plain", } diff --git a/src/components/validation/resources/mod.rs b/src/components/validation/resources/mod.rs index a9b39a560988c..4b5dc66cad0eb 100644 --- a/src/components/validation/resources/mod.rs +++ b/src/components/validation/resources/mod.rs @@ -212,6 +212,7 @@ fn serializer_config_to_deserializer( }) } SerializerConfig::RawMessage | SerializerConfig::Text(_) => DeserializerConfig::Bytes, + SerializerConfig::Syslog(_) => todo!(), }; deserializer_config.build() diff --git a/src/sinks/websocket/sink.rs b/src/sinks/websocket/sink.rs index 7c59253c28c6f..a87dfe296e8c3 100644 --- a/src/sinks/websocket/sink.rs +++ b/src/sinks/websocket/sink.rs @@ -236,12 +236,12 @@ impl WebSocketSink { const fn should_encode_as_binary(&self) -> bool { use vector_lib::codecs::encoding::Serializer::{ - Avro, Csv, Gelf, Json, Logfmt, Native, NativeJson, Protobuf, RawMessage, Text, + Avro, Csv, Gelf, Json, Logfmt, Native, NativeJson, Protobuf, RawMessage, Text, Syslog, }; match self.encoder.serializer() { RawMessage(_) | Avro(_) | Native(_) | Protobuf(_) => true, - Csv(_) | Logfmt(_) | Gelf(_) | Json(_) | Text(_) | NativeJson(_) => false, + Csv(_) | Logfmt(_) | Gelf(_) | Json(_) | Text(_) | NativeJson(_) | Syslog(_) => false, } } From 7407f7bc316344096a357aecb762338d252a4968 Mon Sep 17 00:00:00 2001 From: polarathene <5098581+polarathene@users.noreply.github.com> Date: Fri, 15 Mar 2024 17:09:52 +1300 Subject: [PATCH 02/24] chore: Split syslog encoder into separate files This is only a temporary change to make the diffs for future commits easier to follow. --- lib/codecs/src/encoding/format/syslog.rs | 439 ------------------ .../format/syslog/facility_severity.rs | 223 +++++++++ .../src/encoding/format/syslog/serializer.rs | 122 +++++ .../format/syslog/serializer_config.rs | 90 ++++ 4 files changed, 435 insertions(+), 439 deletions(-) create mode 100644 lib/codecs/src/encoding/format/syslog/facility_severity.rs create mode 100644 lib/codecs/src/encoding/format/syslog/serializer.rs create mode 100644 lib/codecs/src/encoding/format/syslog/serializer_config.rs diff --git a/lib/codecs/src/encoding/format/syslog.rs b/lib/codecs/src/encoding/format/syslog.rs index 017e054dcc850..9077c806bd698 100644 --- a/lib/codecs/src/encoding/format/syslog.rs +++ b/lib/codecs/src/encoding/format/syslog.rs @@ -5,442 +5,3 @@ use chrono::{DateTime, SecondsFormat, Local}; use vrl::value::Value; use serde::{de, Deserialize}; use vector_config::configurable_component; - -const NIL_VALUE: &'static str = "-"; - -/// Syslog RFC -#[configurable_component] -#[derive(Clone, Debug, Eq, PartialEq)] -#[serde(rename_all = "snake_case")] -pub enum SyslogRFC { - /// RFC 3164 - Rfc3164, - - /// RFC 5424 - Rfc5424 -} - -impl Default for SyslogRFC { - fn default() -> Self { - SyslogRFC::Rfc5424 - } -} - -/// Syslog facility -#[configurable_component] -#[derive(Clone, Debug, Eq, PartialEq)] -enum Facility { - /// Syslog facility ordinal number - Fixed(u8), - - /// Syslog facility name - Field(String) -} - -impl Default for Facility { - fn default() -> Self { - Facility::Fixed(1) - } -} - -/// Syslog severity -#[configurable_component] -#[derive(Clone, Debug, Eq, PartialEq)] -enum Severity { - /// Syslog severity ordinal number - Fixed(u8), - - /// Syslog severity name - Field(String) -} - -impl Default for Severity { - fn default() -> Self { - Severity::Fixed(6) - } -} - -/// Config used to build a `SyslogSerializer`. -#[configurable_component] -#[derive(Debug, Clone, Default)] -pub struct SyslogSerializerConfig { - /// RFC - #[serde(default)] - rfc: SyslogRFC, - - /// Facility - #[serde(default)] - #[serde(deserialize_with = "deserialize_facility")] - facility: Facility, - - /// Severity - #[serde(default)] - #[serde(deserialize_with = "deserialize_severity")] - severity: Severity, - - /// Tag - #[serde(default)] - tag: String, - - /// Trim prefix - trim_prefix: Option, - - /// Payload key - #[serde(default)] - payload_key: String, - - /// Add log source - #[serde(default)] - add_log_source: bool, - - /// App Name, RFC 5424 only - #[serde(default = "default_app_name")] - app_name: String, - - /// Proc ID, RFC 5424 only - #[serde(default = "default_nil_value")] - proc_id: String, - - /// Msg ID, RFC 5424 only - #[serde(default = "default_nil_value")] - msg_id: String -} - -impl SyslogSerializerConfig { - /// Build the `SyslogSerializer` from this configuration. - pub fn build(&self) -> SyslogSerializer { - SyslogSerializer::new(&self) - } - - /// The data type of events that are accepted by `SyslogSerializer`. - pub fn input_type(&self) -> DataType { - DataType::Log - } - - /// The schema required by the serializer. - pub fn schema_requirement(&self) -> schema::Requirement { - schema::Requirement::empty() - } -} - -/// Serializer that converts an `Event` to bytes using the Syslog format. -#[derive(Debug, Clone)] -pub struct SyslogSerializer { - config: SyslogSerializerConfig -} - -impl SyslogSerializer { - /// Creates a new `SyslogSerializer`. - pub fn new(conf: &SyslogSerializerConfig) -> Self { - Self { config: conf.clone() } - } -} - -impl Encoder for SyslogSerializer { - type Error = vector_common::Error; - - fn encode(&mut self, event: Event, buffer: &mut BytesMut) -> Result<(), Self::Error> { - match event { - Event::Log(log) => { - let mut buf = String::from("<"); - let pri = get_num_facility(&self.config.facility, &log) * 8 + get_num_severity(&self.config.severity, &log); - buf.push_str(&pri.to_string()); - buf.push_str(">"); - match self.config.rfc { - SyslogRFC::Rfc3164 => { - let timestamp = get_timestamp(&log); - let formatted_timestamp = format!(" {} ", timestamp.format("%b %e %H:%M:%S")); - buf.push_str(&formatted_timestamp); - buf.push_str(&get_field("hostname", &log)); - buf.push(' '); - buf.push_str(&get_field_or_config(&self.config.tag, &log)); - buf.push_str(": "); - if self.config.add_log_source { - add_log_source(&log, &mut buf); - } - }, - SyslogRFC::Rfc5424 => { - buf.push_str("1 "); - let timestamp = get_timestamp(&log); - buf.push_str(×tamp.to_rfc3339_opts(SecondsFormat::Millis, true)); - buf.push(' '); - buf.push_str(&get_field("hostname", &log)); - buf.push(' '); - buf.push_str(&get_field_or_config(&&self.config.app_name, &log)); - buf.push(' '); - buf.push_str(&get_field_or_config(&&self.config.proc_id, &log)); - buf.push(' '); - buf.push_str(&get_field_or_config(&&self.config.msg_id, &log)); - buf.push_str(" - "); // no structured data - if self.config.add_log_source { - add_log_source(&log, &mut buf); - } - } - } - let mut payload = if self.config.payload_key.is_empty() { - serde_json::to_vec(&log).unwrap_or_default() - } else { - get_field(&&self.config.payload_key, &log).as_bytes().to_vec() - }; - let mut vec = buf.as_bytes().to_vec(); - vec.append(&mut payload); - buffer.put_slice(&vec); - }, - _ => {} - } - Ok(()) - } -} - -fn deserialize_facility<'de, D>(d: D) -> Result - where D: de::Deserializer<'de> -{ - let value: String = String::deserialize(d)?; - let num_value = value.parse::(); - match num_value { - Ok(num) => { - if num > 23 { - return Err(de::Error::invalid_value(de::Unexpected::Unsigned(num as u64), &"facility number too large")); - } else { - return Ok(Facility::Fixed(num)); - } - } - Err(_) => { - if let Some(field_name) = value.strip_prefix("$.message.") { - return Ok(Facility::Field(field_name.to_string())); - } else { - let num = match value.to_uppercase().as_str() { - "KERN" => 0, - "USER" => 1, - "MAIL" => 2, - "DAEMON" => 3, - "AUTH" => 4, - "SYSLOG" => 5, - "LPR" => 6, - "NEWS" => 7, - "UUCP" => 8, - "CRON" => 9, - "AUTHPRIV" => 10, - "FTP" => 11, - "NTP" => 12, - "SECURITY" => 13, - "CONSOLE" => 14, - "SOLARIS-CRON" => 15, - "LOCAL0" => 16, - "LOCAL1" => 17, - "LOCAL2" => 18, - "LOCAL3" => 19, - "LOCAL4" => 20, - "LOCAL5" => 21, - "LOCAL6" => 22, - "LOCAL7" => 23, - _ => 24, - }; - if num > 23 { - return Err(de::Error::invalid_value(de::Unexpected::Unsigned(num as u64), &"unknown facility")); - } else { - return Ok(Facility::Fixed(num)) - } - } - } - } -} - -fn deserialize_severity<'de, D>(d: D) -> Result - where D: de::Deserializer<'de> -{ - let value: String = String::deserialize(d)?; - let num_value = value.parse::(); - match num_value { - Ok(num) => { - if num > 7 { - return Err(de::Error::invalid_value(de::Unexpected::Unsigned(num as u64), &"severity number too large")) - } else { - return Ok(Severity::Fixed(num)) - } - } - Err(_) => { - if let Some(field_name) = value.strip_prefix("$.message.") { - return Ok(Severity::Field(field_name.to_string())); - } else { - let num = match value.to_uppercase().as_str() { - "EMERGENCY" => 0, - "ALERT" => 1, - "CRITICAL" => 2, - "ERROR" => 3, - "WARNING" => 4, - "NOTICE" => 5, - "INFORMATIONAL" => 6, - "DEBUG" => 7, - _ => 8, - }; - if num > 7 { - return Err(de::Error::invalid_value(de::Unexpected::Unsigned(num as u64), &"unknown severity")) - } else { - return Ok(Severity::Fixed(num)) - } - } - } - } -} - -fn default_app_name() -> String { - String::from("vector") -} - -fn default_nil_value() -> String { - String::from(NIL_VALUE) -} - -fn add_log_source(log: &LogEvent, buf: &mut String) { - buf.push_str("namespace_name="); - buf.push_str(&String::from_utf8( - log - .get("kubernetes.namespace_name") - .map(|h| h.coerce_to_bytes()) - .unwrap_or_default().to_vec() - ).unwrap()); - buf.push_str(", container_name="); - buf.push_str(&String::from_utf8( - log - .get("kubernetes.container_name") - .map(|h| h.coerce_to_bytes()) - .unwrap_or_default().to_vec() - ).unwrap()); - buf.push_str(", pod_name="); - buf.push_str(&String::from_utf8( - log - .get("kubernetes.pod_name") - .map(|h| h.coerce_to_bytes()) - .unwrap_or_default().to_vec() - ).unwrap()); - buf.push_str(", message="); -} - -fn get_num_facility(config_facility: &Facility, log: &LogEvent) -> u8 { - match config_facility { - Facility::Fixed(num) => return *num, - Facility::Field(field_name) => { - if let Some(field_value) = log.get(field_name.as_str()) { - let field_value_string = String::from_utf8(field_value.coerce_to_bytes().to_vec()).unwrap_or_default(); - let num_value = field_value_string.parse::(); - match num_value { - Ok(num) => { - if num > 23 { - return 1 // USER - } else { - return num - } - } - Err(_) => { - let num = match field_value_string.to_uppercase().as_str() { - "KERN" => 0, - "USER" => 1, - "MAIL" => 2, - "DAEMON" => 3, - "AUTH" => 4, - "SYSLOG" => 5, - "LPR" => 6, - "NEWS" => 7, - "UUCP" => 8, - "CRON" => 9, - "AUTHPRIV" => 10, - "FTP" => 11, - "NTP" => 12, - "SECURITY" => 13, - "CONSOLE" => 14, - "SOLARIS-CRON" => 15, - "LOCAL0" => 16, - "LOCAL1" => 17, - "LOCAL2" => 18, - "LOCAL3" => 19, - "LOCAL4" => 20, - "LOCAL5" => 21, - "LOCAL6" => 22, - "LOCAL7" => 23, - _ => 24, - }; - if num > 23 { - return 1 // USER - } else { - return num - } - } - } - } else { - return 1 // USER - } - } - } -} - -fn get_num_severity(config_severity: &Severity, log: &LogEvent) -> u8 { - match config_severity { - Severity::Fixed(num) => return *num, - Severity::Field(field_name) => { - if let Some(field_value) = log.get(field_name.as_str()) { - let field_value_string = String::from_utf8(field_value.coerce_to_bytes().to_vec()).unwrap_or_default(); - let num_value = field_value_string.parse::(); - match num_value { - Ok(num) => { - if num > 7 { - return 6 // INFORMATIONAL - } else { - return num - } - } - Err(_) => { - let num = match field_value_string.to_uppercase().as_str() { - "EMERGENCY" => 0, - "ALERT" => 1, - "CRITICAL" => 2, - "ERROR" => 3, - "WARNING" => 4, - "NOTICE" => 5, - "INFORMATIONAL" => 6, - "DEBUG" => 7, - _ => 8, - }; - if num > 7 { - return 6 // INFORMATIONAL - } else { - return num - } - } - } - } else { - return 6 // INFORMATIONAL - } - } - } -} - -fn get_field_or_config(config_name: &String, log: &LogEvent) -> String { - if let Some(field_name) = config_name.strip_prefix("$.message.") { - return get_field(field_name, log) - } else { - return config_name.clone() - } -} - -fn get_field(field_name: &str, log: &LogEvent) -> String { - if let Some(field_value) = log.get(field_name) { - return String::from_utf8(field_value.coerce_to_bytes().to_vec()).unwrap_or_default(); - } else { - return NIL_VALUE.to_string() - } -} - -fn get_timestamp(log: &LogEvent) -> DateTime:: { - match log.get("@timestamp") { - Some(value) => { - if let Value::Timestamp(timestamp) = value { - DateTime::::from(*timestamp) - } else { - Local::now() - } - }, - _ => Local::now() - } -} - diff --git a/lib/codecs/src/encoding/format/syslog/facility_severity.rs b/lib/codecs/src/encoding/format/syslog/facility_severity.rs new file mode 100644 index 0000000000000..8117d62709484 --- /dev/null +++ b/lib/codecs/src/encoding/format/syslog/facility_severity.rs @@ -0,0 +1,223 @@ +/// Syslog facility +#[configurable_component] +#[derive(Clone, Debug, Eq, PartialEq)] +enum Facility { + /// Syslog facility ordinal number + Fixed(u8), + + /// Syslog facility name + Field(String) +} + +impl Default for Facility { + fn default() -> Self { + Facility::Fixed(1) + } +} + +/// Syslog severity +#[configurable_component] +#[derive(Clone, Debug, Eq, PartialEq)] +enum Severity { + /// Syslog severity ordinal number + Fixed(u8), + + /// Syslog severity name + Field(String) +} + +impl Default for Severity { + fn default() -> Self { + Severity::Fixed(6) + } +} + +fn deserialize_facility<'de, D>(d: D) -> Result + where D: de::Deserializer<'de> +{ + let value: String = String::deserialize(d)?; + let num_value = value.parse::(); + match num_value { + Ok(num) => { + if num > 23 { + return Err(de::Error::invalid_value(de::Unexpected::Unsigned(num as u64), &"facility number too large")); + } else { + return Ok(Facility::Fixed(num)); + } + } + Err(_) => { + if let Some(field_name) = value.strip_prefix("$.message.") { + return Ok(Facility::Field(field_name.to_string())); + } else { + let num = match value.to_uppercase().as_str() { + "KERN" => 0, + "USER" => 1, + "MAIL" => 2, + "DAEMON" => 3, + "AUTH" => 4, + "SYSLOG" => 5, + "LPR" => 6, + "NEWS" => 7, + "UUCP" => 8, + "CRON" => 9, + "AUTHPRIV" => 10, + "FTP" => 11, + "NTP" => 12, + "SECURITY" => 13, + "CONSOLE" => 14, + "SOLARIS-CRON" => 15, + "LOCAL0" => 16, + "LOCAL1" => 17, + "LOCAL2" => 18, + "LOCAL3" => 19, + "LOCAL4" => 20, + "LOCAL5" => 21, + "LOCAL6" => 22, + "LOCAL7" => 23, + _ => 24, + }; + if num > 23 { + return Err(de::Error::invalid_value(de::Unexpected::Unsigned(num as u64), &"unknown facility")); + } else { + return Ok(Facility::Fixed(num)) + } + } + } + } +} + +fn deserialize_severity<'de, D>(d: D) -> Result + where D: de::Deserializer<'de> +{ + let value: String = String::deserialize(d)?; + let num_value = value.parse::(); + match num_value { + Ok(num) => { + if num > 7 { + return Err(de::Error::invalid_value(de::Unexpected::Unsigned(num as u64), &"severity number too large")) + } else { + return Ok(Severity::Fixed(num)) + } + } + Err(_) => { + if let Some(field_name) = value.strip_prefix("$.message.") { + return Ok(Severity::Field(field_name.to_string())); + } else { + let num = match value.to_uppercase().as_str() { + "EMERGENCY" => 0, + "ALERT" => 1, + "CRITICAL" => 2, + "ERROR" => 3, + "WARNING" => 4, + "NOTICE" => 5, + "INFORMATIONAL" => 6, + "DEBUG" => 7, + _ => 8, + }; + if num > 7 { + return Err(de::Error::invalid_value(de::Unexpected::Unsigned(num as u64), &"unknown severity")) + } else { + return Ok(Severity::Fixed(num)) + } + } + } + } +} + +fn get_num_facility(config_facility: &Facility, log: &LogEvent) -> u8 { + match config_facility { + Facility::Fixed(num) => return *num, + Facility::Field(field_name) => { + if let Some(field_value) = log.get(field_name.as_str()) { + let field_value_string = String::from_utf8(field_value.coerce_to_bytes().to_vec()).unwrap_or_default(); + let num_value = field_value_string.parse::(); + match num_value { + Ok(num) => { + if num > 23 { + return 1 // USER + } else { + return num + } + } + Err(_) => { + let num = match field_value_string.to_uppercase().as_str() { + "KERN" => 0, + "USER" => 1, + "MAIL" => 2, + "DAEMON" => 3, + "AUTH" => 4, + "SYSLOG" => 5, + "LPR" => 6, + "NEWS" => 7, + "UUCP" => 8, + "CRON" => 9, + "AUTHPRIV" => 10, + "FTP" => 11, + "NTP" => 12, + "SECURITY" => 13, + "CONSOLE" => 14, + "SOLARIS-CRON" => 15, + "LOCAL0" => 16, + "LOCAL1" => 17, + "LOCAL2" => 18, + "LOCAL3" => 19, + "LOCAL4" => 20, + "LOCAL5" => 21, + "LOCAL6" => 22, + "LOCAL7" => 23, + _ => 24, + }; + if num > 23 { + return 1 // USER + } else { + return num + } + } + } + } else { + return 1 // USER + } + } + } +} + +fn get_num_severity(config_severity: &Severity, log: &LogEvent) -> u8 { + match config_severity { + Severity::Fixed(num) => return *num, + Severity::Field(field_name) => { + if let Some(field_value) = log.get(field_name.as_str()) { + let field_value_string = String::from_utf8(field_value.coerce_to_bytes().to_vec()).unwrap_or_default(); + let num_value = field_value_string.parse::(); + match num_value { + Ok(num) => { + if num > 7 { + return 6 // INFORMATIONAL + } else { + return num + } + } + Err(_) => { + let num = match field_value_string.to_uppercase().as_str() { + "EMERGENCY" => 0, + "ALERT" => 1, + "CRITICAL" => 2, + "ERROR" => 3, + "WARNING" => 4, + "NOTICE" => 5, + "INFORMATIONAL" => 6, + "DEBUG" => 7, + _ => 8, + }; + if num > 7 { + return 6 // INFORMATIONAL + } else { + return num + } + } + } + } else { + return 6 // INFORMATIONAL + } + } + } +} diff --git a/lib/codecs/src/encoding/format/syslog/serializer.rs b/lib/codecs/src/encoding/format/syslog/serializer.rs new file mode 100644 index 0000000000000..6921e6fe0c3db --- /dev/null +++ b/lib/codecs/src/encoding/format/syslog/serializer.rs @@ -0,0 +1,122 @@ +/// Serializer that converts an `Event` to bytes using the Syslog format. +#[derive(Debug, Clone)] +pub struct SyslogSerializer { + config: SyslogSerializerConfig +} + +impl SyslogSerializer { + /// Creates a new `SyslogSerializer`. + pub fn new(conf: &SyslogSerializerConfig) -> Self { + Self { config: conf.clone() } + } +} + +impl Encoder for SyslogSerializer { + type Error = vector_common::Error; + + fn encode(&mut self, event: Event, buffer: &mut BytesMut) -> Result<(), Self::Error> { + match event { + Event::Log(log) => { + let mut buf = String::from("<"); + let pri = get_num_facility(&self.config.facility, &log) * 8 + get_num_severity(&self.config.severity, &log); + buf.push_str(&pri.to_string()); + buf.push_str(">"); + match self.config.rfc { + SyslogRFC::Rfc3164 => { + let timestamp = get_timestamp(&log); + let formatted_timestamp = format!(" {} ", timestamp.format("%b %e %H:%M:%S")); + buf.push_str(&formatted_timestamp); + buf.push_str(&get_field("hostname", &log)); + buf.push(' '); + buf.push_str(&get_field_or_config(&self.config.tag, &log)); + buf.push_str(": "); + if self.config.add_log_source { + add_log_source(&log, &mut buf); + } + }, + SyslogRFC::Rfc5424 => { + buf.push_str("1 "); + let timestamp = get_timestamp(&log); + buf.push_str(×tamp.to_rfc3339_opts(SecondsFormat::Millis, true)); + buf.push(' '); + buf.push_str(&get_field("hostname", &log)); + buf.push(' '); + buf.push_str(&get_field_or_config(&&self.config.app_name, &log)); + buf.push(' '); + buf.push_str(&get_field_or_config(&&self.config.proc_id, &log)); + buf.push(' '); + buf.push_str(&get_field_or_config(&&self.config.msg_id, &log)); + buf.push_str(" - "); // no structured data + if self.config.add_log_source { + add_log_source(&log, &mut buf); + } + } + } + let mut payload = if self.config.payload_key.is_empty() { + serde_json::to_vec(&log).unwrap_or_default() + } else { + get_field(&&self.config.payload_key, &log).as_bytes().to_vec() + }; + let mut vec = buf.as_bytes().to_vec(); + vec.append(&mut payload); + buffer.put_slice(&vec); + }, + _ => {} + } + Ok(()) + } +} + +fn get_field_or_config(config_name: &String, log: &LogEvent) -> String { + if let Some(field_name) = config_name.strip_prefix("$.message.") { + return get_field(field_name, log) + } else { + return config_name.clone() + } +} + +fn get_field(field_name: &str, log: &LogEvent) -> String { + if let Some(field_value) = log.get(field_name) { + return String::from_utf8(field_value.coerce_to_bytes().to_vec()).unwrap_or_default(); + } else { + return NIL_VALUE.to_string() + } +} + +fn get_timestamp(log: &LogEvent) -> DateTime:: { + match log.get("@timestamp") { + Some(value) => { + if let Value::Timestamp(timestamp) = value { + DateTime::::from(*timestamp) + } else { + Local::now() + } + }, + _ => Local::now() + } +} + +fn add_log_source(log: &LogEvent, buf: &mut String) { + buf.push_str("namespace_name="); + buf.push_str(&String::from_utf8( + log + .get("kubernetes.namespace_name") + .map(|h| h.coerce_to_bytes()) + .unwrap_or_default().to_vec() + ).unwrap()); + buf.push_str(", container_name="); + buf.push_str(&String::from_utf8( + log + .get("kubernetes.container_name") + .map(|h| h.coerce_to_bytes()) + .unwrap_or_default().to_vec() + ).unwrap()); + buf.push_str(", pod_name="); + buf.push_str(&String::from_utf8( + log + .get("kubernetes.pod_name") + .map(|h| h.coerce_to_bytes()) + .unwrap_or_default().to_vec() + ).unwrap()); + buf.push_str(", message="); +} diff --git a/lib/codecs/src/encoding/format/syslog/serializer_config.rs b/lib/codecs/src/encoding/format/syslog/serializer_config.rs new file mode 100644 index 0000000000000..f0aae986c5c1d --- /dev/null +++ b/lib/codecs/src/encoding/format/syslog/serializer_config.rs @@ -0,0 +1,90 @@ +const NIL_VALUE: &'static str = "-"; + +/// Syslog RFC +#[configurable_component] +#[derive(Clone, Debug, Eq, PartialEq)] +#[serde(rename_all = "snake_case")] +pub enum SyslogRFC { + /// RFC 3164 + Rfc3164, + + /// RFC 5424 + Rfc5424 +} + +impl Default for SyslogRFC { + fn default() -> Self { + SyslogRFC::Rfc5424 + } +} + +/// Config used to build a `SyslogSerializer`. +#[configurable_component] +#[derive(Debug, Clone, Default)] +pub struct SyslogSerializerConfig { + /// RFC + #[serde(default)] + rfc: SyslogRFC, + + /// Facility + #[serde(default)] + #[serde(deserialize_with = "deserialize_facility")] + facility: Facility, + + /// Severity + #[serde(default)] + #[serde(deserialize_with = "deserialize_severity")] + severity: Severity, + + /// Tag + #[serde(default)] + tag: String, + + /// Trim prefix + trim_prefix: Option, + + /// Payload key + #[serde(default)] + payload_key: String, + + /// Add log source + #[serde(default)] + add_log_source: bool, + + /// App Name, RFC 5424 only + #[serde(default = "default_app_name")] + app_name: String, + + /// Proc ID, RFC 5424 only + #[serde(default = "default_nil_value")] + proc_id: String, + + /// Msg ID, RFC 5424 only + #[serde(default = "default_nil_value")] + msg_id: String +} + +impl SyslogSerializerConfig { + /// Build the `SyslogSerializer` from this configuration. + pub fn build(&self) -> SyslogSerializer { + SyslogSerializer::new(&self) + } + + /// The data type of events that are accepted by `SyslogSerializer`. + pub fn input_type(&self) -> DataType { + DataType::Log + } + + /// The schema required by the serializer. + pub fn schema_requirement(&self) -> schema::Requirement { + schema::Requirement::empty() + } +} + +fn default_app_name() -> String { + String::from("vector") +} + +fn default_nil_value() -> String { + String::from(NIL_VALUE) +} From c9aacd9fe9320844385ccba521454d53d32432ad Mon Sep 17 00:00:00 2001 From: polarathene <5098581+polarathene@users.noreply.github.com> Date: Fri, 15 Mar 2024 17:10:00 +1300 Subject: [PATCH 03/24] refactor: Syslog facility and severity - Introduce a `Pri` struct with fields for severity and facility as enum values. - `Pri` uses `strum` crate to parse string values into their appropriate enum variant. - Handles the responsibility of encoding the two enum values ordinal values into the `PRIVAL` value for the encoder. - As `Facility` and `Severity` enums better represent their ordinal mapping directly - The `Fixed` + `Field` subtyping with custom deserializer isn't necessary. Parsing a string that represents the enum by name or its ordinal representation is much simpler. - Likewise this removes the need for the get methods as the enum can provide both the `String` or `u8` representation as needed. --- lib/codecs/Cargo.toml | 3 +- .../format/syslog/facility_severity.rs | 293 ++++++------------ 2 files changed, 90 insertions(+), 206 deletions(-) diff --git a/lib/codecs/Cargo.toml b/lib/codecs/Cargo.toml index f47ce0fc2d60d..d4d12e966994b 100644 --- a/lib/codecs/Cargo.toml +++ b/lib/codecs/Cargo.toml @@ -27,6 +27,7 @@ serde.workspace = true serde_json.workspace = true smallvec = { version = "1", default-features = false, features = ["union"] } snafu = { version = "0.7.5", default-features = false, features = ["futures"] } +strum = { version = "0.26", features = ["derive"], optional = true } syslog_loose = { version = "0.21", default-features = false, optional = true } tokio-util = { version = "0.7", default-features = false, features = ["codec"] } tracing = { version = "0.1", default-features = false } @@ -48,4 +49,4 @@ rstest = "0.18.2" vrl.workspace = true [features] -syslog = ["dep:syslog_loose"] +syslog = ["dep:syslog_loose", "dep:strum"] diff --git a/lib/codecs/src/encoding/format/syslog/facility_severity.rs b/lib/codecs/src/encoding/format/syslog/facility_severity.rs index 8117d62709484..786fe61cd0ead 100644 --- a/lib/codecs/src/encoding/format/syslog/facility_severity.rs +++ b/lib/codecs/src/encoding/format/syslog/facility_severity.rs @@ -1,223 +1,106 @@ -/// Syslog facility -#[configurable_component] -#[derive(Clone, Debug, Eq, PartialEq)] -enum Facility { - /// Syslog facility ordinal number - Fixed(u8), +use std::str::FromStr; +use strum::{FromRepr, EnumString}; - /// Syslog facility name - Field(String) +#[derive(Default, Debug)] +struct Pri { + facility: Facility, + severity: Severity, } -impl Default for Facility { - fn default() -> Self { - Facility::Fixed(1) +impl Pri { + fn from_str_variants(facility_variant: &str, severity_variant: &str) -> Self { + // The original PR had `deserialize_*()` methods parsed a value to a `u8` or stored a field key as a `String` + // Later the equivalent `get_num_*()` method would retrieve the `u8` value or lookup the field key for the actual value, + // otherwise it'd fallback to the default Facility/Severity value. + // This approach instead parses a string of the name or ordinal representation, + // any reference via field key lookup should have already happened by this point. + let facility = Facility::into_variant(&facility_variant).unwrap_or(Facility::User); + let severity = Severity::into_variant(&severity_variant).unwrap_or(Severity::Informational); + + Self { + facility, + severity, + } + } + + // The last paragraph describes how to compose the enums into `PRIVAL`: + // https://datatracker.ietf.org/doc/html/rfc5424#section-6.2.1 + fn encode(&self) -> String { + let prival = (self.facility as u8 * 8) + self.severity as u8; + ["<", &prival.to_string(), ">"].concat() } } -/// Syslog severity -#[configurable_component] -#[derive(Clone, Debug, Eq, PartialEq)] -enum Severity { - /// Syslog severity ordinal number - Fixed(u8), +// Facility + Severity mapping from Name => Ordinal number: - /// Syslog severity name - Field(String) +/// Syslog facility +#[derive(Default, Debug, EnumString, FromRepr, Copy, Clone)] +#[strum(serialize_all = "kebab-case")] +enum Facility { + Kern = 0, + #[default] + User = 1, + Mail = 2, + Daemon = 3, + Auth = 4, + Syslog = 5, + LPR = 6, + News = 7, + UUCP = 8, + Cron = 9, + AuthPriv = 10, + FTP = 11, + NTP = 12, + Security = 13, + Console = 14, + SolarisCron = 15, + Local0 = 16, + Local1 = 17, + Local2 = 18, + Local3 = 19, + Local4 = 20, + Local5 = 21, + Local6 = 22, + Local7 = 23, } -impl Default for Severity { - fn default() -> Self { - Severity::Fixed(6) - } +/// Syslog severity +#[derive(Default, Debug, EnumString, FromRepr, Copy, Clone)] +#[strum(serialize_all = "kebab-case")] +enum Severity { + Emergency = 0, + Alert = 1, + Critical = 2, + Error = 3, + Warning = 4, + Notice = 5, + #[default] + Informational = 6, + Debug = 7, } -fn deserialize_facility<'de, D>(d: D) -> Result - where D: de::Deserializer<'de> -{ - let value: String = String::deserialize(d)?; - let num_value = value.parse::(); - match num_value { - Ok(num) => { - if num > 23 { - return Err(de::Error::invalid_value(de::Unexpected::Unsigned(num as u64), &"facility number too large")); - } else { - return Ok(Facility::Fixed(num)); - } - } - Err(_) => { - if let Some(field_name) = value.strip_prefix("$.message.") { - return Ok(Facility::Field(field_name.to_string())); - } else { - let num = match value.to_uppercase().as_str() { - "KERN" => 0, - "USER" => 1, - "MAIL" => 2, - "DAEMON" => 3, - "AUTH" => 4, - "SYSLOG" => 5, - "LPR" => 6, - "NEWS" => 7, - "UUCP" => 8, - "CRON" => 9, - "AUTHPRIV" => 10, - "FTP" => 11, - "NTP" => 12, - "SECURITY" => 13, - "CONSOLE" => 14, - "SOLARIS-CRON" => 15, - "LOCAL0" => 16, - "LOCAL1" => 17, - "LOCAL2" => 18, - "LOCAL3" => 19, - "LOCAL4" => 20, - "LOCAL5" => 21, - "LOCAL6" => 22, - "LOCAL7" => 23, - _ => 24, - }; - if num > 23 { - return Err(de::Error::invalid_value(de::Unexpected::Unsigned(num as u64), &"unknown facility")); - } else { - return Ok(Facility::Fixed(num)) - } - } - } - } -} +// Additionally support variants from string-based integers: +// Parse a string name, with fallback for parsing a string ordinal number. +impl Facility { + fn into_variant(variant_name: &str) -> Option { + let s = variant_name.to_ascii_lowercase(); -fn deserialize_severity<'de, D>(d: D) -> Result - where D: de::Deserializer<'de> -{ - let value: String = String::deserialize(d)?; - let num_value = value.parse::(); - match num_value { - Ok(num) => { - if num > 7 { - return Err(de::Error::invalid_value(de::Unexpected::Unsigned(num as u64), &"severity number too large")) - } else { - return Ok(Severity::Fixed(num)) - } - } - Err(_) => { - if let Some(field_name) = value.strip_prefix("$.message.") { - return Ok(Severity::Field(field_name.to_string())); - } else { - let num = match value.to_uppercase().as_str() { - "EMERGENCY" => 0, - "ALERT" => 1, - "CRITICAL" => 2, - "ERROR" => 3, - "WARNING" => 4, - "NOTICE" => 5, - "INFORMATIONAL" => 6, - "DEBUG" => 7, - _ => 8, - }; - if num > 7 { - return Err(de::Error::invalid_value(de::Unexpected::Unsigned(num as u64), &"unknown severity")) - } else { - return Ok(Severity::Fixed(num)) - } - } - } + s.parse::().map_or_else( + |_| Self::from_str(&s).ok(), + |num| Self::from_repr(num), + ) } } -fn get_num_facility(config_facility: &Facility, log: &LogEvent) -> u8 { - match config_facility { - Facility::Fixed(num) => return *num, - Facility::Field(field_name) => { - if let Some(field_value) = log.get(field_name.as_str()) { - let field_value_string = String::from_utf8(field_value.coerce_to_bytes().to_vec()).unwrap_or_default(); - let num_value = field_value_string.parse::(); - match num_value { - Ok(num) => { - if num > 23 { - return 1 // USER - } else { - return num - } - } - Err(_) => { - let num = match field_value_string.to_uppercase().as_str() { - "KERN" => 0, - "USER" => 1, - "MAIL" => 2, - "DAEMON" => 3, - "AUTH" => 4, - "SYSLOG" => 5, - "LPR" => 6, - "NEWS" => 7, - "UUCP" => 8, - "CRON" => 9, - "AUTHPRIV" => 10, - "FTP" => 11, - "NTP" => 12, - "SECURITY" => 13, - "CONSOLE" => 14, - "SOLARIS-CRON" => 15, - "LOCAL0" => 16, - "LOCAL1" => 17, - "LOCAL2" => 18, - "LOCAL3" => 19, - "LOCAL4" => 20, - "LOCAL5" => 21, - "LOCAL6" => 22, - "LOCAL7" => 23, - _ => 24, - }; - if num > 23 { - return 1 // USER - } else { - return num - } - } - } - } else { - return 1 // USER - } - } - } -} +// NOTE: The `strum` crate does not provide traits, +// requiring copy/paste of the prior impl instead. +impl Severity { + fn into_variant(variant_name: &str) -> Option { + let s = variant_name.to_ascii_lowercase(); -fn get_num_severity(config_severity: &Severity, log: &LogEvent) -> u8 { - match config_severity { - Severity::Fixed(num) => return *num, - Severity::Field(field_name) => { - if let Some(field_value) = log.get(field_name.as_str()) { - let field_value_string = String::from_utf8(field_value.coerce_to_bytes().to_vec()).unwrap_or_default(); - let num_value = field_value_string.parse::(); - match num_value { - Ok(num) => { - if num > 7 { - return 6 // INFORMATIONAL - } else { - return num - } - } - Err(_) => { - let num = match field_value_string.to_uppercase().as_str() { - "EMERGENCY" => 0, - "ALERT" => 1, - "CRITICAL" => 2, - "ERROR" => 3, - "WARNING" => 4, - "NOTICE" => 5, - "INFORMATIONAL" => 6, - "DEBUG" => 7, - _ => 8, - }; - if num > 7 { - return 6 // INFORMATIONAL - } else { - return num - } - } - } - } else { - return 6 // INFORMATIONAL - } - } + s.parse::().map_or_else( + |_| Self::from_str(&s).ok(), + |num| Self::from_repr(num), + ) } } From d03c87f6d6dd714d6421be3597440ba8220e1fad Mon Sep 17 00:00:00 2001 From: polarathene <5098581+polarathene@users.noreply.github.com> Date: Fri, 15 Mar 2024 13:02:38 +1300 Subject: [PATCH 04/24] refactor: `SyslogSerializer` `SyslogSerializer::encode()` has been simplified. - Only matching `Event::Log` is relevant, an `if let` bind instead of `match` helps remove a redundant level of nesting. - This method only focuses on boilerplate now, delegating the rest to `ConfigDecanter` (_adapt `LogEvent` + encoder config_) and `SyslogMessage` (_encode into syslog message string_). - This removes some complexity during actual encoding logic, which should only be concerned about directly encoding from one representation to another, not complimentary features related to Vector config or it's type system. The new `ConfigDecanter` is where many of the original helper methods that were used by `SyslogSerializer::encode()` now reside. This change better communicates the scope of their usage. - Any interaction with `LogEvent` is now contained within the methods of this new struct. Likewise for the consumption of the encoder configuration (instead of queries to config throughout encoding). - The `decant_config()` method better illustrates an overview of the data we're encoding and where that's being sourced from via the new `SyslogMessage` struct, which splits off the actual encoding responsibility (see next commit). --- .../src/encoding/format/syslog/serializer.rs | 219 ++++++++++-------- 1 file changed, 126 insertions(+), 93 deletions(-) diff --git a/lib/codecs/src/encoding/format/syslog/serializer.rs b/lib/codecs/src/encoding/format/syslog/serializer.rs index 6921e6fe0c3db..064e4bc96577e 100644 --- a/lib/codecs/src/encoding/format/syslog/serializer.rs +++ b/lib/codecs/src/encoding/format/syslog/serializer.rs @@ -15,108 +15,141 @@ impl Encoder for SyslogSerializer { type Error = vector_common::Error; fn encode(&mut self, event: Event, buffer: &mut BytesMut) -> Result<(), Self::Error> { - match event { - Event::Log(log) => { - let mut buf = String::from("<"); - let pri = get_num_facility(&self.config.facility, &log) * 8 + get_num_severity(&self.config.severity, &log); - buf.push_str(&pri.to_string()); - buf.push_str(">"); - match self.config.rfc { - SyslogRFC::Rfc3164 => { - let timestamp = get_timestamp(&log); - let formatted_timestamp = format!(" {} ", timestamp.format("%b %e %H:%M:%S")); - buf.push_str(&formatted_timestamp); - buf.push_str(&get_field("hostname", &log)); - buf.push(' '); - buf.push_str(&get_field_or_config(&self.config.tag, &log)); - buf.push_str(": "); - if self.config.add_log_source { - add_log_source(&log, &mut buf); - } - }, - SyslogRFC::Rfc5424 => { - buf.push_str("1 "); - let timestamp = get_timestamp(&log); - buf.push_str(×tamp.to_rfc3339_opts(SecondsFormat::Millis, true)); - buf.push(' '); - buf.push_str(&get_field("hostname", &log)); - buf.push(' '); - buf.push_str(&get_field_or_config(&&self.config.app_name, &log)); - buf.push(' '); - buf.push_str(&get_field_or_config(&&self.config.proc_id, &log)); - buf.push(' '); - buf.push_str(&get_field_or_config(&&self.config.msg_id, &log)); - buf.push_str(" - "); // no structured data - if self.config.add_log_source { - add_log_source(&log, &mut buf); - } - } - } - let mut payload = if self.config.payload_key.is_empty() { - serde_json::to_vec(&log).unwrap_or_default() - } else { - get_field(&&self.config.payload_key, &log).as_bytes().to_vec() - }; - let mut vec = buf.as_bytes().to_vec(); - vec.append(&mut payload); - buffer.put_slice(&vec); - }, - _ => {} + if let Event::Log(log_event) = event { + let syslog_message = ConfigDecanter::new(log_event).decant_config(&self.config); + + let vec = syslog_message + .encode(&self.config.rfc) + .as_bytes() + .to_vec(); + buffer.put_slice(&vec); } + Ok(()) } } -fn get_field_or_config(config_name: &String, log: &LogEvent) -> String { - if let Some(field_name) = config_name.strip_prefix("$.message.") { - return get_field(field_name, log) - } else { - return config_name.clone() - } +// Adapts a `LogEvent` into a `SyslogMessage` based on config from `SyslogSerializerConfig`: +// - Splits off the responsibility of encoding logic to `SyslogMessage` (which is not dependent upon Vector types). +// - Majority of methods are only needed to support the `decant_config()` operation. +struct ConfigDecanter { + log: LogEvent, } -fn get_field(field_name: &str, log: &LogEvent) -> String { - if let Some(field_value) = log.get(field_name) { - return String::from_utf8(field_value.coerce_to_bytes().to_vec()).unwrap_or_default(); - } else { - return NIL_VALUE.to_string() +impl ConfigDecanter { + fn new(log: LogEvent) -> Self { + Self { + log, + } } -} -fn get_timestamp(log: &LogEvent) -> DateTime:: { - match log.get("@timestamp") { - Some(value) => { - if let Value::Timestamp(timestamp) = value { - DateTime::::from(*timestamp) - } else { - Local::now() - } - }, - _ => Local::now() + fn decant_config(&self, config: &SyslogSerializerConfig) -> SyslogMessage { + let x = |v| self.replace_if_proxied(v).unwrap_or_default(); + let facility = x(&config.facility); + let severity = x(&config.severity); + + let y = |v| self.replace_if_proxied_opt(v); + let app_name = y(&config.app_name).unwrap_or("vector".to_owned()); + let proc_id = y(&config.proc_id); + let msg_id = y(&config.msg_id); + + SyslogMessage { + pri: Pri::from_str_variants(&facility, &severity), + timestamp: self.get_timestamp(), + hostname: self.value_by_key("hostname"), + tag: Tag { + app_name, + proc_id, + msg_id, + }, + structured_data: None, + message: self.get_message(&config), + } } -} -fn add_log_source(log: &LogEvent, buf: &mut String) { - buf.push_str("namespace_name="); - buf.push_str(&String::from_utf8( - log - .get("kubernetes.namespace_name") - .map(|h| h.coerce_to_bytes()) - .unwrap_or_default().to_vec() - ).unwrap()); - buf.push_str(", container_name="); - buf.push_str(&String::from_utf8( - log - .get("kubernetes.container_name") - .map(|h| h.coerce_to_bytes()) - .unwrap_or_default().to_vec() - ).unwrap()); - buf.push_str(", pod_name="); - buf.push_str(&String::from_utf8( - log - .get("kubernetes.pod_name") - .map(|h| h.coerce_to_bytes()) - .unwrap_or_default().to_vec() - ).unwrap()); - buf.push_str(", message="); + fn replace_if_proxied_opt(&self, value: &Option) -> Option { + value.as_ref().and_then(|v| self.replace_if_proxied(v)) + } + + // When the value has the expected prefix, perform a lookup for a field key without that prefix part. + // A failed lookup returns `None`, while a value without the prefix uses the config value as-is. + // + // Q: Why `$.message.` as the prefix? (Appears to be JSONPath syntax?) + // NOTE: Originally named in PR as: `get_field_or_config()` + fn replace_if_proxied(&self, value: &str) -> Option { + value + .strip_prefix("$.message.") + .map_or( + Some(value.to_owned()), + |field_key| self.value_by_key(field_key), + ) + } + + // NOTE: Originally named in PR as: `get_field()` + // Now returns a `None` directly instead of converting to either `"-"` or `""` + fn value_by_key(&self, field_key: &str) -> Option { + self.log.get(field_key).and_then(|field_value| { + let bytes = field_value.coerce_to_bytes(); + String::from_utf8(bytes.to_vec()).ok() + }) + } + + fn get_timestamp(&self) -> DateTime:: { + // Q: Was this Timestamp key hard-coded to the needs of the original PR author? + // + // Key `@timestamp` depends on input: + // https://vector.dev/guides/level-up/managing-schemas/#example-custom-timestamp-field + // https://vector.dev/docs/about/under-the-hood/architecture/data-model/log/#timestamps + // NOTE: Log schema key renaming is unavailable when Log namespacing is enabled: + // https://vector.dev/docs/reference/configuration/global-options/#log_schema + // + // NOTE: Log namespacing has metadata `%vector.ingest_timestamp` from a source (file/demo_logs) instead of `timestamp`. + // As a `payload_key` it will not respect config `encoding.timestamp_format`, but does when + // using the parent object (`%vector`). Inputs without namespacing respect that config setting. + if let Some(Value::Timestamp(timestamp)) = self.log.get("@timestamp") { + // Q: Utc type returned is changed to Local? + // - Could otherwise return `*timestamp` as-is? Why is Local conversion necessary? + DateTime::::from(*timestamp) + } else { + // NOTE: Local time is encouraged by RFC 5424 when creating a fallback timestamp for RFC 3164 + Local::now() + } + } + + fn get_message(&self, config: &SyslogSerializerConfig) -> String { + let mut message = String::new(); + + if config.add_log_source { + message.push_str(self.add_log_source().as_str()); + } + + // `payload_key` configures where to source the value for the syslog `message`: + // - Field key (Valid) => Get value by lookup (value_by_key) + // - Field key (Invalid) => Empty string (unwrap_or_default) + // - Not configured => JSON encoded `LogEvent` (fallback?) + // + // Q: Was the JSON fallback intended by the original PR author only for debugging? + // Roughly equivalent to using `payload_key: .` (in YAML config)? + let payload = if config.payload_key.is_empty() { + serde_json::to_string(&self.log).ok() + } else { + self.value_by_key(&config.payload_key) + }; + + message.push_str(&payload.unwrap_or_default()); + message + } + + // NOTE: This is a third-party addition from the original PR author (it is not relevant to the syslog spec): + // TODO: Remove, as this type of additional data is better supported via VRL remap + `StructuredData`? + fn add_log_source(&self) -> String { + let get_value = |s| self.value_by_key(s).unwrap_or_default(); + + [ + "namespace_name=", get_value("kubernetes.namespace_name").as_str(), + ", container_name=", get_value("kubernetes.container_name").as_str(), + ", pod_name=", get_value("kubernetes.pod_name").as_str(), + ", message=" + ].concat() + } } From 0288a80add0022448a1fc2e5016dcefca0b7be5f Mon Sep 17 00:00:00 2001 From: polarathene <5098581+polarathene@users.noreply.github.com> Date: Thu, 14 Mar 2024 17:51:43 +1300 Subject: [PATCH 05/24] refactor: `SyslogSerializerConfig` `SyslogSerializerConfig` has been simplified. - Facility / Severity deserializer methods aren't needed, as per their prior refactor with `strum`. - The `app_name` default is set via `decant_config()` when not configured explicitly. - The other two fields calling a `default_nil_value()` method instead use an option value which encodes `None` into the expected `-` value. - Everything else does not need a serde attribute to apply a default, the `Default` trait on the struct is sufficient. - `trim_prefix` was removed as it didn't seem relevant. `tag` was also removed as it's represented by several subfields in RFC 5424 which RFC 3164 can also use. `SyslogMessage::encode()` refactors the original PR encoding logic: - Syslog Header fields focused, the PRI and final message value have already been prepared prior. They are only referenced at the end of `encode()` to combine into the final string output. - While less efficient than `push_str()`, each match variant has a clear structure returned via the array `join(" ")` which minimizes the noise of `SP` from the original PR. Value preparation prior to this is clear and better documented. - `Tag` is a child struct to keep the main logic easy to grok. `StructuredData` is a similar case. --- .../format/syslog/serializer_config.rs | 173 +++++++++++++----- 1 file changed, 132 insertions(+), 41 deletions(-) diff --git a/lib/codecs/src/encoding/format/syslog/serializer_config.rs b/lib/codecs/src/encoding/format/syslog/serializer_config.rs index f0aae986c5c1d..57bf85aca6995 100644 --- a/lib/codecs/src/encoding/format/syslog/serializer_config.rs +++ b/lib/codecs/src/encoding/format/syslog/serializer_config.rs @@ -1,67 +1,49 @@ const NIL_VALUE: &'static str = "-"; +const SYSLOG_V1: &'static str = "1"; /// Syslog RFC #[configurable_component] -#[derive(Clone, Debug, Eq, PartialEq)] +#[derive(Clone, Debug, Default)] #[serde(rename_all = "snake_case")] pub enum SyslogRFC { /// RFC 3164 Rfc3164, + #[default] /// RFC 5424 Rfc5424 } -impl Default for SyslogRFC { - fn default() -> Self { - SyslogRFC::Rfc5424 - } -} - /// Config used to build a `SyslogSerializer`. #[configurable_component] -#[derive(Debug, Clone, Default)] +// Serde default makes all config keys optional. +// Each field assigns either a fixed value, or field name (lookup field key to retrieve dynamic value per `LogEvent`). +#[serde(default)] +#[derive(Clone, Debug, Default)] pub struct SyslogSerializerConfig { /// RFC - #[serde(default)] rfc: SyslogRFC, - /// Facility - #[serde(default)] - #[serde(deserialize_with = "deserialize_facility")] - facility: Facility, - + facility: String, /// Severity - #[serde(default)] - #[serde(deserialize_with = "deserialize_severity")] - severity: Severity, - - /// Tag - #[serde(default)] - tag: String, + severity: String, - /// Trim prefix - trim_prefix: Option, + /// App Name + app_name: Option, + /// Proc ID + proc_id: Option, + /// Msg ID + msg_id: Option, /// Payload key - #[serde(default)] payload_key: String, - /// Add log source - #[serde(default)] add_log_source: bool, - /// App Name, RFC 5424 only - #[serde(default = "default_app_name")] - app_name: String, - - /// Proc ID, RFC 5424 only - #[serde(default = "default_nil_value")] - proc_id: String, - - /// Msg ID, RFC 5424 only - #[serde(default = "default_nil_value")] - msg_id: String + // NOTE: The `tag` field was removed, it is better represented by the equivalents in RFC 5424. + // Q: The majority of the fields above pragmatically only make sense as config for keys to query? + // Q: What was `trim_prefix` for? It is not used in file, nor in Vector source tree. + // Q: `add_log_source` doesn't belong here? Better handled by the `remap` transform with structured data? } impl SyslogSerializerConfig { @@ -81,10 +63,119 @@ impl SyslogSerializerConfig { } } -fn default_app_name() -> String { - String::from("vector") +// ABNF definition: +// https://datatracker.ietf.org/doc/html/rfc5424#section-6 +// https://datatracker.ietf.org/doc/html/rfc5424#section-6.2 +#[derive(Default, Debug)] +struct SyslogMessage { + pri: Pri, + timestamp: DateTime::, + hostname: Option, + tag: Tag, + structured_data: Option, + message: String, +} + +impl SyslogMessage { + fn encode(&self, rfc: &SyslogRFC) -> String { + // Q: NIL_VALUE is unlikely? Technically invalid for RFC 3164: + // https://datatracker.ietf.org/doc/html/rfc5424#section-6.2.4 + // https://datatracker.ietf.org/doc/html/rfc3164#section-4.1.2 + let hostname = self.hostname.as_deref().unwrap_or(NIL_VALUE); + let structured_data = self.structured_data.as_ref().map(|sd| sd.encode()); + + let fields_encoded = match rfc { + SyslogRFC::Rfc3164 => { + // TIMESTAMP field format: + // https://datatracker.ietf.org/doc/html/rfc3164#section-4.1.2 + let timestamp = self.timestamp.format("%b %e %H:%M:%S").to_string(); + // MSG part begins with TAG field + optional context: + // https://datatracker.ietf.org/doc/html/rfc3164#section-4.1.3 + let mut msg_start = self.tag.encode_rfc_3164(); + // When RFC 5424 "Structured Data" is available, it can be compatible with RFC 3164 + // by including it in the RFC 3164 `CONTENT` field (part of MSG): + // https://datatracker.ietf.org/doc/html/rfc5424#appendix-A.1 + if let Some(sd) = structured_data.as_deref() { + msg_start = msg_start + " " + sd + } + + [ + timestamp.as_str(), + hostname, + &msg_start, + ].join(" ") + }, + SyslogRFC::Rfc5424 => { + // HEADER part fields: + // https://datatracker.ietf.org/doc/html/rfc5424#section-6.2 + let version = SYSLOG_V1; + let timestamp = self.timestamp.to_rfc3339_opts(SecondsFormat::Millis, true); + let tag = self.tag.encode_rfc_5424(); + // Structured Data: + // https://datatracker.ietf.org/doc/html/rfc5424#section-6.3 + let sd = structured_data.as_deref().unwrap_or(NIL_VALUE); + + [ + version, + timestamp.as_str(), + hostname, + &tag, + sd + ].join(" ") + } + }; + + [ + &self.pri.encode(), + &fields_encoded, + " ", + &self.message, + ].concat() + + // Q: RFC 5424 MSG part should technically ensure UTF-8 message begins with BOM? + // https://datatracker.ietf.org/doc/html/rfc5424#section-6.4 + } +} + +#[derive(Default, Debug)] +struct Tag { + app_name: String, + proc_id: Option, + msg_id: Option +} + +// NOTE: `.as_deref()` usage below avoids requiring `self.clone()` +impl Tag { + // Roughly equivalent - RFC 5424 fields can compose the start of + // an RFC 3164 MSG part (TAG + CONTENT fields): + // https://datatracker.ietf.org/doc/html/rfc5424#appendix-A.1 + fn encode_rfc_3164(&self) -> String { + let Self { app_name, proc_id, msg_id } = self; + + match proc_id.as_deref().or(msg_id.as_deref()) { + Some(context) => [&app_name, "[", &context, "]:"].concat(), + None => [&app_name, ":"].concat() + } + } + + // TAG was split into separate fields: APP-NAME, PROCID, MSGID + // https://datatracker.ietf.org/doc/html/rfc5424#section-6.2.5 + fn encode_rfc_5424(&self) -> String { + let Self { app_name, proc_id, msg_id } = self; + + [ + &app_name, + proc_id.as_deref().unwrap_or(NIL_VALUE), + msg_id.as_deref().unwrap_or(NIL_VALUE), + ].join(" ") + } } -fn default_nil_value() -> String { - String::from(NIL_VALUE) +#[derive(Debug)] +struct StructuredData {} + +impl StructuredData { + fn encode(&self) -> String { + todo!() + } } From 1049ebdc8481ae6bb9465aba581f5618644643ef Mon Sep 17 00:00:00 2001 From: polarathene <5098581+polarathene@users.noreply.github.com> Date: Fri, 15 Mar 2024 17:05:32 +1300 Subject: [PATCH 06/24] chore: Merge back into `syslog.rs` No changes beyond relocating the code into a single file. --- lib/codecs/src/encoding/format/syslog.rs | 454 +++++++++++++++++- .../format/syslog/facility_severity.rs | 106 ---- .../src/encoding/format/syslog/serializer.rs | 155 ------ .../format/syslog/serializer_config.rs | 181 ------- 4 files changed, 453 insertions(+), 443 deletions(-) delete mode 100644 lib/codecs/src/encoding/format/syslog/facility_severity.rs delete mode 100644 lib/codecs/src/encoding/format/syslog/serializer.rs delete mode 100644 lib/codecs/src/encoding/format/syslog/serializer_config.rs diff --git a/lib/codecs/src/encoding/format/syslog.rs b/lib/codecs/src/encoding/format/syslog.rs index 9077c806bd698..01be0ad01d8c7 100644 --- a/lib/codecs/src/encoding/format/syslog.rs +++ b/lib/codecs/src/encoding/format/syslog.rs @@ -3,5 +3,457 @@ use tokio_util::codec::Encoder; use vector_core::{config::DataType, event::{Event, LogEvent}, schema}; use chrono::{DateTime, SecondsFormat, Local}; use vrl::value::Value; -use serde::{de, Deserialize}; use vector_config::configurable_component; + +use std::str::FromStr; +use strum::{FromRepr, EnumString}; + +/// Config used to build a `SyslogSerializer`. +#[configurable_component] +// Serde default makes all config keys optional. +// Each field assigns either a fixed value, or field name (lookup field key to retrieve dynamic value per `LogEvent`). +#[serde(default)] +#[derive(Clone, Debug, Default)] +pub struct SyslogSerializerConfig { + /// RFC + rfc: SyslogRFC, + /// Facility + facility: String, + /// Severity + severity: String, + + /// App Name + app_name: Option, + /// Proc ID + proc_id: Option, + /// Msg ID + msg_id: Option, + + /// Payload key + payload_key: String, + /// Add log source + add_log_source: bool, + + // NOTE: The `tag` field was removed, it is better represented by the equivalents in RFC 5424. + // Q: The majority of the fields above pragmatically only make sense as config for keys to query? + // Q: What was `trim_prefix` for? It is not used in file, nor in Vector source tree. + // Q: `add_log_source` doesn't belong here? Better handled by the `remap` transform with structured data? +} + +impl SyslogSerializerConfig { + /// Build the `SyslogSerializer` from this configuration. + pub fn build(&self) -> SyslogSerializer { + SyslogSerializer::new(&self) + } + + /// The data type of events that are accepted by `SyslogSerializer`. + pub fn input_type(&self) -> DataType { + DataType::Log + } + + /// The schema required by the serializer. + pub fn schema_requirement(&self) -> schema::Requirement { + schema::Requirement::empty() + } +} + +/// Serializer that converts an `Event` to bytes using the Syslog format. +#[derive(Debug, Clone)] +pub struct SyslogSerializer { + config: SyslogSerializerConfig +} + +impl SyslogSerializer { + /// Creates a new `SyslogSerializer`. + pub fn new(conf: &SyslogSerializerConfig) -> Self { + Self { config: conf.clone() } + } +} + +impl Encoder for SyslogSerializer { + type Error = vector_common::Error; + + fn encode(&mut self, event: Event, buffer: &mut BytesMut) -> Result<(), Self::Error> { + if let Event::Log(log_event) = event { + let syslog_message = ConfigDecanter::new(log_event).decant_config(&self.config); + + let vec = syslog_message + .encode(&self.config.rfc) + .as_bytes() + .to_vec(); + buffer.put_slice(&vec); + } + + Ok(()) + } +} + +// Adapts a `LogEvent` into a `SyslogMessage` based on config from `SyslogSerializerConfig`: +// - Splits off the responsibility of encoding logic to `SyslogMessage` (which is not dependent upon Vector types). +// - Majority of methods are only needed to support the `decant_config()` operation. +struct ConfigDecanter { + log: LogEvent, +} + +impl ConfigDecanter { + fn new(log: LogEvent) -> Self { + Self { + log, + } + } + + fn decant_config(&self, config: &SyslogSerializerConfig) -> SyslogMessage { + let x = |v| self.replace_if_proxied(v).unwrap_or_default(); + let facility = x(&config.facility); + let severity = x(&config.severity); + + let y = |v| self.replace_if_proxied_opt(v); + let app_name = y(&config.app_name).unwrap_or("vector".to_owned()); + let proc_id = y(&config.proc_id); + let msg_id = y(&config.msg_id); + + SyslogMessage { + pri: Pri::from_str_variants(&facility, &severity), + timestamp: self.get_timestamp(), + hostname: self.value_by_key("hostname"), + tag: Tag { + app_name, + proc_id, + msg_id, + }, + structured_data: None, + message: self.get_message(&config), + } + } + + fn replace_if_proxied_opt(&self, value: &Option) -> Option { + value.as_ref().and_then(|v| self.replace_if_proxied(v)) + } + + // When the value has the expected prefix, perform a lookup for a field key without that prefix part. + // A failed lookup returns `None`, while a value without the prefix uses the config value as-is. + // + // Q: Why `$.message.` as the prefix? (Appears to be JSONPath syntax?) + // NOTE: Originally named in PR as: `get_field_or_config()` + fn replace_if_proxied(&self, value: &str) -> Option { + value + .strip_prefix("$.message.") + .map_or( + Some(value.to_owned()), + |field_key| self.value_by_key(field_key), + ) + } + + // NOTE: Originally named in PR as: `get_field()` + // Now returns a `None` directly instead of converting to either `"-"` or `""` + fn value_by_key(&self, field_key: &str) -> Option { + self.log.get(field_key).and_then(|field_value| { + let bytes = field_value.coerce_to_bytes(); + String::from_utf8(bytes.to_vec()).ok() + }) + } + + fn get_timestamp(&self) -> DateTime:: { + // Q: Was this Timestamp key hard-coded to the needs of the original PR author? + // + // Key `@timestamp` depends on input: + // https://vector.dev/guides/level-up/managing-schemas/#example-custom-timestamp-field + // https://vector.dev/docs/about/under-the-hood/architecture/data-model/log/#timestamps + // NOTE: Log schema key renaming is unavailable when Log namespacing is enabled: + // https://vector.dev/docs/reference/configuration/global-options/#log_schema + // + // NOTE: Log namespacing has metadata `%vector.ingest_timestamp` from a source (file/demo_logs) instead of `timestamp`. + // As a `payload_key` it will not respect config `encoding.timestamp_format`, but does when + // using the parent object (`%vector`). Inputs without namespacing respect that config setting. + if let Some(Value::Timestamp(timestamp)) = self.log.get("@timestamp") { + // Q: Utc type returned is changed to Local? + // - Could otherwise return `*timestamp` as-is? Why is Local conversion necessary? + DateTime::::from(*timestamp) + } else { + // NOTE: Local time is encouraged by RFC 5424 when creating a fallback timestamp for RFC 3164 + Local::now() + } + } + + fn get_message(&self, config: &SyslogSerializerConfig) -> String { + let mut message = String::new(); + + if config.add_log_source { + message.push_str(self.add_log_source().as_str()); + } + + // `payload_key` configures where to source the value for the syslog `message`: + // - Field key (Valid) => Get value by lookup (value_by_key) + // - Field key (Invalid) => Empty string (unwrap_or_default) + // - Not configured => JSON encoded `LogEvent` (fallback?) + // + // Q: Was the JSON fallback intended by the original PR author only for debugging? + // Roughly equivalent to using `payload_key: .` (in YAML config)? + let payload = if config.payload_key.is_empty() { + serde_json::to_string(&self.log).ok() + } else { + self.value_by_key(&config.payload_key) + }; + + message.push_str(&payload.unwrap_or_default()); + message + } + + // NOTE: This is a third-party addition from the original PR author (it is not relevant to the syslog spec): + // TODO: Remove, as this type of additional data is better supported via VRL remap + `StructuredData`? + fn add_log_source(&self) -> String { + let get_value = |s| self.value_by_key(s).unwrap_or_default(); + + [ + "namespace_name=", get_value("kubernetes.namespace_name").as_str(), + ", container_name=", get_value("kubernetes.container_name").as_str(), + ", pod_name=", get_value("kubernetes.pod_name").as_str(), + ", message=" + ].concat() + } +} + +// +// SyslogMessage support +// + +const NIL_VALUE: &'static str = "-"; +const SYSLOG_V1: &'static str = "1"; + +/// Syslog RFC +#[configurable_component] +#[derive(Clone, Debug, Default)] +#[serde(rename_all = "snake_case")] +pub enum SyslogRFC { + /// RFC 3164 + Rfc3164, + + #[default] + /// RFC 5424 + Rfc5424 +} + +// ABNF definition: +// https://datatracker.ietf.org/doc/html/rfc5424#section-6 +// https://datatracker.ietf.org/doc/html/rfc5424#section-6.2 +#[derive(Default, Debug)] +struct SyslogMessage { + pri: Pri, + timestamp: DateTime::, + hostname: Option, + tag: Tag, + structured_data: Option, + message: String, +} + +impl SyslogMessage { + fn encode(&self, rfc: &SyslogRFC) -> String { + // Q: NIL_VALUE is unlikely? Technically invalid for RFC 3164: + // https://datatracker.ietf.org/doc/html/rfc5424#section-6.2.4 + // https://datatracker.ietf.org/doc/html/rfc3164#section-4.1.2 + let hostname = self.hostname.as_deref().unwrap_or(NIL_VALUE); + let structured_data = self.structured_data.as_ref().map(|sd| sd.encode()); + + let fields_encoded = match rfc { + SyslogRFC::Rfc3164 => { + // TIMESTAMP field format: + // https://datatracker.ietf.org/doc/html/rfc3164#section-4.1.2 + let timestamp = self.timestamp.format("%b %e %H:%M:%S").to_string(); + // MSG part begins with TAG field + optional context: + // https://datatracker.ietf.org/doc/html/rfc3164#section-4.1.3 + let mut msg_start = self.tag.encode_rfc_3164(); + // When RFC 5424 "Structured Data" is available, it can be compatible with RFC 3164 + // by including it in the RFC 3164 `CONTENT` field (part of MSG): + // https://datatracker.ietf.org/doc/html/rfc5424#appendix-A.1 + if let Some(sd) = structured_data.as_deref() { + msg_start = msg_start + " " + sd + } + + [ + timestamp.as_str(), + hostname, + &msg_start, + ].join(" ") + }, + SyslogRFC::Rfc5424 => { + // HEADER part fields: + // https://datatracker.ietf.org/doc/html/rfc5424#section-6.2 + let version = SYSLOG_V1; + let timestamp = self.timestamp.to_rfc3339_opts(SecondsFormat::Millis, true); + let tag = self.tag.encode_rfc_5424(); + // Structured Data: + // https://datatracker.ietf.org/doc/html/rfc5424#section-6.3 + let sd = structured_data.as_deref().unwrap_or(NIL_VALUE); + + [ + version, + timestamp.as_str(), + hostname, + &tag, + sd + ].join(" ") + } + }; + + [ + &self.pri.encode(), + &fields_encoded, + " ", + &self.message, + ].concat() + + // Q: RFC 5424 MSG part should technically ensure UTF-8 message begins with BOM? + // https://datatracker.ietf.org/doc/html/rfc5424#section-6.4 + } +} + +#[derive(Default, Debug)] +struct Tag { + app_name: String, + proc_id: Option, + msg_id: Option +} + +// NOTE: `.as_deref()` usage below avoids requiring `self.clone()` +impl Tag { + // Roughly equivalent - RFC 5424 fields can compose the start of + // an RFC 3164 MSG part (TAG + CONTENT fields): + // https://datatracker.ietf.org/doc/html/rfc5424#appendix-A.1 + fn encode_rfc_3164(&self) -> String { + let Self { app_name, proc_id, msg_id } = self; + + match proc_id.as_deref().or(msg_id.as_deref()) { + Some(context) => [&app_name, "[", &context, "]:"].concat(), + None => [&app_name, ":"].concat() + } + } + + // TAG was split into separate fields: APP-NAME, PROCID, MSGID + // https://datatracker.ietf.org/doc/html/rfc5424#section-6.2.5 + fn encode_rfc_5424(&self) -> String { + let Self { app_name, proc_id, msg_id } = self; + + [ + &app_name, + proc_id.as_deref().unwrap_or(NIL_VALUE), + msg_id.as_deref().unwrap_or(NIL_VALUE), + ].join(" ") + } +} + +#[derive(Debug)] +struct StructuredData {} + +impl StructuredData { + fn encode(&self) -> String { + todo!() + } +} + +// +// Facility + Severity support +// + +#[derive(Default, Debug)] +struct Pri { + facility: Facility, + severity: Severity, +} + +impl Pri { + fn from_str_variants(facility_variant: &str, severity_variant: &str) -> Self { + // The original PR had `deserialize_*()` methods parsed a value to a `u8` or stored a field key as a `String` + // Later the equivalent `get_num_*()` method would retrieve the `u8` value or lookup the field key for the actual value, + // otherwise it'd fallback to the default Facility/Severity value. + // This approach instead parses a string of the name or ordinal representation, + // any reference via field key lookup should have already happened by this point. + let facility = Facility::into_variant(&facility_variant).unwrap_or(Facility::User); + let severity = Severity::into_variant(&severity_variant).unwrap_or(Severity::Informational); + + Self { + facility, + severity, + } + } + + // The last paragraph describes how to compose the enums into `PRIVAL`: + // https://datatracker.ietf.org/doc/html/rfc5424#section-6.2.1 + fn encode(&self) -> String { + let prival = (self.facility as u8 * 8) + self.severity as u8; + ["<", &prival.to_string(), ">"].concat() + } +} + +// Facility + Severity mapping from Name => Ordinal number: + +/// Syslog facility +#[derive(Default, Debug, EnumString, FromRepr, Copy, Clone)] +#[strum(serialize_all = "kebab-case")] +enum Facility { + Kern = 0, + #[default] + User = 1, + Mail = 2, + Daemon = 3, + Auth = 4, + Syslog = 5, + LPR = 6, + News = 7, + UUCP = 8, + Cron = 9, + AuthPriv = 10, + FTP = 11, + NTP = 12, + Security = 13, + Console = 14, + SolarisCron = 15, + Local0 = 16, + Local1 = 17, + Local2 = 18, + Local3 = 19, + Local4 = 20, + Local5 = 21, + Local6 = 22, + Local7 = 23, +} + +/// Syslog severity +#[derive(Default, Debug, EnumString, FromRepr, Copy, Clone)] +#[strum(serialize_all = "kebab-case")] +enum Severity { + Emergency = 0, + Alert = 1, + Critical = 2, + Error = 3, + Warning = 4, + Notice = 5, + #[default] + Informational = 6, + Debug = 7, +} + +// Additionally support variants from string-based integers: +// Parse a string name, with fallback for parsing a string ordinal number. +impl Facility { + fn into_variant(variant_name: &str) -> Option { + let s = variant_name.to_ascii_lowercase(); + + s.parse::().map_or_else( + |_| Self::from_str(&s).ok(), + |num| Self::from_repr(num), + ) + } +} + +// NOTE: The `strum` crate does not provide traits, +// requiring copy/paste of the prior impl instead. +impl Severity { + fn into_variant(variant_name: &str) -> Option { + let s = variant_name.to_ascii_lowercase(); + + s.parse::().map_or_else( + |_| Self::from_str(&s).ok(), + |num| Self::from_repr(num), + ) + } +} diff --git a/lib/codecs/src/encoding/format/syslog/facility_severity.rs b/lib/codecs/src/encoding/format/syslog/facility_severity.rs deleted file mode 100644 index 786fe61cd0ead..0000000000000 --- a/lib/codecs/src/encoding/format/syslog/facility_severity.rs +++ /dev/null @@ -1,106 +0,0 @@ -use std::str::FromStr; -use strum::{FromRepr, EnumString}; - -#[derive(Default, Debug)] -struct Pri { - facility: Facility, - severity: Severity, -} - -impl Pri { - fn from_str_variants(facility_variant: &str, severity_variant: &str) -> Self { - // The original PR had `deserialize_*()` methods parsed a value to a `u8` or stored a field key as a `String` - // Later the equivalent `get_num_*()` method would retrieve the `u8` value or lookup the field key for the actual value, - // otherwise it'd fallback to the default Facility/Severity value. - // This approach instead parses a string of the name or ordinal representation, - // any reference via field key lookup should have already happened by this point. - let facility = Facility::into_variant(&facility_variant).unwrap_or(Facility::User); - let severity = Severity::into_variant(&severity_variant).unwrap_or(Severity::Informational); - - Self { - facility, - severity, - } - } - - // The last paragraph describes how to compose the enums into `PRIVAL`: - // https://datatracker.ietf.org/doc/html/rfc5424#section-6.2.1 - fn encode(&self) -> String { - let prival = (self.facility as u8 * 8) + self.severity as u8; - ["<", &prival.to_string(), ">"].concat() - } -} - -// Facility + Severity mapping from Name => Ordinal number: - -/// Syslog facility -#[derive(Default, Debug, EnumString, FromRepr, Copy, Clone)] -#[strum(serialize_all = "kebab-case")] -enum Facility { - Kern = 0, - #[default] - User = 1, - Mail = 2, - Daemon = 3, - Auth = 4, - Syslog = 5, - LPR = 6, - News = 7, - UUCP = 8, - Cron = 9, - AuthPriv = 10, - FTP = 11, - NTP = 12, - Security = 13, - Console = 14, - SolarisCron = 15, - Local0 = 16, - Local1 = 17, - Local2 = 18, - Local3 = 19, - Local4 = 20, - Local5 = 21, - Local6 = 22, - Local7 = 23, -} - -/// Syslog severity -#[derive(Default, Debug, EnumString, FromRepr, Copy, Clone)] -#[strum(serialize_all = "kebab-case")] -enum Severity { - Emergency = 0, - Alert = 1, - Critical = 2, - Error = 3, - Warning = 4, - Notice = 5, - #[default] - Informational = 6, - Debug = 7, -} - -// Additionally support variants from string-based integers: -// Parse a string name, with fallback for parsing a string ordinal number. -impl Facility { - fn into_variant(variant_name: &str) -> Option { - let s = variant_name.to_ascii_lowercase(); - - s.parse::().map_or_else( - |_| Self::from_str(&s).ok(), - |num| Self::from_repr(num), - ) - } -} - -// NOTE: The `strum` crate does not provide traits, -// requiring copy/paste of the prior impl instead. -impl Severity { - fn into_variant(variant_name: &str) -> Option { - let s = variant_name.to_ascii_lowercase(); - - s.parse::().map_or_else( - |_| Self::from_str(&s).ok(), - |num| Self::from_repr(num), - ) - } -} diff --git a/lib/codecs/src/encoding/format/syslog/serializer.rs b/lib/codecs/src/encoding/format/syslog/serializer.rs deleted file mode 100644 index 064e4bc96577e..0000000000000 --- a/lib/codecs/src/encoding/format/syslog/serializer.rs +++ /dev/null @@ -1,155 +0,0 @@ -/// Serializer that converts an `Event` to bytes using the Syslog format. -#[derive(Debug, Clone)] -pub struct SyslogSerializer { - config: SyslogSerializerConfig -} - -impl SyslogSerializer { - /// Creates a new `SyslogSerializer`. - pub fn new(conf: &SyslogSerializerConfig) -> Self { - Self { config: conf.clone() } - } -} - -impl Encoder for SyslogSerializer { - type Error = vector_common::Error; - - fn encode(&mut self, event: Event, buffer: &mut BytesMut) -> Result<(), Self::Error> { - if let Event::Log(log_event) = event { - let syslog_message = ConfigDecanter::new(log_event).decant_config(&self.config); - - let vec = syslog_message - .encode(&self.config.rfc) - .as_bytes() - .to_vec(); - buffer.put_slice(&vec); - } - - Ok(()) - } -} - -// Adapts a `LogEvent` into a `SyslogMessage` based on config from `SyslogSerializerConfig`: -// - Splits off the responsibility of encoding logic to `SyslogMessage` (which is not dependent upon Vector types). -// - Majority of methods are only needed to support the `decant_config()` operation. -struct ConfigDecanter { - log: LogEvent, -} - -impl ConfigDecanter { - fn new(log: LogEvent) -> Self { - Self { - log, - } - } - - fn decant_config(&self, config: &SyslogSerializerConfig) -> SyslogMessage { - let x = |v| self.replace_if_proxied(v).unwrap_or_default(); - let facility = x(&config.facility); - let severity = x(&config.severity); - - let y = |v| self.replace_if_proxied_opt(v); - let app_name = y(&config.app_name).unwrap_or("vector".to_owned()); - let proc_id = y(&config.proc_id); - let msg_id = y(&config.msg_id); - - SyslogMessage { - pri: Pri::from_str_variants(&facility, &severity), - timestamp: self.get_timestamp(), - hostname: self.value_by_key("hostname"), - tag: Tag { - app_name, - proc_id, - msg_id, - }, - structured_data: None, - message: self.get_message(&config), - } - } - - fn replace_if_proxied_opt(&self, value: &Option) -> Option { - value.as_ref().and_then(|v| self.replace_if_proxied(v)) - } - - // When the value has the expected prefix, perform a lookup for a field key without that prefix part. - // A failed lookup returns `None`, while a value without the prefix uses the config value as-is. - // - // Q: Why `$.message.` as the prefix? (Appears to be JSONPath syntax?) - // NOTE: Originally named in PR as: `get_field_or_config()` - fn replace_if_proxied(&self, value: &str) -> Option { - value - .strip_prefix("$.message.") - .map_or( - Some(value.to_owned()), - |field_key| self.value_by_key(field_key), - ) - } - - // NOTE: Originally named in PR as: `get_field()` - // Now returns a `None` directly instead of converting to either `"-"` or `""` - fn value_by_key(&self, field_key: &str) -> Option { - self.log.get(field_key).and_then(|field_value| { - let bytes = field_value.coerce_to_bytes(); - String::from_utf8(bytes.to_vec()).ok() - }) - } - - fn get_timestamp(&self) -> DateTime:: { - // Q: Was this Timestamp key hard-coded to the needs of the original PR author? - // - // Key `@timestamp` depends on input: - // https://vector.dev/guides/level-up/managing-schemas/#example-custom-timestamp-field - // https://vector.dev/docs/about/under-the-hood/architecture/data-model/log/#timestamps - // NOTE: Log schema key renaming is unavailable when Log namespacing is enabled: - // https://vector.dev/docs/reference/configuration/global-options/#log_schema - // - // NOTE: Log namespacing has metadata `%vector.ingest_timestamp` from a source (file/demo_logs) instead of `timestamp`. - // As a `payload_key` it will not respect config `encoding.timestamp_format`, but does when - // using the parent object (`%vector`). Inputs without namespacing respect that config setting. - if let Some(Value::Timestamp(timestamp)) = self.log.get("@timestamp") { - // Q: Utc type returned is changed to Local? - // - Could otherwise return `*timestamp` as-is? Why is Local conversion necessary? - DateTime::::from(*timestamp) - } else { - // NOTE: Local time is encouraged by RFC 5424 when creating a fallback timestamp for RFC 3164 - Local::now() - } - } - - fn get_message(&self, config: &SyslogSerializerConfig) -> String { - let mut message = String::new(); - - if config.add_log_source { - message.push_str(self.add_log_source().as_str()); - } - - // `payload_key` configures where to source the value for the syslog `message`: - // - Field key (Valid) => Get value by lookup (value_by_key) - // - Field key (Invalid) => Empty string (unwrap_or_default) - // - Not configured => JSON encoded `LogEvent` (fallback?) - // - // Q: Was the JSON fallback intended by the original PR author only for debugging? - // Roughly equivalent to using `payload_key: .` (in YAML config)? - let payload = if config.payload_key.is_empty() { - serde_json::to_string(&self.log).ok() - } else { - self.value_by_key(&config.payload_key) - }; - - message.push_str(&payload.unwrap_or_default()); - message - } - - // NOTE: This is a third-party addition from the original PR author (it is not relevant to the syslog spec): - // TODO: Remove, as this type of additional data is better supported via VRL remap + `StructuredData`? - fn add_log_source(&self) -> String { - let get_value = |s| self.value_by_key(s).unwrap_or_default(); - - [ - "namespace_name=", get_value("kubernetes.namespace_name").as_str(), - ", container_name=", get_value("kubernetes.container_name").as_str(), - ", pod_name=", get_value("kubernetes.pod_name").as_str(), - ", message=" - ].concat() - } -} diff --git a/lib/codecs/src/encoding/format/syslog/serializer_config.rs b/lib/codecs/src/encoding/format/syslog/serializer_config.rs deleted file mode 100644 index 57bf85aca6995..0000000000000 --- a/lib/codecs/src/encoding/format/syslog/serializer_config.rs +++ /dev/null @@ -1,181 +0,0 @@ -const NIL_VALUE: &'static str = "-"; -const SYSLOG_V1: &'static str = "1"; - -/// Syslog RFC -#[configurable_component] -#[derive(Clone, Debug, Default)] -#[serde(rename_all = "snake_case")] -pub enum SyslogRFC { - /// RFC 3164 - Rfc3164, - - #[default] - /// RFC 5424 - Rfc5424 -} - -/// Config used to build a `SyslogSerializer`. -#[configurable_component] -// Serde default makes all config keys optional. -// Each field assigns either a fixed value, or field name (lookup field key to retrieve dynamic value per `LogEvent`). -#[serde(default)] -#[derive(Clone, Debug, Default)] -pub struct SyslogSerializerConfig { - /// RFC - rfc: SyslogRFC, - /// Facility - facility: String, - /// Severity - severity: String, - - /// App Name - app_name: Option, - /// Proc ID - proc_id: Option, - /// Msg ID - msg_id: Option, - - /// Payload key - payload_key: String, - /// Add log source - add_log_source: bool, - - // NOTE: The `tag` field was removed, it is better represented by the equivalents in RFC 5424. - // Q: The majority of the fields above pragmatically only make sense as config for keys to query? - // Q: What was `trim_prefix` for? It is not used in file, nor in Vector source tree. - // Q: `add_log_source` doesn't belong here? Better handled by the `remap` transform with structured data? -} - -impl SyslogSerializerConfig { - /// Build the `SyslogSerializer` from this configuration. - pub fn build(&self) -> SyslogSerializer { - SyslogSerializer::new(&self) - } - - /// The data type of events that are accepted by `SyslogSerializer`. - pub fn input_type(&self) -> DataType { - DataType::Log - } - - /// The schema required by the serializer. - pub fn schema_requirement(&self) -> schema::Requirement { - schema::Requirement::empty() - } -} - -// ABNF definition: -// https://datatracker.ietf.org/doc/html/rfc5424#section-6 -// https://datatracker.ietf.org/doc/html/rfc5424#section-6.2 -#[derive(Default, Debug)] -struct SyslogMessage { - pri: Pri, - timestamp: DateTime::, - hostname: Option, - tag: Tag, - structured_data: Option, - message: String, -} - -impl SyslogMessage { - fn encode(&self, rfc: &SyslogRFC) -> String { - // Q: NIL_VALUE is unlikely? Technically invalid for RFC 3164: - // https://datatracker.ietf.org/doc/html/rfc5424#section-6.2.4 - // https://datatracker.ietf.org/doc/html/rfc3164#section-4.1.2 - let hostname = self.hostname.as_deref().unwrap_or(NIL_VALUE); - let structured_data = self.structured_data.as_ref().map(|sd| sd.encode()); - - let fields_encoded = match rfc { - SyslogRFC::Rfc3164 => { - // TIMESTAMP field format: - // https://datatracker.ietf.org/doc/html/rfc3164#section-4.1.2 - let timestamp = self.timestamp.format("%b %e %H:%M:%S").to_string(); - // MSG part begins with TAG field + optional context: - // https://datatracker.ietf.org/doc/html/rfc3164#section-4.1.3 - let mut msg_start = self.tag.encode_rfc_3164(); - // When RFC 5424 "Structured Data" is available, it can be compatible with RFC 3164 - // by including it in the RFC 3164 `CONTENT` field (part of MSG): - // https://datatracker.ietf.org/doc/html/rfc5424#appendix-A.1 - if let Some(sd) = structured_data.as_deref() { - msg_start = msg_start + " " + sd - } - - [ - timestamp.as_str(), - hostname, - &msg_start, - ].join(" ") - }, - SyslogRFC::Rfc5424 => { - // HEADER part fields: - // https://datatracker.ietf.org/doc/html/rfc5424#section-6.2 - let version = SYSLOG_V1; - let timestamp = self.timestamp.to_rfc3339_opts(SecondsFormat::Millis, true); - let tag = self.tag.encode_rfc_5424(); - // Structured Data: - // https://datatracker.ietf.org/doc/html/rfc5424#section-6.3 - let sd = structured_data.as_deref().unwrap_or(NIL_VALUE); - - [ - version, - timestamp.as_str(), - hostname, - &tag, - sd - ].join(" ") - } - }; - - [ - &self.pri.encode(), - &fields_encoded, - " ", - &self.message, - ].concat() - - // Q: RFC 5424 MSG part should technically ensure UTF-8 message begins with BOM? - // https://datatracker.ietf.org/doc/html/rfc5424#section-6.4 - } -} - -#[derive(Default, Debug)] -struct Tag { - app_name: String, - proc_id: Option, - msg_id: Option -} - -// NOTE: `.as_deref()` usage below avoids requiring `self.clone()` -impl Tag { - // Roughly equivalent - RFC 5424 fields can compose the start of - // an RFC 3164 MSG part (TAG + CONTENT fields): - // https://datatracker.ietf.org/doc/html/rfc5424#appendix-A.1 - fn encode_rfc_3164(&self) -> String { - let Self { app_name, proc_id, msg_id } = self; - - match proc_id.as_deref().or(msg_id.as_deref()) { - Some(context) => [&app_name, "[", &context, "]:"].concat(), - None => [&app_name, ":"].concat() - } - } - - // TAG was split into separate fields: APP-NAME, PROCID, MSGID - // https://datatracker.ietf.org/doc/html/rfc5424#section-6.2.5 - fn encode_rfc_5424(&self) -> String { - let Self { app_name, proc_id, msg_id } = self; - - [ - &app_name, - proc_id.as_deref().unwrap_or(NIL_VALUE), - msg_id.as_deref().unwrap_or(NIL_VALUE), - ].join(" ") - } -} - -#[derive(Debug)] -struct StructuredData {} - -impl StructuredData { - fn encode(&self) -> String { - todo!() - } -} From 3cdc1b46c4493cb607e0f3acb56741fd01c0bfaa Mon Sep 17 00:00:00 2001 From: polarathene <5098581+polarathene@users.noreply.github.com> Date: Fri, 15 Mar 2024 17:06:05 +1300 Subject: [PATCH 07/24] feat: Add StructuredData support to Syslog encoder --- lib/codecs/src/encoding/format/syslog.rs | 86 ++++++++++++++++++++++-- 1 file changed, 79 insertions(+), 7 deletions(-) diff --git a/lib/codecs/src/encoding/format/syslog.rs b/lib/codecs/src/encoding/format/syslog.rs index 01be0ad01d8c7..606a62dab55bc 100644 --- a/lib/codecs/src/encoding/format/syslog.rs +++ b/lib/codecs/src/encoding/format/syslog.rs @@ -2,9 +2,10 @@ use bytes::{BufMut, BytesMut}; use tokio_util::codec::Encoder; use vector_core::{config::DataType, event::{Event, LogEvent}, schema}; use chrono::{DateTime, SecondsFormat, Local}; -use vrl::value::Value; +use vrl::value::{ObjectMap, Value}; use vector_config::configurable_component; +use std::collections::HashMap; use std::str::FromStr; use strum::{FromRepr, EnumString}; @@ -121,7 +122,7 @@ impl ConfigDecanter { proc_id, msg_id, }, - structured_data: None, + structured_data: self.get_structured_data(), message: self.get_message(&config), } } @@ -153,6 +154,12 @@ impl ConfigDecanter { }) } + fn get_structured_data(&self) -> Option { + self.log.get("structured_data") + .and_then(|v| v.clone().into_object()) + .map(StructuredData::from) + } + fn get_timestamp(&self) -> DateTime:: { // Q: Was this Timestamp key hard-coded to the needs of the original PR author? // @@ -281,8 +288,6 @@ impl SyslogMessage { let version = SYSLOG_V1; let timestamp = self.timestamp.to_rfc3339_opts(SecondsFormat::Millis, true); let tag = self.tag.encode_rfc_5424(); - // Structured Data: - // https://datatracker.ietf.org/doc/html/rfc5424#section-6.3 let sd = structured_data.as_deref().unwrap_or(NIL_VALUE); [ @@ -341,12 +346,79 @@ impl Tag { } } -#[derive(Debug)] -struct StructuredData {} +// Structured Data: +// https://datatracker.ietf.org/doc/html/rfc5424#section-6.3 +// An SD-ELEMENT consists of a name (SD-ID) + parameter key-value pairs (SD-PARAM) +type StructuredDataMap = HashMap>; +#[derive(Debug, Default)] +struct StructuredData { + elements: StructuredDataMap +} + +// Used by `SyslogMessage::encode()` +/* + Adapted `format_structured_data_rfc5424` method from: + https://github.com/vectordotdev/vector/blob/fafe8c50a4721fa3ddbea34e0641d3c145f14388/src/sources/syslog.rs#L1548-L1563 + No notable change in logic, uses `NIL_VALUE` constant, and adapts method to struct instead of free-standing. +*/ impl StructuredData { fn encode(&self) -> String { - todo!() + if self.elements.is_empty() { + NIL_VALUE.to_string() + } else { + let mut s = String::new(); + + for (sd_id, sd_params) in &self.elements { + s = s + "[" + sd_id; + for (key, value) in sd_params { + s = s + " " + key + "=\"" + value + "\""; + } + s += "]"; + } + + s + } + } +} + +// Used by `ConfigDecanter::decant_config()` +/* + Adapted `structured_data_from_fields()` method from: + https://github.com/vectordotdev/vector/blob/fafe8c50a4721fa3ddbea34e0641d3c145f14388/src/sources/syslog.rs#L1439-L1454 + + Refactored to `impl From` that uses `flat_map()` instead to collect K/V tuples into a `HashMap`. +*/ +impl From for StructuredData { + fn from(fields: ObjectMap) -> Self { + let elements = fields.into_iter().flat_map(|(sd_id, value)| { + let sd_params = value + .into_object()? + .into_iter() + .map(|(k, v)| (k.into(), value_to_string(v))) + .collect(); + + Some((sd_id.into(), sd_params)) + }).collect::(); + + Self { elements } + } +} + +// Only used as helper to support `StructuredData::from()` +/* + Adapted `value_to_string()` method from: + https://github.com/vectordotdev/vector/blob/fafe8c50a4721fa3ddbea34e0641d3c145f14388/src/sources/syslog.rs#L1569-L1579 + https://github.com/vectordotdev/vrl/blob/main/src/value/value/convert.rs + https://github.com/vectordotdev/vrl/blob/main/src/value/value/display.rs + + Simplified via `match` expression which seems better suited for this logic. +*/ +fn value_to_string(v: Value) -> String { + match v { + Value::Bytes(bytes) => String::from_utf8_lossy(&bytes).to_string(), + Value::Timestamp(timestamp) => timestamp.to_rfc3339_opts(SecondsFormat::AutoSi, true), + _ => v.to_string() } } From 3001b679089027f3e22d3edc2c7cc0567c50a4e6 Mon Sep 17 00:00:00 2001 From: polarathene <5098581+polarathene@users.noreply.github.com> Date: Fri, 15 Mar 2024 17:41:09 +1300 Subject: [PATCH 08/24] chore: Housekeeping - Drop notes referring to original PR differences + StructuredData adaption references. None of it should be relevant going forward. - Revise some other notes. - Drop `add_log_source` method (introduced from the original PR author) in favor of using `StructuredData` support instead. --- lib/codecs/src/encoding/format/syslog.rs | 53 ++---------------------- 1 file changed, 3 insertions(+), 50 deletions(-) diff --git a/lib/codecs/src/encoding/format/syslog.rs b/lib/codecs/src/encoding/format/syslog.rs index 606a62dab55bc..cc8a7dbb72a33 100644 --- a/lib/codecs/src/encoding/format/syslog.rs +++ b/lib/codecs/src/encoding/format/syslog.rs @@ -32,13 +32,8 @@ pub struct SyslogSerializerConfig { /// Payload key payload_key: String, - /// Add log source - add_log_source: bool, - // NOTE: The `tag` field was removed, it is better represented by the equivalents in RFC 5424. // Q: The majority of the fields above pragmatically only make sense as config for keys to query? - // Q: What was `trim_prefix` for? It is not used in file, nor in Vector source tree. - // Q: `add_log_source` doesn't belong here? Better handled by the `remap` transform with structured data? } impl SyslogSerializerConfig { @@ -145,8 +140,6 @@ impl ConfigDecanter { ) } - // NOTE: Originally named in PR as: `get_field()` - // Now returns a `None` directly instead of converting to either `"-"` or `""` fn value_by_key(&self, field_key: &str) -> Option { self.log.get(field_key).and_then(|field_value| { let bytes = field_value.coerce_to_bytes(); @@ -185,10 +178,6 @@ impl ConfigDecanter { fn get_message(&self, config: &SyslogSerializerConfig) -> String { let mut message = String::new(); - if config.add_log_source { - message.push_str(self.add_log_source().as_str()); - } - // `payload_key` configures where to source the value for the syslog `message`: // - Field key (Valid) => Get value by lookup (value_by_key) // - Field key (Invalid) => Empty string (unwrap_or_default) @@ -205,19 +194,6 @@ impl ConfigDecanter { message.push_str(&payload.unwrap_or_default()); message } - - // NOTE: This is a third-party addition from the original PR author (it is not relevant to the syslog spec): - // TODO: Remove, as this type of additional data is better supported via VRL remap + `StructuredData`? - fn add_log_source(&self) -> String { - let get_value = |s| self.value_by_key(s).unwrap_or_default(); - - [ - "namespace_name=", get_value("kubernetes.namespace_name").as_str(), - ", container_name=", get_value("kubernetes.container_name").as_str(), - ", pod_name=", get_value("kubernetes.pod_name").as_str(), - ", message=" - ].concat() - } } // @@ -319,7 +295,6 @@ struct Tag { msg_id: Option } -// NOTE: `.as_deref()` usage below avoids requiring `self.clone()` impl Tag { // Roughly equivalent - RFC 5424 fields can compose the start of // an RFC 3164 MSG part (TAG + CONTENT fields): @@ -356,12 +331,6 @@ struct StructuredData { } // Used by `SyslogMessage::encode()` -/* - Adapted `format_structured_data_rfc5424` method from: - https://github.com/vectordotdev/vector/blob/fafe8c50a4721fa3ddbea34e0641d3c145f14388/src/sources/syslog.rs#L1548-L1563 - - No notable change in logic, uses `NIL_VALUE` constant, and adapts method to struct instead of free-standing. -*/ impl StructuredData { fn encode(&self) -> String { if self.elements.is_empty() { @@ -383,12 +352,6 @@ impl StructuredData { } // Used by `ConfigDecanter::decant_config()` -/* - Adapted `structured_data_from_fields()` method from: - https://github.com/vectordotdev/vector/blob/fafe8c50a4721fa3ddbea34e0641d3c145f14388/src/sources/syslog.rs#L1439-L1454 - - Refactored to `impl From` that uses `flat_map()` instead to collect K/V tuples into a `HashMap`. -*/ impl From for StructuredData { fn from(fields: ObjectMap) -> Self { let elements = fields.into_iter().flat_map(|(sd_id, value)| { @@ -406,14 +369,6 @@ impl From for StructuredData { } // Only used as helper to support `StructuredData::from()` -/* - Adapted `value_to_string()` method from: - https://github.com/vectordotdev/vector/blob/fafe8c50a4721fa3ddbea34e0641d3c145f14388/src/sources/syslog.rs#L1569-L1579 - https://github.com/vectordotdev/vrl/blob/main/src/value/value/convert.rs - https://github.com/vectordotdev/vrl/blob/main/src/value/value/display.rs - - Simplified via `match` expression which seems better suited for this logic. -*/ fn value_to_string(v: Value) -> String { match v { Value::Bytes(bytes) => String::from_utf8_lossy(&bytes).to_string(), @@ -434,9 +389,6 @@ struct Pri { impl Pri { fn from_str_variants(facility_variant: &str, severity_variant: &str) -> Self { - // The original PR had `deserialize_*()` methods parsed a value to a `u8` or stored a field key as a `String` - // Later the equivalent `get_num_*()` method would retrieve the `u8` value or lookup the field key for the actual value, - // otherwise it'd fallback to the default Facility/Severity value. // This approach instead parses a string of the name or ordinal representation, // any reference via field key lookup should have already happened by this point. let facility = Facility::into_variant(&facility_variant).unwrap_or(Facility::User); @@ -505,7 +457,8 @@ enum Severity { } // Additionally support variants from string-based integers: -// Parse a string name, with fallback for parsing a string ordinal number. +// Attempts to parse a string for ordinal mapping first, otherwise try the variant name. +// NOTE: No error handling in place, invalid config will fallback to default during `decant_config()`. impl Facility { fn into_variant(variant_name: &str) -> Option { let s = variant_name.to_ascii_lowercase(); @@ -518,7 +471,7 @@ impl Facility { } // NOTE: The `strum` crate does not provide traits, -// requiring copy/paste of the prior impl instead. +// requiring copy/paste to repeat the previous impl for this enum too. impl Severity { fn into_variant(variant_name: &str) -> Option { let s = variant_name.to_ascii_lowercase(); From f8be8d96bca68e71451004131122329ce54d56bf Mon Sep 17 00:00:00 2001 From: polarathene <5098581+polarathene@users.noreply.github.com> Date: Mon, 18 Mar 2024 17:03:27 +1300 Subject: [PATCH 09/24] chore: DRY `into_variant()` via `akin` crate This should be simple and lightweight enough to justify for the DRY benefit? This way the method doesn't need to be duplicated redundantly. That was required because there is no trait for `FromRepr` provided via `strum`. That would require a similar amount of lines for the small duplication here. The `akin` macro duplicates the `impl` block for each value in the `&enums` array. --- lib/codecs/Cargo.toml | 3 ++- lib/codecs/src/encoding/format/syslog.rs | 30 +++++++++--------------- 2 files changed, 13 insertions(+), 20 deletions(-) diff --git a/lib/codecs/Cargo.toml b/lib/codecs/Cargo.toml index d4d12e966994b..cc51ad692ae7b 100644 --- a/lib/codecs/Cargo.toml +++ b/lib/codecs/Cargo.toml @@ -10,6 +10,7 @@ name = "generate-avro-fixtures" path = "tests/bin/generate-avro-fixtures.rs" [dependencies] +akin = { version = "0.4", optional = true } apache-avro = { version = "0.16.0", default-features = false } bytes = { version = "1", default-features = false } chrono.workspace = true @@ -49,4 +50,4 @@ rstest = "0.18.2" vrl.workspace = true [features] -syslog = ["dep:syslog_loose", "dep:strum"] +syslog = ["dep:syslog_loose", "dep:strum", "dep:akin"] diff --git a/lib/codecs/src/encoding/format/syslog.rs b/lib/codecs/src/encoding/format/syslog.rs index cc8a7dbb72a33..1f0450cb69e31 100644 --- a/lib/codecs/src/encoding/format/syslog.rs +++ b/lib/codecs/src/encoding/format/syslog.rs @@ -8,6 +8,7 @@ use vector_config::configurable_component; use std::collections::HashMap; use std::str::FromStr; use strum::{FromRepr, EnumString}; +use akin::akin; /// Config used to build a `SyslogSerializer`. #[configurable_component] @@ -459,26 +460,17 @@ enum Severity { // Additionally support variants from string-based integers: // Attempts to parse a string for ordinal mapping first, otherwise try the variant name. // NOTE: No error handling in place, invalid config will fallback to default during `decant_config()`. -impl Facility { - fn into_variant(variant_name: &str) -> Option { - let s = variant_name.to_ascii_lowercase(); - - s.parse::().map_or_else( - |_| Self::from_str(&s).ok(), - |num| Self::from_repr(num), - ) - } -} +akin! { + let &enums = [Facility, Severity]; -// NOTE: The `strum` crate does not provide traits, -// requiring copy/paste to repeat the previous impl for this enum too. -impl Severity { - fn into_variant(variant_name: &str) -> Option { - let s = variant_name.to_ascii_lowercase(); + impl *enums { + fn into_variant(variant_name: &str) -> Option { + let s = variant_name.to_ascii_lowercase(); - s.parse::().map_or_else( - |_| Self::from_str(&s).ok(), - |num| Self::from_repr(num), - ) + s.parse::().map_or_else( + |_| Self::from_str(&s).ok(), + |num| Self::from_repr(num), + ) + } } } From 536028731b2767148e85e77e14092037272984a6 Mon Sep 17 00:00:00 2001 From: polarathene <5098581+polarathene@users.noreply.github.com> Date: Wed, 20 Mar 2024 17:44:54 +1300 Subject: [PATCH 10/24] chore: Minor revisions - `ConfigDecanter::get_message()` replaces the fallback method in favor of `to_string_lossy()` (a dedicated equivalent for converting `Value` type to a String type (_technically it is a CoW str, hence the follow-up with `to_string()`_)). - This also encodes the value better, especially for the default `log_namespace: false` as the message value (when `String`) is not quote wrapped, which matches the behaviour of the `text` encoder output. - Additionally uses the `LogEvent` method `get_message()` directly from `lib/vector-core/src/event /log_event.rs`. This can better retrieve the log message regardless of the `log_namespace` setting. - Encoding of RFC 5424 fields has changed to inline the `version` constant directly, instead of via a redundant variable. If there's ever multiple versions that need to be supported, it could be addressed then. - The RFC 5424 timestamp has a max precision of microseconds, thus this should be rounded and `AutoSi` can be used (_or `Micros` if it should have fixed padding instead of truncating trailing `000`_). --- lib/codecs/src/encoding/format/syslog.rs | 28 +++++++++++++----------- 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/lib/codecs/src/encoding/format/syslog.rs b/lib/codecs/src/encoding/format/syslog.rs index 1f0450cb69e31..23b11acb74c30 100644 --- a/lib/codecs/src/encoding/format/syslog.rs +++ b/lib/codecs/src/encoding/format/syslog.rs @@ -1,7 +1,7 @@ use bytes::{BufMut, BytesMut}; use tokio_util::codec::Encoder; use vector_core::{config::DataType, event::{Event, LogEvent}, schema}; -use chrono::{DateTime, SecondsFormat, Local}; +use chrono::{DateTime, SecondsFormat, Local, SubsecRound}; use vrl::value::{ObjectMap, Value}; use vector_config::configurable_component; @@ -177,23 +177,23 @@ impl ConfigDecanter { } fn get_message(&self, config: &SyslogSerializerConfig) -> String { - let mut message = String::new(); - // `payload_key` configures where to source the value for the syslog `message`: + // - Not configured => Encodes the default log message. // - Field key (Valid) => Get value by lookup (value_by_key) // - Field key (Invalid) => Empty string (unwrap_or_default) - // - Not configured => JSON encoded `LogEvent` (fallback?) - // - // Q: Was the JSON fallback intended by the original PR author only for debugging? - // Roughly equivalent to using `payload_key: .` (in YAML config)? + + // Ref: + // `log.get_message()`: + // https://github.com/vectordotdev/vector/blob/ad6a48efc0f79b2c18a5c1394e5d8603fdfd1bab/lib/vector-core/src/event/log_event.rs#L532-L541 + // `v.to_string_lossy()`: + // https://github.com/vectordotdev/vrl/blob/f2d71cd26cb8270230f531945d7dee4929235905/src/value/value/serde.rs#L34-L55 let payload = if config.payload_key.is_empty() { - serde_json::to_string(&self.log).ok() + self.log.get_message().map(|v| v.to_string_lossy().to_string() ) } else { self.value_by_key(&config.payload_key) }; - message.push_str(&payload.unwrap_or_default()); - message + payload.unwrap_or_default() } } @@ -242,6 +242,7 @@ impl SyslogMessage { SyslogRFC::Rfc3164 => { // TIMESTAMP field format: // https://datatracker.ietf.org/doc/html/rfc3164#section-4.1.2 + // https://docs.rs/chrono/latest/chrono/format/strftime/index.html let timestamp = self.timestamp.format("%b %e %H:%M:%S").to_string(); // MSG part begins with TAG field + optional context: // https://datatracker.ietf.org/doc/html/rfc3164#section-4.1.3 @@ -262,13 +263,14 @@ impl SyslogMessage { SyslogRFC::Rfc5424 => { // HEADER part fields: // https://datatracker.ietf.org/doc/html/rfc5424#section-6.2 - let version = SYSLOG_V1; - let timestamp = self.timestamp.to_rfc3339_opts(SecondsFormat::Millis, true); + // TIME-FRAC max length is 6 digits (microseconds): + // https://datatracker.ietf.org/doc/html/rfc5424#section-6 + let timestamp = self.timestamp.round_subsecs(6).to_rfc3339_opts(SecondsFormat::AutoSi, true); let tag = self.tag.encode_rfc_5424(); let sd = structured_data.as_deref().unwrap_or(NIL_VALUE); [ - version, + SYSLOG_V1, timestamp.as_str(), hostname, &tag, From 38d0d611eb42eb9ae22cbeb0383482b4dd2150c5 Mon Sep 17 00:00:00 2001 From: polarathene <5098581+polarathene@users.noreply.github.com> Date: Wed, 20 Mar 2024 17:25:46 +1300 Subject: [PATCH 11/24] chore: Switch from `DateTime` to `DateTime` - The original PR author appears to have relied on a hard-coded timestamp key here. - `DateTime` would render the timestamp field with the local timezone offset, but other than that `DateTime` would seem more consistent with usage in Vector, especially since any original TZ context is lost by this point? - Notes adjusted accordingly, with added TODO query for each encoding mode to potentially support configurable timezone. --- lib/codecs/src/encoding/format/syslog.rs | 53 ++++++++++++++++-------- 1 file changed, 35 insertions(+), 18 deletions(-) diff --git a/lib/codecs/src/encoding/format/syslog.rs b/lib/codecs/src/encoding/format/syslog.rs index 23b11acb74c30..3588e99e02bf3 100644 --- a/lib/codecs/src/encoding/format/syslog.rs +++ b/lib/codecs/src/encoding/format/syslog.rs @@ -1,7 +1,7 @@ use bytes::{BufMut, BytesMut}; use tokio_util::codec::Encoder; use vector_core::{config::DataType, event::{Event, LogEvent}, schema}; -use chrono::{DateTime, SecondsFormat, Local, SubsecRound}; +use chrono::{DateTime, SecondsFormat, SubsecRound, Utc}; use vrl::value::{ObjectMap, Value}; use vector_config::configurable_component; @@ -154,25 +154,37 @@ impl ConfigDecanter { .map(StructuredData::from) } - fn get_timestamp(&self) -> DateTime:: { - // Q: Was this Timestamp key hard-coded to the needs of the original PR author? + fn get_timestamp(&self) -> DateTime:: { + // Q: Should the timestamp source be configurable? (eg: Select a field from the `remap` transform) // - // Key `@timestamp` depends on input: - // https://vector.dev/guides/level-up/managing-schemas/#example-custom-timestamp-field - // https://vector.dev/docs/about/under-the-hood/architecture/data-model/log/#timestamps - // NOTE: Log schema key renaming is unavailable when Log namespacing is enabled: - // https://vector.dev/docs/reference/configuration/global-options/#log_schema + // Concerns: + // - A source with `log_namespace: true` seems to cause `get_timstamp()` to return `None`? + // Does not seem to retrieve `%vector.ingest_timestamp`? + // - A sink type `console` with `timestamp_format: unix_ms` converts a `Value::Timestamp` prior to the encoder logic + // to `Value::Integer(i64)` instead, which won't match this condition. // - // NOTE: Log namespacing has metadata `%vector.ingest_timestamp` from a source (file/demo_logs) instead of `timestamp`. - // As a `payload_key` it will not respect config `encoding.timestamp_format`, but does when - // using the parent object (`%vector`). Inputs without namespacing respect that config setting. - if let Some(Value::Timestamp(timestamp)) = self.log.get("@timestamp") { - // Q: Utc type returned is changed to Local? - // - Could otherwise return `*timestamp` as-is? Why is Local conversion necessary? - DateTime::::from(*timestamp) + // NOTE: + // Vector always manages `Value::Timestamp` as `DateTime`, any prior TZ information context is always dropped. + // If restoring the TZ for a log is important, it could be handled via a remap transform? + // + // Ref: + // `log.get_timestamp()`: + // https://github.com/vectordotdev/vector/blob/ad6a48efc0f79b2c18a5c1394e5d8603fdfd1bab/lib/vector-core/src/event/log_event.rs#L543-L552 + if let Some(Value::Timestamp(timestamp)) = self.log.get_timestamp() { + *timestamp } else { - // NOTE: Local time is encouraged by RFC 5424 when creating a fallback timestamp for RFC 3164 - Local::now() + // NOTE: + // When timezone information is missing Vector handles conversion to UTC by assuming the local TZ: + // https://vector.dev/docs/about/under-the-hood/architecture/data-model/log/#time-zones + // There is a global option for which TZ to assume (where the default is local TZ): + // https://vector.dev/docs/reference/configuration/global-options/#timezone + // https://github.com/vectordotdev/vector/blob/58a4a2ef52e606c0f9b9fa975cf114b661300584/lib/vector-core/src/config/global_options.rs#L233-L236 + // https://github.com/vectordotdev/vrl/blob/c010300710a00191cd406e57cd0f3e001923d598/src/compiler/datetime.rs#L88-L95 + // VRL remap can also override that: + // https://vector.dev/docs/reference/configuration/transforms/remap/#timezone + // Vector's `syslog` source type also uses `Utc::now()` internally as a fallback: + // https://github.com/vectordotdev/vector/blob/58a4a2ef52e606c0f9b9fa975cf114b661300584/src/sources/syslog.rs#L430-L438 + Utc::now() } } @@ -223,7 +235,7 @@ pub enum SyslogRFC { #[derive(Default, Debug)] struct SyslogMessage { pri: Pri, - timestamp: DateTime::, + timestamp: DateTime::, hostname: Option, tag: Tag, structured_data: Option, @@ -243,6 +255,10 @@ impl SyslogMessage { // TIMESTAMP field format: // https://datatracker.ietf.org/doc/html/rfc3164#section-4.1.2 // https://docs.rs/chrono/latest/chrono/format/strftime/index.html + // + // TODO: Should this remain as UTC or adjust to the local TZ of the environment (or Vector config)? + // RFC 5424 suggests (when adapting for RFC 3164) to present a timestamp with the local TZ of the log source: + // https://www.rfc-editor.org/rfc/rfc5424#appendix-A.1 let timestamp = self.timestamp.format("%b %e %H:%M:%S").to_string(); // MSG part begins with TAG field + optional context: // https://datatracker.ietf.org/doc/html/rfc3164#section-4.1.3 @@ -265,6 +281,7 @@ impl SyslogMessage { // https://datatracker.ietf.org/doc/html/rfc5424#section-6.2 // TIME-FRAC max length is 6 digits (microseconds): // https://datatracker.ietf.org/doc/html/rfc5424#section-6 + // TODO: Likewise for RFC 5424, as UTC the offset will always render as `Z` if not configurable. let timestamp = self.timestamp.round_subsecs(6).to_rfc3339_opts(SecondsFormat::AutoSi, true); let tag = self.tag.encode_rfc_5424(); let sd = structured_data.as_deref().unwrap_or(NIL_VALUE); From 7ef97fb85832936fabe45ec43f6b35ab58414402 Mon Sep 17 00:00:00 2001 From: polarathene <5098581+polarathene@users.noreply.github.com> Date: Wed, 20 Mar 2024 19:45:15 +1300 Subject: [PATCH 12/24] chore: Adopt a separate options config struct + minor revisions - Move encoder config settings under a single `syslog` config field. This better mirrors configuration options for existing encoders like Avro and CSV. - `ConfigDecanter::value_by_key()` appears to accomplish roughly the same as the existing helper method `to_string_lossy()`. Prefer that instead. This also makes the `StructuredData` helper `value_to_string()` redundant too at a glance? - Added some reference for the priority value `PRIVAL`. - `Pri::from_str_variants()` uses the existing defaults for fallback, communicate that more clearly. Contextual note is no longer useful, removed. --- lib/codecs/src/encoding/format/syslog.rs | 78 ++++++++++++++---------- 1 file changed, 47 insertions(+), 31 deletions(-) diff --git a/lib/codecs/src/encoding/format/syslog.rs b/lib/codecs/src/encoding/format/syslog.rs index 3588e99e02bf3..d27f47025b8f2 100644 --- a/lib/codecs/src/encoding/format/syslog.rs +++ b/lib/codecs/src/encoding/format/syslog.rs @@ -12,11 +12,37 @@ use akin::akin; /// Config used to build a `SyslogSerializer`. #[configurable_component] +#[derive(Clone, Debug, Default)] +#[serde(default)] +pub struct SyslogSerializerConfig { + /// Options for the Syslog serializer. + pub syslog: SyslogSerializerOptions +} + +impl SyslogSerializerConfig { + /// Build the `SyslogSerializer` from this configuration. + pub fn build(&self) -> SyslogSerializer { + SyslogSerializer::new(&self) + } + + /// The data type of events that are accepted by `SyslogSerializer`. + pub fn input_type(&self) -> DataType { + DataType::Log + } + + /// The schema required by the serializer. + pub fn schema_requirement(&self) -> schema::Requirement { + schema::Requirement::empty() + } +} + +/// Syslog serializer options. +#[configurable_component] +#[derive(Clone, Debug, Default)] // Serde default makes all config keys optional. // Each field assigns either a fixed value, or field name (lookup field key to retrieve dynamic value per `LogEvent`). #[serde(default)] -#[derive(Clone, Debug, Default)] -pub struct SyslogSerializerConfig { +pub struct SyslogSerializerOptions { /// RFC rfc: SyslogRFC, /// Facility @@ -37,23 +63,6 @@ pub struct SyslogSerializerConfig { // Q: The majority of the fields above pragmatically only make sense as config for keys to query? } -impl SyslogSerializerConfig { - /// Build the `SyslogSerializer` from this configuration. - pub fn build(&self) -> SyslogSerializer { - SyslogSerializer::new(&self) - } - - /// The data type of events that are accepted by `SyslogSerializer`. - pub fn input_type(&self) -> DataType { - DataType::Log - } - - /// The schema required by the serializer. - pub fn schema_requirement(&self) -> schema::Requirement { - schema::Requirement::empty() - } -} - /// Serializer that converts an `Event` to bytes using the Syslog format. #[derive(Debug, Clone)] pub struct SyslogSerializer { @@ -72,10 +81,10 @@ impl Encoder for SyslogSerializer { fn encode(&mut self, event: Event, buffer: &mut BytesMut) -> Result<(), Self::Error> { if let Event::Log(log_event) = event { - let syslog_message = ConfigDecanter::new(log_event).decant_config(&self.config); + let syslog_message = ConfigDecanter::new(log_event).decant_config(&self.config.syslog); let vec = syslog_message - .encode(&self.config.rfc) + .encode(&self.config.syslog.rfc) .as_bytes() .to_vec(); buffer.put_slice(&vec); @@ -85,7 +94,7 @@ impl Encoder for SyslogSerializer { } } -// Adapts a `LogEvent` into a `SyslogMessage` based on config from `SyslogSerializerConfig`: +// Adapts a `LogEvent` into a `SyslogMessage` based on config from `SyslogSerializerOptions`: // - Splits off the responsibility of encoding logic to `SyslogMessage` (which is not dependent upon Vector types). // - Majority of methods are only needed to support the `decant_config()` operation. struct ConfigDecanter { @@ -99,7 +108,7 @@ impl ConfigDecanter { } } - fn decant_config(&self, config: &SyslogSerializerConfig) -> SyslogMessage { + fn decant_config(&self, config: &SyslogSerializerOptions) -> SyslogMessage { let x = |v| self.replace_if_proxied(v).unwrap_or_default(); let facility = x(&config.facility); let severity = x(&config.severity); @@ -142,9 +151,8 @@ impl ConfigDecanter { } fn value_by_key(&self, field_key: &str) -> Option { - self.log.get(field_key).and_then(|field_value| { - let bytes = field_value.coerce_to_bytes(); - String::from_utf8(bytes.to_vec()).ok() + self.log.get(field_key).map(|field_value| { + field_value.to_string_lossy().to_string() }) } @@ -188,7 +196,7 @@ impl ConfigDecanter { } } - fn get_message(&self, config: &SyslogSerializerConfig) -> String { + fn get_message(&self, config: &SyslogSerializerOptions) -> String { // `payload_key` configures where to source the value for the syslog `message`: // - Not configured => Encodes the default log message. // - Field key (Valid) => Get value by lookup (value_by_key) @@ -389,6 +397,12 @@ impl From for StructuredData { } // Only used as helper to support `StructuredData::from()` +// +// TODO: +// This method could be replaced in favor of calling `v.to_string_lossy().to_string()`? (As was done elsewhere in this file): +// https://github.com/vectordotdev/vrl/blob/f2d71cd26cb8270230f531945d7dee4929235905/src/value/value/serde.rs#L34-L55 +// Timestamp value is handled the same way via `timestamp_to_string()`: +// https://github.com/vectordotdev/vrl/blob/f2d71cd26cb8270230f531945d7dee4929235905/src/value/value.rs#L175-L179 fn value_to_string(v: Value) -> String { match v { Value::Bytes(bytes) => String::from_utf8_lossy(&bytes).to_string(), @@ -401,6 +415,10 @@ fn value_to_string(v: Value) -> String { // Facility + Severity support // +// PRIVAL: +// https://www.rfc-editor.org/rfc/rfc5424#section-6.2.1 +// > The number contained within these angle brackets is known as the Priority value (PRIVAL) +// and represents both the Facility and Severity. #[derive(Default, Debug)] struct Pri { facility: Facility, @@ -409,10 +427,8 @@ struct Pri { impl Pri { fn from_str_variants(facility_variant: &str, severity_variant: &str) -> Self { - // This approach instead parses a string of the name or ordinal representation, - // any reference via field key lookup should have already happened by this point. - let facility = Facility::into_variant(&facility_variant).unwrap_or(Facility::User); - let severity = Severity::into_variant(&severity_variant).unwrap_or(Severity::Informational); + let facility = Facility::into_variant(&facility_variant).unwrap_or_default(); + let severity = Severity::into_variant(&severity_variant).unwrap_or_default(); Self { facility, From 34e735d7ff4443807868ddaacb58ffd0204aaff0 Mon Sep 17 00:00:00 2001 From: polarathene <5098581+polarathene@users.noreply.github.com> Date: Mon, 1 Apr 2024 14:40:14 +1300 Subject: [PATCH 13/24] chore: Switch from `String` to deserialize `Facility` + `Severity` enums To better communicate the allowed values, these two config fields can change from the `String` type to their appropriate enum type. - This relies on serde to deserialize the config value to the enum which adds a bit more noise to grok. - It does make `Pri::from_str_variants()` redundant, while the `into_variant()` methods are refactored to `deserialize()` with a proper error message emitted to match the what serde would normally emit for failed enum variant deserialization. - A drawback of this change is that these two config fields lost the ability to reference a different value path in the `LogEvent`. That'll be addressed in a future commit. --- lib/codecs/src/encoding/format/syslog.rs | 66 ++++++++++++++---------- 1 file changed, 39 insertions(+), 27 deletions(-) diff --git a/lib/codecs/src/encoding/format/syslog.rs b/lib/codecs/src/encoding/format/syslog.rs index d27f47025b8f2..00918a6ce2139 100644 --- a/lib/codecs/src/encoding/format/syslog.rs +++ b/lib/codecs/src/encoding/format/syslog.rs @@ -6,9 +6,14 @@ use vrl::value::{ObjectMap, Value}; use vector_config::configurable_component; use std::collections::HashMap; + +// All of this block is to support the Facility + Severity enums with convenience of string or ordinal config value: use std::str::FromStr; -use strum::{FromRepr, EnumString}; +use strum::{FromRepr, EnumString, VariantNames}; +// `akin` macro for DRY impl to share with both enums due to lack of a `FromRepr` trait: use akin::akin; +// Custom deserialization with serde needed: +use serde::{Deserializer, de::Error}; /// Config used to build a `SyslogSerializer`. #[configurable_component] @@ -46,9 +51,11 @@ pub struct SyslogSerializerOptions { /// RFC rfc: SyslogRFC, /// Facility - facility: String, + #[serde(deserialize_with = "Facility::deserialize")] + facility: Facility, /// Severity - severity: String, + #[serde(deserialize_with = "Severity::deserialize")] + severity: Severity, /// App Name app_name: Option, @@ -109,17 +116,16 @@ impl ConfigDecanter { } fn decant_config(&self, config: &SyslogSerializerOptions) -> SyslogMessage { - let x = |v| self.replace_if_proxied(v).unwrap_or_default(); - let facility = x(&config.facility); - let severity = x(&config.severity); - let y = |v| self.replace_if_proxied_opt(v); let app_name = y(&config.app_name).unwrap_or("vector".to_owned()); let proc_id = y(&config.proc_id); let msg_id = y(&config.msg_id); SyslogMessage { - pri: Pri::from_str_variants(&facility, &severity), + pri: Pri { + facility: config.facility, + severity: config.severity, + }, timestamp: self.get_timestamp(), hostname: self.value_by_key("hostname"), tag: Tag { @@ -426,16 +432,6 @@ struct Pri { } impl Pri { - fn from_str_variants(facility_variant: &str, severity_variant: &str) -> Self { - let facility = Facility::into_variant(&facility_variant).unwrap_or_default(); - let severity = Severity::into_variant(&severity_variant).unwrap_or_default(); - - Self { - facility, - severity, - } - } - // The last paragraph describes how to compose the enums into `PRIVAL`: // https://datatracker.ietf.org/doc/html/rfc5424#section-6.2.1 fn encode(&self) -> String { @@ -445,10 +441,15 @@ impl Pri { } // Facility + Severity mapping from Name => Ordinal number: +// NOTE: +// - `configurable_component(no_deser)` is used to match the existing functionality to support deserializing config with ordinal mapping. +// - `EnumString` with `strum(serialize_all = "kebab-case")` provides the `FromStr` support, while `FromRepr` handles ordinal support. +// - `VariantNames` assists with generating the equivalent `de::Error::unknown_variant` serde error message. /// Syslog facility -#[derive(Default, Debug, EnumString, FromRepr, Copy, Clone)] +#[derive(Default, Debug, EnumString, FromRepr, VariantNames, Copy, Clone)] #[strum(serialize_all = "kebab-case")] +#[configurable_component(no_deser)] enum Facility { Kern = 0, #[default] @@ -478,8 +479,9 @@ enum Facility { } /// Syslog severity -#[derive(Default, Debug, EnumString, FromRepr, Copy, Clone)] +#[derive(Default, Debug, EnumString, FromRepr, VariantNames, Copy, Clone)] #[strum(serialize_all = "kebab-case")] +#[configurable_component(no_deser)] enum Severity { Emergency = 0, Alert = 1, @@ -494,18 +496,28 @@ enum Severity { // Additionally support variants from string-based integers: // Attempts to parse a string for ordinal mapping first, otherwise try the variant name. -// NOTE: No error handling in place, invalid config will fallback to default during `decant_config()`. +// NOTE: +// - While `serde(rename_all = "kebab-case")` attribute would deserialize like `FromStr` + `EnumString`, config input must strictly match. +// - To retain support for ordinal config input, a custom deserialize method is needed (as `derive(Deserialize)` is too basic): +// - Error message should roughly match `de::Error::unknown_variant` + akin! { let &enums = [Facility, Severity]; impl *enums { - fn into_variant(variant_name: &str) -> Option { - let s = variant_name.to_ascii_lowercase(); - - s.parse::().map_or_else( - |_| Self::from_str(&s).ok(), + fn deserialize<'de, D>(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let value = String::deserialize(deserializer)?; + + value.parse::().map_or_else( + |_| Self::from_str(&value.to_ascii_lowercase()).ok(), |num| Self::from_repr(num), - ) + ).ok_or(format!( + "Unknown variant `{value}`, expected one of `{variants}`", + variants=Self::VARIANTS.join("`, `") + )).map_err(D::Error::custom) } } } From 2ac3da2db28f3da1df8d088de6b2d44dc7567416 Mon Sep 17 00:00:00 2001 From: polarathene <5098581+polarathene@users.noreply.github.com> Date: Mon, 1 Apr 2024 15:04:52 +1300 Subject: [PATCH 14/24] fix: Support deserializing config value that is a number type In a YAML config a string can optionally be wrapped with quotes, while a number that isn't quote wrapped will be treated as a number type. The current support was only for string numbers, this change now supports flexibility for config using ordinal values in YAML regardless of quote usage. The previous `Self::into_variant(&s)` logic could have been used instead of bringing in `serde-aux`, but the external helper attribute approach seems easier to grok/follow as the intermediary container still seems required for a terse implementation. The match statement uses a reference (_which requires a deref for `from_repr`_) to appease the borrow checker for the later borrow needed by `value` in the error message. --- lib/codecs/Cargo.toml | 4 +++- lib/codecs/src/encoding/format/syslog.rs | 26 ++++++++++++++++++------ 2 files changed, 23 insertions(+), 7 deletions(-) diff --git a/lib/codecs/Cargo.toml b/lib/codecs/Cargo.toml index cc51ad692ae7b..aab13651415a9 100644 --- a/lib/codecs/Cargo.toml +++ b/lib/codecs/Cargo.toml @@ -16,6 +16,7 @@ bytes = { version = "1", default-features = false } chrono.workspace = true csv-core = { version = "0.1.10", default-features = false } derivative = { version = "2", default-features = false } +derive_more = { version = "0.99", optional = true } dyn-clone = { version = "1", default-features = false } lookup = { package = "vector-lookup", path = "../vector-lookup", default-features = false, features = ["test"] } memchr = { version = "2", default-features = false } @@ -25,6 +26,7 @@ prost = { version = "0.12.3", default-features = false, features = ["std"] } prost-reflect = { version = "0.13", default-features = false, features = ["serde"] } regex = { version = "1.10.3", default-features = false, features = ["std", "perf"] } serde.workspace = true +serde-aux = { version = "4.5", optional = true } serde_json.workspace = true smallvec = { version = "1", default-features = false, features = ["union"] } snafu = { version = "0.7.5", default-features = false, features = ["futures"] } @@ -50,4 +52,4 @@ rstest = "0.18.2" vrl.workspace = true [features] -syslog = ["dep:syslog_loose", "dep:strum", "dep:akin"] +syslog = ["dep:syslog_loose", "dep:strum", "dep:akin", "dep:derive_more", "dep:serde-aux"] diff --git a/lib/codecs/src/encoding/format/syslog.rs b/lib/codecs/src/encoding/format/syslog.rs index 00918a6ce2139..fdfd026f24712 100644 --- a/lib/codecs/src/encoding/format/syslog.rs +++ b/lib/codecs/src/encoding/format/syslog.rs @@ -13,7 +13,8 @@ use strum::{FromRepr, EnumString, VariantNames}; // `akin` macro for DRY impl to share with both enums due to lack of a `FromRepr` trait: use akin::akin; // Custom deserialization with serde needed: -use serde::{Deserializer, de::Error}; +use serde::{Deserialize, Deserializer, de::Error}; +use serde_aux::field_attributes::deserialize_number_from_string; /// Config used to build a `SyslogSerializer`. #[configurable_component] @@ -509,15 +510,28 @@ akin! { where D: Deserializer<'de>, { - let value = String::deserialize(deserializer)?; + let value = NumberOrString::deserialize(deserializer)?; + let variant: Option = match &value { + NumberOrString::Number(num) => Self::from_repr(*num), + NumberOrString::String(s) => Self::from_str(&s.to_ascii_lowercase()).ok(), + }; - value.parse::().map_or_else( - |_| Self::from_str(&value.to_ascii_lowercase()).ok(), - |num| Self::from_repr(num), - ).ok_or(format!( + variant.ok_or_else(|| format!( "Unknown variant `{value}`, expected one of `{variants}`", variants=Self::VARIANTS.join("`, `") )).map_err(D::Error::custom) } } } + +// An intermediary container to deserialize config value into. +// Ensures that a string number is properly deserialized to the `usize` variant. +#[derive(derive_more::Display, Deserialize)] +#[serde(untagged)] +enum NumberOrString { + Number( + #[serde(deserialize_with = "deserialize_number_from_string")] + usize + ), + String(String) +} From 7ba64bee325bd2869b4535abb8bd94b4ad0819cf Mon Sep 17 00:00:00 2001 From: polarathene <5098581+polarathene@users.noreply.github.com> Date: Mon, 1 Apr 2024 15:10:06 +1300 Subject: [PATCH 15/24] chore: Add doc comments for enum variants to appease Vector requirement This seems redundant given the context? Mostly adds unnecessary noise. Could probably `impl Configurable` or similar to try workaround the requirement. The metadata description could generate the variant list similar to how it's been handled for error message handling? --- lib/codecs/src/encoding/format/syslog.rs | 33 ++++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/lib/codecs/src/encoding/format/syslog.rs b/lib/codecs/src/encoding/format/syslog.rs index fdfd026f24712..03d841d1cb23e 100644 --- a/lib/codecs/src/encoding/format/syslog.rs +++ b/lib/codecs/src/encoding/format/syslog.rs @@ -443,6 +443,7 @@ impl Pri { // Facility + Severity mapping from Name => Ordinal number: // NOTE: +// - Vector component enforces variant doc-comments, even though it's pointless for these enums? // - `configurable_component(no_deser)` is used to match the existing functionality to support deserializing config with ordinal mapping. // - `EnumString` with `strum(serialize_all = "kebab-case")` provides the `FromStr` support, while `FromRepr` handles ordinal support. // - `VariantNames` assists with generating the equivalent `de::Error::unknown_variant` serde error message. @@ -452,30 +453,54 @@ impl Pri { #[strum(serialize_all = "kebab-case")] #[configurable_component(no_deser)] enum Facility { + /// Kern Kern = 0, + /// User #[default] User = 1, + /// Mail Mail = 2, + /// Daemon Daemon = 3, + /// Auth Auth = 4, + /// Syslog Syslog = 5, + /// LPR LPR = 6, + /// News News = 7, + /// UUCP UUCP = 8, + /// Cron Cron = 9, + /// AuthPriv AuthPriv = 10, + /// FTP FTP = 11, + /// NTP NTP = 12, + /// Security Security = 13, + /// Console Console = 14, + /// SolarisCron SolarisCron = 15, + /// Local0 Local0 = 16, + /// Local1 Local1 = 17, + /// Local2 Local2 = 18, + /// Local3 Local3 = 19, + /// Local4 Local4 = 20, + /// Local5 Local5 = 21, + /// Local6 Local6 = 22, + /// Local7 Local7 = 23, } @@ -484,14 +509,22 @@ enum Facility { #[strum(serialize_all = "kebab-case")] #[configurable_component(no_deser)] enum Severity { + /// Emergency Emergency = 0, + /// Alert Alert = 1, + /// Critical Critical = 2, + /// Error Error = 3, + /// Warning Warning = 4, + /// Notice Notice = 5, + /// Informational #[default] Informational = 6, + /// Debug Debug = 7, } From ed202bbd71d8ce4e632c97c451ec30b4d0c5dc13 Mon Sep 17 00:00:00 2001 From: polarathene <5098581+polarathene@users.noreply.github.com> Date: Mon, 1 Apr 2024 18:44:19 +1300 Subject: [PATCH 16/24] chore: Use `snafu` for error message Not sure if this is worthwhile, but it adopts error message convention elsewhere I've seen by managing them via Snafu. --- lib/codecs/src/encoding/format/syslog.rs | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/lib/codecs/src/encoding/format/syslog.rs b/lib/codecs/src/encoding/format/syslog.rs index 03d841d1cb23e..64ed40eb08377 100644 --- a/lib/codecs/src/encoding/format/syslog.rs +++ b/lib/codecs/src/encoding/format/syslog.rs @@ -15,6 +15,7 @@ use akin::akin; // Custom deserialization with serde needed: use serde::{Deserialize, Deserializer, de::Error}; use serde_aux::field_attributes::deserialize_number_from_string; +use snafu::{Snafu, OptionExt}; /// Config used to build a `SyslogSerializer`. #[configurable_component] @@ -549,10 +550,10 @@ akin! { NumberOrString::String(s) => Self::from_str(&s.to_ascii_lowercase()).ok(), }; - variant.ok_or_else(|| format!( - "Unknown variant `{value}`, expected one of `{variants}`", - variants=Self::VARIANTS.join("`, `") - )).map_err(D::Error::custom) + variant.with_context(|| InvalidVariantSnafu { + input: value.to_string(), + variants: Self::VARIANTS.join("`, `"), + }).map_err(D::Error::custom) } } } @@ -568,3 +569,9 @@ enum NumberOrString { ), String(String) } + +#[derive(Debug, Snafu)] +enum StrumDeserializeError { + #[snafu(display("Unknown variant `{}`, expected one of `{}`", input, variants))] + InvalidVariant { input: String, variants: String }, +} From c295b7d4b0d763cb5505808264ae17fb6368249c Mon Sep 17 00:00:00 2001 From: Vitalii Parfonov Date: Mon, 18 Aug 2025 16:28:44 +0300 Subject: [PATCH 17/24] merge with master Signed-off-by: Vitalii Parfonov --- .github/CODEOWNERS | 115 +- .github/DISCUSSION_TEMPLATE/q-a.yml | 33 + .github/ISSUE_TEMPLATE/bug.yml | 1 + .github/ISSUE_TEMPLATE/feature.yml | 1 + .github/ISSUE_TEMPLATE/minor-release.md | 94 +- .github/ISSUE_TEMPLATE/patch-release.md | 41 +- .github/PULL_REQUEST_TEMPLATE.md | 56 +- .github/SUPPORT.md | 24 +- .github/actions/spelling/allow.txt | 503 +- .github/actions/spelling/excludes.txt | 113 - .github/actions/spelling/expect.txt | 863 +- .github/actions/spelling/only.txt | 5 + .github/actions/spelling/patterns.txt | 6 + .github/audit.yml | 7 +- .github/dependabot.yml | 38 +- .github/semantic.yml | 254 - .github/workflows/build_preview_sites.yml | 10 +- .github/workflows/changelog.yaml | 108 +- .github/workflows/changes.yml | 114 +- .github/workflows/ci-integration-review.yml | 195 + ...ment-trigger.yml => ci-review-trigger.yml} | 81 +- .github/workflows/cli.yml | 46 +- .github/workflows/compilation-timings.yml | 50 +- .github/workflows/component_features.yml | 46 +- .github/workflows/create_preview_sites.yml | 228 +- .github/workflows/cross.yml | 61 +- .github/workflows/custom_builds.yml | 4 + .github/workflows/deny.yml | 48 +- .github/workflows/e2e.yml | 49 +- .github/workflows/environment.yml | 50 +- .github/workflows/gardener_issue_comment.yml | 4 +- .github/workflows/gardener_open_issue.yml | 4 +- .github/workflows/gardener_open_pr.yml | 10 +- .../gardener_remove_waiting_author.yml | 14 +- .github/workflows/install-sh.yml | 49 +- .github/workflows/integration-comment.yml | 538 - .github/workflows/integration-test.yml | 12 +- .github/workflows/integration.yml | 418 +- .github/workflows/k8s_e2e.yml | 119 +- .github/workflows/labeler.yml | 4 +- .github/workflows/master_merge_queue.yml | 10 +- .github/workflows/misc.yml | 47 +- .github/workflows/msrv.yml | 18 +- .github/workflows/nightly.yml | 6 +- .github/workflows/preview_site_trigger.yml | 67 +- .github/workflows/protobuf.yml | 13 +- .github/workflows/publish.yml | 339 +- .github/workflows/regression.yml | 726 +- .github/workflows/release.yml | 4 + .github/workflows/scorecard.yml | 73 + .github/workflows/semantic.yml | 254 + .github/workflows/spelling.yml | 62 +- .github/workflows/test.yml | 241 +- .github/workflows/unit_mac.yml | 55 +- .github/workflows/unit_windows.yml | 40 +- .github/workflows/workload_checks.yml | 210 - .gitignore | 68 +- CONTRIBUTING.md | 55 +- Cargo.lock | 6861 ++++--- Cargo.toml | 534 +- LICENSE-3rdparty.csv | 235 +- Makefile | 13 +- PRIVACY.md | 4 +- README.md | 151 +- RELEASES.md | 23 +- STYLE.md | 11 +- Tiltfile | 2 +- VERSIONING.md | 2 +- aqua/aqua.yaml | 14 + benches/batch.rs | 57 +- benches/codecs/encoder.rs | 2 +- benches/codecs/newline_bytes.rs | 4 +- benches/distribution_statistic.rs | 15 +- benches/dnstap/mod.rs | 27 +- benches/enrichment_tables.rs | 216 +- benches/http.rs | 10 +- benches/languages.rs | 16 +- benches/lua.rs | 7 +- benches/metrics_snapshot.rs | 2 +- benches/remap.rs | 6 +- benches/transform/common.rs | 2 +- benches/transform/dedupe.rs | 45 +- benches/transform/reduce.rs | 19 +- benches/transform/route.rs | 7 +- buf.work.yaml | 4 - buf.yaml | 19 + build.rs | 49 +- ...aws_sink_retry_logic_update.enhancement.md | 3 + ...1155_http_sink_templateable_uri.feature.md | 4 + .../18445_armv6_binaries.enhancement.md | 3 - .../18863_k8s_logs_rotate_wait.enhancement.md | 3 - changelog.d/19813_add_mqtt_sink.feature.md | 3 - ...press_onezone_storage_class.enhancement.md | 3 - changelog.d/19892_dnstap_over_tcp.breaking.md | 3 - ...21_missing_dns_record_types.enhancement.md | 3 - changelog.d/19937_edns_ede_support.feature.md | 3 - ...0006_improve_greptimedb_tls.enhancement.md | 3 - .../20032_gzip_zlib_performance.fix.md | 3 - ...0035_dnstap_lowercase_hostnames.feature.md | 4 - ..._http_sink_header_templates.enhancement.md | 3 + ...70_utilization_metric_periodic_emit.fix.md | 3 + ...22764_clickhouse_query_settings.feature.md | 3 + ...e_bounded_date_range_search.enhancement.md | 3 + ..._uri_provided_basic_auth_opensearch.fix.md | 3 + ..._postgres_sink_healthcheck_disabled.fix.md | 3 + changelog.d/23220_hyphens_in_secrets.fix.md | 3 + ...23279_logs_sources_output_logs.breaking.md | 4 + ...7_fix_nats_sink_ignored_healthcheck.fix.md | 3 + changelog.d/23361_interval_ms_readable.fix.md | 3 + ...c-tag-values-full-unmodifiable-tags.fix.md | 3 + ...gative_buffer_size_and_event_gauges.fix.md | 3 + ..._dedupe_transform_time_settings.feature.md | 3 + .../6491_add_websocket_source.feature.md | 3 + ...upport_max_bytes_memory_buffers.feature.md | 3 + changelog.d/README.md | 19 +- changelog.d/aws_credentials_process.fix.md | 2 - changelog.d/batch-newline.fix.md | 3 + .../clickhouse_acknowledgements.breaking.md | 1 - ...atadog_agent_ddtags_parsing.enhancement.md | 4 - .../deprecate_strict_env_vars.breaking.md | 4 - .../fix_concurrent_map_potential_hang.fix.md | 3 + .../fix_metric_set_memory_growth.feature.md | 6 + ...include_paths_glob_patterns.enhancement.md | 3 - changelog.d/log_to_metric_config.fix.md | 3 + changelog.d/pulsar_source.feature.md | 3 - changelog.d/redis_sentinel_in_sink.feature.md | 3 + changelog.d/redis_zadd_in_sink.feature.md | 3 + .../splunk_hec_logs_raw_timestamp.fix.md | 1 - .../splunk_hec_received_event_bytes.fix.md | 3 - ...s_in_unsigned_int_templates.enhancement.md | 3 + clippy.toml | 4 +- config/examples/environment_variables.yaml | 6 +- config/examples/es_s3_hybrid.yaml | 4 +- config/examples/wrapped_json.yaml | 12 +- config/vector.yaml | 2 + cue.mod/module.cue | 3 + deny.toml | 33 +- distribution/docker/README.md | 6 +- distribution/docker/alpine/Dockerfile | 10 +- distribution/docker/debian/Dockerfile | 5 + .../docker/distroless-libc/Dockerfile | 7 +- .../docker/distroless-static/Dockerfile | 7 +- distribution/init.d/vector | 3 +- distribution/install.sh | 44 +- .../kubernetes/vector-agent/README.md | 2 +- .../kubernetes/vector-agent/configmap.yaml | 3 +- .../kubernetes/vector-agent/daemonset.yaml | 6 +- .../vector-agent/kustomization.yaml | 1 + .../kubernetes/vector-agent/rbac.yaml | 6 +- .../vector-agent/service-headless.yaml | 3 +- .../vector-agent/serviceaccount.yaml | 3 +- .../kubernetes/vector-aggregator/README.md | 2 +- .../vector-aggregator/configmap.yaml | 3 +- .../vector-aggregator/service-headless.yaml | 3 +- .../kubernetes/vector-aggregator/service.yaml | 3 +- .../vector-aggregator/serviceaccount.yaml | 3 +- .../vector-aggregator/statefulset.yaml | 5 +- .../vector-stateless-aggregator/README.md | 2 +- .../configmap.yaml | 3 +- .../deployment.yaml | 5 +- .../service-headless.yaml | 3 +- .../vector-stateless-aggregator/service.yaml | 3 +- .../serviceaccount.yaml | 3 +- distribution/systemd/vector.service | 2 +- docs/DEPRECATIONS.md | 5 +- docs/DEVELOPING.md | 6 +- docs/DOCUMENTING.md | 8 +- docs/REVIEWING.md | 4 +- docs/specs/component.md | 10 +- docs/tutorials/sinks/1_basic_sink.md | 2 +- docs/tutorials/sinks/2_http_sink.md | 51 +- lib/codecs/Cargo.toml | 34 +- lib/codecs/src/common/length_delimited.rs | 66 + lib/codecs/src/common/mod.rs | 2 +- lib/codecs/src/common/protobuf.rs | 36 - lib/codecs/src/decoding/format/avro.rs | 6 +- lib/codecs/src/decoding/format/gelf.rs | 43 +- lib/codecs/src/decoding/format/influxdb.rs | 212 + lib/codecs/src/decoding/format/json.rs | 4 +- lib/codecs/src/decoding/format/mod.rs | 2 + lib/codecs/src/decoding/format/native.rs | 2 +- lib/codecs/src/decoding/format/native_json.rs | 6 +- lib/codecs/src/decoding/format/protobuf.rs | 127 +- lib/codecs/src/decoding/format/syslog.rs | 2 +- lib/codecs/src/decoding/format/vrl.rs | 2 +- .../decoding/framing/character_delimited.rs | 9 +- .../src/decoding/framing/chunked_gelf.rs | 1265 ++ .../src/decoding/framing/length_delimited.rs | 105 +- lib/codecs/src/decoding/framing/mod.rs | 33 +- .../src/decoding/framing/octet_counting.rs | 11 +- lib/codecs/src/decoding/mod.rs | 109 +- lib/codecs/src/encoding/format/avro.rs | 2 +- lib/codecs/src/encoding/format/cef.rs | 542 + lib/codecs/src/encoding/format/csv.rs | 32 +- lib/codecs/src/encoding/format/gelf.rs | 13 +- lib/codecs/src/encoding/format/json.rs | 263 +- lib/codecs/src/encoding/format/logfmt.rs | 9 +- lib/codecs/src/encoding/format/mod.rs | 4 +- lib/codecs/src/encoding/format/native.rs | 2 +- lib/codecs/src/encoding/format/native_json.rs | 19 +- lib/codecs/src/encoding/format/protobuf.rs | 417 +- lib/codecs/src/encoding/format/raw_message.rs | 7 - lib/codecs/src/encoding/format/text.rs | 3 +- lib/codecs/src/encoding/framing/bytes.rs | 10 +- .../encoding/framing/character_delimited.rs | 1 + .../src/encoding/framing/length_delimited.rs | 85 +- .../src/encoding/framing/newline_delimited.rs | 11 +- lib/codecs/src/encoding/mod.rs | 88 +- lib/codecs/src/gelf.rs | 27 +- lib/codecs/src/lib.rs | 2 +- .../tests/data/native_encoding/json/0000.json | 2 +- .../tests/data/native_encoding/json/0001.json | 2 +- .../tests/data/native_encoding/json/0002.json | 2 +- .../tests/data/native_encoding/json/0003.json | 2 +- .../tests/data/native_encoding/json/0004.json | 2 +- .../tests/data/native_encoding/json/0005.json | 2 +- .../tests/data/native_encoding/json/0006.json | 2 +- .../tests/data/native_encoding/json/0007.json | 2 +- .../tests/data/native_encoding/json/0008.json | 2 +- .../tests/data/native_encoding/json/0009.json | 2 +- .../tests/data/native_encoding/json/0010.json | 2 +- .../tests/data/native_encoding/json/0011.json | 2 +- .../tests/data/native_encoding/json/0012.json | 2 +- .../tests/data/native_encoding/json/0013.json | 2 +- .../tests/data/native_encoding/json/0014.json | 2 +- .../tests/data/native_encoding/json/0015.json | 2 +- .../tests/data/native_encoding/json/0016.json | 2 +- .../tests/data/native_encoding/json/0017.json | 2 +- .../tests/data/native_encoding/json/0018.json | 2 +- .../tests/data/native_encoding/json/0019.json | 2 +- .../tests/data/native_encoding/json/0020.json | 2 +- .../tests/data/native_encoding/json/0021.json | 2 +- .../tests/data/native_encoding/json/0022.json | 2 +- .../tests/data/native_encoding/json/0023.json | 2 +- .../tests/data/native_encoding/json/0024.json | 2 +- .../tests/data/native_encoding/json/0025.json | 2 +- .../tests/data/native_encoding/json/0026.json | 2 +- .../tests/data/native_encoding/json/0027.json | 2 +- .../tests/data/native_encoding/json/0028.json | 2 +- .../tests/data/native_encoding/json/0029.json | 2 +- .../tests/data/native_encoding/json/0030.json | 2 +- .../tests/data/native_encoding/json/0031.json | 2 +- .../tests/data/native_encoding/json/0032.json | 2 +- .../tests/data/native_encoding/json/0033.json | 2 +- .../tests/data/native_encoding/json/0034.json | 2 +- .../tests/data/native_encoding/json/0035.json | 2 +- .../tests/data/native_encoding/json/0036.json | 2 +- .../tests/data/native_encoding/json/0037.json | 2 +- .../tests/data/native_encoding/json/0038.json | 2 +- .../tests/data/native_encoding/json/0039.json | 2 +- .../tests/data/native_encoding/json/0040.json | 2 +- .../tests/data/native_encoding/json/0041.json | 2 +- .../tests/data/native_encoding/json/0042.json | 2 +- .../tests/data/native_encoding/json/0043.json | 2 +- .../tests/data/native_encoding/json/0044.json | 2 +- .../tests/data/native_encoding/json/0045.json | 2 +- .../tests/data/native_encoding/json/0046.json | 2 +- .../tests/data/native_encoding/json/0047.json | 2 +- .../tests/data/native_encoding/json/0048.json | 2 +- .../tests/data/native_encoding/json/0049.json | 2 +- .../tests/data/native_encoding/json/0050.json | 2 +- .../tests/data/native_encoding/json/0051.json | 2 +- .../tests/data/native_encoding/json/0052.json | 2 +- .../tests/data/native_encoding/json/0053.json | 2 +- .../tests/data/native_encoding/json/0054.json | 2 +- .../tests/data/native_encoding/json/0055.json | 2 +- .../tests/data/native_encoding/json/0056.json | 2 +- .../tests/data/native_encoding/json/0057.json | 2 +- .../tests/data/native_encoding/json/0058.json | 2 +- .../tests/data/native_encoding/json/0059.json | 2 +- .../tests/data/native_encoding/json/0060.json | 2 +- .../tests/data/native_encoding/json/0061.json | 2 +- .../tests/data/native_encoding/json/0062.json | 2 +- .../tests/data/native_encoding/json/0063.json | 2 +- .../tests/data/native_encoding/json/0064.json | 2 +- .../tests/data/native_encoding/json/0065.json | 2 +- .../tests/data/native_encoding/json/0066.json | 2 +- .../tests/data/native_encoding/json/0067.json | 2 +- .../tests/data/native_encoding/json/0068.json | 2 +- .../tests/data/native_encoding/json/0069.json | 2 +- .../tests/data/native_encoding/json/0070.json | 2 +- .../tests/data/native_encoding/json/0071.json | 2 +- .../tests/data/native_encoding/json/0072.json | 2 +- .../tests/data/native_encoding/json/0073.json | 2 +- .../tests/data/native_encoding/json/0074.json | 2 +- .../tests/data/native_encoding/json/0075.json | 2 +- .../tests/data/native_encoding/json/0076.json | 2 +- .../tests/data/native_encoding/json/0077.json | 2 +- .../tests/data/native_encoding/json/0078.json | 2 +- .../tests/data/native_encoding/json/0079.json | 2 +- .../tests/data/native_encoding/json/0080.json | 2 +- .../tests/data/native_encoding/json/0081.json | 2 +- .../tests/data/native_encoding/json/0082.json | 2 +- .../tests/data/native_encoding/json/0083.json | 2 +- .../tests/data/native_encoding/json/0084.json | 2 +- .../tests/data/native_encoding/json/0085.json | 2 +- .../tests/data/native_encoding/json/0086.json | 2 +- .../tests/data/native_encoding/json/0087.json | 2 +- .../tests/data/native_encoding/json/0088.json | 2 +- .../tests/data/native_encoding/json/0089.json | 2 +- .../tests/data/native_encoding/json/0090.json | 2 +- .../tests/data/native_encoding/json/0091.json | 2 +- .../tests/data/native_encoding/json/0092.json | 2 +- .../tests/data/native_encoding/json/0093.json | 2 +- .../tests/data/native_encoding/json/0094.json | 2 +- .../tests/data/native_encoding/json/0095.json | 2 +- .../tests/data/native_encoding/json/0096.json | 2 +- .../tests/data/native_encoding/json/0097.json | 2 +- .../tests/data/native_encoding/json/0098.json | 2 +- .../tests/data/native_encoding/json/0099.json | 2 +- .../tests/data/native_encoding/json/0100.json | 2 +- .../tests/data/native_encoding/json/0101.json | 2 +- .../tests/data/native_encoding/json/0102.json | 2 +- .../tests/data/native_encoding/json/0103.json | 2 +- .../tests/data/native_encoding/json/0104.json | 2 +- .../tests/data/native_encoding/json/0105.json | 2 +- .../tests/data/native_encoding/json/0106.json | 2 +- .../tests/data/native_encoding/json/0107.json | 2 +- .../tests/data/native_encoding/json/0108.json | 2 +- .../tests/data/native_encoding/json/0109.json | 2 +- .../tests/data/native_encoding/json/0110.json | 2 +- .../tests/data/native_encoding/json/0111.json | 2 +- .../tests/data/native_encoding/json/0112.json | 2 +- .../tests/data/native_encoding/json/0113.json | 2 +- .../tests/data/native_encoding/json/0114.json | 2 +- .../tests/data/native_encoding/json/0115.json | 2 +- .../tests/data/native_encoding/json/0116.json | 2 +- .../tests/data/native_encoding/json/0117.json | 2 +- .../tests/data/native_encoding/json/0118.json | 2 +- .../tests/data/native_encoding/json/0119.json | 2 +- .../tests/data/native_encoding/json/0120.json | 2 +- .../tests/data/native_encoding/json/0121.json | 2 +- .../tests/data/native_encoding/json/0122.json | 2 +- .../tests/data/native_encoding/json/0123.json | 2 +- .../tests/data/native_encoding/json/0124.json | 2 +- .../tests/data/native_encoding/json/0125.json | 2 +- .../tests/data/native_encoding/json/0126.json | 2 +- .../tests/data/native_encoding/json/0127.json | 2 +- .../tests/data/native_encoding/json/0128.json | 2 +- .../tests/data/native_encoding/json/0129.json | 2 +- .../tests/data/native_encoding/json/0130.json | 2 +- .../tests/data/native_encoding/json/0131.json | 2 +- .../tests/data/native_encoding/json/0132.json | 2 +- .../tests/data/native_encoding/json/0133.json | 2 +- .../tests/data/native_encoding/json/0134.json | 2 +- .../tests/data/native_encoding/json/0135.json | 2 +- .../tests/data/native_encoding/json/0136.json | 2 +- .../tests/data/native_encoding/json/0137.json | 2 +- .../tests/data/native_encoding/json/0138.json | 2 +- .../tests/data/native_encoding/json/0139.json | 2 +- .../tests/data/native_encoding/json/0140.json | 2 +- .../tests/data/native_encoding/json/0141.json | 2 +- .../tests/data/native_encoding/json/0142.json | 2 +- .../tests/data/native_encoding/json/0143.json | 2 +- .../tests/data/native_encoding/json/0144.json | 2 +- .../tests/data/native_encoding/json/0145.json | 2 +- .../tests/data/native_encoding/json/0146.json | 2 +- .../tests/data/native_encoding/json/0147.json | 2 +- .../tests/data/native_encoding/json/0148.json | 2 +- .../tests/data/native_encoding/json/0149.json | 2 +- .../tests/data/native_encoding/json/0150.json | 2 +- .../tests/data/native_encoding/json/0151.json | 2 +- .../tests/data/native_encoding/json/0152.json | 2 +- .../tests/data/native_encoding/json/0153.json | 2 +- .../tests/data/native_encoding/json/0154.json | 2 +- .../tests/data/native_encoding/json/0155.json | 2 +- .../tests/data/native_encoding/json/0156.json | 2 +- .../tests/data/native_encoding/json/0157.json | 2 +- .../tests/data/native_encoding/json/0158.json | 2 +- .../tests/data/native_encoding/json/0159.json | 2 +- .../tests/data/native_encoding/json/0160.json | 2 +- .../tests/data/native_encoding/json/0161.json | 2 +- .../tests/data/native_encoding/json/0162.json | 2 +- .../tests/data/native_encoding/json/0163.json | 2 +- .../tests/data/native_encoding/json/0164.json | 2 +- .../tests/data/native_encoding/json/0165.json | 2 +- .../tests/data/native_encoding/json/0166.json | 2 +- .../tests/data/native_encoding/json/0167.json | 2 +- .../tests/data/native_encoding/json/0168.json | 2 +- .../tests/data/native_encoding/json/0169.json | 2 +- .../tests/data/native_encoding/json/0170.json | 2 +- .../tests/data/native_encoding/json/0171.json | 2 +- .../tests/data/native_encoding/json/0172.json | 2 +- .../tests/data/native_encoding/json/0173.json | 2 +- .../tests/data/native_encoding/json/0174.json | 2 +- .../tests/data/native_encoding/json/0175.json | 2 +- .../tests/data/native_encoding/json/0176.json | 2 +- .../tests/data/native_encoding/json/0177.json | 2 +- .../tests/data/native_encoding/json/0178.json | 2 +- .../tests/data/native_encoding/json/0179.json | 2 +- .../tests/data/native_encoding/json/0181.json | 2 +- .../tests/data/native_encoding/json/0182.json | 2 +- .../tests/data/native_encoding/json/0183.json | 2 +- .../tests/data/native_encoding/json/0184.json | 2 +- .../tests/data/native_encoding/json/0185.json | 2 +- .../tests/data/native_encoding/json/0186.json | 2 +- .../tests/data/native_encoding/json/0187.json | 2 +- .../tests/data/native_encoding/json/0188.json | 2 +- .../tests/data/native_encoding/json/0189.json | 2 +- .../tests/data/native_encoding/json/0190.json | 2 +- .../tests/data/native_encoding/json/0191.json | 2 +- .../tests/data/native_encoding/json/0192.json | 2 +- .../tests/data/native_encoding/json/0193.json | 2 +- .../tests/data/native_encoding/json/0194.json | 2 +- .../tests/data/native_encoding/json/0195.json | 2 +- .../tests/data/native_encoding/json/0196.json | 2 +- .../tests/data/native_encoding/json/0197.json | 2 +- .../tests/data/native_encoding/json/0198.json | 2 +- .../tests/data/native_encoding/json/0199.json | 2 +- .../tests/data/native_encoding/json/0200.json | 2 +- .../tests/data/native_encoding/json/0201.json | 2 +- .../tests/data/native_encoding/json/0202.json | 2 +- .../tests/data/native_encoding/json/0203.json | 2 +- .../tests/data/native_encoding/json/0204.json | 2 +- .../tests/data/native_encoding/json/0205.json | 2 +- .../tests/data/native_encoding/json/0206.json | 2 +- .../tests/data/native_encoding/json/0207.json | 2 +- .../tests/data/native_encoding/json/0208.json | 2 +- .../tests/data/native_encoding/json/0209.json | 2 +- .../tests/data/native_encoding/json/0210.json | 2 +- .../tests/data/native_encoding/json/0211.json | 2 +- .../tests/data/native_encoding/json/0212.json | 2 +- .../tests/data/native_encoding/json/0213.json | 2 +- .../tests/data/native_encoding/json/0214.json | 2 +- .../tests/data/native_encoding/json/0215.json | 2 +- .../tests/data/native_encoding/json/0216.json | 2 +- .../tests/data/native_encoding/json/0217.json | 2 +- .../tests/data/native_encoding/json/0218.json | 2 +- .../tests/data/native_encoding/json/0219.json | 2 +- .../tests/data/native_encoding/json/0220.json | 2 +- .../tests/data/native_encoding/json/0221.json | 2 +- .../tests/data/native_encoding/json/0222.json | 2 +- .../tests/data/native_encoding/json/0223.json | 2 +- .../tests/data/native_encoding/json/0224.json | 2 +- .../tests/data/native_encoding/json/0225.json | 2 +- .../tests/data/native_encoding/json/0226.json | 2 +- .../tests/data/native_encoding/json/0227.json | 2 +- .../tests/data/native_encoding/json/0228.json | 2 +- .../tests/data/native_encoding/json/0229.json | 2 +- .../tests/data/native_encoding/json/0230.json | 2 +- .../tests/data/native_encoding/json/0231.json | 2 +- .../tests/data/native_encoding/json/0232.json | 2 +- .../tests/data/native_encoding/json/0233.json | 2 +- .../tests/data/native_encoding/json/0234.json | 2 +- .../tests/data/native_encoding/json/0235.json | 2 +- .../tests/data/native_encoding/json/0236.json | 2 +- .../tests/data/native_encoding/json/0237.json | 2 +- .../tests/data/native_encoding/json/0238.json | 2 +- .../tests/data/native_encoding/json/0239.json | 2 +- .../tests/data/native_encoding/json/0240.json | 2 +- .../tests/data/native_encoding/json/0241.json | 2 +- .../tests/data/native_encoding/json/0242.json | 2 +- .../tests/data/native_encoding/json/0243.json | 2 +- .../tests/data/native_encoding/json/0244.json | 2 +- .../tests/data/native_encoding/json/0245.json | 2 +- .../tests/data/native_encoding/json/0246.json | 2 +- .../tests/data/native_encoding/json/0247.json | 2 +- .../tests/data/native_encoding/json/0248.json | 2 +- .../tests/data/native_encoding/json/0249.json | 2 +- .../tests/data/native_encoding/json/0250.json | 2 +- .../tests/data/native_encoding/json/0251.json | 2 +- .../tests/data/native_encoding/json/0252.json | 2 +- .../tests/data/native_encoding/json/0253.json | 2 +- .../tests/data/native_encoding/json/0254.json | 2 +- .../tests/data/native_encoding/json/0255.json | 2 +- .../tests/data/native_encoding/json/0256.json | 2 +- .../tests/data/native_encoding/json/0257.json | 2 +- .../tests/data/native_encoding/json/0258.json | 2 +- .../tests/data/native_encoding/json/0259.json | 2 +- .../tests/data/native_encoding/json/0260.json | 2 +- .../tests/data/native_encoding/json/0261.json | 2 +- .../tests/data/native_encoding/json/0262.json | 2 +- .../tests/data/native_encoding/json/0263.json | 2 +- .../tests/data/native_encoding/json/0264.json | 2 +- .../tests/data/native_encoding/json/0265.json | 2 +- .../tests/data/native_encoding/json/0266.json | 2 +- .../tests/data/native_encoding/json/0267.json | 2 +- .../tests/data/native_encoding/json/0268.json | 2 +- .../tests/data/native_encoding/json/0269.json | 2 +- .../tests/data/native_encoding/json/0270.json | 2 +- .../tests/data/native_encoding/json/0271.json | 2 +- .../tests/data/native_encoding/json/0272.json | 2 +- .../tests/data/native_encoding/json/0273.json | 2 +- .../tests/data/native_encoding/json/0274.json | 2 +- .../tests/data/native_encoding/json/0275.json | 2 +- .../tests/data/native_encoding/json/0276.json | 2 +- .../tests/data/native_encoding/json/0277.json | 2 +- .../tests/data/native_encoding/json/0278.json | 2 +- .../tests/data/native_encoding/json/0279.json | 2 +- .../tests/data/native_encoding/json/0280.json | 2 +- .../tests/data/native_encoding/json/0281.json | 2 +- .../tests/data/native_encoding/json/0282.json | 2 +- .../tests/data/native_encoding/json/0283.json | 2 +- .../tests/data/native_encoding/json/0284.json | 2 +- .../tests/data/native_encoding/json/0285.json | 2 +- .../tests/data/native_encoding/json/0286.json | 2 +- .../tests/data/native_encoding/json/0287.json | 2 +- .../tests/data/native_encoding/json/0288.json | 2 +- .../tests/data/native_encoding/json/0289.json | 2 +- .../tests/data/native_encoding/json/0290.json | 2 +- .../tests/data/native_encoding/json/0291.json | 2 +- .../tests/data/native_encoding/json/0292.json | 2 +- .../tests/data/native_encoding/json/0293.json | 2 +- .../tests/data/native_encoding/json/0294.json | 2 +- .../tests/data/native_encoding/json/0295.json | 2 +- .../tests/data/native_encoding/json/0296.json | 2 +- .../tests/data/native_encoding/json/0297.json | 2 +- .../tests/data/native_encoding/json/0298.json | 2 +- .../tests/data/native_encoding/json/0299.json | 2 +- .../tests/data/native_encoding/json/0300.json | 2 +- .../tests/data/native_encoding/json/0301.json | 2 +- .../tests/data/native_encoding/json/0302.json | 2 +- .../tests/data/native_encoding/json/0303.json | 2 +- .../tests/data/native_encoding/json/0304.json | 2 +- .../tests/data/native_encoding/json/0305.json | 2 +- .../tests/data/native_encoding/json/0306.json | 2 +- .../tests/data/native_encoding/json/0307.json | 2 +- .../tests/data/native_encoding/json/0308.json | 2 +- .../tests/data/native_encoding/json/0309.json | 2 +- .../tests/data/native_encoding/json/0310.json | 2 +- .../tests/data/native_encoding/json/0311.json | 2 +- .../tests/data/native_encoding/json/0312.json | 2 +- .../tests/data/native_encoding/json/0313.json | 2 +- .../tests/data/native_encoding/json/0314.json | 2 +- .../tests/data/native_encoding/json/0315.json | 2 +- .../tests/data/native_encoding/json/0316.json | 2 +- .../tests/data/native_encoding/json/0317.json | 2 +- .../tests/data/native_encoding/json/0318.json | 2 +- .../tests/data/native_encoding/json/0319.json | 2 +- .../tests/data/native_encoding/json/0320.json | 2 +- .../tests/data/native_encoding/json/0321.json | 2 +- .../tests/data/native_encoding/json/0322.json | 2 +- .../tests/data/native_encoding/json/0323.json | 2 +- .../tests/data/native_encoding/json/0324.json | 2 +- .../tests/data/native_encoding/json/0325.json | 2 +- .../tests/data/native_encoding/json/0326.json | 2 +- .../tests/data/native_encoding/json/0327.json | 2 +- .../tests/data/native_encoding/json/0328.json | 2 +- .../tests/data/native_encoding/json/0329.json | 2 +- .../tests/data/native_encoding/json/0330.json | 2 +- .../tests/data/native_encoding/json/0331.json | 2 +- .../tests/data/native_encoding/json/0332.json | 2 +- .../tests/data/native_encoding/json/0333.json | 2 +- .../tests/data/native_encoding/json/0334.json | 2 +- .../tests/data/native_encoding/json/0335.json | 2 +- .../tests/data/native_encoding/json/0336.json | 2 +- .../tests/data/native_encoding/json/0338.json | 2 +- .../tests/data/native_encoding/json/0339.json | 2 +- .../tests/data/native_encoding/json/0340.json | 2 +- .../tests/data/native_encoding/json/0341.json | 2 +- .../tests/data/native_encoding/json/0342.json | 2 +- .../tests/data/native_encoding/json/0343.json | 2 +- .../tests/data/native_encoding/json/0344.json | 2 +- .../tests/data/native_encoding/json/0345.json | 2 +- .../tests/data/native_encoding/json/0346.json | 2 +- .../tests/data/native_encoding/json/0347.json | 2 +- .../tests/data/native_encoding/json/0348.json | 2 +- .../tests/data/native_encoding/json/0349.json | 2 +- .../tests/data/native_encoding/json/0350.json | 2 +- .../tests/data/native_encoding/json/0351.json | 2 +- .../tests/data/native_encoding/json/0352.json | 2 +- .../tests/data/native_encoding/json/0353.json | 2 +- .../tests/data/native_encoding/json/0354.json | 2 +- .../tests/data/native_encoding/json/0355.json | 2 +- .../tests/data/native_encoding/json/0356.json | 2 +- .../tests/data/native_encoding/json/0357.json | 2 +- .../tests/data/native_encoding/json/0358.json | 2 +- .../tests/data/native_encoding/json/0359.json | 2 +- .../tests/data/native_encoding/json/0360.json | 2 +- .../tests/data/native_encoding/json/0361.json | 2 +- .../tests/data/native_encoding/json/0362.json | 2 +- .../tests/data/native_encoding/json/0363.json | 2 +- .../tests/data/native_encoding/json/0364.json | 2 +- .../tests/data/native_encoding/json/0365.json | 2 +- .../tests/data/native_encoding/json/0366.json | 2 +- .../tests/data/native_encoding/json/0367.json | 2 +- .../tests/data/native_encoding/json/0368.json | 2 +- .../tests/data/native_encoding/json/0369.json | 2 +- .../tests/data/native_encoding/json/0370.json | 2 +- .../tests/data/native_encoding/json/0371.json | 2 +- .../tests/data/native_encoding/json/0372.json | 2 +- .../tests/data/native_encoding/json/0373.json | 2 +- .../tests/data/native_encoding/json/0374.json | 2 +- .../tests/data/native_encoding/json/0375.json | 2 +- .../tests/data/native_encoding/json/0376.json | 2 +- .../tests/data/native_encoding/json/0377.json | 2 +- .../tests/data/native_encoding/json/0378.json | 2 +- .../tests/data/native_encoding/json/0379.json | 2 +- .../tests/data/native_encoding/json/0380.json | 2 +- .../tests/data/native_encoding/json/0381.json | 2 +- .../tests/data/native_encoding/json/0382.json | 2 +- .../tests/data/native_encoding/json/0383.json | 2 +- .../tests/data/native_encoding/json/0384.json | 2 +- .../tests/data/native_encoding/json/0385.json | 2 +- .../tests/data/native_encoding/json/0386.json | 2 +- .../tests/data/native_encoding/json/0387.json | 2 +- .../tests/data/native_encoding/json/0388.json | 2 +- .../tests/data/native_encoding/json/0389.json | 2 +- .../tests/data/native_encoding/json/0390.json | 2 +- .../tests/data/native_encoding/json/0391.json | 2 +- .../tests/data/native_encoding/json/0392.json | 2 +- .../tests/data/native_encoding/json/0393.json | 2 +- .../tests/data/native_encoding/json/0395.json | 2 +- .../tests/data/native_encoding/json/0396.json | 2 +- .../tests/data/native_encoding/json/0397.json | 2 +- .../tests/data/native_encoding/json/0398.json | 2 +- .../tests/data/native_encoding/json/0399.json | 2 +- .../tests/data/native_encoding/json/0400.json | 2 +- .../tests/data/native_encoding/json/0401.json | 2 +- .../tests/data/native_encoding/json/0402.json | 2 +- .../tests/data/native_encoding/json/0403.json | 2 +- .../tests/data/native_encoding/json/0404.json | 2 +- .../tests/data/native_encoding/json/0405.json | 2 +- .../tests/data/native_encoding/json/0406.json | 2 +- .../tests/data/native_encoding/json/0407.json | 2 +- .../tests/data/native_encoding/json/0408.json | 2 +- .../tests/data/native_encoding/json/0409.json | 2 +- .../tests/data/native_encoding/json/0410.json | 2 +- .../tests/data/native_encoding/json/0411.json | 2 +- .../tests/data/native_encoding/json/0412.json | 2 +- .../tests/data/native_encoding/json/0413.json | 2 +- .../tests/data/native_encoding/json/0414.json | 2 +- .../tests/data/native_encoding/json/0415.json | 2 +- .../tests/data/native_encoding/json/0416.json | 2 +- .../tests/data/native_encoding/json/0417.json | 2 +- .../tests/data/native_encoding/json/0418.json | 2 +- .../tests/data/native_encoding/json/0419.json | 2 +- .../tests/data/native_encoding/json/0420.json | 2 +- .../tests/data/native_encoding/json/0421.json | 2 +- .../tests/data/native_encoding/json/0422.json | 2 +- .../tests/data/native_encoding/json/0423.json | 2 +- .../tests/data/native_encoding/json/0424.json | 2 +- .../tests/data/native_encoding/json/0425.json | 2 +- .../tests/data/native_encoding/json/0426.json | 2 +- .../tests/data/native_encoding/json/0427.json | 2 +- .../tests/data/native_encoding/json/0428.json | 2 +- .../tests/data/native_encoding/json/0429.json | 2 +- .../tests/data/native_encoding/json/0430.json | 2 +- .../tests/data/native_encoding/json/0431.json | 2 +- .../tests/data/native_encoding/json/0432.json | 2 +- .../tests/data/native_encoding/json/0433.json | 2 +- .../tests/data/native_encoding/json/0434.json | 2 +- .../tests/data/native_encoding/json/0435.json | 2 +- .../tests/data/native_encoding/json/0436.json | 2 +- .../tests/data/native_encoding/json/0437.json | 2 +- .../tests/data/native_encoding/json/0438.json | 2 +- .../tests/data/native_encoding/json/0439.json | 2 +- .../tests/data/native_encoding/json/0440.json | 2 +- .../tests/data/native_encoding/json/0441.json | 2 +- .../tests/data/native_encoding/json/0442.json | 2 +- .../tests/data/native_encoding/json/0443.json | 2 +- .../tests/data/native_encoding/json/0444.json | 2 +- .../tests/data/native_encoding/json/0445.json | 2 +- .../tests/data/native_encoding/json/0446.json | 2 +- .../tests/data/native_encoding/json/0447.json | 2 +- .../tests/data/native_encoding/json/0448.json | 2 +- .../tests/data/native_encoding/json/0449.json | 2 +- .../tests/data/native_encoding/json/0450.json | 2 +- .../tests/data/native_encoding/json/0451.json | 2 +- .../tests/data/native_encoding/json/0452.json | 2 +- .../tests/data/native_encoding/json/0453.json | 2 +- .../tests/data/native_encoding/json/0454.json | 2 +- .../tests/data/native_encoding/json/0455.json | 2 +- .../tests/data/native_encoding/json/0456.json | 2 +- .../tests/data/native_encoding/json/0457.json | 2 +- .../tests/data/native_encoding/json/0458.json | 2 +- .../tests/data/native_encoding/json/0459.json | 2 +- .../tests/data/native_encoding/json/0460.json | 2 +- .../tests/data/native_encoding/json/0461.json | 2 +- .../tests/data/native_encoding/json/0462.json | 2 +- .../tests/data/native_encoding/json/0463.json | 2 +- .../tests/data/native_encoding/json/0464.json | 2 +- .../tests/data/native_encoding/json/0465.json | 2 +- .../tests/data/native_encoding/json/0466.json | 2 +- .../tests/data/native_encoding/json/0467.json | 2 +- .../tests/data/native_encoding/json/0468.json | 2 +- .../tests/data/native_encoding/json/0469.json | 2 +- .../tests/data/native_encoding/json/0470.json | 2 +- .../tests/data/native_encoding/json/0471.json | 2 +- .../tests/data/native_encoding/json/0472.json | 2 +- .../tests/data/native_encoding/json/0473.json | 2 +- .../tests/data/native_encoding/json/0474.json | 2 +- .../tests/data/native_encoding/json/0475.json | 2 +- .../tests/data/native_encoding/json/0476.json | 2 +- .../tests/data/native_encoding/json/0477.json | 2 +- .../tests/data/native_encoding/json/0478.json | 2 +- .../tests/data/native_encoding/json/0479.json | 2 +- .../tests/data/native_encoding/json/0480.json | 2 +- .../tests/data/native_encoding/json/0481.json | 2 +- .../tests/data/native_encoding/json/0482.json | 2 +- .../tests/data/native_encoding/json/0483.json | 2 +- .../tests/data/native_encoding/json/0484.json | 2 +- .../tests/data/native_encoding/json/0485.json | 2 +- .../tests/data/native_encoding/json/0486.json | 2 +- .../tests/data/native_encoding/json/0487.json | 2 +- .../tests/data/native_encoding/json/0488.json | 2 +- .../tests/data/native_encoding/json/0489.json | 2 +- .../tests/data/native_encoding/json/0490.json | 2 +- .../tests/data/native_encoding/json/0491.json | 2 +- .../tests/data/native_encoding/json/0492.json | 2 +- .../tests/data/native_encoding/json/0493.json | 2 +- .../tests/data/native_encoding/json/0494.json | 2 +- .../tests/data/native_encoding/json/0495.json | 2 +- .../tests/data/native_encoding/json/0496.json | 2 +- .../tests/data/native_encoding/json/0497.json | 2 +- .../tests/data/native_encoding/json/0498.json | 2 +- .../tests/data/native_encoding/json/0499.json | 2 +- .../tests/data/native_encoding/json/0500.json | 2 +- .../tests/data/native_encoding/json/0501.json | 2 +- .../tests/data/native_encoding/json/0502.json | 2 +- .../tests/data/native_encoding/json/0503.json | 2 +- .../tests/data/native_encoding/json/0504.json | 2 +- .../tests/data/native_encoding/json/0505.json | 2 +- .../tests/data/native_encoding/json/0506.json | 2 +- .../tests/data/native_encoding/json/0507.json | 2 +- .../tests/data/native_encoding/json/0508.json | 2 +- .../tests/data/native_encoding/json/0509.json | 2 +- .../tests/data/native_encoding/json/0510.json | 2 +- .../tests/data/native_encoding/json/0511.json | 2 +- .../tests/data/native_encoding/json/0512.json | 2 +- .../tests/data/native_encoding/json/0513.json | 2 +- .../tests/data/native_encoding/json/0514.json | 2 +- .../tests/data/native_encoding/json/0515.json | 2 +- .../tests/data/native_encoding/json/0516.json | 2 +- .../tests/data/native_encoding/json/0517.json | 2 +- .../tests/data/native_encoding/json/0518.json | 2 +- .../tests/data/native_encoding/json/0519.json | 2 +- .../tests/data/native_encoding/json/0520.json | 2 +- .../tests/data/native_encoding/json/0521.json | 2 +- .../tests/data/native_encoding/json/0522.json | 2 +- .../tests/data/native_encoding/json/0523.json | 2 +- .../tests/data/native_encoding/json/0524.json | 2 +- .../tests/data/native_encoding/json/0525.json | 2 +- .../tests/data/native_encoding/json/0526.json | 2 +- .../tests/data/native_encoding/json/0527.json | 2 +- .../tests/data/native_encoding/json/0528.json | 2 +- .../tests/data/native_encoding/json/0529.json | 2 +- .../tests/data/native_encoding/json/0530.json | 2 +- .../tests/data/native_encoding/json/0531.json | 2 +- .../tests/data/native_encoding/json/0532.json | 2 +- .../tests/data/native_encoding/json/0533.json | 2 +- .../tests/data/native_encoding/json/0534.json | 2 +- .../tests/data/native_encoding/json/0535.json | 2 +- .../tests/data/native_encoding/json/0536.json | 2 +- .../tests/data/native_encoding/json/0537.json | 2 +- .../tests/data/native_encoding/json/0538.json | 2 +- .../tests/data/native_encoding/json/0539.json | 2 +- .../tests/data/native_encoding/json/0540.json | 2 +- .../tests/data/native_encoding/json/0541.json | 2 +- .../tests/data/native_encoding/json/0542.json | 2 +- .../tests/data/native_encoding/json/0543.json | 2 +- .../tests/data/native_encoding/json/0544.json | 2 +- .../tests/data/native_encoding/json/0545.json | 2 +- .../tests/data/native_encoding/json/0546.json | 2 +- .../tests/data/native_encoding/json/0547.json | 2 +- .../tests/data/native_encoding/json/0548.json | 2 +- .../tests/data/native_encoding/json/0549.json | 2 +- .../tests/data/native_encoding/json/0550.json | 2 +- .../tests/data/native_encoding/json/0551.json | 2 +- .../tests/data/native_encoding/json/0552.json | 2 +- .../tests/data/native_encoding/json/0553.json | 2 +- .../tests/data/native_encoding/json/0554.json | 2 +- .../tests/data/native_encoding/json/0555.json | 2 +- .../tests/data/native_encoding/json/0556.json | 2 +- .../tests/data/native_encoding/json/0557.json | 2 +- .../tests/data/native_encoding/json/0558.json | 2 +- .../tests/data/native_encoding/json/0559.json | 2 +- .../tests/data/native_encoding/json/0560.json | 2 +- .../tests/data/native_encoding/json/0561.json | 2 +- .../tests/data/native_encoding/json/0562.json | 2 +- .../tests/data/native_encoding/json/0563.json | 2 +- .../tests/data/native_encoding/json/0564.json | 2 +- .../tests/data/native_encoding/json/0565.json | 2 +- .../tests/data/native_encoding/json/0566.json | 2 +- .../tests/data/native_encoding/json/0567.json | 2 +- .../tests/data/native_encoding/json/0568.json | 2 +- .../tests/data/native_encoding/json/0569.json | 2 +- .../tests/data/native_encoding/json/0570.json | 2 +- .../tests/data/native_encoding/json/0571.json | 2 +- .../tests/data/native_encoding/json/0572.json | 2 +- .../tests/data/native_encoding/json/0573.json | 2 +- .../tests/data/native_encoding/json/0574.json | 2 +- .../tests/data/native_encoding/json/0575.json | 2 +- .../tests/data/native_encoding/json/0576.json | 2 +- .../tests/data/native_encoding/json/0577.json | 2 +- .../tests/data/native_encoding/json/0578.json | 2 +- .../tests/data/native_encoding/json/0579.json | 2 +- .../tests/data/native_encoding/json/0580.json | 2 +- .../tests/data/native_encoding/json/0581.json | 2 +- .../tests/data/native_encoding/json/0582.json | 2 +- .../tests/data/native_encoding/json/0583.json | 2 +- .../tests/data/native_encoding/json/0584.json | 2 +- .../tests/data/native_encoding/json/0585.json | 2 +- .../tests/data/native_encoding/json/0586.json | 2 +- .../tests/data/native_encoding/json/0587.json | 2 +- .../tests/data/native_encoding/json/0588.json | 2 +- .../tests/data/native_encoding/json/0589.json | 2 +- .../tests/data/native_encoding/json/0590.json | 2 +- .../tests/data/native_encoding/json/0591.json | 2 +- .../tests/data/native_encoding/json/0592.json | 2 +- .../tests/data/native_encoding/json/0593.json | 2 +- .../tests/data/native_encoding/json/0594.json | 2 +- .../tests/data/native_encoding/json/0595.json | 2 +- .../tests/data/native_encoding/json/0596.json | 2 +- .../tests/data/native_encoding/json/0597.json | 2 +- .../tests/data/native_encoding/json/0598.json | 2 +- .../tests/data/native_encoding/json/0599.json | 2 +- .../tests/data/native_encoding/json/0600.json | 2 +- .../tests/data/native_encoding/json/0601.json | 2 +- .../tests/data/native_encoding/json/0602.json | 2 +- .../tests/data/native_encoding/json/0603.json | 2 +- .../tests/data/native_encoding/json/0604.json | 2 +- .../tests/data/native_encoding/json/0605.json | 2 +- .../tests/data/native_encoding/json/0606.json | 2 +- .../tests/data/native_encoding/json/0607.json | 2 +- .../tests/data/native_encoding/json/0608.json | 2 +- .../tests/data/native_encoding/json/0609.json | 2 +- .../tests/data/native_encoding/json/0610.json | 2 +- .../tests/data/native_encoding/json/0611.json | 2 +- .../tests/data/native_encoding/json/0612.json | 2 +- .../tests/data/native_encoding/json/0613.json | 2 +- .../tests/data/native_encoding/json/0614.json | 2 +- .../tests/data/native_encoding/json/0615.json | 2 +- .../tests/data/native_encoding/json/0616.json | 2 +- .../tests/data/native_encoding/json/0618.json | 2 +- .../tests/data/native_encoding/json/0619.json | 2 +- .../tests/data/native_encoding/json/0620.json | 2 +- .../tests/data/native_encoding/json/0621.json | 2 +- .../tests/data/native_encoding/json/0622.json | 2 +- .../tests/data/native_encoding/json/0623.json | 2 +- .../tests/data/native_encoding/json/0624.json | 2 +- .../tests/data/native_encoding/json/0625.json | 2 +- .../tests/data/native_encoding/json/0626.json | 2 +- .../tests/data/native_encoding/json/0627.json | 2 +- .../tests/data/native_encoding/json/0628.json | 2 +- .../tests/data/native_encoding/json/0629.json | 2 +- .../tests/data/native_encoding/json/0630.json | 2 +- .../tests/data/native_encoding/json/0631.json | 2 +- .../tests/data/native_encoding/json/0632.json | 2 +- .../tests/data/native_encoding/json/0633.json | 2 +- .../tests/data/native_encoding/json/0634.json | 2 +- .../tests/data/native_encoding/json/0635.json | 2 +- .../tests/data/native_encoding/json/0636.json | 2 +- .../tests/data/native_encoding/json/0637.json | 2 +- .../tests/data/native_encoding/json/0638.json | 2 +- .../tests/data/native_encoding/json/0639.json | 2 +- .../tests/data/native_encoding/json/0640.json | 2 +- .../tests/data/native_encoding/json/0641.json | 2 +- .../tests/data/native_encoding/json/0642.json | 2 +- .../tests/data/native_encoding/json/0643.json | 2 +- .../tests/data/native_encoding/json/0644.json | 2 +- .../tests/data/native_encoding/json/0645.json | 2 +- .../tests/data/native_encoding/json/0646.json | 2 +- .../tests/data/native_encoding/json/0647.json | 2 +- .../tests/data/native_encoding/json/0648.json | 2 +- .../tests/data/native_encoding/json/0649.json | 2 +- .../tests/data/native_encoding/json/0650.json | 2 +- .../tests/data/native_encoding/json/0651.json | 2 +- .../tests/data/native_encoding/json/0652.json | 2 +- .../tests/data/native_encoding/json/0653.json | 2 +- .../tests/data/native_encoding/json/0654.json | 2 +- .../tests/data/native_encoding/json/0655.json | 2 +- .../tests/data/native_encoding/json/0656.json | 2 +- .../tests/data/native_encoding/json/0657.json | 2 +- .../tests/data/native_encoding/json/0658.json | 2 +- .../tests/data/native_encoding/json/0659.json | 2 +- .../tests/data/native_encoding/json/0660.json | 2 +- .../tests/data/native_encoding/json/0661.json | 2 +- .../tests/data/native_encoding/json/0662.json | 2 +- .../tests/data/native_encoding/json/0663.json | 2 +- .../tests/data/native_encoding/json/0664.json | 2 +- .../tests/data/native_encoding/json/0665.json | 2 +- .../tests/data/native_encoding/json/0666.json | 2 +- .../tests/data/native_encoding/json/0667.json | 2 +- .../tests/data/native_encoding/json/0668.json | 2 +- .../tests/data/native_encoding/json/0669.json | 2 +- .../tests/data/native_encoding/json/0670.json | 2 +- .../tests/data/native_encoding/json/0671.json | 2 +- .../tests/data/native_encoding/json/0672.json | 2 +- .../tests/data/native_encoding/json/0673.json | 2 +- .../tests/data/native_encoding/json/0674.json | 2 +- .../tests/data/native_encoding/json/0675.json | 2 +- .../tests/data/native_encoding/json/0676.json | 2 +- .../tests/data/native_encoding/json/0677.json | 2 +- .../tests/data/native_encoding/json/0678.json | 2 +- .../tests/data/native_encoding/json/0679.json | 2 +- .../tests/data/native_encoding/json/0680.json | 2 +- .../tests/data/native_encoding/json/0681.json | 2 +- .../tests/data/native_encoding/json/0682.json | 2 +- .../tests/data/native_encoding/json/0683.json | 2 +- .../tests/data/native_encoding/json/0684.json | 2 +- .../tests/data/native_encoding/json/0685.json | 2 +- .../tests/data/native_encoding/json/0686.json | 2 +- .../tests/data/native_encoding/json/0687.json | 2 +- .../tests/data/native_encoding/json/0688.json | 2 +- .../tests/data/native_encoding/json/0689.json | 2 +- .../tests/data/native_encoding/json/0690.json | 2 +- .../tests/data/native_encoding/json/0691.json | 2 +- .../tests/data/native_encoding/json/0692.json | 2 +- .../tests/data/native_encoding/json/0693.json | 2 +- .../tests/data/native_encoding/json/0694.json | 2 +- .../tests/data/native_encoding/json/0695.json | 2 +- .../tests/data/native_encoding/json/0696.json | 2 +- .../tests/data/native_encoding/json/0697.json | 2 +- .../tests/data/native_encoding/json/0698.json | 2 +- .../tests/data/native_encoding/json/0699.json | 2 +- .../tests/data/native_encoding/json/0700.json | 2 +- .../tests/data/native_encoding/json/0701.json | 2 +- .../tests/data/native_encoding/json/0702.json | 2 +- .../tests/data/native_encoding/json/0703.json | 2 +- .../tests/data/native_encoding/json/0704.json | 2 +- .../tests/data/native_encoding/json/0705.json | 2 +- .../tests/data/native_encoding/json/0706.json | 2 +- .../tests/data/native_encoding/json/0707.json | 2 +- .../tests/data/native_encoding/json/0708.json | 2 +- .../tests/data/native_encoding/json/0709.json | 2 +- .../tests/data/native_encoding/json/0710.json | 2 +- .../tests/data/native_encoding/json/0711.json | 2 +- .../tests/data/native_encoding/json/0712.json | 2 +- .../tests/data/native_encoding/json/0713.json | 2 +- .../tests/data/native_encoding/json/0714.json | 2 +- .../tests/data/native_encoding/json/0715.json | 2 +- .../tests/data/native_encoding/json/0716.json | 2 +- .../tests/data/native_encoding/json/0717.json | 2 +- .../tests/data/native_encoding/json/0718.json | 2 +- .../tests/data/native_encoding/json/0719.json | 2 +- .../tests/data/native_encoding/json/0720.json | 2 +- .../tests/data/native_encoding/json/0721.json | 2 +- .../tests/data/native_encoding/json/0722.json | 2 +- .../tests/data/native_encoding/json/0723.json | 2 +- .../tests/data/native_encoding/json/0724.json | 2 +- .../tests/data/native_encoding/json/0725.json | 2 +- .../tests/data/native_encoding/json/0726.json | 2 +- .../tests/data/native_encoding/json/0727.json | 2 +- .../tests/data/native_encoding/json/0728.json | 2 +- .../tests/data/native_encoding/json/0729.json | 2 +- .../tests/data/native_encoding/json/0730.json | 2 +- .../tests/data/native_encoding/json/0731.json | 2 +- .../tests/data/native_encoding/json/0732.json | 2 +- .../tests/data/native_encoding/json/0733.json | 2 +- .../tests/data/native_encoding/json/0734.json | 2 +- .../tests/data/native_encoding/json/0735.json | 2 +- .../tests/data/native_encoding/json/0736.json | 2 +- .../tests/data/native_encoding/json/0737.json | 2 +- .../tests/data/native_encoding/json/0738.json | 2 +- .../tests/data/native_encoding/json/0739.json | 2 +- .../tests/data/native_encoding/json/0740.json | 2 +- .../tests/data/native_encoding/json/0741.json | 2 +- .../tests/data/native_encoding/json/0742.json | 2 +- .../tests/data/native_encoding/json/0743.json | 2 +- .../tests/data/native_encoding/json/0744.json | 2 +- .../tests/data/native_encoding/json/0745.json | 2 +- .../tests/data/native_encoding/json/0746.json | 2 +- .../tests/data/native_encoding/json/0747.json | 2 +- .../tests/data/native_encoding/json/0748.json | 2 +- .../tests/data/native_encoding/json/0749.json | 2 +- .../tests/data/native_encoding/json/0750.json | 2 +- .../tests/data/native_encoding/json/0751.json | 2 +- .../tests/data/native_encoding/json/0752.json | 2 +- .../tests/data/native_encoding/json/0753.json | 2 +- .../tests/data/native_encoding/json/0754.json | 2 +- .../tests/data/native_encoding/json/0755.json | 2 +- .../tests/data/native_encoding/json/0756.json | 2 +- .../tests/data/native_encoding/json/0757.json | 2 +- .../tests/data/native_encoding/json/0758.json | 2 +- .../tests/data/native_encoding/json/0759.json | 2 +- .../tests/data/native_encoding/json/0760.json | 2 +- .../tests/data/native_encoding/json/0761.json | 2 +- .../tests/data/native_encoding/json/0762.json | 2 +- .../tests/data/native_encoding/json/0763.json | 2 +- .../tests/data/native_encoding/json/0764.json | 2 +- .../tests/data/native_encoding/json/0765.json | 2 +- .../tests/data/native_encoding/json/0766.json | 2 +- .../tests/data/native_encoding/json/0767.json | 2 +- .../tests/data/native_encoding/json/0768.json | 2 +- .../tests/data/native_encoding/json/0769.json | 2 +- .../tests/data/native_encoding/json/0770.json | 2 +- .../tests/data/native_encoding/json/0771.json | 2 +- .../tests/data/native_encoding/json/0772.json | 2 +- .../tests/data/native_encoding/json/0774.json | 2 +- .../tests/data/native_encoding/json/0775.json | 2 +- .../tests/data/native_encoding/json/0776.json | 2 +- .../tests/data/native_encoding/json/0777.json | 2 +- .../tests/data/native_encoding/json/0778.json | 2 +- .../tests/data/native_encoding/json/0779.json | 2 +- .../tests/data/native_encoding/json/0780.json | 2 +- .../tests/data/native_encoding/json/0781.json | 2 +- .../tests/data/native_encoding/json/0782.json | 2 +- .../tests/data/native_encoding/json/0783.json | 2 +- .../tests/data/native_encoding/json/0784.json | 2 +- .../tests/data/native_encoding/json/0785.json | 2 +- .../tests/data/native_encoding/json/0786.json | 2 +- .../tests/data/native_encoding/json/0787.json | 2 +- .../tests/data/native_encoding/json/0788.json | 2 +- .../tests/data/native_encoding/json/0789.json | 2 +- .../tests/data/native_encoding/json/0790.json | 2 +- .../tests/data/native_encoding/json/0791.json | 2 +- .../tests/data/native_encoding/json/0792.json | 2 +- .../tests/data/native_encoding/json/0793.json | 2 +- .../tests/data/native_encoding/json/0794.json | 2 +- .../tests/data/native_encoding/json/0795.json | 2 +- .../tests/data/native_encoding/json/0796.json | 2 +- .../tests/data/native_encoding/json/0797.json | 2 +- .../tests/data/native_encoding/json/0798.json | 2 +- .../tests/data/native_encoding/json/0799.json | 2 +- .../tests/data/native_encoding/json/0800.json | 2 +- .../tests/data/native_encoding/json/0801.json | 2 +- .../tests/data/native_encoding/json/0802.json | 2 +- .../tests/data/native_encoding/json/0803.json | 2 +- .../tests/data/native_encoding/json/0805.json | 2 +- .../tests/data/native_encoding/json/0806.json | 2 +- .../tests/data/native_encoding/json/0807.json | 2 +- .../tests/data/native_encoding/json/0808.json | 2 +- .../tests/data/native_encoding/json/0809.json | 2 +- .../tests/data/native_encoding/json/0810.json | 2 +- .../tests/data/native_encoding/json/0811.json | 2 +- .../tests/data/native_encoding/json/0812.json | 2 +- .../tests/data/native_encoding/json/0813.json | 2 +- .../tests/data/native_encoding/json/0814.json | 2 +- .../tests/data/native_encoding/json/0815.json | 2 +- .../tests/data/native_encoding/json/0816.json | 2 +- .../tests/data/native_encoding/json/0817.json | 2 +- .../tests/data/native_encoding/json/0818.json | 2 +- .../tests/data/native_encoding/json/0819.json | 2 +- .../tests/data/native_encoding/json/0820.json | 2 +- .../tests/data/native_encoding/json/0821.json | 2 +- .../tests/data/native_encoding/json/0822.json | 2 +- .../tests/data/native_encoding/json/0823.json | 2 +- .../tests/data/native_encoding/json/0824.json | 2 +- .../tests/data/native_encoding/json/0825.json | 2 +- .../tests/data/native_encoding/json/0826.json | 2 +- .../tests/data/native_encoding/json/0827.json | 2 +- .../tests/data/native_encoding/json/0828.json | 2 +- .../tests/data/native_encoding/json/0829.json | 2 +- .../tests/data/native_encoding/json/0830.json | 2 +- .../tests/data/native_encoding/json/0831.json | 2 +- .../tests/data/native_encoding/json/0832.json | 2 +- .../tests/data/native_encoding/json/0833.json | 2 +- .../tests/data/native_encoding/json/0834.json | 2 +- .../tests/data/native_encoding/json/0835.json | 2 +- .../tests/data/native_encoding/json/0836.json | 2 +- .../tests/data/native_encoding/json/0837.json | 2 +- .../tests/data/native_encoding/json/0838.json | 2 +- .../tests/data/native_encoding/json/0839.json | 2 +- .../tests/data/native_encoding/json/0840.json | 2 +- .../tests/data/native_encoding/json/0841.json | 2 +- .../tests/data/native_encoding/json/0842.json | 2 +- .../tests/data/native_encoding/json/0843.json | 2 +- .../tests/data/native_encoding/json/0844.json | 2 +- .../tests/data/native_encoding/json/0845.json | 2 +- .../tests/data/native_encoding/json/0846.json | 2 +- .../tests/data/native_encoding/json/0847.json | 2 +- .../tests/data/native_encoding/json/0848.json | 2 +- .../tests/data/native_encoding/json/0849.json | 2 +- .../tests/data/native_encoding/json/0850.json | 2 +- .../tests/data/native_encoding/json/0851.json | 2 +- .../tests/data/native_encoding/json/0852.json | 2 +- .../tests/data/native_encoding/json/0853.json | 2 +- .../tests/data/native_encoding/json/0854.json | 2 +- .../tests/data/native_encoding/json/0855.json | 2 +- .../tests/data/native_encoding/json/0856.json | 2 +- .../tests/data/native_encoding/json/0857.json | 2 +- .../tests/data/native_encoding/json/0858.json | 2 +- .../tests/data/native_encoding/json/0859.json | 2 +- .../tests/data/native_encoding/json/0860.json | 2 +- .../tests/data/native_encoding/json/0861.json | 2 +- .../tests/data/native_encoding/json/0862.json | 2 +- .../tests/data/native_encoding/json/0863.json | 2 +- .../tests/data/native_encoding/json/0864.json | 2 +- .../tests/data/native_encoding/json/0865.json | 2 +- .../tests/data/native_encoding/json/0866.json | 2 +- .../tests/data/native_encoding/json/0867.json | 2 +- .../tests/data/native_encoding/json/0868.json | 2 +- .../tests/data/native_encoding/json/0869.json | 2 +- .../tests/data/native_encoding/json/0870.json | 2 +- .../tests/data/native_encoding/json/0871.json | 2 +- .../tests/data/native_encoding/json/0872.json | 2 +- .../tests/data/native_encoding/json/0873.json | 2 +- .../tests/data/native_encoding/json/0874.json | 2 +- .../tests/data/native_encoding/json/0875.json | 2 +- .../tests/data/native_encoding/json/0876.json | 2 +- .../tests/data/native_encoding/json/0877.json | 2 +- .../tests/data/native_encoding/json/0878.json | 2 +- .../tests/data/native_encoding/json/0879.json | 2 +- .../tests/data/native_encoding/json/0880.json | 2 +- .../tests/data/native_encoding/json/0881.json | 2 +- .../tests/data/native_encoding/json/0882.json | 2 +- .../tests/data/native_encoding/json/0883.json | 2 +- .../tests/data/native_encoding/json/0884.json | 2 +- .../tests/data/native_encoding/json/0885.json | 2 +- .../tests/data/native_encoding/json/0886.json | 2 +- .../tests/data/native_encoding/json/0887.json | 2 +- .../tests/data/native_encoding/json/0888.json | 2 +- .../tests/data/native_encoding/json/0889.json | 2 +- .../tests/data/native_encoding/json/0890.json | 2 +- .../tests/data/native_encoding/json/0891.json | 2 +- .../tests/data/native_encoding/json/0892.json | 2 +- .../tests/data/native_encoding/json/0893.json | 2 +- .../tests/data/native_encoding/json/0894.json | 2 +- .../tests/data/native_encoding/json/0895.json | 2 +- .../tests/data/native_encoding/json/0896.json | 2 +- .../tests/data/native_encoding/json/0897.json | 2 +- .../tests/data/native_encoding/json/0898.json | 2 +- .../tests/data/native_encoding/json/0899.json | 2 +- .../tests/data/native_encoding/json/0900.json | 2 +- .../tests/data/native_encoding/json/0901.json | 2 +- .../tests/data/native_encoding/json/0902.json | 2 +- .../tests/data/native_encoding/json/0903.json | 2 +- .../tests/data/native_encoding/json/0904.json | 2 +- .../tests/data/native_encoding/json/0905.json | 2 +- .../tests/data/native_encoding/json/0906.json | 2 +- .../tests/data/native_encoding/json/0907.json | 2 +- .../tests/data/native_encoding/json/0908.json | 2 +- .../tests/data/native_encoding/json/0909.json | 2 +- .../tests/data/native_encoding/json/0910.json | 2 +- .../tests/data/native_encoding/json/0911.json | 2 +- .../tests/data/native_encoding/json/0912.json | 2 +- .../tests/data/native_encoding/json/0913.json | 2 +- .../tests/data/native_encoding/json/0914.json | 2 +- .../tests/data/native_encoding/json/0915.json | 2 +- .../tests/data/native_encoding/json/0916.json | 2 +- .../tests/data/native_encoding/json/0917.json | 2 +- .../tests/data/native_encoding/json/0918.json | 2 +- .../tests/data/native_encoding/json/0919.json | 2 +- .../tests/data/native_encoding/json/0920.json | 2 +- .../tests/data/native_encoding/json/0921.json | 2 +- .../tests/data/native_encoding/json/0922.json | 2 +- .../tests/data/native_encoding/json/0923.json | 2 +- .../tests/data/native_encoding/json/0924.json | 2 +- .../tests/data/native_encoding/json/0925.json | 2 +- .../tests/data/native_encoding/json/0926.json | 2 +- .../tests/data/native_encoding/json/0927.json | 2 +- .../tests/data/native_encoding/json/0928.json | 2 +- .../tests/data/native_encoding/json/0929.json | 2 +- .../tests/data/native_encoding/json/0930.json | 2 +- .../tests/data/native_encoding/json/0931.json | 2 +- .../tests/data/native_encoding/json/0932.json | 2 +- .../tests/data/native_encoding/json/0933.json | 2 +- .../tests/data/native_encoding/json/0934.json | 2 +- .../tests/data/native_encoding/json/0935.json | 2 +- .../tests/data/native_encoding/json/0936.json | 2 +- .../tests/data/native_encoding/json/0937.json | 2 +- .../tests/data/native_encoding/json/0939.json | 2 +- .../tests/data/native_encoding/json/0940.json | 2 +- .../tests/data/native_encoding/json/0941.json | 2 +- .../tests/data/native_encoding/json/0942.json | 2 +- .../tests/data/native_encoding/json/0943.json | 2 +- .../tests/data/native_encoding/json/0944.json | 2 +- .../tests/data/native_encoding/json/0945.json | 2 +- .../tests/data/native_encoding/json/0946.json | 2 +- .../tests/data/native_encoding/json/0947.json | 2 +- .../tests/data/native_encoding/json/0948.json | 2 +- .../tests/data/native_encoding/json/0949.json | 2 +- .../tests/data/native_encoding/json/0950.json | 2 +- .../tests/data/native_encoding/json/0951.json | 2 +- .../tests/data/native_encoding/json/0952.json | 2 +- .../tests/data/native_encoding/json/0953.json | 2 +- .../tests/data/native_encoding/json/0954.json | 2 +- .../tests/data/native_encoding/json/0955.json | 2 +- .../tests/data/native_encoding/json/0956.json | 2 +- .../tests/data/native_encoding/json/0957.json | 2 +- .../tests/data/native_encoding/json/0958.json | 2 +- .../tests/data/native_encoding/json/0959.json | 2 +- .../tests/data/native_encoding/json/0960.json | 2 +- .../tests/data/native_encoding/json/0961.json | 2 +- .../tests/data/native_encoding/json/0962.json | 2 +- .../tests/data/native_encoding/json/0963.json | 2 +- .../tests/data/native_encoding/json/0964.json | 2 +- .../tests/data/native_encoding/json/0965.json | 2 +- .../tests/data/native_encoding/json/0966.json | 2 +- .../tests/data/native_encoding/json/0967.json | 2 +- .../tests/data/native_encoding/json/0968.json | 2 +- .../tests/data/native_encoding/json/0969.json | 2 +- .../tests/data/native_encoding/json/0970.json | 2 +- .../tests/data/native_encoding/json/0971.json | 2 +- .../tests/data/native_encoding/json/0972.json | 2 +- .../tests/data/native_encoding/json/0973.json | 2 +- .../tests/data/native_encoding/json/0974.json | 2 +- .../tests/data/native_encoding/json/0975.json | 2 +- .../tests/data/native_encoding/json/0976.json | 2 +- .../tests/data/native_encoding/json/0977.json | 2 +- .../tests/data/native_encoding/json/0978.json | 2 +- .../tests/data/native_encoding/json/0979.json | 2 +- .../tests/data/native_encoding/json/0980.json | 2 +- .../tests/data/native_encoding/json/0981.json | 2 +- .../tests/data/native_encoding/json/0982.json | 2 +- .../tests/data/native_encoding/json/0983.json | 2 +- .../tests/data/native_encoding/json/0984.json | 2 +- .../tests/data/native_encoding/json/0985.json | 2 +- .../tests/data/native_encoding/json/0986.json | 2 +- .../tests/data/native_encoding/json/0987.json | 2 +- .../tests/data/native_encoding/json/0988.json | 2 +- .../tests/data/native_encoding/json/0989.json | 2 +- .../tests/data/native_encoding/json/0990.json | 2 +- .../tests/data/native_encoding/json/0991.json | 2 +- .../tests/data/native_encoding/json/0992.json | 2 +- .../tests/data/native_encoding/json/0993.json | 2 +- .../tests/data/native_encoding/json/0994.json | 2 +- .../tests/data/native_encoding/json/0995.json | 2 +- .../tests/data/native_encoding/json/0996.json | 2 +- .../tests/data/native_encoding/json/0997.json | 2 +- .../tests/data/native_encoding/json/0998.json | 2 +- .../tests/data/native_encoding/json/0999.json | 2 +- .../tests/data/native_encoding/json/1000.json | 2 +- .../tests/data/native_encoding/json/1001.json | 2 +- .../tests/data/native_encoding/json/1002.json | 2 +- .../tests/data/native_encoding/json/1003.json | 2 +- .../tests/data/native_encoding/json/1004.json | 2 +- .../tests/data/native_encoding/json/1005.json | 2 +- .../tests/data/native_encoding/json/1006.json | 2 +- .../tests/data/native_encoding/json/1007.json | 2 +- .../tests/data/native_encoding/json/1008.json | 2 +- .../tests/data/native_encoding/json/1009.json | 2 +- .../tests/data/native_encoding/json/1010.json | 2 +- .../tests/data/native_encoding/json/1011.json | 2 +- .../tests/data/native_encoding/json/1012.json | 2 +- .../tests/data/native_encoding/json/1013.json | 2 +- .../tests/data/native_encoding/json/1014.json | 2 +- .../tests/data/native_encoding/json/1015.json | 2 +- .../tests/data/native_encoding/json/1016.json | 2 +- .../tests/data/native_encoding/json/1017.json | 2 +- .../tests/data/native_encoding/json/1018.json | 2 +- .../tests/data/native_encoding/json/1019.json | 2 +- .../tests/data/native_encoding/json/1020.json | 2 +- .../tests/data/native_encoding/json/1021.json | 2 +- .../tests/data/native_encoding/json/1022.json | 2 +- .../tests/data/native_encoding/json/1023.json | 2 +- .../native_encoding/json/pre-v41/0000.json | 1 + .../native_encoding/json/pre-v41/0001.json | 1 + .../native_encoding/json/pre-v41/0002.json | 1 + .../native_encoding/json/pre-v41/0003.json | 1 + .../native_encoding/json/pre-v41/0004.json | 1 + .../native_encoding/json/pre-v41/0005.json | 1 + .../native_encoding/json/pre-v41/0006.json | 1 + .../native_encoding/json/pre-v41/0007.json | 1 + .../native_encoding/json/pre-v41/0008.json | 1 + .../native_encoding/json/pre-v41/0009.json | 1 + .../native_encoding/json/pre-v41/0010.json | 1 + .../native_encoding/json/pre-v41/0011.json | 1 + .../native_encoding/json/pre-v41/0012.json | 1 + .../native_encoding/json/pre-v41/0013.json | 1 + .../native_encoding/json/pre-v41/0014.json | 1 + .../native_encoding/json/pre-v41/0015.json | 1 + .../native_encoding/json/pre-v41/0016.json | 1 + .../native_encoding/json/pre-v41/0017.json | 1 + .../native_encoding/json/pre-v41/0018.json | 1 + .../native_encoding/json/pre-v41/0019.json | 1 + .../native_encoding/json/pre-v41/0020.json | 1 + .../native_encoding/json/pre-v41/0021.json | 1 + .../native_encoding/json/pre-v41/0022.json | 1 + .../native_encoding/json/pre-v41/0023.json | 1 + .../native_encoding/json/pre-v41/0024.json | 1 + .../native_encoding/json/pre-v41/0025.json | 1 + .../native_encoding/json/pre-v41/0026.json | 1 + .../native_encoding/json/pre-v41/0027.json | 1 + .../native_encoding/json/pre-v41/0028.json | 1 + .../native_encoding/json/pre-v41/0029.json | 1 + .../native_encoding/json/pre-v41/0030.json | 1 + .../native_encoding/json/pre-v41/0031.json | 1 + .../native_encoding/json/pre-v41/0032.json | 1 + .../native_encoding/json/pre-v41/0033.json | 1 + .../native_encoding/json/pre-v41/0034.json | 1 + .../native_encoding/json/pre-v41/0035.json | 1 + .../native_encoding/json/pre-v41/0036.json | 1 + .../native_encoding/json/pre-v41/0037.json | 1 + .../native_encoding/json/pre-v41/0038.json | 1 + .../native_encoding/json/pre-v41/0039.json | 1 + .../native_encoding/json/pre-v41/0040.json | 1 + .../native_encoding/json/pre-v41/0041.json | 1 + .../native_encoding/json/pre-v41/0042.json | 1 + .../native_encoding/json/pre-v41/0043.json | 1 + .../native_encoding/json/pre-v41/0044.json | 1 + .../native_encoding/json/pre-v41/0045.json | 1 + .../native_encoding/json/pre-v41/0046.json | 1 + .../native_encoding/json/pre-v41/0047.json | 1 + .../native_encoding/json/pre-v41/0048.json | 1 + .../native_encoding/json/pre-v41/0049.json | 1 + .../native_encoding/json/pre-v41/0050.json | 1 + .../native_encoding/json/pre-v41/0051.json | 1 + .../native_encoding/json/pre-v41/0052.json | 1 + .../native_encoding/json/pre-v41/0053.json | 1 + .../native_encoding/json/pre-v41/0054.json | 1 + .../native_encoding/json/pre-v41/0055.json | 1 + .../native_encoding/json/pre-v41/0056.json | 1 + .../native_encoding/json/pre-v41/0057.json | 1 + .../native_encoding/json/pre-v41/0058.json | 1 + .../native_encoding/json/pre-v41/0059.json | 1 + .../native_encoding/json/pre-v41/0060.json | 1 + .../native_encoding/json/pre-v41/0061.json | 1 + .../native_encoding/json/pre-v41/0062.json | 1 + .../native_encoding/json/pre-v41/0063.json | 1 + .../native_encoding/json/pre-v41/0064.json | 1 + .../native_encoding/json/pre-v41/0065.json | 1 + .../native_encoding/json/pre-v41/0066.json | 1 + .../native_encoding/json/pre-v41/0067.json | 1 + .../native_encoding/json/pre-v41/0068.json | 1 + .../native_encoding/json/pre-v41/0069.json | 1 + .../native_encoding/json/pre-v41/0070.json | 1 + .../native_encoding/json/pre-v41/0071.json | 1 + .../native_encoding/json/pre-v41/0072.json | 1 + .../native_encoding/json/pre-v41/0073.json | 1 + .../native_encoding/json/pre-v41/0074.json | 1 + .../native_encoding/json/pre-v41/0075.json | 1 + .../native_encoding/json/pre-v41/0076.json | 1 + .../native_encoding/json/pre-v41/0077.json | 1 + .../native_encoding/json/pre-v41/0078.json | 1 + .../native_encoding/json/pre-v41/0079.json | 1 + .../native_encoding/json/pre-v41/0080.json | 1 + .../native_encoding/json/pre-v41/0081.json | 1 + .../native_encoding/json/pre-v41/0082.json | 1 + .../native_encoding/json/pre-v41/0083.json | 1 + .../native_encoding/json/pre-v41/0084.json | 1 + .../native_encoding/json/pre-v41/0085.json | 1 + .../native_encoding/json/pre-v41/0086.json | 1 + .../native_encoding/json/pre-v41/0087.json | 1 + .../native_encoding/json/pre-v41/0088.json | 1 + .../native_encoding/json/pre-v41/0089.json | 1 + .../native_encoding/json/pre-v41/0090.json | 1 + .../native_encoding/json/pre-v41/0091.json | 1 + .../native_encoding/json/pre-v41/0092.json | 1 + .../native_encoding/json/pre-v41/0093.json | 1 + .../native_encoding/json/pre-v41/0094.json | 1 + .../native_encoding/json/pre-v41/0095.json | 1 + .../native_encoding/json/pre-v41/0096.json | 1 + .../native_encoding/json/pre-v41/0097.json | 1 + .../native_encoding/json/pre-v41/0098.json | 1 + .../native_encoding/json/pre-v41/0099.json | 1 + .../native_encoding/json/pre-v41/0100.json | 1 + .../native_encoding/json/pre-v41/0101.json | 1 + .../native_encoding/json/pre-v41/0102.json | 1 + .../native_encoding/json/pre-v41/0103.json | 1 + .../native_encoding/json/pre-v41/0104.json | 1 + .../native_encoding/json/pre-v41/0105.json | 1 + .../native_encoding/json/pre-v41/0106.json | 1 + .../native_encoding/json/pre-v41/0107.json | 1 + .../native_encoding/json/pre-v41/0108.json | 1 + .../native_encoding/json/pre-v41/0109.json | 1 + .../native_encoding/json/pre-v41/0110.json | 1 + .../native_encoding/json/pre-v41/0111.json | 1 + .../native_encoding/json/pre-v41/0112.json | 1 + .../native_encoding/json/pre-v41/0113.json | 1 + .../native_encoding/json/pre-v41/0114.json | 1 + .../native_encoding/json/pre-v41/0115.json | 1 + .../native_encoding/json/pre-v41/0116.json | 1 + .../native_encoding/json/pre-v41/0117.json | 1 + .../native_encoding/json/pre-v41/0118.json | 1 + .../native_encoding/json/pre-v41/0119.json | 1 + .../native_encoding/json/pre-v41/0120.json | 1 + .../native_encoding/json/pre-v41/0121.json | 1 + .../native_encoding/json/pre-v41/0122.json | 1 + .../native_encoding/json/pre-v41/0123.json | 1 + .../native_encoding/json/pre-v41/0124.json | 1 + .../native_encoding/json/pre-v41/0125.json | 1 + .../native_encoding/json/pre-v41/0126.json | 1 + .../native_encoding/json/pre-v41/0127.json | 1 + .../native_encoding/json/pre-v41/0128.json | 1 + .../native_encoding/json/pre-v41/0129.json | 1 + .../native_encoding/json/pre-v41/0130.json | 1 + .../native_encoding/json/pre-v41/0131.json | 1 + .../native_encoding/json/pre-v41/0132.json | 1 + .../native_encoding/json/pre-v41/0133.json | 1 + .../native_encoding/json/pre-v41/0134.json | 1 + .../native_encoding/json/pre-v41/0135.json | 1 + .../native_encoding/json/pre-v41/0136.json | 1 + .../native_encoding/json/pre-v41/0137.json | 1 + .../native_encoding/json/pre-v41/0138.json | 1 + .../native_encoding/json/pre-v41/0139.json | 1 + .../native_encoding/json/pre-v41/0140.json | 1 + .../native_encoding/json/pre-v41/0141.json | 1 + .../native_encoding/json/pre-v41/0142.json | 1 + .../native_encoding/json/pre-v41/0143.json | 1 + .../native_encoding/json/pre-v41/0144.json | 1 + .../native_encoding/json/pre-v41/0145.json | 1 + .../native_encoding/json/pre-v41/0146.json | 1 + .../native_encoding/json/pre-v41/0147.json | 1 + .../native_encoding/json/pre-v41/0148.json | 1 + .../native_encoding/json/pre-v41/0149.json | 1 + .../native_encoding/json/pre-v41/0150.json | 1 + .../native_encoding/json/pre-v41/0151.json | 1 + .../native_encoding/json/pre-v41/0152.json | 1 + .../native_encoding/json/pre-v41/0153.json | 1 + .../native_encoding/json/pre-v41/0154.json | 1 + .../native_encoding/json/pre-v41/0155.json | 1 + .../native_encoding/json/pre-v41/0156.json | 1 + .../native_encoding/json/pre-v41/0157.json | 1 + .../native_encoding/json/pre-v41/0158.json | 1 + .../native_encoding/json/pre-v41/0159.json | 1 + .../native_encoding/json/pre-v41/0160.json | 1 + .../native_encoding/json/pre-v41/0161.json | 1 + .../native_encoding/json/pre-v41/0162.json | 1 + .../native_encoding/json/pre-v41/0163.json | 1 + .../native_encoding/json/pre-v41/0164.json | 1 + .../native_encoding/json/pre-v41/0165.json | 1 + .../native_encoding/json/pre-v41/0166.json | 1 + .../native_encoding/json/pre-v41/0167.json | 1 + .../native_encoding/json/pre-v41/0168.json | 1 + .../native_encoding/json/pre-v41/0169.json | 1 + .../native_encoding/json/pre-v41/0170.json | 1 + .../native_encoding/json/pre-v41/0171.json | 1 + .../native_encoding/json/pre-v41/0172.json | 1 + .../native_encoding/json/pre-v41/0173.json | 1 + .../native_encoding/json/pre-v41/0174.json | 1 + .../native_encoding/json/pre-v41/0175.json | 1 + .../native_encoding/json/pre-v41/0176.json | 1 + .../native_encoding/json/pre-v41/0177.json | 1 + .../native_encoding/json/pre-v41/0178.json | 1 + .../native_encoding/json/pre-v41/0179.json | 1 + .../native_encoding/json/pre-v41/0180.json | 1 + .../native_encoding/json/pre-v41/0181.json | 1 + .../native_encoding/json/pre-v41/0182.json | 1 + .../native_encoding/json/pre-v41/0183.json | 1 + .../native_encoding/json/pre-v41/0184.json | 1 + .../native_encoding/json/pre-v41/0185.json | 1 + .../native_encoding/json/pre-v41/0186.json | 1 + .../native_encoding/json/pre-v41/0187.json | 1 + .../native_encoding/json/pre-v41/0188.json | 1 + .../native_encoding/json/pre-v41/0189.json | 1 + .../native_encoding/json/pre-v41/0190.json | 1 + .../native_encoding/json/pre-v41/0191.json | 1 + .../native_encoding/json/pre-v41/0192.json | 1 + .../native_encoding/json/pre-v41/0193.json | 1 + .../native_encoding/json/pre-v41/0194.json | 1 + .../native_encoding/json/pre-v41/0195.json | 1 + .../native_encoding/json/pre-v41/0196.json | 1 + .../native_encoding/json/pre-v41/0197.json | 1 + .../native_encoding/json/pre-v41/0198.json | 1 + .../native_encoding/json/pre-v41/0199.json | 1 + .../native_encoding/json/pre-v41/0200.json | 1 + .../native_encoding/json/pre-v41/0201.json | 1 + .../native_encoding/json/pre-v41/0202.json | 1 + .../native_encoding/json/pre-v41/0203.json | 1 + .../native_encoding/json/pre-v41/0204.json | 1 + .../native_encoding/json/pre-v41/0205.json | 1 + .../native_encoding/json/pre-v41/0206.json | 1 + .../native_encoding/json/pre-v41/0207.json | 1 + .../native_encoding/json/pre-v41/0208.json | 1 + .../native_encoding/json/pre-v41/0209.json | 1 + .../native_encoding/json/pre-v41/0210.json | 1 + .../native_encoding/json/pre-v41/0211.json | 1 + .../native_encoding/json/pre-v41/0212.json | 1 + .../native_encoding/json/pre-v41/0213.json | 1 + .../native_encoding/json/pre-v41/0214.json | 1 + .../native_encoding/json/pre-v41/0215.json | 1 + .../native_encoding/json/pre-v41/0216.json | 1 + .../native_encoding/json/pre-v41/0217.json | 1 + .../native_encoding/json/pre-v41/0218.json | 1 + .../native_encoding/json/pre-v41/0219.json | 1 + .../native_encoding/json/pre-v41/0220.json | 1 + .../native_encoding/json/pre-v41/0221.json | 1 + .../native_encoding/json/pre-v41/0222.json | 1 + .../native_encoding/json/pre-v41/0223.json | 1 + .../native_encoding/json/pre-v41/0224.json | 1 + .../native_encoding/json/pre-v41/0225.json | 1 + .../native_encoding/json/pre-v41/0226.json | 1 + .../native_encoding/json/pre-v41/0227.json | 1 + .../native_encoding/json/pre-v41/0228.json | 1 + .../native_encoding/json/pre-v41/0229.json | 1 + .../native_encoding/json/pre-v41/0230.json | 1 + .../native_encoding/json/pre-v41/0231.json | 1 + .../native_encoding/json/pre-v41/0232.json | 1 + .../native_encoding/json/pre-v41/0233.json | 1 + .../native_encoding/json/pre-v41/0234.json | 1 + .../native_encoding/json/pre-v41/0235.json | 1 + .../native_encoding/json/pre-v41/0236.json | 1 + .../native_encoding/json/pre-v41/0237.json | 1 + .../native_encoding/json/pre-v41/0238.json | 1 + .../native_encoding/json/pre-v41/0239.json | 1 + .../native_encoding/json/pre-v41/0240.json | 1 + .../native_encoding/json/pre-v41/0241.json | 1 + .../native_encoding/json/pre-v41/0242.json | 1 + .../native_encoding/json/pre-v41/0243.json | 1 + .../native_encoding/json/pre-v41/0244.json | 1 + .../native_encoding/json/pre-v41/0245.json | 1 + .../native_encoding/json/pre-v41/0246.json | 1 + .../native_encoding/json/pre-v41/0247.json | 1 + .../native_encoding/json/pre-v41/0248.json | 1 + .../native_encoding/json/pre-v41/0249.json | 1 + .../native_encoding/json/pre-v41/0250.json | 1 + .../native_encoding/json/pre-v41/0251.json | 1 + .../native_encoding/json/pre-v41/0252.json | 1 + .../native_encoding/json/pre-v41/0253.json | 1 + .../native_encoding/json/pre-v41/0254.json | 1 + .../native_encoding/json/pre-v41/0255.json | 1 + .../native_encoding/json/pre-v41/0256.json | 1 + .../native_encoding/json/pre-v41/0257.json | 1 + .../native_encoding/json/pre-v41/0258.json | 1 + .../native_encoding/json/pre-v41/0259.json | 1 + .../native_encoding/json/pre-v41/0260.json | 1 + .../native_encoding/json/pre-v41/0261.json | 1 + .../native_encoding/json/pre-v41/0262.json | 1 + .../native_encoding/json/pre-v41/0263.json | 1 + .../native_encoding/json/pre-v41/0264.json | 1 + .../native_encoding/json/pre-v41/0265.json | 1 + .../native_encoding/json/pre-v41/0266.json | 1 + .../native_encoding/json/pre-v41/0267.json | 1 + .../native_encoding/json/pre-v41/0268.json | 1 + .../native_encoding/json/pre-v41/0269.json | 1 + .../native_encoding/json/pre-v41/0270.json | 1 + .../native_encoding/json/pre-v41/0271.json | 1 + .../native_encoding/json/pre-v41/0272.json | 1 + .../native_encoding/json/pre-v41/0273.json | 1 + .../native_encoding/json/pre-v41/0274.json | 1 + .../native_encoding/json/pre-v41/0275.json | 1 + .../native_encoding/json/pre-v41/0276.json | 1 + .../native_encoding/json/pre-v41/0277.json | 1 + .../native_encoding/json/pre-v41/0278.json | 1 + .../native_encoding/json/pre-v41/0279.json | 1 + .../native_encoding/json/pre-v41/0280.json | 1 + .../native_encoding/json/pre-v41/0281.json | 1 + .../native_encoding/json/pre-v41/0282.json | 1 + .../native_encoding/json/pre-v41/0283.json | 1 + .../native_encoding/json/pre-v41/0284.json | 1 + .../native_encoding/json/pre-v41/0285.json | 1 + .../native_encoding/json/pre-v41/0286.json | 1 + .../native_encoding/json/pre-v41/0287.json | 1 + .../native_encoding/json/pre-v41/0288.json | 1 + .../native_encoding/json/pre-v41/0289.json | 1 + .../native_encoding/json/pre-v41/0290.json | 1 + .../native_encoding/json/pre-v41/0291.json | 1 + .../native_encoding/json/pre-v41/0292.json | 1 + .../native_encoding/json/pre-v41/0293.json | 1 + .../native_encoding/json/pre-v41/0294.json | 1 + .../native_encoding/json/pre-v41/0295.json | 1 + .../native_encoding/json/pre-v41/0296.json | 1 + .../native_encoding/json/pre-v41/0297.json | 1 + .../native_encoding/json/pre-v41/0298.json | 1 + .../native_encoding/json/pre-v41/0299.json | 1 + .../native_encoding/json/pre-v41/0300.json | 1 + .../native_encoding/json/pre-v41/0301.json | 1 + .../native_encoding/json/pre-v41/0302.json | 1 + .../native_encoding/json/pre-v41/0303.json | 1 + .../native_encoding/json/pre-v41/0304.json | 1 + .../native_encoding/json/pre-v41/0305.json | 1 + .../native_encoding/json/pre-v41/0306.json | 1 + .../native_encoding/json/pre-v41/0307.json | 1 + .../native_encoding/json/pre-v41/0308.json | 1 + .../native_encoding/json/pre-v41/0309.json | 1 + .../native_encoding/json/pre-v41/0310.json | 1 + .../native_encoding/json/pre-v41/0311.json | 1 + .../native_encoding/json/pre-v41/0312.json | 1 + .../native_encoding/json/pre-v41/0313.json | 1 + .../native_encoding/json/pre-v41/0314.json | 1 + .../native_encoding/json/pre-v41/0315.json | 1 + .../native_encoding/json/pre-v41/0316.json | 1 + .../native_encoding/json/pre-v41/0317.json | 1 + .../native_encoding/json/pre-v41/0318.json | 1 + .../native_encoding/json/pre-v41/0319.json | 1 + .../native_encoding/json/pre-v41/0320.json | 1 + .../native_encoding/json/pre-v41/0321.json | 1 + .../native_encoding/json/pre-v41/0322.json | 1 + .../native_encoding/json/pre-v41/0323.json | 1 + .../native_encoding/json/pre-v41/0324.json | 1 + .../native_encoding/json/pre-v41/0325.json | 1 + .../native_encoding/json/pre-v41/0326.json | 1 + .../native_encoding/json/pre-v41/0327.json | 1 + .../native_encoding/json/pre-v41/0328.json | 1 + .../native_encoding/json/pre-v41/0329.json | 1 + .../native_encoding/json/pre-v41/0330.json | 1 + .../native_encoding/json/pre-v41/0331.json | 1 + .../native_encoding/json/pre-v41/0332.json | 1 + .../native_encoding/json/pre-v41/0333.json | 1 + .../native_encoding/json/pre-v41/0334.json | 1 + .../native_encoding/json/pre-v41/0335.json | 1 + .../native_encoding/json/pre-v41/0336.json | 1 + .../native_encoding/json/pre-v41/0337.json | 1 + .../native_encoding/json/pre-v41/0338.json | 1 + .../native_encoding/json/pre-v41/0339.json | 1 + .../native_encoding/json/pre-v41/0340.json | 1 + .../native_encoding/json/pre-v41/0341.json | 1 + .../native_encoding/json/pre-v41/0342.json | 1 + .../native_encoding/json/pre-v41/0343.json | 1 + .../native_encoding/json/pre-v41/0344.json | 1 + .../native_encoding/json/pre-v41/0345.json | 1 + .../native_encoding/json/pre-v41/0346.json | 1 + .../native_encoding/json/pre-v41/0347.json | 1 + .../native_encoding/json/pre-v41/0348.json | 1 + .../native_encoding/json/pre-v41/0349.json | 1 + .../native_encoding/json/pre-v41/0350.json | 1 + .../native_encoding/json/pre-v41/0351.json | 1 + .../native_encoding/json/pre-v41/0352.json | 1 + .../native_encoding/json/pre-v41/0353.json | 1 + .../native_encoding/json/pre-v41/0354.json | 1 + .../native_encoding/json/pre-v41/0355.json | 1 + .../native_encoding/json/pre-v41/0356.json | 1 + .../native_encoding/json/pre-v41/0357.json | 1 + .../native_encoding/json/pre-v41/0358.json | 1 + .../native_encoding/json/pre-v41/0359.json | 1 + .../native_encoding/json/pre-v41/0360.json | 1 + .../native_encoding/json/pre-v41/0361.json | 1 + .../native_encoding/json/pre-v41/0362.json | 1 + .../native_encoding/json/pre-v41/0363.json | 1 + .../native_encoding/json/pre-v41/0364.json | 1 + .../native_encoding/json/pre-v41/0365.json | 1 + .../native_encoding/json/pre-v41/0366.json | 1 + .../native_encoding/json/pre-v41/0367.json | 1 + .../native_encoding/json/pre-v41/0368.json | 1 + .../native_encoding/json/pre-v41/0369.json | 1 + .../native_encoding/json/pre-v41/0370.json | 1 + .../native_encoding/json/pre-v41/0371.json | 1 + .../native_encoding/json/pre-v41/0372.json | 1 + .../native_encoding/json/pre-v41/0373.json | 1 + .../native_encoding/json/pre-v41/0374.json | 1 + .../native_encoding/json/pre-v41/0375.json | 1 + .../native_encoding/json/pre-v41/0376.json | 1 + .../native_encoding/json/pre-v41/0377.json | 1 + .../native_encoding/json/pre-v41/0378.json | 1 + .../native_encoding/json/pre-v41/0379.json | 1 + .../native_encoding/json/pre-v41/0380.json | 1 + .../native_encoding/json/pre-v41/0381.json | 1 + .../native_encoding/json/pre-v41/0382.json | 1 + .../native_encoding/json/pre-v41/0383.json | 1 + .../native_encoding/json/pre-v41/0384.json | 1 + .../native_encoding/json/pre-v41/0385.json | 1 + .../native_encoding/json/pre-v41/0386.json | 1 + .../native_encoding/json/pre-v41/0387.json | 1 + .../native_encoding/json/pre-v41/0388.json | 1 + .../native_encoding/json/pre-v41/0389.json | 1 + .../native_encoding/json/pre-v41/0390.json | 1 + .../native_encoding/json/pre-v41/0391.json | 1 + .../native_encoding/json/pre-v41/0392.json | 1 + .../native_encoding/json/pre-v41/0393.json | 1 + .../native_encoding/json/pre-v41/0394.json | 1 + .../native_encoding/json/pre-v41/0395.json | 1 + .../native_encoding/json/pre-v41/0396.json | 1 + .../native_encoding/json/pre-v41/0397.json | 1 + .../native_encoding/json/pre-v41/0398.json | 1 + .../native_encoding/json/pre-v41/0399.json | 1 + .../native_encoding/json/pre-v41/0400.json | 1 + .../native_encoding/json/pre-v41/0401.json | 1 + .../native_encoding/json/pre-v41/0402.json | 1 + .../native_encoding/json/pre-v41/0403.json | 1 + .../native_encoding/json/pre-v41/0404.json | 1 + .../native_encoding/json/pre-v41/0405.json | 1 + .../native_encoding/json/pre-v41/0406.json | 1 + .../native_encoding/json/pre-v41/0407.json | 1 + .../native_encoding/json/pre-v41/0408.json | 1 + .../native_encoding/json/pre-v41/0409.json | 1 + .../native_encoding/json/pre-v41/0410.json | 1 + .../native_encoding/json/pre-v41/0411.json | 1 + .../native_encoding/json/pre-v41/0412.json | 1 + .../native_encoding/json/pre-v41/0413.json | 1 + .../native_encoding/json/pre-v41/0414.json | 1 + .../native_encoding/json/pre-v41/0415.json | 1 + .../native_encoding/json/pre-v41/0416.json | 1 + .../native_encoding/json/pre-v41/0417.json | 1 + .../native_encoding/json/pre-v41/0418.json | 1 + .../native_encoding/json/pre-v41/0419.json | 1 + .../native_encoding/json/pre-v41/0420.json | 1 + .../native_encoding/json/pre-v41/0421.json | 1 + .../native_encoding/json/pre-v41/0422.json | 1 + .../native_encoding/json/pre-v41/0423.json | 1 + .../native_encoding/json/pre-v41/0424.json | 1 + .../native_encoding/json/pre-v41/0425.json | 1 + .../native_encoding/json/pre-v41/0426.json | 1 + .../native_encoding/json/pre-v41/0427.json | 1 + .../native_encoding/json/pre-v41/0428.json | 1 + .../native_encoding/json/pre-v41/0429.json | 1 + .../native_encoding/json/pre-v41/0430.json | 1 + .../native_encoding/json/pre-v41/0431.json | 1 + .../native_encoding/json/pre-v41/0432.json | 1 + .../native_encoding/json/pre-v41/0433.json | 1 + .../native_encoding/json/pre-v41/0434.json | 1 + .../native_encoding/json/pre-v41/0435.json | 1 + .../native_encoding/json/pre-v41/0436.json | 1 + .../native_encoding/json/pre-v41/0437.json | 1 + .../native_encoding/json/pre-v41/0438.json | 1 + .../native_encoding/json/pre-v41/0439.json | 1 + .../native_encoding/json/pre-v41/0440.json | 1 + .../native_encoding/json/pre-v41/0441.json | 1 + .../native_encoding/json/pre-v41/0442.json | 1 + .../native_encoding/json/pre-v41/0443.json | 1 + .../native_encoding/json/pre-v41/0444.json | 1 + .../native_encoding/json/pre-v41/0445.json | 1 + .../native_encoding/json/pre-v41/0446.json | 1 + .../native_encoding/json/pre-v41/0447.json | 1 + .../native_encoding/json/pre-v41/0448.json | 1 + .../native_encoding/json/pre-v41/0449.json | 1 + .../native_encoding/json/pre-v41/0450.json | 1 + .../native_encoding/json/pre-v41/0451.json | 1 + .../native_encoding/json/pre-v41/0452.json | 1 + .../native_encoding/json/pre-v41/0453.json | 1 + .../native_encoding/json/pre-v41/0454.json | 1 + .../native_encoding/json/pre-v41/0455.json | 1 + .../native_encoding/json/pre-v41/0456.json | 1 + .../native_encoding/json/pre-v41/0457.json | 1 + .../native_encoding/json/pre-v41/0458.json | 1 + .../native_encoding/json/pre-v41/0459.json | 1 + .../native_encoding/json/pre-v41/0460.json | 1 + .../native_encoding/json/pre-v41/0461.json | 1 + .../native_encoding/json/pre-v41/0462.json | 1 + .../native_encoding/json/pre-v41/0463.json | 1 + .../native_encoding/json/pre-v41/0464.json | 1 + .../native_encoding/json/pre-v41/0465.json | 1 + .../native_encoding/json/pre-v41/0466.json | 1 + .../native_encoding/json/pre-v41/0467.json | 1 + .../native_encoding/json/pre-v41/0468.json | 1 + .../native_encoding/json/pre-v41/0469.json | 1 + .../native_encoding/json/pre-v41/0470.json | 1 + .../native_encoding/json/pre-v41/0471.json | 1 + .../native_encoding/json/pre-v41/0472.json | 1 + .../native_encoding/json/pre-v41/0473.json | 1 + .../native_encoding/json/pre-v41/0474.json | 1 + .../native_encoding/json/pre-v41/0475.json | 1 + .../native_encoding/json/pre-v41/0476.json | 1 + .../native_encoding/json/pre-v41/0477.json | 1 + .../native_encoding/json/pre-v41/0478.json | 1 + .../native_encoding/json/pre-v41/0479.json | 1 + .../native_encoding/json/pre-v41/0480.json | 1 + .../native_encoding/json/pre-v41/0481.json | 1 + .../native_encoding/json/pre-v41/0482.json | 1 + .../native_encoding/json/pre-v41/0483.json | 1 + .../native_encoding/json/pre-v41/0484.json | 1 + .../native_encoding/json/pre-v41/0485.json | 1 + .../native_encoding/json/pre-v41/0486.json | 1 + .../native_encoding/json/pre-v41/0487.json | 1 + .../native_encoding/json/pre-v41/0488.json | 1 + .../native_encoding/json/pre-v41/0489.json | 1 + .../native_encoding/json/pre-v41/0490.json | 1 + .../native_encoding/json/pre-v41/0491.json | 1 + .../native_encoding/json/pre-v41/0492.json | 1 + .../native_encoding/json/pre-v41/0493.json | 1 + .../native_encoding/json/pre-v41/0494.json | 1 + .../native_encoding/json/pre-v41/0495.json | 1 + .../native_encoding/json/pre-v41/0496.json | 1 + .../native_encoding/json/pre-v41/0497.json | 1 + .../native_encoding/json/pre-v41/0498.json | 1 + .../native_encoding/json/pre-v41/0499.json | 1 + .../native_encoding/json/pre-v41/0500.json | 1 + .../native_encoding/json/pre-v41/0501.json | 1 + .../native_encoding/json/pre-v41/0502.json | 1 + .../native_encoding/json/pre-v41/0503.json | 1 + .../native_encoding/json/pre-v41/0504.json | 1 + .../native_encoding/json/pre-v41/0505.json | 1 + .../native_encoding/json/pre-v41/0506.json | 1 + .../native_encoding/json/pre-v41/0507.json | 1 + .../native_encoding/json/pre-v41/0508.json | 1 + .../native_encoding/json/pre-v41/0509.json | 1 + .../native_encoding/json/pre-v41/0510.json | 1 + .../native_encoding/json/pre-v41/0511.json | 1 + .../native_encoding/json/pre-v41/0512.json | 1 + .../native_encoding/json/pre-v41/0513.json | 1 + .../native_encoding/json/pre-v41/0514.json | 1 + .../native_encoding/json/pre-v41/0515.json | 1 + .../native_encoding/json/pre-v41/0516.json | 1 + .../native_encoding/json/pre-v41/0517.json | 1 + .../native_encoding/json/pre-v41/0518.json | 1 + .../native_encoding/json/pre-v41/0519.json | 1 + .../native_encoding/json/pre-v41/0520.json | 1 + .../native_encoding/json/pre-v41/0521.json | 1 + .../native_encoding/json/pre-v41/0522.json | 1 + .../native_encoding/json/pre-v41/0523.json | 1 + .../native_encoding/json/pre-v41/0524.json | 1 + .../native_encoding/json/pre-v41/0525.json | 1 + .../native_encoding/json/pre-v41/0526.json | 1 + .../native_encoding/json/pre-v41/0527.json | 1 + .../native_encoding/json/pre-v41/0528.json | 1 + .../native_encoding/json/pre-v41/0529.json | 1 + .../native_encoding/json/pre-v41/0530.json | 1 + .../native_encoding/json/pre-v41/0531.json | 1 + .../native_encoding/json/pre-v41/0532.json | 1 + .../native_encoding/json/pre-v41/0533.json | 1 + .../native_encoding/json/pre-v41/0534.json | 1 + .../native_encoding/json/pre-v41/0535.json | 1 + .../native_encoding/json/pre-v41/0536.json | 1 + .../native_encoding/json/pre-v41/0537.json | 1 + .../native_encoding/json/pre-v41/0538.json | 1 + .../native_encoding/json/pre-v41/0539.json | 1 + .../native_encoding/json/pre-v41/0540.json | 1 + .../native_encoding/json/pre-v41/0541.json | 1 + .../native_encoding/json/pre-v41/0542.json | 1 + .../native_encoding/json/pre-v41/0543.json | 1 + .../native_encoding/json/pre-v41/0544.json | 1 + .../native_encoding/json/pre-v41/0545.json | 1 + .../native_encoding/json/pre-v41/0546.json | 1 + .../native_encoding/json/pre-v41/0547.json | 1 + .../native_encoding/json/pre-v41/0548.json | 1 + .../native_encoding/json/pre-v41/0549.json | 1 + .../native_encoding/json/pre-v41/0550.json | 1 + .../native_encoding/json/pre-v41/0551.json | 1 + .../native_encoding/json/pre-v41/0552.json | 1 + .../native_encoding/json/pre-v41/0553.json | 1 + .../native_encoding/json/pre-v41/0554.json | 1 + .../native_encoding/json/pre-v41/0555.json | 1 + .../native_encoding/json/pre-v41/0556.json | 1 + .../native_encoding/json/pre-v41/0557.json | 1 + .../native_encoding/json/pre-v41/0558.json | 1 + .../native_encoding/json/pre-v41/0559.json | 1 + .../native_encoding/json/pre-v41/0560.json | 1 + .../native_encoding/json/pre-v41/0561.json | 1 + .../native_encoding/json/pre-v41/0562.json | 1 + .../native_encoding/json/pre-v41/0563.json | 1 + .../native_encoding/json/pre-v41/0564.json | 1 + .../native_encoding/json/pre-v41/0565.json | 1 + .../native_encoding/json/pre-v41/0566.json | 1 + .../native_encoding/json/pre-v41/0567.json | 1 + .../native_encoding/json/pre-v41/0568.json | 1 + .../native_encoding/json/pre-v41/0569.json | 1 + .../native_encoding/json/pre-v41/0570.json | 1 + .../native_encoding/json/pre-v41/0571.json | 1 + .../native_encoding/json/pre-v41/0572.json | 1 + .../native_encoding/json/pre-v41/0573.json | 1 + .../native_encoding/json/pre-v41/0574.json | 1 + .../native_encoding/json/pre-v41/0575.json | 1 + .../native_encoding/json/pre-v41/0576.json | 1 + .../native_encoding/json/pre-v41/0577.json | 1 + .../native_encoding/json/pre-v41/0578.json | 1 + .../native_encoding/json/pre-v41/0579.json | 1 + .../native_encoding/json/pre-v41/0580.json | 1 + .../native_encoding/json/pre-v41/0581.json | 1 + .../native_encoding/json/pre-v41/0582.json | 1 + .../native_encoding/json/pre-v41/0583.json | 1 + .../native_encoding/json/pre-v41/0584.json | 1 + .../native_encoding/json/pre-v41/0585.json | 1 + .../native_encoding/json/pre-v41/0586.json | 1 + .../native_encoding/json/pre-v41/0587.json | 1 + .../native_encoding/json/pre-v41/0588.json | 1 + .../native_encoding/json/pre-v41/0589.json | 1 + .../native_encoding/json/pre-v41/0590.json | 1 + .../native_encoding/json/pre-v41/0591.json | 1 + .../native_encoding/json/pre-v41/0592.json | 1 + .../native_encoding/json/pre-v41/0593.json | 1 + .../native_encoding/json/pre-v41/0594.json | 1 + .../native_encoding/json/pre-v41/0595.json | 1 + .../native_encoding/json/pre-v41/0596.json | 1 + .../native_encoding/json/pre-v41/0597.json | 1 + .../native_encoding/json/pre-v41/0598.json | 1 + .../native_encoding/json/pre-v41/0599.json | 1 + .../native_encoding/json/pre-v41/0600.json | 1 + .../native_encoding/json/pre-v41/0601.json | 1 + .../native_encoding/json/pre-v41/0602.json | 1 + .../native_encoding/json/pre-v41/0603.json | 1 + .../native_encoding/json/pre-v41/0604.json | 1 + .../native_encoding/json/pre-v41/0605.json | 1 + .../native_encoding/json/pre-v41/0606.json | 1 + .../native_encoding/json/pre-v41/0607.json | 1 + .../native_encoding/json/pre-v41/0608.json | 1 + .../native_encoding/json/pre-v41/0609.json | 1 + .../native_encoding/json/pre-v41/0610.json | 1 + .../native_encoding/json/pre-v41/0611.json | 1 + .../native_encoding/json/pre-v41/0612.json | 1 + .../native_encoding/json/pre-v41/0613.json | 1 + .../native_encoding/json/pre-v41/0614.json | 1 + .../native_encoding/json/pre-v41/0615.json | 1 + .../native_encoding/json/pre-v41/0616.json | 1 + .../native_encoding/json/pre-v41/0617.json | 1 + .../native_encoding/json/pre-v41/0618.json | 1 + .../native_encoding/json/pre-v41/0619.json | 1 + .../native_encoding/json/pre-v41/0620.json | 1 + .../native_encoding/json/pre-v41/0621.json | 1 + .../native_encoding/json/pre-v41/0622.json | 1 + .../native_encoding/json/pre-v41/0623.json | 1 + .../native_encoding/json/pre-v41/0624.json | 1 + .../native_encoding/json/pre-v41/0625.json | 1 + .../native_encoding/json/pre-v41/0626.json | 1 + .../native_encoding/json/pre-v41/0627.json | 1 + .../native_encoding/json/pre-v41/0628.json | 1 + .../native_encoding/json/pre-v41/0629.json | 1 + .../native_encoding/json/pre-v41/0630.json | 1 + .../native_encoding/json/pre-v41/0631.json | 1 + .../native_encoding/json/pre-v41/0632.json | 1 + .../native_encoding/json/pre-v41/0633.json | 1 + .../native_encoding/json/pre-v41/0634.json | 1 + .../native_encoding/json/pre-v41/0635.json | 1 + .../native_encoding/json/pre-v41/0636.json | 1 + .../native_encoding/json/pre-v41/0637.json | 1 + .../native_encoding/json/pre-v41/0638.json | 1 + .../native_encoding/json/pre-v41/0639.json | 1 + .../native_encoding/json/pre-v41/0640.json | 1 + .../native_encoding/json/pre-v41/0641.json | 1 + .../native_encoding/json/pre-v41/0642.json | 1 + .../native_encoding/json/pre-v41/0643.json | 1 + .../native_encoding/json/pre-v41/0644.json | 1 + .../native_encoding/json/pre-v41/0645.json | 1 + .../native_encoding/json/pre-v41/0646.json | 1 + .../native_encoding/json/pre-v41/0647.json | 1 + .../native_encoding/json/pre-v41/0648.json | 1 + .../native_encoding/json/pre-v41/0649.json | 1 + .../native_encoding/json/pre-v41/0650.json | 1 + .../native_encoding/json/pre-v41/0651.json | 1 + .../native_encoding/json/pre-v41/0652.json | 1 + .../native_encoding/json/pre-v41/0653.json | 1 + .../native_encoding/json/pre-v41/0654.json | 1 + .../native_encoding/json/pre-v41/0655.json | 1 + .../native_encoding/json/pre-v41/0656.json | 1 + .../native_encoding/json/pre-v41/0657.json | 1 + .../native_encoding/json/pre-v41/0658.json | 1 + .../native_encoding/json/pre-v41/0659.json | 1 + .../native_encoding/json/pre-v41/0660.json | 1 + .../native_encoding/json/pre-v41/0661.json | 1 + .../native_encoding/json/pre-v41/0662.json | 1 + .../native_encoding/json/pre-v41/0663.json | 1 + .../native_encoding/json/pre-v41/0664.json | 1 + .../native_encoding/json/pre-v41/0665.json | 1 + .../native_encoding/json/pre-v41/0666.json | 1 + .../native_encoding/json/pre-v41/0667.json | 1 + .../native_encoding/json/pre-v41/0668.json | 1 + .../native_encoding/json/pre-v41/0669.json | 1 + .../native_encoding/json/pre-v41/0670.json | 1 + .../native_encoding/json/pre-v41/0671.json | 1 + .../native_encoding/json/pre-v41/0672.json | 1 + .../native_encoding/json/pre-v41/0673.json | 1 + .../native_encoding/json/pre-v41/0674.json | 1 + .../native_encoding/json/pre-v41/0675.json | 1 + .../native_encoding/json/pre-v41/0676.json | 1 + .../native_encoding/json/pre-v41/0677.json | 1 + .../native_encoding/json/pre-v41/0678.json | 1 + .../native_encoding/json/pre-v41/0679.json | 1 + .../native_encoding/json/pre-v41/0680.json | 1 + .../native_encoding/json/pre-v41/0681.json | 1 + .../native_encoding/json/pre-v41/0682.json | 1 + .../native_encoding/json/pre-v41/0683.json | 1 + .../native_encoding/json/pre-v41/0684.json | 1 + .../native_encoding/json/pre-v41/0685.json | 1 + .../native_encoding/json/pre-v41/0686.json | 1 + .../native_encoding/json/pre-v41/0687.json | 1 + .../native_encoding/json/pre-v41/0688.json | 1 + .../native_encoding/json/pre-v41/0689.json | 1 + .../native_encoding/json/pre-v41/0690.json | 1 + .../native_encoding/json/pre-v41/0691.json | 1 + .../native_encoding/json/pre-v41/0692.json | 1 + .../native_encoding/json/pre-v41/0693.json | 1 + .../native_encoding/json/pre-v41/0694.json | 1 + .../native_encoding/json/pre-v41/0695.json | 1 + .../native_encoding/json/pre-v41/0696.json | 1 + .../native_encoding/json/pre-v41/0697.json | 1 + .../native_encoding/json/pre-v41/0698.json | 1 + .../native_encoding/json/pre-v41/0699.json | 1 + .../native_encoding/json/pre-v41/0700.json | 1 + .../native_encoding/json/pre-v41/0701.json | 1 + .../native_encoding/json/pre-v41/0702.json | 1 + .../native_encoding/json/pre-v41/0703.json | 1 + .../native_encoding/json/pre-v41/0704.json | 1 + .../native_encoding/json/pre-v41/0705.json | 1 + .../native_encoding/json/pre-v41/0706.json | 1 + .../native_encoding/json/pre-v41/0707.json | 1 + .../native_encoding/json/pre-v41/0708.json | 1 + .../native_encoding/json/pre-v41/0709.json | 1 + .../native_encoding/json/pre-v41/0710.json | 1 + .../native_encoding/json/pre-v41/0711.json | 1 + .../native_encoding/json/pre-v41/0712.json | 1 + .../native_encoding/json/pre-v41/0713.json | 1 + .../native_encoding/json/pre-v41/0714.json | 1 + .../native_encoding/json/pre-v41/0715.json | 1 + .../native_encoding/json/pre-v41/0716.json | 1 + .../native_encoding/json/pre-v41/0717.json | 1 + .../native_encoding/json/pre-v41/0718.json | 1 + .../native_encoding/json/pre-v41/0719.json | 1 + .../native_encoding/json/pre-v41/0720.json | 1 + .../native_encoding/json/pre-v41/0721.json | 1 + .../native_encoding/json/pre-v41/0722.json | 1 + .../native_encoding/json/pre-v41/0723.json | 1 + .../native_encoding/json/pre-v41/0724.json | 1 + .../native_encoding/json/pre-v41/0725.json | 1 + .../native_encoding/json/pre-v41/0726.json | 1 + .../native_encoding/json/pre-v41/0727.json | 1 + .../native_encoding/json/pre-v41/0728.json | 1 + .../native_encoding/json/pre-v41/0729.json | 1 + .../native_encoding/json/pre-v41/0730.json | 1 + .../native_encoding/json/pre-v41/0731.json | 1 + .../native_encoding/json/pre-v41/0732.json | 1 + .../native_encoding/json/pre-v41/0733.json | 1 + .../native_encoding/json/pre-v41/0734.json | 1 + .../native_encoding/json/pre-v41/0735.json | 1 + .../native_encoding/json/pre-v41/0736.json | 1 + .../native_encoding/json/pre-v41/0737.json | 1 + .../native_encoding/json/pre-v41/0738.json | 1 + .../native_encoding/json/pre-v41/0739.json | 1 + .../native_encoding/json/pre-v41/0740.json | 1 + .../native_encoding/json/pre-v41/0741.json | 1 + .../native_encoding/json/pre-v41/0742.json | 1 + .../native_encoding/json/pre-v41/0743.json | 1 + .../native_encoding/json/pre-v41/0744.json | 1 + .../native_encoding/json/pre-v41/0745.json | 1 + .../native_encoding/json/pre-v41/0746.json | 1 + .../native_encoding/json/pre-v41/0747.json | 1 + .../native_encoding/json/pre-v41/0748.json | 1 + .../native_encoding/json/pre-v41/0749.json | 1 + .../native_encoding/json/pre-v41/0750.json | 1 + .../native_encoding/json/pre-v41/0751.json | 1 + .../native_encoding/json/pre-v41/0752.json | 1 + .../native_encoding/json/pre-v41/0753.json | 1 + .../native_encoding/json/pre-v41/0754.json | 1 + .../native_encoding/json/pre-v41/0755.json | 1 + .../native_encoding/json/pre-v41/0756.json | 1 + .../native_encoding/json/pre-v41/0757.json | 1 + .../native_encoding/json/pre-v41/0758.json | 1 + .../native_encoding/json/pre-v41/0759.json | 1 + .../native_encoding/json/pre-v41/0760.json | 1 + .../native_encoding/json/pre-v41/0761.json | 1 + .../native_encoding/json/pre-v41/0762.json | 1 + .../native_encoding/json/pre-v41/0763.json | 1 + .../native_encoding/json/pre-v41/0764.json | 1 + .../native_encoding/json/pre-v41/0765.json | 1 + .../native_encoding/json/pre-v41/0766.json | 1 + .../native_encoding/json/pre-v41/0767.json | 1 + .../native_encoding/json/pre-v41/0768.json | 1 + .../native_encoding/json/pre-v41/0769.json | 1 + .../native_encoding/json/pre-v41/0770.json | 1 + .../native_encoding/json/pre-v41/0771.json | 1 + .../native_encoding/json/pre-v41/0772.json | 1 + .../native_encoding/json/pre-v41/0773.json | 1 + .../native_encoding/json/pre-v41/0774.json | 1 + .../native_encoding/json/pre-v41/0775.json | 1 + .../native_encoding/json/pre-v41/0776.json | 1 + .../native_encoding/json/pre-v41/0777.json | 1 + .../native_encoding/json/pre-v41/0778.json | 1 + .../native_encoding/json/pre-v41/0779.json | 1 + .../native_encoding/json/pre-v41/0780.json | 1 + .../native_encoding/json/pre-v41/0781.json | 1 + .../native_encoding/json/pre-v41/0782.json | 1 + .../native_encoding/json/pre-v41/0783.json | 1 + .../native_encoding/json/pre-v41/0784.json | 1 + .../native_encoding/json/pre-v41/0785.json | 1 + .../native_encoding/json/pre-v41/0786.json | 1 + .../native_encoding/json/pre-v41/0787.json | 1 + .../native_encoding/json/pre-v41/0788.json | 1 + .../native_encoding/json/pre-v41/0789.json | 1 + .../native_encoding/json/pre-v41/0790.json | 1 + .../native_encoding/json/pre-v41/0791.json | 1 + .../native_encoding/json/pre-v41/0792.json | 1 + .../native_encoding/json/pre-v41/0793.json | 1 + .../native_encoding/json/pre-v41/0794.json | 1 + .../native_encoding/json/pre-v41/0795.json | 1 + .../native_encoding/json/pre-v41/0796.json | 1 + .../native_encoding/json/pre-v41/0797.json | 1 + .../native_encoding/json/pre-v41/0798.json | 1 + .../native_encoding/json/pre-v41/0799.json | 1 + .../native_encoding/json/pre-v41/0800.json | 1 + .../native_encoding/json/pre-v41/0801.json | 1 + .../native_encoding/json/pre-v41/0802.json | 1 + .../native_encoding/json/pre-v41/0803.json | 1 + .../native_encoding/json/pre-v41/0804.json | 1 + .../native_encoding/json/pre-v41/0805.json | 1 + .../native_encoding/json/pre-v41/0806.json | 1 + .../native_encoding/json/pre-v41/0807.json | 1 + .../native_encoding/json/pre-v41/0808.json | 1 + .../native_encoding/json/pre-v41/0809.json | 1 + .../native_encoding/json/pre-v41/0810.json | 1 + .../native_encoding/json/pre-v41/0811.json | 1 + .../native_encoding/json/pre-v41/0812.json | 1 + .../native_encoding/json/pre-v41/0813.json | 1 + .../native_encoding/json/pre-v41/0814.json | 1 + .../native_encoding/json/pre-v41/0815.json | 1 + .../native_encoding/json/pre-v41/0816.json | 1 + .../native_encoding/json/pre-v41/0817.json | 1 + .../native_encoding/json/pre-v41/0818.json | 1 + .../native_encoding/json/pre-v41/0819.json | 1 + .../native_encoding/json/pre-v41/0820.json | 1 + .../native_encoding/json/pre-v41/0821.json | 1 + .../native_encoding/json/pre-v41/0822.json | 1 + .../native_encoding/json/pre-v41/0823.json | 1 + .../native_encoding/json/pre-v41/0824.json | 1 + .../native_encoding/json/pre-v41/0825.json | 1 + .../native_encoding/json/pre-v41/0826.json | 1 + .../native_encoding/json/pre-v41/0827.json | 1 + .../native_encoding/json/pre-v41/0828.json | 1 + .../native_encoding/json/pre-v41/0829.json | 1 + .../native_encoding/json/pre-v41/0830.json | 1 + .../native_encoding/json/pre-v41/0831.json | 1 + .../native_encoding/json/pre-v41/0832.json | 1 + .../native_encoding/json/pre-v41/0833.json | 1 + .../native_encoding/json/pre-v41/0834.json | 1 + .../native_encoding/json/pre-v41/0835.json | 1 + .../native_encoding/json/pre-v41/0836.json | 1 + .../native_encoding/json/pre-v41/0837.json | 1 + .../native_encoding/json/pre-v41/0838.json | 1 + .../native_encoding/json/pre-v41/0839.json | 1 + .../native_encoding/json/pre-v41/0840.json | 1 + .../native_encoding/json/pre-v41/0841.json | 1 + .../native_encoding/json/pre-v41/0842.json | 1 + .../native_encoding/json/pre-v41/0843.json | 1 + .../native_encoding/json/pre-v41/0844.json | 1 + .../native_encoding/json/pre-v41/0845.json | 1 + .../native_encoding/json/pre-v41/0846.json | 1 + .../native_encoding/json/pre-v41/0847.json | 1 + .../native_encoding/json/pre-v41/0848.json | 1 + .../native_encoding/json/pre-v41/0849.json | 1 + .../native_encoding/json/pre-v41/0850.json | 1 + .../native_encoding/json/pre-v41/0851.json | 1 + .../native_encoding/json/pre-v41/0852.json | 1 + .../native_encoding/json/pre-v41/0853.json | 1 + .../native_encoding/json/pre-v41/0854.json | 1 + .../native_encoding/json/pre-v41/0855.json | 1 + .../native_encoding/json/pre-v41/0856.json | 1 + .../native_encoding/json/pre-v41/0857.json | 1 + .../native_encoding/json/pre-v41/0858.json | 1 + .../native_encoding/json/pre-v41/0859.json | 1 + .../native_encoding/json/pre-v41/0860.json | 1 + .../native_encoding/json/pre-v41/0861.json | 1 + .../native_encoding/json/pre-v41/0862.json | 1 + .../native_encoding/json/pre-v41/0863.json | 1 + .../native_encoding/json/pre-v41/0864.json | 1 + .../native_encoding/json/pre-v41/0865.json | 1 + .../native_encoding/json/pre-v41/0866.json | 1 + .../native_encoding/json/pre-v41/0867.json | 1 + .../native_encoding/json/pre-v41/0868.json | 1 + .../native_encoding/json/pre-v41/0869.json | 1 + .../native_encoding/json/pre-v41/0870.json | 1 + .../native_encoding/json/pre-v41/0871.json | 1 + .../native_encoding/json/pre-v41/0872.json | 1 + .../native_encoding/json/pre-v41/0873.json | 1 + .../native_encoding/json/pre-v41/0874.json | 1 + .../native_encoding/json/pre-v41/0875.json | 1 + .../native_encoding/json/pre-v41/0876.json | 1 + .../native_encoding/json/pre-v41/0877.json | 1 + .../native_encoding/json/pre-v41/0878.json | 1 + .../native_encoding/json/pre-v41/0879.json | 1 + .../native_encoding/json/pre-v41/0880.json | 1 + .../native_encoding/json/pre-v41/0881.json | 1 + .../native_encoding/json/pre-v41/0882.json | 1 + .../native_encoding/json/pre-v41/0883.json | 1 + .../native_encoding/json/pre-v41/0884.json | 1 + .../native_encoding/json/pre-v41/0885.json | 1 + .../native_encoding/json/pre-v41/0886.json | 1 + .../native_encoding/json/pre-v41/0887.json | 1 + .../native_encoding/json/pre-v41/0888.json | 1 + .../native_encoding/json/pre-v41/0889.json | 1 + .../native_encoding/json/pre-v41/0890.json | 1 + .../native_encoding/json/pre-v41/0891.json | 1 + .../native_encoding/json/pre-v41/0892.json | 1 + .../native_encoding/json/pre-v41/0893.json | 1 + .../native_encoding/json/pre-v41/0894.json | 1 + .../native_encoding/json/pre-v41/0895.json | 1 + .../native_encoding/json/pre-v41/0896.json | 1 + .../native_encoding/json/pre-v41/0897.json | 1 + .../native_encoding/json/pre-v41/0898.json | 1 + .../native_encoding/json/pre-v41/0899.json | 1 + .../native_encoding/json/pre-v41/0900.json | 1 + .../native_encoding/json/pre-v41/0901.json | 1 + .../native_encoding/json/pre-v41/0902.json | 1 + .../native_encoding/json/pre-v41/0903.json | 1 + .../native_encoding/json/pre-v41/0904.json | 1 + .../native_encoding/json/pre-v41/0905.json | 1 + .../native_encoding/json/pre-v41/0906.json | 1 + .../native_encoding/json/pre-v41/0907.json | 1 + .../native_encoding/json/pre-v41/0908.json | 1 + .../native_encoding/json/pre-v41/0909.json | 1 + .../native_encoding/json/pre-v41/0910.json | 1 + .../native_encoding/json/pre-v41/0911.json | 1 + .../native_encoding/json/pre-v41/0912.json | 1 + .../native_encoding/json/pre-v41/0913.json | 1 + .../native_encoding/json/pre-v41/0914.json | 1 + .../native_encoding/json/pre-v41/0915.json | 1 + .../native_encoding/json/pre-v41/0916.json | 1 + .../native_encoding/json/pre-v41/0917.json | 1 + .../native_encoding/json/pre-v41/0918.json | 1 + .../native_encoding/json/pre-v41/0919.json | 1 + .../native_encoding/json/pre-v41/0920.json | 1 + .../native_encoding/json/pre-v41/0921.json | 1 + .../native_encoding/json/pre-v41/0922.json | 1 + .../native_encoding/json/pre-v41/0923.json | 1 + .../native_encoding/json/pre-v41/0924.json | 1 + .../native_encoding/json/pre-v41/0925.json | 1 + .../native_encoding/json/pre-v41/0926.json | 1 + .../native_encoding/json/pre-v41/0927.json | 1 + .../native_encoding/json/pre-v41/0928.json | 1 + .../native_encoding/json/pre-v41/0929.json | 1 + .../native_encoding/json/pre-v41/0930.json | 1 + .../native_encoding/json/pre-v41/0931.json | 1 + .../native_encoding/json/pre-v41/0932.json | 1 + .../native_encoding/json/pre-v41/0933.json | 1 + .../native_encoding/json/pre-v41/0934.json | 1 + .../native_encoding/json/pre-v41/0935.json | 1 + .../native_encoding/json/pre-v41/0936.json | 1 + .../native_encoding/json/pre-v41/0937.json | 1 + .../native_encoding/json/pre-v41/0938.json | 1 + .../native_encoding/json/pre-v41/0939.json | 1 + .../native_encoding/json/pre-v41/0940.json | 1 + .../native_encoding/json/pre-v41/0941.json | 1 + .../native_encoding/json/pre-v41/0942.json | 1 + .../native_encoding/json/pre-v41/0943.json | 1 + .../native_encoding/json/pre-v41/0944.json | 1 + .../native_encoding/json/pre-v41/0945.json | 1 + .../native_encoding/json/pre-v41/0946.json | 1 + .../native_encoding/json/pre-v41/0947.json | 1 + .../native_encoding/json/pre-v41/0948.json | 1 + .../native_encoding/json/pre-v41/0949.json | 1 + .../native_encoding/json/pre-v41/0950.json | 1 + .../native_encoding/json/pre-v41/0951.json | 1 + .../native_encoding/json/pre-v41/0952.json | 1 + .../native_encoding/json/pre-v41/0953.json | 1 + .../native_encoding/json/pre-v41/0954.json | 1 + .../native_encoding/json/pre-v41/0955.json | 1 + .../native_encoding/json/pre-v41/0956.json | 1 + .../native_encoding/json/pre-v41/0957.json | 1 + .../native_encoding/json/pre-v41/0958.json | 1 + .../native_encoding/json/pre-v41/0959.json | 1 + .../native_encoding/json/pre-v41/0960.json | 1 + .../native_encoding/json/pre-v41/0961.json | 1 + .../native_encoding/json/pre-v41/0962.json | 1 + .../native_encoding/json/pre-v41/0963.json | 1 + .../native_encoding/json/pre-v41/0964.json | 1 + .../native_encoding/json/pre-v41/0965.json | 1 + .../native_encoding/json/pre-v41/0966.json | 1 + .../native_encoding/json/pre-v41/0967.json | 1 + .../native_encoding/json/pre-v41/0968.json | 1 + .../native_encoding/json/pre-v41/0969.json | 1 + .../native_encoding/json/pre-v41/0970.json | 1 + .../native_encoding/json/pre-v41/0971.json | 1 + .../native_encoding/json/pre-v41/0972.json | 1 + .../native_encoding/json/pre-v41/0973.json | 1 + .../native_encoding/json/pre-v41/0974.json | 1 + .../native_encoding/json/pre-v41/0975.json | 1 + .../native_encoding/json/pre-v41/0976.json | 1 + .../native_encoding/json/pre-v41/0977.json | 1 + .../native_encoding/json/pre-v41/0978.json | 1 + .../native_encoding/json/pre-v41/0979.json | 1 + .../native_encoding/json/pre-v41/0980.json | 1 + .../native_encoding/json/pre-v41/0981.json | 1 + .../native_encoding/json/pre-v41/0982.json | 1 + .../native_encoding/json/pre-v41/0983.json | 1 + .../native_encoding/json/pre-v41/0984.json | 1 + .../native_encoding/json/pre-v41/0985.json | 1 + .../native_encoding/json/pre-v41/0986.json | 1 + .../native_encoding/json/pre-v41/0987.json | 1 + .../native_encoding/json/pre-v41/0988.json | 1 + .../native_encoding/json/pre-v41/0989.json | 1 + .../native_encoding/json/pre-v41/0990.json | 1 + .../native_encoding/json/pre-v41/0991.json | 1 + .../native_encoding/json/pre-v41/0992.json | 1 + .../native_encoding/json/pre-v41/0993.json | 1 + .../native_encoding/json/pre-v41/0994.json | 1 + .../native_encoding/json/pre-v41/0995.json | 1 + .../native_encoding/json/pre-v41/0996.json | 1 + .../native_encoding/json/pre-v41/0997.json | 1 + .../native_encoding/json/pre-v41/0998.json | 1 + .../native_encoding/json/pre-v41/0999.json | 1 + .../native_encoding/json/pre-v41/1000.json | 1 + .../native_encoding/json/pre-v41/1001.json | 1 + .../native_encoding/json/pre-v41/1002.json | 1 + .../native_encoding/json/pre-v41/1003.json | 1 + .../native_encoding/json/pre-v41/1004.json | 1 + .../native_encoding/json/pre-v41/1005.json | 1 + .../native_encoding/json/pre-v41/1006.json | 1 + .../native_encoding/json/pre-v41/1007.json | 1 + .../native_encoding/json/pre-v41/1008.json | 1 + .../native_encoding/json/pre-v41/1009.json | 1 + .../native_encoding/json/pre-v41/1010.json | 1 + .../native_encoding/json/pre-v41/1011.json | 1 + .../native_encoding/json/pre-v41/1012.json | 1 + .../native_encoding/json/pre-v41/1013.json | 1 + .../native_encoding/json/pre-v41/1014.json | 1 + .../native_encoding/json/pre-v41/1015.json | 1 + .../native_encoding/json/pre-v41/1016.json | 1 + .../native_encoding/json/pre-v41/1017.json | 1 + .../native_encoding/json/pre-v41/1018.json | 1 + .../native_encoding/json/pre-v41/1019.json | 1 + .../native_encoding/json/pre-v41/1020.json | 1 + .../native_encoding/json/pre-v41/1021.json | 1 + .../native_encoding/json/pre-v41/1022.json | 1 + .../native_encoding/json/pre-v41/1023.json | 1 + .../tests/data/native_encoding/proto/0000.pb | Bin 73 -> 160 bytes .../tests/data/native_encoding/proto/0001.pb | Bin 51 -> 71 bytes .../tests/data/native_encoding/proto/0002.pb | Bin 73 -> 695 bytes .../tests/data/native_encoding/proto/0003.pb | Bin 34 -> 956 bytes .../tests/data/native_encoding/proto/0004.pb | Bin 398 -> 2306 bytes .../tests/data/native_encoding/proto/0005.pb | Bin 1720 -> 131 bytes .../tests/data/native_encoding/proto/0006.pb | Bin 1856 -> 365 bytes .../tests/data/native_encoding/proto/0007.pb | Bin 121 -> 2163 bytes .../tests/data/native_encoding/proto/0008.pb | Bin 32 -> 210 bytes .../tests/data/native_encoding/proto/0009.pb | Bin 476 -> 315 bytes .../tests/data/native_encoding/proto/0010.pb | Bin 1996 -> 60 bytes .../tests/data/native_encoding/proto/0011.pb | Bin 89 -> 66 bytes .../tests/data/native_encoding/proto/0012.pb | Bin 128 -> 1686 bytes .../tests/data/native_encoding/proto/0013.pb | Bin 2794 -> 48 bytes .../tests/data/native_encoding/proto/0014.pb | Bin 29 -> 2075 bytes .../tests/data/native_encoding/proto/0015.pb | Bin 137 -> 104 bytes .../tests/data/native_encoding/proto/0016.pb | Bin 168 -> 646 bytes .../tests/data/native_encoding/proto/0017.pb | Bin 3611 -> 32 bytes .../tests/data/native_encoding/proto/0018.pb | Bin 96 -> 48 bytes .../tests/data/native_encoding/proto/0019.pb | Bin 32 -> 579 bytes .../tests/data/native_encoding/proto/0020.pb | Bin 351 -> 2406 bytes .../tests/data/native_encoding/proto/0021.pb | Bin 58 -> 57 bytes .../tests/data/native_encoding/proto/0022.pb | Bin 2281 -> 112 bytes .../tests/data/native_encoding/proto/0023.pb | Bin 1228 -> 144 bytes .../tests/data/native_encoding/proto/0024.pb | Bin 156 -> 69 bytes .../tests/data/native_encoding/proto/0025.pb | Bin 335 -> 1433 bytes .../tests/data/native_encoding/proto/0026.pb | Bin 32 -> 80 bytes .../tests/data/native_encoding/proto/0027.pb | Bin 80 -> 50 bytes .../tests/data/native_encoding/proto/0028.pb | Bin 2095 -> 964 bytes .../tests/data/native_encoding/proto/0029.pb | Bin 2228 -> 32 bytes .../tests/data/native_encoding/proto/0030.pb | Bin 1925 -> 64 bytes .../tests/data/native_encoding/proto/0031.pb | Bin 51 -> 72 bytes .../tests/data/native_encoding/proto/0032.pb | Bin 1149 -> 105 bytes .../tests/data/native_encoding/proto/0033.pb | Bin 501 -> 112 bytes .../tests/data/native_encoding/proto/0034.pb | Bin 405 -> 59 bytes .../tests/data/native_encoding/proto/0035.pb | Bin 74 -> 88 bytes .../tests/data/native_encoding/proto/0036.pb | Bin 22 -> 62 bytes .../tests/data/native_encoding/proto/0037.pb | Bin 92 -> 50 bytes .../tests/data/native_encoding/proto/0038.pb | Bin 1746 -> 195 bytes .../tests/data/native_encoding/proto/0039.pb | Bin 251 -> 89 bytes .../tests/data/native_encoding/proto/0040.pb | Bin 30 -> 100 bytes .../tests/data/native_encoding/proto/0041.pb | Bin 43 -> 113 bytes .../tests/data/native_encoding/proto/0042.pb | Bin 210 -> 809 bytes .../tests/data/native_encoding/proto/0043.pb | Bin 61 -> 70 bytes .../tests/data/native_encoding/proto/0044.pb | Bin 2111 -> 57 bytes .../tests/data/native_encoding/proto/0045.pb | Bin 52 -> 32 bytes .../tests/data/native_encoding/proto/0046.pb | Bin 310 -> 74 bytes .../tests/data/native_encoding/proto/0047.pb | Bin 72 -> 53 bytes .../tests/data/native_encoding/proto/0048.pb | Bin 1722 -> 232 bytes .../tests/data/native_encoding/proto/0049.pb | Bin 275 -> 822 bytes .../tests/data/native_encoding/proto/0050.pb | Bin 14 -> 491 bytes .../tests/data/native_encoding/proto/0051.pb | Bin 2402 -> 347 bytes .../tests/data/native_encoding/proto/0052.pb | Bin 51 -> 271 bytes .../tests/data/native_encoding/proto/0053.pb | Bin 14 -> 177 bytes .../tests/data/native_encoding/proto/0054.pb | Bin 258 -> 6035 bytes .../tests/data/native_encoding/proto/0055.pb | Bin 77 -> 38 bytes .../tests/data/native_encoding/proto/0056.pb | Bin 60 -> 91 bytes .../tests/data/native_encoding/proto/0057.pb | Bin 128 -> 1269 bytes .../tests/data/native_encoding/proto/0058.pb | Bin 100 -> 60 bytes .../tests/data/native_encoding/proto/0059.pb | Bin 450 -> 106 bytes .../tests/data/native_encoding/proto/0060.pb | Bin 1821 -> 116 bytes .../tests/data/native_encoding/proto/0061.pb | Bin 2538 -> 53 bytes .../tests/data/native_encoding/proto/0062.pb | Bin 46 -> 1388 bytes .../tests/data/native_encoding/proto/0063.pb | Bin 1614 -> 667 bytes .../tests/data/native_encoding/proto/0064.pb | Bin 356 -> 109 bytes .../tests/data/native_encoding/proto/0065.pb | Bin 572 -> 510 bytes .../tests/data/native_encoding/proto/0066.pb | Bin 617 -> 32 bytes .../tests/data/native_encoding/proto/0067.pb | Bin 1950 -> 140 bytes .../tests/data/native_encoding/proto/0068.pb | Bin 57 -> 306 bytes .../tests/data/native_encoding/proto/0069.pb | Bin 490 -> 80 bytes .../tests/data/native_encoding/proto/0070.pb | Bin 387 -> 1457 bytes .../tests/data/native_encoding/proto/0071.pb | Bin 31 -> 32 bytes .../tests/data/native_encoding/proto/0072.pb | Bin 68 -> 140 bytes .../tests/data/native_encoding/proto/0073.pb | Bin 1198 -> 32 bytes .../tests/data/native_encoding/proto/0074.pb | Bin 561 -> 231 bytes .../tests/data/native_encoding/proto/0075.pb | Bin 70 -> 79 bytes .../tests/data/native_encoding/proto/0076.pb | Bin 929 -> 43 bytes .../tests/data/native_encoding/proto/0077.pb | Bin 71 -> 221 bytes .../tests/data/native_encoding/proto/0078.pb | Bin 950 -> 1447 bytes .../tests/data/native_encoding/proto/0079.pb | Bin 25 -> 76 bytes .../tests/data/native_encoding/proto/0080.pb | 12 +- .../tests/data/native_encoding/proto/0081.pb | Bin 129 -> 57 bytes .../tests/data/native_encoding/proto/0082.pb | Bin 449 -> 363 bytes .../tests/data/native_encoding/proto/0083.pb | Bin 202 -> 1934 bytes .../tests/data/native_encoding/proto/0084.pb | Bin 173 -> 99 bytes .../tests/data/native_encoding/proto/0085.pb | Bin 4123 -> 307 bytes .../tests/data/native_encoding/proto/0086.pb | Bin 47 -> 92 bytes .../tests/data/native_encoding/proto/0087.pb | Bin 200 -> 85 bytes .../tests/data/native_encoding/proto/0088.pb | Bin 37 -> 329 bytes .../tests/data/native_encoding/proto/0089.pb | Bin 97 -> 68 bytes .../tests/data/native_encoding/proto/0090.pb | Bin 48 -> 50 bytes .../tests/data/native_encoding/proto/0091.pb | Bin 33 -> 89 bytes .../tests/data/native_encoding/proto/0092.pb | Bin 40 -> 182 bytes .../tests/data/native_encoding/proto/0093.pb | Bin 229 -> 60 bytes .../tests/data/native_encoding/proto/0094.pb | Bin 52 -> 46 bytes .../tests/data/native_encoding/proto/0095.pb | Bin 31 -> 257 bytes .../tests/data/native_encoding/proto/0096.pb | Bin 1678 -> 337 bytes .../tests/data/native_encoding/proto/0097.pb | Bin 55 -> 1683 bytes .../tests/data/native_encoding/proto/0098.pb | Bin 403 -> 512 bytes .../tests/data/native_encoding/proto/0099.pb | Bin 464 -> 142 bytes .../tests/data/native_encoding/proto/0100.pb | Bin 56 -> 100 bytes .../tests/data/native_encoding/proto/0101.pb | Bin 108 -> 363 bytes .../tests/data/native_encoding/proto/0102.pb | Bin 98 -> 494 bytes .../tests/data/native_encoding/proto/0103.pb | Bin 13014 -> 178 bytes .../tests/data/native_encoding/proto/0104.pb | Bin 62 -> 58 bytes .../tests/data/native_encoding/proto/0105.pb | Bin 38 -> 112 bytes .../tests/data/native_encoding/proto/0106.pb | Bin 42 -> 304 bytes .../tests/data/native_encoding/proto/0107.pb | Bin 2373 -> 97 bytes .../tests/data/native_encoding/proto/0108.pb | Bin 397 -> 1322 bytes .../tests/data/native_encoding/proto/0109.pb | Bin 142 -> 89 bytes .../tests/data/native_encoding/proto/0110.pb | Bin 47 -> 515 bytes .../tests/data/native_encoding/proto/0111.pb | Bin 1779 -> 287 bytes .../tests/data/native_encoding/proto/0112.pb | Bin 236 -> 42 bytes .../tests/data/native_encoding/proto/0113.pb | Bin 69 -> 247 bytes .../tests/data/native_encoding/proto/0114.pb | Bin 212 -> 802 bytes .../tests/data/native_encoding/proto/0115.pb | Bin 43 -> 5448 bytes .../tests/data/native_encoding/proto/0116.pb | Bin 76 -> 32 bytes .../tests/data/native_encoding/proto/0117.pb | Bin 171 -> 61 bytes .../tests/data/native_encoding/proto/0118.pb | Bin 51 -> 127 bytes .../tests/data/native_encoding/proto/0119.pb | Bin 144 -> 357 bytes .../tests/data/native_encoding/proto/0120.pb | Bin 1423 -> 85 bytes .../tests/data/native_encoding/proto/0121.pb | Bin 59 -> 198 bytes .../tests/data/native_encoding/proto/0122.pb | Bin 14 -> 88 bytes .../tests/data/native_encoding/proto/0123.pb | Bin 63 -> 2071 bytes .../tests/data/native_encoding/proto/0124.pb | Bin 14 -> 107 bytes .../tests/data/native_encoding/proto/0125.pb | Bin 112 -> 817 bytes .../tests/data/native_encoding/proto/0126.pb | Bin 164 -> 225 bytes .../tests/data/native_encoding/proto/0127.pb | Bin 555 -> 46 bytes .../tests/data/native_encoding/proto/0128.pb | Bin 338 -> 79 bytes .../tests/data/native_encoding/proto/0129.pb | Bin 418 -> 64 bytes .../tests/data/native_encoding/proto/0130.pb | Bin 243 -> 91 bytes .../tests/data/native_encoding/proto/0131.pb | Bin 252 -> 1103 bytes .../tests/data/native_encoding/proto/0132.pb | Bin 127 -> 141 bytes .../tests/data/native_encoding/proto/0133.pb | Bin 9336 -> 46 bytes .../tests/data/native_encoding/proto/0134.pb | Bin 131 -> 60 bytes .../tests/data/native_encoding/proto/0135.pb | Bin 59 -> 107 bytes .../tests/data/native_encoding/proto/0136.pb | Bin 42 -> 226 bytes .../tests/data/native_encoding/proto/0137.pb | Bin 1029 -> 46 bytes .../tests/data/native_encoding/proto/0138.pb | Bin 489 -> 63 bytes .../tests/data/native_encoding/proto/0139.pb | Bin 72 -> 2333 bytes .../tests/data/native_encoding/proto/0140.pb | Bin 102 -> 1208 bytes .../tests/data/native_encoding/proto/0141.pb | Bin 64 -> 618 bytes .../tests/data/native_encoding/proto/0142.pb | Bin 75 -> 2606 bytes .../tests/data/native_encoding/proto/0143.pb | Bin 100 -> 2548 bytes .../tests/data/native_encoding/proto/0144.pb | Bin 175 -> 54 bytes .../tests/data/native_encoding/proto/0145.pb | Bin 64 -> 1274 bytes .../tests/data/native_encoding/proto/0146.pb | Bin 118 -> 345 bytes .../tests/data/native_encoding/proto/0147.pb | 24 +- .../tests/data/native_encoding/proto/0148.pb | Bin 162 -> 106 bytes .../tests/data/native_encoding/proto/0149.pb | Bin 68 -> 56 bytes .../tests/data/native_encoding/proto/0150.pb | Bin 168 -> 54 bytes .../tests/data/native_encoding/proto/0151.pb | Bin 269 -> 279 bytes .../tests/data/native_encoding/proto/0152.pb | Bin 691 -> 1925 bytes .../tests/data/native_encoding/proto/0153.pb | Bin 14 -> 2001 bytes .../tests/data/native_encoding/proto/0154.pb | Bin 495 -> 223 bytes .../tests/data/native_encoding/proto/0155.pb | Bin 81 -> 475 bytes .../tests/data/native_encoding/proto/0156.pb | Bin 2163 -> 284 bytes .../tests/data/native_encoding/proto/0157.pb | Bin 5335 -> 1089 bytes .../tests/data/native_encoding/proto/0158.pb | Bin 150 -> 768 bytes .../tests/data/native_encoding/proto/0159.pb | Bin 34 -> 532 bytes .../tests/data/native_encoding/proto/0160.pb | Bin 169 -> 128 bytes .../tests/data/native_encoding/proto/0161.pb | Bin 275 -> 1947 bytes .../tests/data/native_encoding/proto/0162.pb | Bin 1308 -> 79 bytes .../tests/data/native_encoding/proto/0163.pb | Bin 247 -> 88 bytes .../tests/data/native_encoding/proto/0164.pb | Bin 56 -> 311 bytes .../tests/data/native_encoding/proto/0165.pb | Bin 81 -> 1437 bytes .../tests/data/native_encoding/proto/0166.pb | Bin 485 -> 140 bytes .../tests/data/native_encoding/proto/0167.pb | Bin 516 -> 535 bytes .../tests/data/native_encoding/proto/0168.pb | Bin 131 -> 116 bytes .../tests/data/native_encoding/proto/0169.pb | Bin 13614 -> 129 bytes .../tests/data/native_encoding/proto/0170.pb | Bin 49 -> 678 bytes .../tests/data/native_encoding/proto/0171.pb | Bin 101 -> 77 bytes .../tests/data/native_encoding/proto/0172.pb | Bin 265 -> 6109 bytes .../tests/data/native_encoding/proto/0173.pb | Bin 2481 -> 100 bytes .../tests/data/native_encoding/proto/0174.pb | Bin 139 -> 176 bytes .../tests/data/native_encoding/proto/0175.pb | Bin 137 -> 61 bytes .../tests/data/native_encoding/proto/0176.pb | Bin 165 -> 1616 bytes .../tests/data/native_encoding/proto/0177.pb | Bin 203 -> 129 bytes .../tests/data/native_encoding/proto/0178.pb | Bin 269 -> 67 bytes .../tests/data/native_encoding/proto/0179.pb | Bin 271 -> 136 bytes .../tests/data/native_encoding/proto/0180.pb | Bin 315 -> 32 bytes .../tests/data/native_encoding/proto/0181.pb | Bin 336 -> 50 bytes .../tests/data/native_encoding/proto/0182.pb | Bin 95 -> 648 bytes .../tests/data/native_encoding/proto/0183.pb | Bin 196 -> 87 bytes .../tests/data/native_encoding/proto/0184.pb | Bin 14 -> 581 bytes .../tests/data/native_encoding/proto/0185.pb | Bin 63 -> 10780 bytes .../tests/data/native_encoding/proto/0186.pb | Bin 82 -> 2683 bytes .../tests/data/native_encoding/proto/0187.pb | Bin 41 -> 62 bytes .../tests/data/native_encoding/proto/0188.pb | Bin 66 -> 271 bytes .../tests/data/native_encoding/proto/0189.pb | Bin 75 -> 399 bytes .../tests/data/native_encoding/proto/0190.pb | Bin 2053 -> 1414 bytes .../tests/data/native_encoding/proto/0191.pb | Bin 1393 -> 82 bytes .../tests/data/native_encoding/proto/0192.pb | Bin 125 -> 260 bytes .../tests/data/native_encoding/proto/0193.pb | Bin 249 -> 47 bytes .../tests/data/native_encoding/proto/0194.pb | Bin 46 -> 52 bytes .../tests/data/native_encoding/proto/0195.pb | Bin 383 -> 64 bytes .../tests/data/native_encoding/proto/0196.pb | Bin 10043 -> 78 bytes .../tests/data/native_encoding/proto/0197.pb | Bin 1202 -> 891 bytes .../tests/data/native_encoding/proto/0198.pb | Bin 87 -> 70 bytes .../tests/data/native_encoding/proto/0199.pb | Bin 328 -> 64 bytes .../tests/data/native_encoding/proto/0200.pb | Bin 230 -> 262 bytes .../tests/data/native_encoding/proto/0201.pb | Bin 52 -> 62 bytes .../tests/data/native_encoding/proto/0202.pb | Bin 1982 -> 88 bytes .../tests/data/native_encoding/proto/0203.pb | Bin 420 -> 71 bytes .../tests/data/native_encoding/proto/0204.pb | Bin 53 -> 66 bytes .../tests/data/native_encoding/proto/0205.pb | Bin 4178 -> 126 bytes .../tests/data/native_encoding/proto/0206.pb | Bin 74 -> 44 bytes .../tests/data/native_encoding/proto/0207.pb | Bin 348 -> 48 bytes .../tests/data/native_encoding/proto/0208.pb | Bin 49 -> 431 bytes .../tests/data/native_encoding/proto/0209.pb | Bin 23 -> 140 bytes .../tests/data/native_encoding/proto/0210.pb | Bin 38 -> 67 bytes .../tests/data/native_encoding/proto/0211.pb | Bin 1597 -> 46 bytes .../tests/data/native_encoding/proto/0212.pb | Bin 380 -> 470 bytes .../tests/data/native_encoding/proto/0213.pb | Bin 222 -> 67 bytes .../tests/data/native_encoding/proto/0214.pb | Bin 954 -> 75 bytes .../tests/data/native_encoding/proto/0215.pb | Bin 90 -> 75 bytes .../tests/data/native_encoding/proto/0216.pb | Bin 8329 -> 13570 bytes .../tests/data/native_encoding/proto/0217.pb | Bin 8284 -> 2062 bytes .../tests/data/native_encoding/proto/0218.pb | Bin 38 -> 504 bytes .../tests/data/native_encoding/proto/0219.pb | Bin 28 -> 70 bytes .../tests/data/native_encoding/proto/0220.pb | Bin 1550 -> 11504 bytes .../tests/data/native_encoding/proto/0221.pb | Bin 231 -> 268 bytes .../tests/data/native_encoding/proto/0222.pb | Bin 38 -> 1350 bytes .../tests/data/native_encoding/proto/0223.pb | Bin 587 -> 44 bytes .../tests/data/native_encoding/proto/0224.pb | Bin 175 -> 14647 bytes .../tests/data/native_encoding/proto/0225.pb | Bin 2522 -> 176 bytes .../tests/data/native_encoding/proto/0226.pb | Bin 367 -> 888 bytes .../tests/data/native_encoding/proto/0227.pb | Bin 673 -> 1980 bytes .../tests/data/native_encoding/proto/0228.pb | Bin 14 -> 365 bytes .../tests/data/native_encoding/proto/0229.pb | Bin 340 -> 85 bytes .../tests/data/native_encoding/proto/0230.pb | Bin 177 -> 331 bytes .../tests/data/native_encoding/proto/0231.pb | Bin 103 -> 80 bytes .../tests/data/native_encoding/proto/0232.pb | Bin 58 -> 367 bytes .../tests/data/native_encoding/proto/0233.pb | Bin 113 -> 119 bytes .../tests/data/native_encoding/proto/0234.pb | Bin 98 -> 169 bytes .../tests/data/native_encoding/proto/0235.pb | Bin 854 -> 1774 bytes .../tests/data/native_encoding/proto/0236.pb | Bin 88 -> 150 bytes .../tests/data/native_encoding/proto/0237.pb | Bin 29 -> 136 bytes .../tests/data/native_encoding/proto/0238.pb | Bin 12653 -> 66 bytes .../tests/data/native_encoding/proto/0239.pb | Bin 1993 -> 172 bytes .../tests/data/native_encoding/proto/0240.pb | Bin 40 -> 68 bytes .../tests/data/native_encoding/proto/0241.pb | Bin 2184 -> 149 bytes .../tests/data/native_encoding/proto/0242.pb | Bin 12105 -> 1549 bytes .../tests/data/native_encoding/proto/0243.pb | Bin 34 -> 337 bytes .../tests/data/native_encoding/proto/0244.pb | Bin 318 -> 2081 bytes .../tests/data/native_encoding/proto/0245.pb | Bin 1754 -> 109 bytes .../tests/data/native_encoding/proto/0246.pb | Bin 6060 -> 211 bytes .../tests/data/native_encoding/proto/0247.pb | Bin 393 -> 128 bytes .../tests/data/native_encoding/proto/0248.pb | Bin 2335 -> 72 bytes .../tests/data/native_encoding/proto/0249.pb | Bin 101 -> 1490 bytes .../tests/data/native_encoding/proto/0250.pb | Bin 171 -> 98 bytes .../tests/data/native_encoding/proto/0251.pb | Bin 168 -> 150 bytes .../tests/data/native_encoding/proto/0252.pb | Bin 68 -> 48 bytes .../tests/data/native_encoding/proto/0253.pb | Bin 76 -> 873 bytes .../tests/data/native_encoding/proto/0254.pb | Bin 2017 -> 32 bytes .../tests/data/native_encoding/proto/0255.pb | Bin 156 -> 1448 bytes .../tests/data/native_encoding/proto/0256.pb | Bin 41 -> 97 bytes .../tests/data/native_encoding/proto/0257.pb | Bin 1624 -> 394 bytes .../tests/data/native_encoding/proto/0258.pb | Bin 60 -> 178 bytes .../tests/data/native_encoding/proto/0259.pb | Bin 32 -> 59 bytes .../tests/data/native_encoding/proto/0260.pb | Bin 214 -> 1408 bytes .../tests/data/native_encoding/proto/0261.pb | Bin 1988 -> 44 bytes .../tests/data/native_encoding/proto/0262.pb | Bin 66 -> 57 bytes .../tests/data/native_encoding/proto/0263.pb | Bin 443 -> 32 bytes .../tests/data/native_encoding/proto/0264.pb | Bin 222 -> 4774 bytes .../tests/data/native_encoding/proto/0265.pb | Bin 1166 -> 578 bytes .../tests/data/native_encoding/proto/0266.pb | Bin 330 -> 68 bytes .../tests/data/native_encoding/proto/0267.pb | Bin 38 -> 51 bytes .../tests/data/native_encoding/proto/0268.pb | Bin 173 -> 85 bytes .../tests/data/native_encoding/proto/0269.pb | Bin 822 -> 9195 bytes .../tests/data/native_encoding/proto/0270.pb | Bin 7382 -> 210 bytes .../tests/data/native_encoding/proto/0271.pb | Bin 57 -> 172 bytes .../tests/data/native_encoding/proto/0272.pb | Bin 32 -> 64 bytes .../tests/data/native_encoding/proto/0273.pb | Bin 554 -> 123 bytes .../tests/data/native_encoding/proto/0274.pb | Bin 218 -> 1777 bytes .../tests/data/native_encoding/proto/0275.pb | Bin 48 -> 1784 bytes .../tests/data/native_encoding/proto/0276.pb | Bin 4484 -> 73 bytes .../tests/data/native_encoding/proto/0277.pb | Bin 2389 -> 12125 bytes .../tests/data/native_encoding/proto/0278.pb | Bin 56 -> 118 bytes .../tests/data/native_encoding/proto/0279.pb | Bin 11194 -> 2328 bytes .../tests/data/native_encoding/proto/0280.pb | Bin 85 -> 1443 bytes .../tests/data/native_encoding/proto/0281.pb | Bin 197 -> 127 bytes .../tests/data/native_encoding/proto/0282.pb | Bin 102 -> 71 bytes .../tests/data/native_encoding/proto/0283.pb | Bin 132 -> 1725 bytes .../tests/data/native_encoding/proto/0284.pb | Bin 52 -> 136 bytes .../tests/data/native_encoding/proto/0285.pb | Bin 28 -> 1017 bytes .../tests/data/native_encoding/proto/0286.pb | Bin 9096 -> 46 bytes .../tests/data/native_encoding/proto/0287.pb | Bin 293 -> 594 bytes .../tests/data/native_encoding/proto/0288.pb | Bin 207 -> 597 bytes .../tests/data/native_encoding/proto/0289.pb | Bin 40 -> 967 bytes .../tests/data/native_encoding/proto/0290.pb | Bin 150 -> 57 bytes .../tests/data/native_encoding/proto/0291.pb | Bin 77 -> 2269 bytes .../tests/data/native_encoding/proto/0292.pb | Bin 542 -> 32 bytes .../tests/data/native_encoding/proto/0293.pb | Bin 289 -> 68 bytes .../tests/data/native_encoding/proto/0294.pb | Bin 1763 -> 123 bytes .../tests/data/native_encoding/proto/0295.pb | Bin 14 -> 1313 bytes .../tests/data/native_encoding/proto/0296.pb | Bin 535 -> 32 bytes .../tests/data/native_encoding/proto/0297.pb | Bin 102 -> 49 bytes .../tests/data/native_encoding/proto/0298.pb | Bin 27 -> 59 bytes .../tests/data/native_encoding/proto/0299.pb | Bin 6730 -> 514 bytes .../tests/data/native_encoding/proto/0300.pb | Bin 186 -> 60 bytes .../tests/data/native_encoding/proto/0301.pb | Bin 2314 -> 75 bytes .../tests/data/native_encoding/proto/0302.pb | Bin 889 -> 613 bytes .../tests/data/native_encoding/proto/0303.pb | Bin 14 -> 1035 bytes .../tests/data/native_encoding/proto/0304.pb | Bin 193 -> 89 bytes .../tests/data/native_encoding/proto/0305.pb | Bin 36 -> 13677 bytes .../tests/data/native_encoding/proto/0306.pb | Bin 38 -> 178 bytes .../tests/data/native_encoding/proto/0307.pb | Bin 26 -> 71 bytes .../tests/data/native_encoding/proto/0308.pb | Bin 34 -> 2181 bytes .../tests/data/native_encoding/proto/0309.pb | Bin 49 -> 71 bytes .../tests/data/native_encoding/proto/0310.pb | Bin 2082 -> 63 bytes .../tests/data/native_encoding/proto/0311.pb | Bin 1383 -> 48 bytes .../tests/data/native_encoding/proto/0312.pb | Bin 58 -> 2022 bytes .../tests/data/native_encoding/proto/0313.pb | Bin 538 -> 93 bytes .../tests/data/native_encoding/proto/0314.pb | Bin 90 -> 2183 bytes .../tests/data/native_encoding/proto/0315.pb | Bin 56 -> 13267 bytes .../tests/data/native_encoding/proto/0316.pb | Bin 304 -> 545 bytes .../tests/data/native_encoding/proto/0317.pb | Bin 24 -> 276 bytes .../tests/data/native_encoding/proto/0318.pb | Bin 9268 -> 104 bytes .../tests/data/native_encoding/proto/0319.pb | Bin 25 -> 1757 bytes .../tests/data/native_encoding/proto/0320.pb | Bin 33 -> 72 bytes .../tests/data/native_encoding/proto/0321.pb | Bin 41 -> 81 bytes .../tests/data/native_encoding/proto/0322.pb | Bin 195 -> 138 bytes .../tests/data/native_encoding/proto/0323.pb | Bin 1372 -> 2602 bytes .../tests/data/native_encoding/proto/0324.pb | Bin 1191 -> 32 bytes .../tests/data/native_encoding/proto/0325.pb | Bin 367 -> 253 bytes .../tests/data/native_encoding/proto/0326.pb | Bin 1608 -> 219 bytes .../tests/data/native_encoding/proto/0327.pb | Bin 264 -> 280 bytes .../tests/data/native_encoding/proto/0328.pb | Bin 31 -> 451 bytes .../tests/data/native_encoding/proto/0329.pb | Bin 88 -> 78 bytes .../tests/data/native_encoding/proto/0330.pb | Bin 1067 -> 66 bytes .../tests/data/native_encoding/proto/0331.pb | Bin 23 -> 1807 bytes .../tests/data/native_encoding/proto/0332.pb | Bin 529 -> 66 bytes .../tests/data/native_encoding/proto/0333.pb | Bin 88 -> 76 bytes .../tests/data/native_encoding/proto/0334.pb | Bin 63 -> 67 bytes .../tests/data/native_encoding/proto/0335.pb | Bin 6163 -> 2560 bytes .../tests/data/native_encoding/proto/0336.pb | Bin 11142 -> 55 bytes .../tests/data/native_encoding/proto/0337.pb | Bin 449 -> 32 bytes .../tests/data/native_encoding/proto/0338.pb | Bin 2039 -> 59 bytes .../tests/data/native_encoding/proto/0339.pb | Bin 161 -> 308 bytes .../tests/data/native_encoding/proto/0340.pb | Bin 98 -> 53 bytes .../tests/data/native_encoding/proto/0341.pb | Bin 1311 -> 243 bytes .../tests/data/native_encoding/proto/0342.pb | Bin 2866 -> 202 bytes .../tests/data/native_encoding/proto/0343.pb | Bin 75 -> 1144 bytes .../tests/data/native_encoding/proto/0344.pb | Bin 442 -> 339 bytes .../tests/data/native_encoding/proto/0345.pb | Bin 64 -> 1071 bytes .../tests/data/native_encoding/proto/0346.pb | Bin 116 -> 44 bytes .../tests/data/native_encoding/proto/0347.pb | Bin 44 -> 113 bytes .../tests/data/native_encoding/proto/0348.pb | Bin 28 -> 213 bytes .../tests/data/native_encoding/proto/0349.pb | Bin 71 -> 57 bytes .../tests/data/native_encoding/proto/0350.pb | Bin 2378 -> 79 bytes .../tests/data/native_encoding/proto/0351.pb | Bin 36 -> 38 bytes .../tests/data/native_encoding/proto/0352.pb | Bin 664 -> 72 bytes .../tests/data/native_encoding/proto/0353.pb | Bin 968 -> 78 bytes .../tests/data/native_encoding/proto/0354.pb | Bin 1217 -> 50 bytes .../tests/data/native_encoding/proto/0355.pb | Bin 71 -> 103 bytes .../tests/data/native_encoding/proto/0356.pb | Bin 85 -> 193 bytes .../tests/data/native_encoding/proto/0357.pb | Bin 51 -> 55 bytes .../tests/data/native_encoding/proto/0358.pb | Bin 12099 -> 32 bytes .../tests/data/native_encoding/proto/0359.pb | Bin 1536 -> 12512 bytes .../tests/data/native_encoding/proto/0360.pb | Bin 1765 -> 32 bytes .../tests/data/native_encoding/proto/0361.pb | Bin 2324 -> 79 bytes .../tests/data/native_encoding/proto/0362.pb | Bin 67 -> 122 bytes .../tests/data/native_encoding/proto/0363.pb | Bin 64 -> 337 bytes .../tests/data/native_encoding/proto/0364.pb | Bin 354 -> 341 bytes .../tests/data/native_encoding/proto/0365.pb | Bin 99 -> 100 bytes .../tests/data/native_encoding/proto/0366.pb | Bin 72 -> 2190 bytes .../tests/data/native_encoding/proto/0367.pb | Bin 14 -> 1967 bytes .../tests/data/native_encoding/proto/0368.pb | Bin 99 -> 69 bytes .../tests/data/native_encoding/proto/0369.pb | Bin 162 -> 131 bytes .../tests/data/native_encoding/proto/0370.pb | Bin 1529 -> 32 bytes .../tests/data/native_encoding/proto/0371.pb | Bin 52 -> 66 bytes .../tests/data/native_encoding/proto/0372.pb | Bin 14 -> 1968 bytes .../tests/data/native_encoding/proto/0373.pb | Bin 258 -> 8401 bytes .../tests/data/native_encoding/proto/0374.pb | Bin 48 -> 141 bytes .../tests/data/native_encoding/proto/0375.pb | Bin 111 -> 123 bytes .../tests/data/native_encoding/proto/0376.pb | Bin 62 -> 12156 bytes .../tests/data/native_encoding/proto/0377.pb | Bin 1051 -> 2024 bytes .../tests/data/native_encoding/proto/0378.pb | Bin 234 -> 45 bytes .../tests/data/native_encoding/proto/0379.pb | Bin 94 -> 119 bytes .../tests/data/native_encoding/proto/0380.pb | Bin 782 -> 601 bytes .../tests/data/native_encoding/proto/0381.pb | Bin 14 -> 71 bytes .../tests/data/native_encoding/proto/0382.pb | Bin 397 -> 14341 bytes .../tests/data/native_encoding/proto/0383.pb | Bin 55 -> 79 bytes .../tests/data/native_encoding/proto/0384.pb | Bin 48 -> 63 bytes .../tests/data/native_encoding/proto/0385.pb | Bin 42 -> 134 bytes .../tests/data/native_encoding/proto/0386.pb | Bin 152 -> 79 bytes .../tests/data/native_encoding/proto/0387.pb | Bin 64 -> 116 bytes .../tests/data/native_encoding/proto/0388.pb | Bin 6585 -> 1274 bytes .../tests/data/native_encoding/proto/0389.pb | Bin 86 -> 91 bytes .../tests/data/native_encoding/proto/0390.pb | Bin 72 -> 236 bytes .../tests/data/native_encoding/proto/0391.pb | Bin 50 -> 11413 bytes .../tests/data/native_encoding/proto/0392.pb | Bin 42 -> 38 bytes .../tests/data/native_encoding/proto/0393.pb | Bin 991 -> 46 bytes .../tests/data/native_encoding/proto/0394.pb | Bin 72 -> 46 bytes .../tests/data/native_encoding/proto/0395.pb | Bin 122 -> 119 bytes .../tests/data/native_encoding/proto/0396.pb | Bin 223 -> 52 bytes .../tests/data/native_encoding/proto/0397.pb | Bin 512 -> 48 bytes .../tests/data/native_encoding/proto/0398.pb | Bin 85 -> 10738 bytes .../tests/data/native_encoding/proto/0399.pb | Bin 51 -> 65 bytes .../tests/data/native_encoding/proto/0400.pb | Bin 28 -> 1686 bytes .../tests/data/native_encoding/proto/0401.pb | Bin 1764 -> 80 bytes .../tests/data/native_encoding/proto/0402.pb | Bin 50 -> 216 bytes .../tests/data/native_encoding/proto/0403.pb | Bin 334 -> 32 bytes .../tests/data/native_encoding/proto/0404.pb | Bin 32 -> 457 bytes .../tests/data/native_encoding/proto/0405.pb | Bin 40 -> 51 bytes .../tests/data/native_encoding/proto/0406.pb | Bin 10597 -> 1652 bytes .../tests/data/native_encoding/proto/0407.pb | Bin 60 -> 111 bytes .../tests/data/native_encoding/proto/0408.pb | Bin 1832 -> 94 bytes .../tests/data/native_encoding/proto/0409.pb | Bin 143 -> 50 bytes .../tests/data/native_encoding/proto/0410.pb | Bin 51 -> 84 bytes .../tests/data/native_encoding/proto/0411.pb | Bin 44 -> 114 bytes .../tests/data/native_encoding/proto/0412.pb | Bin 38 -> 191 bytes .../tests/data/native_encoding/proto/0413.pb | Bin 2110 -> 4144 bytes .../tests/data/native_encoding/proto/0414.pb | Bin 1809 -> 63 bytes .../tests/data/native_encoding/proto/0415.pb | Bin 1009 -> 436 bytes .../tests/data/native_encoding/proto/0416.pb | Bin 37 -> 3142 bytes .../tests/data/native_encoding/proto/0417.pb | Bin 28 -> 8958 bytes .../tests/data/native_encoding/proto/0418.pb | Bin 29 -> 215 bytes .../tests/data/native_encoding/proto/0419.pb | Bin 2177 -> 141 bytes .../tests/data/native_encoding/proto/0420.pb | Bin 11712 -> 349 bytes .../tests/data/native_encoding/proto/0421.pb | Bin 419 -> 1487 bytes .../tests/data/native_encoding/proto/0422.pb | Bin 90 -> 48 bytes .../tests/data/native_encoding/proto/0423.pb | Bin 467 -> 2081 bytes .../tests/data/native_encoding/proto/0424.pb | Bin 74 -> 529 bytes .../tests/data/native_encoding/proto/0425.pb | Bin 9830 -> 47 bytes .../tests/data/native_encoding/proto/0426.pb | Bin 2248 -> 355 bytes .../tests/data/native_encoding/proto/0427.pb | Bin 1568 -> 1679 bytes .../tests/data/native_encoding/proto/0428.pb | Bin 64 -> 77 bytes .../tests/data/native_encoding/proto/0429.pb | Bin 5626 -> 212 bytes .../tests/data/native_encoding/proto/0430.pb | Bin 414 -> 65 bytes .../tests/data/native_encoding/proto/0431.pb | Bin 348 -> 149 bytes .../tests/data/native_encoding/proto/0432.pb | Bin 2241 -> 32 bytes .../tests/data/native_encoding/proto/0433.pb | Bin 56 -> 529 bytes .../tests/data/native_encoding/proto/0434.pb | Bin 166 -> 91 bytes .../tests/data/native_encoding/proto/0435.pb | Bin 1878 -> 78 bytes .../tests/data/native_encoding/proto/0436.pb | Bin 132 -> 75 bytes .../tests/data/native_encoding/proto/0437.pb | Bin 2356 -> 11775 bytes .../tests/data/native_encoding/proto/0438.pb | Bin 2064 -> 69 bytes .../tests/data/native_encoding/proto/0439.pb | Bin 32 -> 452 bytes .../tests/data/native_encoding/proto/0440.pb | Bin 116 -> 2522 bytes .../tests/data/native_encoding/proto/0441.pb | Bin 7252 -> 32 bytes .../tests/data/native_encoding/proto/0442.pb | Bin 146 -> 117 bytes .../tests/data/native_encoding/proto/0443.pb | Bin 1067 -> 32 bytes .../tests/data/native_encoding/proto/0444.pb | Bin 304 -> 96 bytes .../tests/data/native_encoding/proto/0445.pb | Bin 114 -> 64 bytes .../tests/data/native_encoding/proto/0446.pb | Bin 30 -> 667 bytes .../tests/data/native_encoding/proto/0447.pb | Bin 2071 -> 1065 bytes .../tests/data/native_encoding/proto/0448.pb | Bin 571 -> 1938 bytes .../tests/data/native_encoding/proto/0449.pb | Bin 28 -> 597 bytes .../tests/data/native_encoding/proto/0450.pb | Bin 768 -> 365 bytes .../tests/data/native_encoding/proto/0451.pb | Bin 12251 -> 69 bytes .../tests/data/native_encoding/proto/0452.pb | Bin 103 -> 1146 bytes .../tests/data/native_encoding/proto/0453.pb | Bin 38 -> 589 bytes .../tests/data/native_encoding/proto/0454.pb | Bin 793 -> 141 bytes .../tests/data/native_encoding/proto/0455.pb | Bin 655 -> 55 bytes .../tests/data/native_encoding/proto/0456.pb | Bin 2044 -> 1580 bytes .../tests/data/native_encoding/proto/0457.pb | Bin 34 -> 306 bytes .../tests/data/native_encoding/proto/0458.pb | Bin 180 -> 228 bytes .../tests/data/native_encoding/proto/0459.pb | Bin 13417 -> 363 bytes .../tests/data/native_encoding/proto/0460.pb | Bin 277 -> 364 bytes .../tests/data/native_encoding/proto/0461.pb | Bin 79 -> 2155 bytes .../tests/data/native_encoding/proto/0462.pb | Bin 14 -> 74 bytes .../tests/data/native_encoding/proto/0463.pb | Bin 1278 -> 9708 bytes .../tests/data/native_encoding/proto/0464.pb | Bin 46 -> 175 bytes .../tests/data/native_encoding/proto/0465.pb | Bin 28 -> 97 bytes .../tests/data/native_encoding/proto/0466.pb | Bin 56 -> 69 bytes .../tests/data/native_encoding/proto/0467.pb | Bin 435 -> 449 bytes .../tests/data/native_encoding/proto/0468.pb | Bin 59 -> 174 bytes .../tests/data/native_encoding/proto/0469.pb | Bin 47 -> 62 bytes .../tests/data/native_encoding/proto/0470.pb | Bin 48 -> 4707 bytes .../tests/data/native_encoding/proto/0471.pb | Bin 35 -> 446 bytes .../tests/data/native_encoding/proto/0472.pb | Bin 2009 -> 236 bytes .../tests/data/native_encoding/proto/0473.pb | Bin 1130 -> 81 bytes .../tests/data/native_encoding/proto/0474.pb | Bin 33 -> 213 bytes .../tests/data/native_encoding/proto/0475.pb | Bin 14 -> 1237 bytes .../tests/data/native_encoding/proto/0476.pb | Bin 518 -> 58 bytes .../tests/data/native_encoding/proto/0477.pb | Bin 51 -> 401 bytes .../tests/data/native_encoding/proto/0478.pb | Bin 90 -> 111 bytes .../tests/data/native_encoding/proto/0479.pb | Bin 67 -> 89 bytes .../tests/data/native_encoding/proto/0480.pb | Bin 40 -> 92 bytes .../tests/data/native_encoding/proto/0481.pb | Bin 99 -> 367 bytes .../tests/data/native_encoding/proto/0482.pb | Bin 7261 -> 7113 bytes .../tests/data/native_encoding/proto/0483.pb | Bin 42 -> 405 bytes .../tests/data/native_encoding/proto/0484.pb | Bin 55 -> 2625 bytes .../tests/data/native_encoding/proto/0485.pb | Bin 201 -> 61 bytes .../tests/data/native_encoding/proto/0486.pb | Bin 77 -> 333 bytes .../tests/data/native_encoding/proto/0487.pb | Bin 13791 -> 92 bytes .../tests/data/native_encoding/proto/0488.pb | Bin 907 -> 167 bytes .../tests/data/native_encoding/proto/0489.pb | Bin 1409 -> 188 bytes .../tests/data/native_encoding/proto/0490.pb | Bin 1790 -> 122 bytes .../tests/data/native_encoding/proto/0491.pb | Bin 194 -> 119 bytes .../tests/data/native_encoding/proto/0492.pb | Bin 69 -> 94 bytes .../tests/data/native_encoding/proto/0493.pb | Bin 12432 -> 65 bytes .../tests/data/native_encoding/proto/0494.pb | Bin 75 -> 152 bytes .../tests/data/native_encoding/proto/0495.pb | Bin 1467 -> 5053 bytes .../tests/data/native_encoding/proto/0496.pb | Bin 52 -> 74 bytes .../tests/data/native_encoding/proto/0497.pb | Bin 589 -> 143 bytes .../tests/data/native_encoding/proto/0498.pb | Bin 166 -> 32 bytes .../tests/data/native_encoding/proto/0499.pb | Bin 231 -> 2494 bytes .../tests/data/native_encoding/proto/0500.pb | Bin 113 -> 85 bytes .../tests/data/native_encoding/proto/0501.pb | Bin 105 -> 60 bytes .../tests/data/native_encoding/proto/0502.pb | Bin 472 -> 371 bytes .../tests/data/native_encoding/proto/0503.pb | Bin 1125 -> 12191 bytes .../tests/data/native_encoding/proto/0504.pb | Bin 111 -> 114 bytes .../tests/data/native_encoding/proto/0505.pb | Bin 73 -> 11564 bytes .../tests/data/native_encoding/proto/0506.pb | Bin 269 -> 53 bytes .../tests/data/native_encoding/proto/0507.pb | Bin 7057 -> 57 bytes .../tests/data/native_encoding/proto/0508.pb | Bin 343 -> 78 bytes .../tests/data/native_encoding/proto/0509.pb | Bin 229 -> 2149 bytes .../tests/data/native_encoding/proto/0510.pb | Bin 391 -> 228 bytes .../tests/data/native_encoding/proto/0511.pb | Bin 221 -> 5035 bytes .../tests/data/native_encoding/proto/0512.pb | Bin 85 -> 52 bytes .../tests/data/native_encoding/proto/0513.pb | Bin 21 -> 66 bytes .../tests/data/native_encoding/proto/0514.pb | Bin 27 -> 168 bytes .../tests/data/native_encoding/proto/0515.pb | Bin 963 -> 1861 bytes .../tests/data/native_encoding/proto/0516.pb | Bin 236 -> 369 bytes .../tests/data/native_encoding/proto/0517.pb | Bin 584 -> 40 bytes .../tests/data/native_encoding/proto/0518.pb | Bin 198 -> 152 bytes .../tests/data/native_encoding/proto/0519.pb | Bin 51 -> 1669 bytes .../tests/data/native_encoding/proto/0520.pb | Bin 79 -> 840 bytes .../tests/data/native_encoding/proto/0521.pb | Bin 97 -> 230 bytes .../tests/data/native_encoding/proto/0522.pb | Bin 41 -> 218 bytes .../tests/data/native_encoding/proto/0523.pb | Bin 14 -> 371 bytes .../tests/data/native_encoding/proto/0524.pb | Bin 74 -> 159 bytes .../tests/data/native_encoding/proto/0525.pb | Bin 477 -> 284 bytes .../tests/data/native_encoding/proto/0526.pb | Bin 2251 -> 285 bytes .../tests/data/native_encoding/proto/0527.pb | Bin 14 -> 79 bytes .../tests/data/native_encoding/proto/0528.pb | Bin 110 -> 1102 bytes .../tests/data/native_encoding/proto/0529.pb | Bin 61 -> 145 bytes .../tests/data/native_encoding/proto/0530.pb | Bin 1940 -> 32 bytes .../tests/data/native_encoding/proto/0531.pb | Bin 264 -> 85 bytes .../tests/data/native_encoding/proto/0532.pb | Bin 1372 -> 1970 bytes .../tests/data/native_encoding/proto/0533.pb | Bin 97 -> 141 bytes .../tests/data/native_encoding/proto/0534.pb | Bin 161 -> 116 bytes .../tests/data/native_encoding/proto/0535.pb | Bin 4675 -> 98 bytes .../tests/data/native_encoding/proto/0536.pb | Bin 73 -> 879 bytes .../tests/data/native_encoding/proto/0537.pb | Bin 518 -> 943 bytes .../tests/data/native_encoding/proto/0538.pb | Bin 2418 -> 938 bytes .../tests/data/native_encoding/proto/0539.pb | Bin 33 -> 107 bytes .../tests/data/native_encoding/proto/0540.pb | Bin 1347 -> 195 bytes .../tests/data/native_encoding/proto/0541.pb | Bin 1922 -> 1054 bytes .../tests/data/native_encoding/proto/0542.pb | Bin 100 -> 488 bytes .../tests/data/native_encoding/proto/0543.pb | Bin 32 -> 1525 bytes .../tests/data/native_encoding/proto/0544.pb | Bin 155 -> 48 bytes .../tests/data/native_encoding/proto/0545.pb | Bin 14 -> 265 bytes .../tests/data/native_encoding/proto/0546.pb | Bin 2203 -> 32 bytes .../tests/data/native_encoding/proto/0547.pb | Bin 167 -> 111 bytes .../tests/data/native_encoding/proto/0548.pb | Bin 27 -> 8789 bytes .../tests/data/native_encoding/proto/0549.pb | Bin 438 -> 440 bytes .../tests/data/native_encoding/proto/0550.pb | Bin 138 -> 82 bytes .../tests/data/native_encoding/proto/0551.pb | Bin 476 -> 158 bytes .../tests/data/native_encoding/proto/0552.pb | Bin 78 -> 97 bytes .../tests/data/native_encoding/proto/0553.pb | Bin 57 -> 2187 bytes .../tests/data/native_encoding/proto/0554.pb | Bin 87 -> 32 bytes .../tests/data/native_encoding/proto/0555.pb | Bin 54 -> 52 bytes .../tests/data/native_encoding/proto/0556.pb | Bin 72 -> 182 bytes .../tests/data/native_encoding/proto/0557.pb | Bin 2755 -> 11618 bytes .../tests/data/native_encoding/proto/0558.pb | Bin 86 -> 48 bytes .../tests/data/native_encoding/proto/0559.pb | Bin 1453 -> 5708 bytes .../tests/data/native_encoding/proto/0560.pb | Bin 47 -> 1740 bytes .../tests/data/native_encoding/proto/0561.pb | Bin 149 -> 2189 bytes .../tests/data/native_encoding/proto/0562.pb | Bin 75 -> 32 bytes .../tests/data/native_encoding/proto/0563.pb | Bin 69 -> 32 bytes .../tests/data/native_encoding/proto/0564.pb | Bin 322 -> 11297 bytes .../tests/data/native_encoding/proto/0565.pb | Bin 98 -> 69 bytes .../tests/data/native_encoding/proto/0566.pb | Bin 1268 -> 124 bytes .../tests/data/native_encoding/proto/0567.pb | Bin 14 -> 772 bytes .../tests/data/native_encoding/proto/0568.pb | Bin 8977 -> 69 bytes .../tests/data/native_encoding/proto/0569.pb | Bin 376 -> 1981 bytes .../tests/data/native_encoding/proto/0570.pb | Bin 106 -> 120 bytes .../tests/data/native_encoding/proto/0571.pb | Bin 501 -> 10840 bytes .../tests/data/native_encoding/proto/0572.pb | Bin 178 -> 11899 bytes .../tests/data/native_encoding/proto/0573.pb | Bin 224 -> 177 bytes .../tests/data/native_encoding/proto/0574.pb | Bin 2309 -> 85 bytes .../tests/data/native_encoding/proto/0575.pb | Bin 284 -> 611 bytes .../tests/data/native_encoding/proto/0576.pb | Bin 111 -> 51 bytes .../tests/data/native_encoding/proto/0577.pb | Bin 68 -> 159 bytes .../tests/data/native_encoding/proto/0578.pb | Bin 75 -> 83 bytes .../tests/data/native_encoding/proto/0579.pb | Bin 91 -> 703 bytes .../tests/data/native_encoding/proto/0580.pb | Bin 14 -> 80 bytes .../tests/data/native_encoding/proto/0581.pb | Bin 25 -> 999 bytes .../tests/data/native_encoding/proto/0582.pb | Bin 104 -> 32 bytes .../tests/data/native_encoding/proto/0583.pb | Bin 10776 -> 63 bytes .../tests/data/native_encoding/proto/0584.pb | Bin 81 -> 116 bytes .../tests/data/native_encoding/proto/0585.pb | Bin 603 -> 477 bytes .../tests/data/native_encoding/proto/0586.pb | Bin 232 -> 644 bytes .../tests/data/native_encoding/proto/0587.pb | Bin 115 -> 529 bytes .../tests/data/native_encoding/proto/0588.pb | Bin 179 -> 53 bytes .../tests/data/native_encoding/proto/0589.pb | Bin 59 -> 84 bytes .../tests/data/native_encoding/proto/0590.pb | Bin 29 -> 48 bytes .../tests/data/native_encoding/proto/0591.pb | Bin 116 -> 99 bytes .../tests/data/native_encoding/proto/0592.pb | Bin 159 -> 176 bytes .../tests/data/native_encoding/proto/0593.pb | Bin 1835 -> 119 bytes .../tests/data/native_encoding/proto/0594.pb | Bin 40 -> 603 bytes .../tests/data/native_encoding/proto/0595.pb | Bin 50 -> 189 bytes .../tests/data/native_encoding/proto/0596.pb | Bin 5468 -> 221 bytes .../tests/data/native_encoding/proto/0597.pb | Bin 960 -> 1855 bytes .../tests/data/native_encoding/proto/0598.pb | Bin 32 -> 2133 bytes .../tests/data/native_encoding/proto/0599.pb | Bin 1906 -> 86 bytes .../tests/data/native_encoding/proto/0600.pb | Bin 2003 -> 792 bytes .../tests/data/native_encoding/proto/0601.pb | Bin 172 -> 277 bytes .../tests/data/native_encoding/proto/0602.pb | Bin 46 -> 513 bytes .../tests/data/native_encoding/proto/0603.pb | Bin 36 -> 128 bytes .../tests/data/native_encoding/proto/0604.pb | Bin 30 -> 2402 bytes .../tests/data/native_encoding/proto/0605.pb | Bin 42 -> 179 bytes .../tests/data/native_encoding/proto/0606.pb | Bin 791 -> 116 bytes .../tests/data/native_encoding/proto/0607.pb | Bin 64 -> 100 bytes .../tests/data/native_encoding/proto/0608.pb | Bin 36 -> 74 bytes .../tests/data/native_encoding/proto/0609.pb | Bin 118 -> 95 bytes .../tests/data/native_encoding/proto/0610.pb | Bin 14 -> 145 bytes .../tests/data/native_encoding/proto/0611.pb | Bin 8715 -> 74 bytes .../tests/data/native_encoding/proto/0612.pb | Bin 247 -> 73 bytes .../tests/data/native_encoding/proto/0613.pb | Bin 236 -> 32 bytes .../tests/data/native_encoding/proto/0614.pb | Bin 44 -> 146 bytes .../tests/data/native_encoding/proto/0615.pb | Bin 2515 -> 82 bytes .../tests/data/native_encoding/proto/0616.pb | Bin 294 -> 468 bytes .../tests/data/native_encoding/proto/0617.pb | Bin 30 -> 32 bytes .../tests/data/native_encoding/proto/0618.pb | Bin 14 -> 83 bytes .../tests/data/native_encoding/proto/0619.pb | Bin 423 -> 121 bytes .../tests/data/native_encoding/proto/0620.pb | Bin 55 -> 359 bytes .../tests/data/native_encoding/proto/0621.pb | Bin 1339 -> 173 bytes .../tests/data/native_encoding/proto/0622.pb | Bin 36 -> 65 bytes .../tests/data/native_encoding/proto/0623.pb | Bin 54 -> 61 bytes .../tests/data/native_encoding/proto/0624.pb | Bin 2025 -> 2198 bytes .../tests/data/native_encoding/proto/0625.pb | Bin 1812 -> 77 bytes .../tests/data/native_encoding/proto/0626.pb | Bin 61 -> 46 bytes .../tests/data/native_encoding/proto/0627.pb | Bin 420 -> 1907 bytes .../tests/data/native_encoding/proto/0628.pb | Bin 976 -> 32 bytes .../tests/data/native_encoding/proto/0629.pb | Bin 259 -> 69 bytes .../tests/data/native_encoding/proto/0630.pb | Bin 881 -> 95 bytes .../tests/data/native_encoding/proto/0631.pb | Bin 534 -> 115 bytes .../tests/data/native_encoding/proto/0632.pb | Bin 101 -> 32 bytes .../tests/data/native_encoding/proto/0633.pb | Bin 128 -> 261 bytes .../tests/data/native_encoding/proto/0634.pb | Bin 30 -> 1020 bytes .../tests/data/native_encoding/proto/0635.pb | Bin 519 -> 424 bytes .../tests/data/native_encoding/proto/0636.pb | Bin 229 -> 157 bytes .../tests/data/native_encoding/proto/0637.pb | Bin 72 -> 65 bytes .../tests/data/native_encoding/proto/0638.pb | Bin 43 -> 129 bytes .../tests/data/native_encoding/proto/0639.pb | Bin 1160 -> 415 bytes .../tests/data/native_encoding/proto/0640.pb | Bin 88 -> 372 bytes .../tests/data/native_encoding/proto/0641.pb | Bin 60 -> 657 bytes .../tests/data/native_encoding/proto/0642.pb | Bin 85 -> 32 bytes .../tests/data/native_encoding/proto/0643.pb | Bin 97 -> 86 bytes .../tests/data/native_encoding/proto/0644.pb | Bin 14 -> 1886 bytes .../tests/data/native_encoding/proto/0645.pb | Bin 67 -> 48 bytes .../tests/data/native_encoding/proto/0646.pb | Bin 139 -> 32 bytes .../tests/data/native_encoding/proto/0647.pb | Bin 1014 -> 90 bytes .../tests/data/native_encoding/proto/0648.pb | Bin 109 -> 71 bytes .../tests/data/native_encoding/proto/0649.pb | Bin 53 -> 4187 bytes .../tests/data/native_encoding/proto/0650.pb | Bin 440 -> 47 bytes .../tests/data/native_encoding/proto/0651.pb | Bin 2647 -> 43 bytes .../tests/data/native_encoding/proto/0652.pb | Bin 206 -> 1599 bytes .../tests/data/native_encoding/proto/0653.pb | Bin 84 -> 81 bytes .../tests/data/native_encoding/proto/0654.pb | Bin 40 -> 463 bytes .../tests/data/native_encoding/proto/0655.pb | Bin 574 -> 83 bytes .../tests/data/native_encoding/proto/0656.pb | Bin 129 -> 41 bytes .../tests/data/native_encoding/proto/0657.pb | Bin 189 -> 882 bytes .../tests/data/native_encoding/proto/0658.pb | Bin 69 -> 848 bytes .../tests/data/native_encoding/proto/0659.pb | Bin 1535 -> 2000 bytes .../tests/data/native_encoding/proto/0660.pb | Bin 105 -> 295 bytes .../tests/data/native_encoding/proto/0661.pb | Bin 1469 -> 51 bytes .../tests/data/native_encoding/proto/0662.pb | Bin 75 -> 32 bytes .../tests/data/native_encoding/proto/0663.pb | Bin 14 -> 2453 bytes .../tests/data/native_encoding/proto/0664.pb | Bin 28 -> 1829 bytes .../tests/data/native_encoding/proto/0665.pb | Bin 54 -> 1241 bytes .../tests/data/native_encoding/proto/0666.pb | Bin 650 -> 68 bytes .../tests/data/native_encoding/proto/0667.pb | Bin 409 -> 65 bytes .../tests/data/native_encoding/proto/0668.pb | Bin 161 -> 67 bytes .../tests/data/native_encoding/proto/0669.pb | Bin 1439 -> 66 bytes .../tests/data/native_encoding/proto/0670.pb | Bin 2276 -> 54 bytes .../tests/data/native_encoding/proto/0671.pb | Bin 8092 -> 46 bytes .../tests/data/native_encoding/proto/0672.pb | Bin 653 -> 2062 bytes .../tests/data/native_encoding/proto/0673.pb | Bin 2478 -> 9593 bytes .../tests/data/native_encoding/proto/0674.pb | Bin 20 -> 83 bytes .../tests/data/native_encoding/proto/0675.pb | Bin 3765 -> 105 bytes .../tests/data/native_encoding/proto/0676.pb | Bin 1587 -> 2747 bytes .../tests/data/native_encoding/proto/0677.pb | Bin 14 -> 67 bytes .../tests/data/native_encoding/proto/0678.pb | Bin 65 -> 220 bytes .../tests/data/native_encoding/proto/0679.pb | Bin 4860 -> 50 bytes .../tests/data/native_encoding/proto/0680.pb | Bin 30 -> 59 bytes .../tests/data/native_encoding/proto/0681.pb | Bin 1896 -> 88 bytes .../tests/data/native_encoding/proto/0682.pb | Bin 41 -> 51 bytes .../tests/data/native_encoding/proto/0683.pb | Bin 216 -> 12208 bytes .../tests/data/native_encoding/proto/0684.pb | Bin 56 -> 71 bytes .../tests/data/native_encoding/proto/0685.pb | Bin 154 -> 61 bytes .../tests/data/native_encoding/proto/0686.pb | Bin 59 -> 395 bytes .../tests/data/native_encoding/proto/0687.pb | Bin 82 -> 84 bytes .../tests/data/native_encoding/proto/0688.pb | Bin 1897 -> 71 bytes .../tests/data/native_encoding/proto/0689.pb | Bin 654 -> 208 bytes .../tests/data/native_encoding/proto/0690.pb | Bin 32 -> 207 bytes .../tests/data/native_encoding/proto/0691.pb | Bin 116 -> 72 bytes .../tests/data/native_encoding/proto/0692.pb | Bin 991 -> 112 bytes .../tests/data/native_encoding/proto/0693.pb | Bin 334 -> 103 bytes .../tests/data/native_encoding/proto/0694.pb | Bin 95 -> 32 bytes .../tests/data/native_encoding/proto/0695.pb | Bin 87 -> 2263 bytes .../tests/data/native_encoding/proto/0696.pb | Bin 32 -> 108 bytes .../tests/data/native_encoding/proto/0697.pb | Bin 73 -> 898 bytes .../tests/data/native_encoding/proto/0698.pb | Bin 66 -> 2341 bytes .../tests/data/native_encoding/proto/0699.pb | Bin 2075 -> 83 bytes .../tests/data/native_encoding/proto/0700.pb | Bin 305 -> 113 bytes .../tests/data/native_encoding/proto/0701.pb | Bin 2540 -> 230 bytes .../tests/data/native_encoding/proto/0702.pb | Bin 811 -> 32 bytes .../tests/data/native_encoding/proto/0703.pb | Bin 7344 -> 9657 bytes .../tests/data/native_encoding/proto/0704.pb | Bin 104 -> 89 bytes .../tests/data/native_encoding/proto/0705.pb | Bin 55 -> 1313 bytes .../tests/data/native_encoding/proto/0706.pb | Bin 53 -> 1782 bytes .../tests/data/native_encoding/proto/0707.pb | Bin 1295 -> 545 bytes .../tests/data/native_encoding/proto/0708.pb | Bin 14 -> 63 bytes .../tests/data/native_encoding/proto/0709.pb | Bin 78 -> 255 bytes .../tests/data/native_encoding/proto/0710.pb | Bin 30 -> 809 bytes .../tests/data/native_encoding/proto/0711.pb | Bin 23 -> 979 bytes .../tests/data/native_encoding/proto/0712.pb | Bin 52 -> 142 bytes .../tests/data/native_encoding/proto/0713.pb | Bin 33 -> 75 bytes .../tests/data/native_encoding/proto/0714.pb | Bin 1577 -> 174 bytes .../tests/data/native_encoding/proto/0715.pb | Bin 142 -> 959 bytes .../tests/data/native_encoding/proto/0716.pb | Bin 81 -> 64 bytes .../tests/data/native_encoding/proto/0717.pb | Bin 53 -> 67 bytes .../tests/data/native_encoding/proto/0718.pb | Bin 71 -> 946 bytes .../tests/data/native_encoding/proto/0719.pb | Bin 117 -> 1688 bytes .../tests/data/native_encoding/proto/0720.pb | Bin 583 -> 67 bytes .../tests/data/native_encoding/proto/0721.pb | Bin 10217 -> 112 bytes .../tests/data/native_encoding/proto/0722.pb | Bin 2120 -> 89 bytes .../tests/data/native_encoding/proto/0723.pb | Bin 128 -> 166 bytes .../tests/data/native_encoding/proto/0724.pb | Bin 139 -> 1335 bytes .../tests/data/native_encoding/proto/0725.pb | Bin 100 -> 109 bytes .../tests/data/native_encoding/proto/0726.pb | Bin 254 -> 46 bytes .../tests/data/native_encoding/proto/0727.pb | Bin 2656 -> 79 bytes .../tests/data/native_encoding/proto/0728.pb | Bin 55 -> 1746 bytes .../tests/data/native_encoding/proto/0729.pb | Bin 1209 -> 122 bytes .../tests/data/native_encoding/proto/0730.pb | Bin 209 -> 446 bytes .../tests/data/native_encoding/proto/0731.pb | Bin 225 -> 96 bytes .../tests/data/native_encoding/proto/0732.pb | Bin 538 -> 659 bytes .../tests/data/native_encoding/proto/0733.pb | Bin 97 -> 32 bytes .../tests/data/native_encoding/proto/0734.pb | Bin 507 -> 1401 bytes .../tests/data/native_encoding/proto/0735.pb | Bin 52 -> 95 bytes .../tests/data/native_encoding/proto/0736.pb | Bin 75 -> 270 bytes .../tests/data/native_encoding/proto/0737.pb | Bin 360 -> 83 bytes .../tests/data/native_encoding/proto/0738.pb | Bin 120 -> 44 bytes .../tests/data/native_encoding/proto/0739.pb | Bin 630 -> 253 bytes .../tests/data/native_encoding/proto/0740.pb | Bin 100 -> 1116 bytes .../tests/data/native_encoding/proto/0741.pb | Bin 45 -> 64 bytes .../tests/data/native_encoding/proto/0742.pb | Bin 14 -> 336 bytes .../tests/data/native_encoding/proto/0743.pb | Bin 2110 -> 1291 bytes .../tests/data/native_encoding/proto/0744.pb | Bin 56 -> 302 bytes .../tests/data/native_encoding/proto/0745.pb | Bin 58 -> 99 bytes .../tests/data/native_encoding/proto/0746.pb | Bin 605 -> 1626 bytes .../tests/data/native_encoding/proto/0747.pb | Bin 52 -> 41 bytes .../tests/data/native_encoding/proto/0748.pb | Bin 206 -> 227 bytes .../tests/data/native_encoding/proto/0749.pb | Bin 14 -> 118 bytes .../tests/data/native_encoding/proto/0750.pb | Bin 174 -> 1480 bytes .../tests/data/native_encoding/proto/0751.pb | Bin 95 -> 70 bytes .../tests/data/native_encoding/proto/0752.pb | Bin 2263 -> 2474 bytes .../tests/data/native_encoding/proto/0753.pb | Bin 50 -> 384 bytes .../tests/data/native_encoding/proto/0754.pb | Bin 14 -> 8070 bytes .../tests/data/native_encoding/proto/0755.pb | Bin 2625 -> 65 bytes .../tests/data/native_encoding/proto/0756.pb | Bin 36 -> 773 bytes .../tests/data/native_encoding/proto/0757.pb | Bin 739 -> 48 bytes .../tests/data/native_encoding/proto/0758.pb | Bin 566 -> 318 bytes .../tests/data/native_encoding/proto/0759.pb | Bin 2168 -> 554 bytes .../tests/data/native_encoding/proto/0760.pb | Bin 43 -> 50 bytes .../tests/data/native_encoding/proto/0761.pb | Bin 1743 -> 77 bytes .../tests/data/native_encoding/proto/0762.pb | Bin 65 -> 89 bytes .../tests/data/native_encoding/proto/0763.pb | Bin 50 -> 1316 bytes .../tests/data/native_encoding/proto/0764.pb | Bin 54 -> 253 bytes .../tests/data/native_encoding/proto/0765.pb | Bin 125 -> 67 bytes .../tests/data/native_encoding/proto/0766.pb | Bin 2337 -> 398 bytes .../tests/data/native_encoding/proto/0767.pb | Bin 508 -> 69 bytes .../tests/data/native_encoding/proto/0768.pb | Bin 57 -> 655 bytes .../tests/data/native_encoding/proto/0769.pb | Bin 212 -> 32 bytes .../tests/data/native_encoding/proto/0770.pb | Bin 11060 -> 1811 bytes .../tests/data/native_encoding/proto/0771.pb | Bin 8656 -> 2048 bytes .../tests/data/native_encoding/proto/0772.pb | Bin 599 -> 89 bytes .../tests/data/native_encoding/proto/0773.pb | Bin 30 -> 50 bytes .../tests/data/native_encoding/proto/0774.pb | Bin 28 -> 75 bytes .../tests/data/native_encoding/proto/0775.pb | Bin 14 -> 54 bytes .../tests/data/native_encoding/proto/0776.pb | Bin 2218 -> 6957 bytes .../tests/data/native_encoding/proto/0777.pb | Bin 1448 -> 64 bytes .../tests/data/native_encoding/proto/0778.pb | Bin 53 -> 71 bytes .../tests/data/native_encoding/proto/0779.pb | Bin 53 -> 61 bytes .../tests/data/native_encoding/proto/0780.pb | Bin 2202 -> 188 bytes .../tests/data/native_encoding/proto/0781.pb | Bin 39 -> 141 bytes .../tests/data/native_encoding/proto/0782.pb | Bin 42 -> 148 bytes .../tests/data/native_encoding/proto/0783.pb | Bin 67 -> 56 bytes .../tests/data/native_encoding/proto/0784.pb | Bin 39 -> 84 bytes .../tests/data/native_encoding/proto/0785.pb | Bin 14 -> 72 bytes .../tests/data/native_encoding/proto/0786.pb | Bin 901 -> 87 bytes .../tests/data/native_encoding/proto/0787.pb | Bin 194 -> 472 bytes .../tests/data/native_encoding/proto/0788.pb | Bin 28 -> 226 bytes .../tests/data/native_encoding/proto/0789.pb | Bin 28 -> 514 bytes .../tests/data/native_encoding/proto/0790.pb | Bin 338 -> 579 bytes .../tests/data/native_encoding/proto/0791.pb | Bin 50 -> 2137 bytes .../tests/data/native_encoding/proto/0792.pb | Bin 298 -> 567 bytes .../tests/data/native_encoding/proto/0793.pb | Bin 44 -> 1846 bytes .../tests/data/native_encoding/proto/0794.pb | Bin 115 -> 42 bytes .../tests/data/native_encoding/proto/0795.pb | Bin 1528 -> 88 bytes .../tests/data/native_encoding/proto/0796.pb | Bin 180 -> 12156 bytes .../tests/data/native_encoding/proto/0797.pb | Bin 742 -> 84 bytes .../tests/data/native_encoding/proto/0798.pb | Bin 35 -> 12265 bytes .../tests/data/native_encoding/proto/0799.pb | Bin 42 -> 110 bytes .../tests/data/native_encoding/proto/0800.pb | Bin 540 -> 92 bytes .../tests/data/native_encoding/proto/0801.pb | Bin 262 -> 285 bytes .../tests/data/native_encoding/proto/0802.pb | Bin 527 -> 189 bytes .../tests/data/native_encoding/proto/0803.pb | Bin 3466 -> 2688 bytes .../tests/data/native_encoding/proto/0804.pb | Bin 14 -> 52 bytes .../tests/data/native_encoding/proto/0805.pb | Bin 231 -> 193 bytes .../tests/data/native_encoding/proto/0806.pb | Bin 2113 -> 95 bytes .../tests/data/native_encoding/proto/0807.pb | Bin 296 -> 1401 bytes .../tests/data/native_encoding/proto/0808.pb | Bin 2141 -> 98 bytes .../tests/data/native_encoding/proto/0809.pb | Bin 411 -> 550 bytes .../tests/data/native_encoding/proto/0810.pb | Bin 63 -> 1192 bytes .../tests/data/native_encoding/proto/0811.pb | Bin 28 -> 103 bytes .../tests/data/native_encoding/proto/0812.pb | Bin 98 -> 154 bytes .../tests/data/native_encoding/proto/0813.pb | Bin 289 -> 101 bytes .../tests/data/native_encoding/proto/0814.pb | Bin 203 -> 646 bytes .../tests/data/native_encoding/proto/0815.pb | Bin 203 -> 206 bytes .../tests/data/native_encoding/proto/0816.pb | Bin 14 -> 2067 bytes .../tests/data/native_encoding/proto/0817.pb | Bin 78 -> 173 bytes .../tests/data/native_encoding/proto/0818.pb | Bin 514 -> 48 bytes .../tests/data/native_encoding/proto/0819.pb | Bin 39 -> 103 bytes .../tests/data/native_encoding/proto/0820.pb | Bin 13186 -> 89 bytes .../tests/data/native_encoding/proto/0821.pb | Bin 1110 -> 43 bytes .../tests/data/native_encoding/proto/0822.pb | Bin 86 -> 59 bytes .../tests/data/native_encoding/proto/0823.pb | Bin 62 -> 118 bytes .../tests/data/native_encoding/proto/0824.pb | Bin 14 -> 230 bytes .../tests/data/native_encoding/proto/0825.pb | Bin 31 -> 85 bytes .../tests/data/native_encoding/proto/0826.pb | Bin 32 -> 54 bytes .../tests/data/native_encoding/proto/0827.pb | Bin 67 -> 166 bytes .../tests/data/native_encoding/proto/0828.pb | Bin 62 -> 322 bytes .../tests/data/native_encoding/proto/0829.pb | Bin 632 -> 32 bytes .../tests/data/native_encoding/proto/0830.pb | Bin 158 -> 52 bytes .../tests/data/native_encoding/proto/0831.pb | Bin 155 -> 84 bytes .../tests/data/native_encoding/proto/0832.pb | Bin 39 -> 263 bytes .../tests/data/native_encoding/proto/0833.pb | Bin 47 -> 193 bytes .../tests/data/native_encoding/proto/0834.pb | Bin 9092 -> 2967 bytes .../tests/data/native_encoding/proto/0835.pb | Bin 46 -> 184 bytes .../tests/data/native_encoding/proto/0836.pb | Bin 814 -> 288 bytes .../tests/data/native_encoding/proto/0837.pb | Bin 1541 -> 81 bytes .../tests/data/native_encoding/proto/0838.pb | Bin 40 -> 78 bytes .../tests/data/native_encoding/proto/0839.pb | Bin 1151 -> 2142 bytes .../tests/data/native_encoding/proto/0840.pb | Bin 119 -> 157 bytes .../tests/data/native_encoding/proto/0841.pb | Bin 41 -> 11506 bytes .../tests/data/native_encoding/proto/0842.pb | Bin 28 -> 142 bytes .../tests/data/native_encoding/proto/0843.pb | Bin 14 -> 60 bytes .../tests/data/native_encoding/proto/0844.pb | Bin 2151 -> 1849 bytes .../tests/data/native_encoding/proto/0845.pb | Bin 97 -> 404 bytes .../tests/data/native_encoding/proto/0846.pb | Bin 94 -> 2172 bytes .../tests/data/native_encoding/proto/0847.pb | Bin 173 -> 64 bytes .../tests/data/native_encoding/proto/0848.pb | Bin 43 -> 433 bytes .../tests/data/native_encoding/proto/0849.pb | Bin 1312 -> 65 bytes .../tests/data/native_encoding/proto/0850.pb | Bin 683 -> 251 bytes .../tests/data/native_encoding/proto/0851.pb | Bin 226 -> 46 bytes .../tests/data/native_encoding/proto/0852.pb | Bin 139 -> 669 bytes .../tests/data/native_encoding/proto/0853.pb | Bin 50 -> 50 bytes .../tests/data/native_encoding/proto/0854.pb | Bin 53 -> 10402 bytes .../tests/data/native_encoding/proto/0855.pb | Bin 179 -> 415 bytes .../tests/data/native_encoding/proto/0856.pb | Bin 51 -> 381 bytes .../tests/data/native_encoding/proto/0857.pb | Bin 5303 -> 54 bytes .../tests/data/native_encoding/proto/0858.pb | Bin 65 -> 1572 bytes .../tests/data/native_encoding/proto/0859.pb | Bin 267 -> 1315 bytes .../tests/data/native_encoding/proto/0860.pb | Bin 120 -> 1006 bytes .../tests/data/native_encoding/proto/0861.pb | Bin 2051 -> 559 bytes .../tests/data/native_encoding/proto/0862.pb | Bin 484 -> 68 bytes .../tests/data/native_encoding/proto/0863.pb | Bin 54 -> 74 bytes .../tests/data/native_encoding/proto/0864.pb | Bin 45 -> 66 bytes .../tests/data/native_encoding/proto/0865.pb | Bin 3880 -> 10579 bytes .../tests/data/native_encoding/proto/0866.pb | Bin 103 -> 92 bytes .../tests/data/native_encoding/proto/0867.pb | Bin 1146 -> 81 bytes .../tests/data/native_encoding/proto/0868.pb | Bin 493 -> 1566 bytes .../tests/data/native_encoding/proto/0869.pb | Bin 683 -> 43 bytes .../tests/data/native_encoding/proto/0870.pb | Bin 729 -> 57 bytes .../tests/data/native_encoding/proto/0871.pb | Bin 172 -> 469 bytes .../tests/data/native_encoding/proto/0872.pb | Bin 368 -> 100 bytes .../tests/data/native_encoding/proto/0873.pb | Bin 61 -> 32 bytes .../tests/data/native_encoding/proto/0874.pb | Bin 212 -> 61 bytes .../tests/data/native_encoding/proto/0875.pb | Bin 32 -> 89 bytes .../tests/data/native_encoding/proto/0876.pb | Bin 7437 -> 75 bytes .../tests/data/native_encoding/proto/0877.pb | Bin 14 -> 134 bytes .../tests/data/native_encoding/proto/0878.pb | Bin 476 -> 317 bytes .../tests/data/native_encoding/proto/0879.pb | Bin 2384 -> 1080 bytes .../tests/data/native_encoding/proto/0880.pb | Bin 571 -> 1019 bytes .../tests/data/native_encoding/proto/0881.pb | Bin 83 -> 2166 bytes .../tests/data/native_encoding/proto/0882.pb | Bin 2175 -> 803 bytes .../tests/data/native_encoding/proto/0883.pb | Bin 56 -> 1194 bytes .../tests/data/native_encoding/proto/0884.pb | Bin 39 -> 146 bytes .../tests/data/native_encoding/proto/0885.pb | Bin 1330 -> 475 bytes .../tests/data/native_encoding/proto/0886.pb | Bin 42 -> 2318 bytes .../tests/data/native_encoding/proto/0887.pb | Bin 319 -> 2606 bytes .../tests/data/native_encoding/proto/0888.pb | Bin 1809 -> 257 bytes .../tests/data/native_encoding/proto/0889.pb | Bin 367 -> 124 bytes .../tests/data/native_encoding/proto/0890.pb | Bin 39 -> 313 bytes .../tests/data/native_encoding/proto/0891.pb | Bin 28 -> 1494 bytes .../tests/data/native_encoding/proto/0892.pb | Bin 760 -> 7875 bytes .../tests/data/native_encoding/proto/0893.pb | Bin 14791 -> 332 bytes .../tests/data/native_encoding/proto/0894.pb | Bin 96 -> 46 bytes .../tests/data/native_encoding/proto/0895.pb | Bin 13848 -> 75 bytes .../tests/data/native_encoding/proto/0896.pb | Bin 53 -> 292 bytes .../tests/data/native_encoding/proto/0897.pb | Bin 417 -> 70 bytes .../tests/data/native_encoding/proto/0898.pb | Bin 2064 -> 337 bytes .../tests/data/native_encoding/proto/0899.pb | Bin 33 -> 211 bytes .../tests/data/native_encoding/proto/0900.pb | Bin 93 -> 283 bytes .../tests/data/native_encoding/proto/0901.pb | Bin 213 -> 47 bytes .../tests/data/native_encoding/proto/0902.pb | Bin 77 -> 809 bytes .../tests/data/native_encoding/proto/0903.pb | Bin 41 -> 193 bytes .../tests/data/native_encoding/proto/0904.pb | Bin 42 -> 109 bytes .../tests/data/native_encoding/proto/0905.pb | Bin 56 -> 1488 bytes .../tests/data/native_encoding/proto/0906.pb | Bin 28 -> 416 bytes .../tests/data/native_encoding/proto/0907.pb | Bin 338 -> 477 bytes .../tests/data/native_encoding/proto/0908.pb | Bin 79 -> 97 bytes .../tests/data/native_encoding/proto/0909.pb | Bin 14 -> 38 bytes .../tests/data/native_encoding/proto/0910.pb | Bin 2086 -> 276 bytes .../tests/data/native_encoding/proto/0911.pb | Bin 2330 -> 138 bytes .../tests/data/native_encoding/proto/0912.pb | Bin 142 -> 48 bytes .../tests/data/native_encoding/proto/0913.pb | Bin 74 -> 1924 bytes .../tests/data/native_encoding/proto/0914.pb | Bin 97 -> 57 bytes .../tests/data/native_encoding/proto/0915.pb | Bin 70 -> 1290 bytes .../tests/data/native_encoding/proto/0916.pb | Bin 104 -> 398 bytes .../tests/data/native_encoding/proto/0917.pb | Bin 920 -> 132 bytes .../tests/data/native_encoding/proto/0918.pb | Bin 108 -> 794 bytes .../tests/data/native_encoding/proto/0919.pb | Bin 11526 -> 159 bytes .../tests/data/native_encoding/proto/0920.pb | Bin 12144 -> 171 bytes .../tests/data/native_encoding/proto/0921.pb | Bin 38 -> 48 bytes .../tests/data/native_encoding/proto/0922.pb | Bin 700 -> 13234 bytes .../tests/data/native_encoding/proto/0923.pb | Bin 1875 -> 2001 bytes .../tests/data/native_encoding/proto/0924.pb | Bin 1379 -> 225 bytes .../tests/data/native_encoding/proto/0925.pb | Bin 36 -> 380 bytes .../tests/data/native_encoding/proto/0926.pb | Bin 81 -> 312 bytes .../tests/data/native_encoding/proto/0927.pb | Bin 80 -> 116 bytes .../tests/data/native_encoding/proto/0928.pb | Bin 2606 -> 76 bytes .../tests/data/native_encoding/proto/0929.pb | Bin 126 -> 299 bytes .../tests/data/native_encoding/proto/0930.pb | Bin 65 -> 54 bytes .../tests/data/native_encoding/proto/0931.pb | Bin 10373 -> 50 bytes .../tests/data/native_encoding/proto/0932.pb | Bin 46 -> 174 bytes .../tests/data/native_encoding/proto/0933.pb | Bin 10646 -> 32 bytes .../tests/data/native_encoding/proto/0934.pb | Bin 43 -> 61 bytes .../tests/data/native_encoding/proto/0935.pb | Bin 138 -> 422 bytes .../tests/data/native_encoding/proto/0936.pb | Bin 43 -> 117 bytes .../tests/data/native_encoding/proto/0937.pb | Bin 949 -> 185 bytes .../tests/data/native_encoding/proto/0938.pb | Bin 26 -> 48 bytes .../tests/data/native_encoding/proto/0939.pb | Bin 1437 -> 2132 bytes .../tests/data/native_encoding/proto/0940.pb | Bin 60 -> 633 bytes .../tests/data/native_encoding/proto/0941.pb | Bin 86 -> 71 bytes .../tests/data/native_encoding/proto/0942.pb | Bin 74 -> 140 bytes .../tests/data/native_encoding/proto/0943.pb | Bin 14 -> 45 bytes .../tests/data/native_encoding/proto/0944.pb | Bin 204 -> 48 bytes .../tests/data/native_encoding/proto/0945.pb | Bin 2053 -> 102 bytes .../tests/data/native_encoding/proto/0946.pb | Bin 62 -> 90 bytes .../tests/data/native_encoding/proto/0947.pb | Bin 134 -> 56 bytes .../tests/data/native_encoding/proto/0948.pb | Bin 70 -> 113 bytes .../tests/data/native_encoding/proto/0949.pb | Bin 58 -> 90 bytes .../tests/data/native_encoding/proto/0950.pb | Bin 662 -> 102 bytes .../tests/data/native_encoding/proto/0951.pb | Bin 14 -> 350 bytes .../tests/data/native_encoding/proto/0952.pb | Bin 7215 -> 487 bytes .../tests/data/native_encoding/proto/0953.pb | Bin 226 -> 171 bytes .../tests/data/native_encoding/proto/0954.pb | Bin 1070 -> 176 bytes .../tests/data/native_encoding/proto/0955.pb | Bin 35 -> 359 bytes .../tests/data/native_encoding/proto/0956.pb | Bin 146 -> 57 bytes .../tests/data/native_encoding/proto/0957.pb | Bin 107 -> 319 bytes .../tests/data/native_encoding/proto/0958.pb | Bin 61 -> 280 bytes .../tests/data/native_encoding/proto/0959.pb | Bin 144 -> 4726 bytes .../tests/data/native_encoding/proto/0960.pb | Bin 1622 -> 78 bytes .../tests/data/native_encoding/proto/0961.pb | Bin 14 -> 149 bytes .../tests/data/native_encoding/proto/0962.pb | Bin 57 -> 10923 bytes .../tests/data/native_encoding/proto/0963.pb | Bin 127 -> 639 bytes .../tests/data/native_encoding/proto/0964.pb | Bin 91 -> 1679 bytes .../tests/data/native_encoding/proto/0965.pb | Bin 30 -> 48 bytes .../tests/data/native_encoding/proto/0966.pb | Bin 414 -> 11324 bytes .../tests/data/native_encoding/proto/0967.pb | Bin 13407 -> 272 bytes .../tests/data/native_encoding/proto/0968.pb | Bin 98 -> 110 bytes .../tests/data/native_encoding/proto/0969.pb | Bin 1318 -> 99 bytes .../tests/data/native_encoding/proto/0970.pb | Bin 231 -> 358 bytes .../tests/data/native_encoding/proto/0971.pb | Bin 2832 -> 543 bytes .../tests/data/native_encoding/proto/0972.pb | Bin 59 -> 593 bytes .../tests/data/native_encoding/proto/0973.pb | Bin 35 -> 287 bytes .../tests/data/native_encoding/proto/0974.pb | Bin 1248 -> 2559 bytes .../tests/data/native_encoding/proto/0975.pb | Bin 205 -> 249 bytes .../tests/data/native_encoding/proto/0976.pb | Bin 40 -> 211 bytes .../tests/data/native_encoding/proto/0977.pb | Bin 282 -> 108 bytes .../tests/data/native_encoding/proto/0978.pb | Bin 1832 -> 6507 bytes .../tests/data/native_encoding/proto/0979.pb | Bin 2105 -> 4772 bytes .../tests/data/native_encoding/proto/0980.pb | Bin 67 -> 46 bytes .../tests/data/native_encoding/proto/0981.pb | Bin 22 -> 1053 bytes .../tests/data/native_encoding/proto/0982.pb | Bin 470 -> 525 bytes .../tests/data/native_encoding/proto/0983.pb | Bin 134 -> 1423 bytes .../tests/data/native_encoding/proto/0984.pb | Bin 1835 -> 567 bytes .../tests/data/native_encoding/proto/0985.pb | Bin 42 -> 154 bytes .../tests/data/native_encoding/proto/0986.pb | Bin 1002 -> 405 bytes .../tests/data/native_encoding/proto/0987.pb | Bin 128 -> 93 bytes .../tests/data/native_encoding/proto/0988.pb | Bin 5875 -> 53 bytes .../tests/data/native_encoding/proto/0989.pb | Bin 69 -> 82 bytes .../tests/data/native_encoding/proto/0990.pb | Bin 5750 -> 1310 bytes .../tests/data/native_encoding/proto/0991.pb | Bin 164 -> 304 bytes .../tests/data/native_encoding/proto/0992.pb | Bin 439 -> 201 bytes .../tests/data/native_encoding/proto/0993.pb | Bin 14 -> 13068 bytes .../tests/data/native_encoding/proto/0994.pb | Bin 2292 -> 1156 bytes .../tests/data/native_encoding/proto/0995.pb | Bin 121 -> 62 bytes .../tests/data/native_encoding/proto/0996.pb | Bin 49 -> 85 bytes .../tests/data/native_encoding/proto/0997.pb | Bin 34 -> 203 bytes .../tests/data/native_encoding/proto/0998.pb | Bin 200 -> 80 bytes .../tests/data/native_encoding/proto/0999.pb | Bin 445 -> 12348 bytes .../tests/data/native_encoding/proto/1000.pb | Bin 242 -> 2140 bytes .../tests/data/native_encoding/proto/1001.pb | Bin 78 -> 53 bytes .../tests/data/native_encoding/proto/1002.pb | Bin 144 -> 1986 bytes .../tests/data/native_encoding/proto/1003.pb | Bin 121 -> 451 bytes .../tests/data/native_encoding/proto/1004.pb | Bin 14 -> 107 bytes .../tests/data/native_encoding/proto/1005.pb | Bin 46 -> 6381 bytes .../tests/data/native_encoding/proto/1006.pb | Bin 60 -> 110 bytes .../tests/data/native_encoding/proto/1007.pb | Bin 217 -> 384 bytes .../tests/data/native_encoding/proto/1008.pb | Bin 2177 -> 307 bytes .../tests/data/native_encoding/proto/1009.pb | Bin 117 -> 381 bytes .../tests/data/native_encoding/proto/1010.pb | Bin 58 -> 5904 bytes .../tests/data/native_encoding/proto/1011.pb | Bin 179 -> 95 bytes .../tests/data/native_encoding/proto/1012.pb | Bin 40 -> 111 bytes .../tests/data/native_encoding/proto/1013.pb | Bin 475 -> 96 bytes .../tests/data/native_encoding/proto/1014.pb | Bin 77 -> 51 bytes .../tests/data/native_encoding/proto/1015.pb | Bin 131 -> 96 bytes .../tests/data/native_encoding/proto/1016.pb | Bin 245 -> 32 bytes .../tests/data/native_encoding/proto/1017.pb | Bin 88 -> 558 bytes .../tests/data/native_encoding/proto/1018.pb | Bin 87 -> 146 bytes .../tests/data/native_encoding/proto/1019.pb | Bin 2627 -> 82 bytes .../tests/data/native_encoding/proto/1020.pb | Bin 2236 -> 1795 bytes .../tests/data/native_encoding/proto/1021.pb | Bin 80 -> 71 bytes .../tests/data/native_encoding/proto/1022.pb | Bin 207 -> 237 bytes .../tests/data/native_encoding/proto/1023.pb | Bin 41 -> 100 bytes .../native_encoding/proto/pre-v41/0000.pb | 6 + .../native_encoding/proto/pre-v41/0001.pb | Bin 0 -> 51 bytes .../native_encoding/proto/pre-v41/0002.pb | Bin 0 -> 73 bytes .../native_encoding/proto/pre-v41/0003.pb | Bin 0 -> 34 bytes .../native_encoding/proto/pre-v41/0004.pb | Bin 0 -> 398 bytes .../native_encoding/proto/pre-v41/0005.pb | Bin 0 -> 1720 bytes .../native_encoding/proto/pre-v41/0006.pb | Bin 0 -> 1856 bytes .../native_encoding/proto/pre-v41/0007.pb | Bin 0 -> 121 bytes .../native_encoding/proto/pre-v41/0008.pb | Bin 0 -> 32 bytes .../native_encoding/proto/pre-v41/0009.pb | Bin 0 -> 476 bytes .../native_encoding/proto/pre-v41/0010.pb | Bin 0 -> 1996 bytes .../native_encoding/proto/pre-v41/0011.pb | Bin 0 -> 89 bytes .../native_encoding/proto/pre-v41/0012.pb | Bin 0 -> 128 bytes .../native_encoding/proto/pre-v41/0013.pb | Bin 0 -> 2794 bytes .../native_encoding/proto/pre-v41/0014.pb | Bin 0 -> 29 bytes .../native_encoding/proto/pre-v41/0015.pb | Bin 0 -> 137 bytes .../native_encoding/proto/pre-v41/0016.pb | Bin 0 -> 168 bytes .../native_encoding/proto/pre-v41/0017.pb | Bin 0 -> 3611 bytes .../native_encoding/proto/pre-v41/0018.pb | Bin 0 -> 96 bytes .../native_encoding/proto/pre-v41/0019.pb | Bin 0 -> 32 bytes .../native_encoding/proto/pre-v41/0020.pb | Bin 0 -> 351 bytes .../native_encoding/proto/pre-v41/0021.pb | Bin 0 -> 58 bytes .../native_encoding/proto/pre-v41/0022.pb | Bin 0 -> 2281 bytes .../native_encoding/proto/pre-v41/0023.pb | Bin 0 -> 1228 bytes .../native_encoding/proto/pre-v41/0024.pb | Bin 0 -> 156 bytes .../native_encoding/proto/pre-v41/0025.pb | Bin 0 -> 335 bytes .../native_encoding/proto/pre-v41/0026.pb | 6 + .../native_encoding/proto/pre-v41/0027.pb | Bin 0 -> 80 bytes .../native_encoding/proto/pre-v41/0028.pb | 30 + .../native_encoding/proto/pre-v41/0029.pb | Bin 0 -> 2228 bytes .../native_encoding/proto/pre-v41/0030.pb | Bin 0 -> 1925 bytes .../native_encoding/proto/pre-v41/0031.pb | Bin 0 -> 51 bytes .../native_encoding/proto/pre-v41/0032.pb | Bin 0 -> 1149 bytes .../native_encoding/proto/pre-v41/0033.pb | Bin 0 -> 501 bytes .../native_encoding/proto/pre-v41/0034.pb | Bin 0 -> 405 bytes .../native_encoding/proto/pre-v41/0035.pb | Bin 0 -> 74 bytes .../native_encoding/proto/pre-v41/0036.pb | 6 + .../native_encoding/proto/pre-v41/0037.pb | Bin 0 -> 92 bytes .../native_encoding/proto/pre-v41/0038.pb | Bin 0 -> 1746 bytes .../native_encoding/proto/pre-v41/0039.pb | Bin 0 -> 251 bytes .../native_encoding/proto/pre-v41/0040.pb | Bin 0 -> 30 bytes .../native_encoding/proto/pre-v41/0041.pb | Bin 0 -> 43 bytes .../native_encoding/proto/pre-v41/0042.pb | Bin 0 -> 210 bytes .../native_encoding/proto/pre-v41/0043.pb | 9 + .../native_encoding/proto/pre-v41/0044.pb | Bin 0 -> 2111 bytes .../native_encoding/proto/pre-v41/0045.pb | Bin 0 -> 52 bytes .../native_encoding/proto/pre-v41/0046.pb | Bin 0 -> 310 bytes .../native_encoding/proto/pre-v41/0047.pb | Bin 0 -> 72 bytes .../native_encoding/proto/pre-v41/0048.pb | Bin 0 -> 1722 bytes .../native_encoding/proto/pre-v41/0049.pb | Bin 0 -> 275 bytes .../native_encoding/proto/pre-v41/0050.pb | 5 + .../native_encoding/proto/pre-v41/0051.pb | Bin 0 -> 2402 bytes .../native_encoding/proto/pre-v41/0052.pb | Bin 0 -> 51 bytes .../native_encoding/proto/pre-v41/0053.pb | Bin 0 -> 14 bytes .../native_encoding/proto/pre-v41/0054.pb | 8 + .../native_encoding/proto/pre-v41/0055.pb | Bin 0 -> 77 bytes .../native_encoding/proto/pre-v41/0056.pb | Bin 0 -> 60 bytes .../native_encoding/proto/pre-v41/0057.pb | Bin 0 -> 128 bytes .../native_encoding/proto/pre-v41/0058.pb | Bin 0 -> 100 bytes .../native_encoding/proto/pre-v41/0059.pb | Bin 0 -> 450 bytes .../native_encoding/proto/pre-v41/0060.pb | Bin 0 -> 1821 bytes .../native_encoding/proto/pre-v41/0061.pb | Bin 0 -> 2538 bytes .../native_encoding/proto/pre-v41/0062.pb | Bin 0 -> 46 bytes .../native_encoding/proto/pre-v41/0063.pb | Bin 0 -> 1614 bytes .../native_encoding/proto/pre-v41/0064.pb | Bin 0 -> 356 bytes .../native_encoding/proto/pre-v41/0065.pb | Bin 0 -> 572 bytes .../native_encoding/proto/pre-v41/0066.pb | Bin 0 -> 617 bytes .../native_encoding/proto/pre-v41/0067.pb | Bin 0 -> 1950 bytes .../native_encoding/proto/pre-v41/0068.pb | Bin 0 -> 57 bytes .../native_encoding/proto/pre-v41/0069.pb | Bin 0 -> 490 bytes .../native_encoding/proto/pre-v41/0070.pb | Bin 0 -> 387 bytes .../native_encoding/proto/pre-v41/0071.pb | Bin 0 -> 31 bytes .../native_encoding/proto/pre-v41/0072.pb | Bin 0 -> 68 bytes .../native_encoding/proto/pre-v41/0073.pb | Bin 0 -> 1198 bytes .../native_encoding/proto/pre-v41/0074.pb | Bin 0 -> 561 bytes .../native_encoding/proto/pre-v41/0075.pb | Bin 0 -> 70 bytes .../native_encoding/proto/pre-v41/0076.pb | Bin 0 -> 929 bytes .../native_encoding/proto/pre-v41/0077.pb | Bin 0 -> 71 bytes .../native_encoding/proto/pre-v41/0078.pb | Bin 0 -> 950 bytes .../native_encoding/proto/pre-v41/0079.pb | Bin 0 -> 25 bytes .../native_encoding/proto/pre-v41/0080.pb | 10 + .../native_encoding/proto/pre-v41/0081.pb | Bin 0 -> 129 bytes .../native_encoding/proto/pre-v41/0082.pb | Bin 0 -> 449 bytes .../native_encoding/proto/pre-v41/0083.pb | Bin 0 -> 202 bytes .../native_encoding/proto/pre-v41/0084.pb | Bin 0 -> 173 bytes .../native_encoding/proto/pre-v41/0085.pb | Bin 0 -> 4123 bytes .../native_encoding/proto/pre-v41/0086.pb | Bin 0 -> 47 bytes .../native_encoding/proto/pre-v41/0087.pb | Bin 0 -> 200 bytes .../native_encoding/proto/pre-v41/0088.pb | Bin 0 -> 37 bytes .../native_encoding/proto/pre-v41/0089.pb | Bin 0 -> 97 bytes .../native_encoding/proto/pre-v41/0090.pb | Bin 0 -> 48 bytes .../native_encoding/proto/pre-v41/0091.pb | Bin 0 -> 33 bytes .../native_encoding/proto/pre-v41/0092.pb | Bin 0 -> 40 bytes .../native_encoding/proto/pre-v41/0093.pb | Bin 0 -> 229 bytes .../native_encoding/proto/pre-v41/0094.pb | Bin 0 -> 52 bytes .../native_encoding/proto/pre-v41/0095.pb | Bin 0 -> 31 bytes .../native_encoding/proto/pre-v41/0096.pb | Bin 0 -> 1678 bytes .../native_encoding/proto/pre-v41/0097.pb | Bin 0 -> 55 bytes .../native_encoding/proto/pre-v41/0098.pb | Bin 0 -> 403 bytes .../native_encoding/proto/pre-v41/0099.pb | Bin 0 -> 464 bytes .../native_encoding/proto/pre-v41/0100.pb | Bin 0 -> 56 bytes .../native_encoding/proto/pre-v41/0101.pb | Bin 0 -> 108 bytes .../native_encoding/proto/pre-v41/0102.pb | Bin 0 -> 98 bytes .../native_encoding/proto/pre-v41/0103.pb | Bin 0 -> 13014 bytes .../native_encoding/proto/pre-v41/0104.pb | 7 + .../native_encoding/proto/pre-v41/0105.pb | Bin 0 -> 38 bytes .../native_encoding/proto/pre-v41/0106.pb | Bin 0 -> 42 bytes .../native_encoding/proto/pre-v41/0107.pb | Bin 0 -> 2373 bytes .../native_encoding/proto/pre-v41/0108.pb | Bin 0 -> 397 bytes .../native_encoding/proto/pre-v41/0109.pb | Bin 0 -> 142 bytes .../native_encoding/proto/pre-v41/0110.pb | Bin 0 -> 47 bytes .../native_encoding/proto/pre-v41/0111.pb | Bin 0 -> 1779 bytes .../native_encoding/proto/pre-v41/0112.pb | Bin 0 -> 236 bytes .../native_encoding/proto/pre-v41/0113.pb | Bin 0 -> 69 bytes .../native_encoding/proto/pre-v41/0114.pb | Bin 0 -> 212 bytes .../native_encoding/proto/pre-v41/0115.pb | Bin 0 -> 43 bytes .../native_encoding/proto/pre-v41/0116.pb | Bin 0 -> 76 bytes .../native_encoding/proto/pre-v41/0117.pb | Bin 0 -> 171 bytes .../native_encoding/proto/pre-v41/0118.pb | Bin 0 -> 51 bytes .../native_encoding/proto/pre-v41/0119.pb | Bin 0 -> 144 bytes .../native_encoding/proto/pre-v41/0120.pb | Bin 0 -> 1423 bytes .../native_encoding/proto/pre-v41/0121.pb | Bin 0 -> 59 bytes .../native_encoding/proto/pre-v41/0122.pb | 5 + .../native_encoding/proto/pre-v41/0123.pb | Bin 0 -> 63 bytes .../native_encoding/proto/pre-v41/0124.pb | Bin 0 -> 14 bytes .../native_encoding/proto/pre-v41/0125.pb | Bin 0 -> 112 bytes .../native_encoding/proto/pre-v41/0126.pb | Bin 0 -> 164 bytes .../native_encoding/proto/pre-v41/0127.pb | Bin 0 -> 555 bytes .../native_encoding/proto/pre-v41/0128.pb | Bin 0 -> 338 bytes .../native_encoding/proto/pre-v41/0129.pb | Bin 0 -> 418 bytes .../native_encoding/proto/pre-v41/0130.pb | Bin 0 -> 243 bytes .../native_encoding/proto/pre-v41/0131.pb | Bin 0 -> 252 bytes .../native_encoding/proto/pre-v41/0132.pb | Bin 0 -> 127 bytes .../native_encoding/proto/pre-v41/0133.pb | Bin 0 -> 9336 bytes .../native_encoding/proto/pre-v41/0134.pb | Bin 0 -> 131 bytes .../native_encoding/proto/pre-v41/0135.pb | Bin 0 -> 59 bytes .../native_encoding/proto/pre-v41/0136.pb | Bin 0 -> 42 bytes .../native_encoding/proto/pre-v41/0137.pb | Bin 0 -> 1029 bytes .../native_encoding/proto/pre-v41/0138.pb | Bin 0 -> 489 bytes .../native_encoding/proto/pre-v41/0139.pb | Bin 0 -> 72 bytes .../native_encoding/proto/pre-v41/0140.pb | Bin 0 -> 102 bytes .../native_encoding/proto/pre-v41/0141.pb | Bin 0 -> 64 bytes .../native_encoding/proto/pre-v41/0142.pb | Bin 0 -> 75 bytes .../native_encoding/proto/pre-v41/0143.pb | Bin 0 -> 100 bytes .../native_encoding/proto/pre-v41/0144.pb | Bin 0 -> 175 bytes .../native_encoding/proto/pre-v41/0145.pb | Bin 0 -> 64 bytes .../native_encoding/proto/pre-v41/0146.pb | Bin 0 -> 118 bytes .../native_encoding/proto/pre-v41/0147.pb | 18 + .../native_encoding/proto/pre-v41/0148.pb | Bin 0 -> 162 bytes .../native_encoding/proto/pre-v41/0149.pb | Bin 0 -> 68 bytes .../native_encoding/proto/pre-v41/0150.pb | Bin 0 -> 168 bytes .../native_encoding/proto/pre-v41/0151.pb | Bin 0 -> 269 bytes .../native_encoding/proto/pre-v41/0152.pb | Bin 0 -> 691 bytes .../native_encoding/proto/pre-v41/0153.pb | Bin 0 -> 14 bytes .../native_encoding/proto/pre-v41/0154.pb | Bin 0 -> 495 bytes .../native_encoding/proto/pre-v41/0155.pb | Bin 0 -> 81 bytes .../native_encoding/proto/pre-v41/0156.pb | Bin 0 -> 2163 bytes .../native_encoding/proto/pre-v41/0157.pb | Bin 0 -> 5335 bytes .../native_encoding/proto/pre-v41/0158.pb | Bin 0 -> 150 bytes .../native_encoding/proto/pre-v41/0159.pb | Bin 0 -> 34 bytes .../native_encoding/proto/pre-v41/0160.pb | Bin 0 -> 169 bytes .../native_encoding/proto/pre-v41/0161.pb | Bin 0 -> 275 bytes .../native_encoding/proto/pre-v41/0162.pb | Bin 0 -> 1308 bytes .../native_encoding/proto/pre-v41/0163.pb | Bin 0 -> 247 bytes .../native_encoding/proto/pre-v41/0164.pb | Bin 0 -> 56 bytes .../native_encoding/proto/pre-v41/0165.pb | Bin 0 -> 81 bytes .../native_encoding/proto/pre-v41/0166.pb | Bin 0 -> 485 bytes .../native_encoding/proto/pre-v41/0167.pb | Bin 0 -> 516 bytes .../native_encoding/proto/pre-v41/0168.pb | Bin 0 -> 131 bytes .../native_encoding/proto/pre-v41/0169.pb | Bin 0 -> 13614 bytes .../native_encoding/proto/pre-v41/0170.pb | Bin 0 -> 49 bytes .../native_encoding/proto/pre-v41/0171.pb | Bin 0 -> 101 bytes .../native_encoding/proto/pre-v41/0172.pb | Bin 0 -> 265 bytes .../native_encoding/proto/pre-v41/0173.pb | Bin 0 -> 2481 bytes .../native_encoding/proto/pre-v41/0174.pb | Bin 0 -> 139 bytes .../native_encoding/proto/pre-v41/0175.pb | Bin 0 -> 137 bytes .../native_encoding/proto/pre-v41/0176.pb | Bin 0 -> 165 bytes .../native_encoding/proto/pre-v41/0177.pb | Bin 0 -> 203 bytes .../native_encoding/proto/pre-v41/0178.pb | Bin 0 -> 269 bytes .../native_encoding/proto/pre-v41/0179.pb | Bin 0 -> 271 bytes .../native_encoding/proto/pre-v41/0180.pb | Bin 0 -> 315 bytes .../native_encoding/proto/pre-v41/0181.pb | Bin 0 -> 336 bytes .../native_encoding/proto/pre-v41/0182.pb | Bin 0 -> 95 bytes .../native_encoding/proto/pre-v41/0183.pb | Bin 0 -> 196 bytes .../native_encoding/proto/pre-v41/0184.pb | Bin 0 -> 14 bytes .../native_encoding/proto/pre-v41/0185.pb | Bin 0 -> 63 bytes .../native_encoding/proto/pre-v41/0186.pb | Bin 0 -> 82 bytes .../native_encoding/proto/pre-v41/0187.pb | Bin 0 -> 41 bytes .../native_encoding/proto/pre-v41/0188.pb | Bin 0 -> 66 bytes .../native_encoding/proto/pre-v41/0189.pb | Bin 0 -> 75 bytes .../native_encoding/proto/pre-v41/0190.pb | Bin 0 -> 2053 bytes .../native_encoding/proto/pre-v41/0191.pb | Bin 0 -> 1393 bytes .../native_encoding/proto/pre-v41/0192.pb | Bin 0 -> 125 bytes .../native_encoding/proto/pre-v41/0193.pb | Bin 0 -> 249 bytes .../native_encoding/proto/pre-v41/0194.pb | Bin 0 -> 46 bytes .../native_encoding/proto/pre-v41/0195.pb | Bin 0 -> 383 bytes .../native_encoding/proto/pre-v41/0196.pb | Bin 0 -> 10043 bytes .../native_encoding/proto/pre-v41/0197.pb | Bin 0 -> 1202 bytes .../native_encoding/proto/pre-v41/0198.pb | Bin 0 -> 87 bytes .../native_encoding/proto/pre-v41/0199.pb | Bin 0 -> 328 bytes .../native_encoding/proto/pre-v41/0200.pb | Bin 0 -> 230 bytes .../native_encoding/proto/pre-v41/0201.pb | 8 + .../native_encoding/proto/pre-v41/0202.pb | Bin 0 -> 1982 bytes .../native_encoding/proto/pre-v41/0203.pb | Bin 0 -> 420 bytes .../native_encoding/proto/pre-v41/0204.pb | Bin 0 -> 53 bytes .../native_encoding/proto/pre-v41/0205.pb | Bin 0 -> 4178 bytes .../native_encoding/proto/pre-v41/0206.pb | Bin 0 -> 74 bytes .../native_encoding/proto/pre-v41/0207.pb | Bin 0 -> 348 bytes .../native_encoding/proto/pre-v41/0208.pb | Bin 0 -> 49 bytes .../native_encoding/proto/pre-v41/0209.pb | Bin 0 -> 23 bytes .../native_encoding/proto/pre-v41/0210.pb | Bin 0 -> 38 bytes .../native_encoding/proto/pre-v41/0211.pb | Bin 0 -> 1597 bytes .../native_encoding/proto/pre-v41/0212.pb | Bin 0 -> 380 bytes .../native_encoding/proto/pre-v41/0213.pb | Bin 0 -> 222 bytes .../native_encoding/proto/pre-v41/0214.pb | Bin 0 -> 954 bytes .../native_encoding/proto/pre-v41/0215.pb | 6 + .../native_encoding/proto/pre-v41/0216.pb | Bin 0 -> 8329 bytes .../native_encoding/proto/pre-v41/0217.pb | Bin 0 -> 8284 bytes .../native_encoding/proto/pre-v41/0218.pb | Bin 0 -> 38 bytes .../native_encoding/proto/pre-v41/0219.pb | Bin 0 -> 28 bytes .../native_encoding/proto/pre-v41/0220.pb | Bin 0 -> 1550 bytes .../native_encoding/proto/pre-v41/0221.pb | Bin 0 -> 231 bytes .../native_encoding/proto/pre-v41/0222.pb | Bin 0 -> 38 bytes .../native_encoding/proto/pre-v41/0223.pb | Bin 0 -> 587 bytes .../native_encoding/proto/pre-v41/0224.pb | Bin 0 -> 175 bytes .../native_encoding/proto/pre-v41/0225.pb | Bin 0 -> 2522 bytes .../native_encoding/proto/pre-v41/0226.pb | Bin 0 -> 367 bytes .../native_encoding/proto/pre-v41/0227.pb | Bin 0 -> 673 bytes .../native_encoding/proto/pre-v41/0228.pb | Bin 0 -> 14 bytes .../native_encoding/proto/pre-v41/0229.pb | Bin 0 -> 340 bytes .../native_encoding/proto/pre-v41/0230.pb | Bin 0 -> 177 bytes .../native_encoding/proto/pre-v41/0231.pb | Bin 0 -> 103 bytes .../native_encoding/proto/pre-v41/0232.pb | 8 + .../native_encoding/proto/pre-v41/0233.pb | Bin 0 -> 113 bytes .../native_encoding/proto/pre-v41/0234.pb | Bin 0 -> 98 bytes .../native_encoding/proto/pre-v41/0235.pb | Bin 0 -> 854 bytes .../native_encoding/proto/pre-v41/0236.pb | Bin 0 -> 88 bytes .../native_encoding/proto/pre-v41/0237.pb | Bin 0 -> 29 bytes .../native_encoding/proto/pre-v41/0238.pb | Bin 0 -> 12653 bytes .../native_encoding/proto/pre-v41/0239.pb | Bin 0 -> 1993 bytes .../native_encoding/proto/pre-v41/0240.pb | Bin 0 -> 40 bytes .../native_encoding/proto/pre-v41/0241.pb | Bin 0 -> 2184 bytes .../native_encoding/proto/pre-v41/0242.pb | Bin 0 -> 12105 bytes .../native_encoding/proto/pre-v41/0243.pb | Bin 0 -> 34 bytes .../native_encoding/proto/pre-v41/0244.pb | Bin 0 -> 318 bytes .../native_encoding/proto/pre-v41/0245.pb | Bin 0 -> 1754 bytes .../native_encoding/proto/pre-v41/0246.pb | Bin 0 -> 6060 bytes .../native_encoding/proto/pre-v41/0247.pb | Bin 0 -> 393 bytes .../native_encoding/proto/pre-v41/0248.pb | Bin 0 -> 2335 bytes .../native_encoding/proto/pre-v41/0249.pb | Bin 0 -> 101 bytes .../native_encoding/proto/pre-v41/0250.pb | Bin 0 -> 171 bytes .../native_encoding/proto/pre-v41/0251.pb | Bin 0 -> 168 bytes .../native_encoding/proto/pre-v41/0252.pb | Bin 0 -> 68 bytes .../native_encoding/proto/pre-v41/0253.pb | Bin 0 -> 76 bytes .../native_encoding/proto/pre-v41/0254.pb | Bin 0 -> 2017 bytes .../native_encoding/proto/pre-v41/0255.pb | Bin 0 -> 156 bytes .../native_encoding/proto/pre-v41/0256.pb | 6 + .../native_encoding/proto/pre-v41/0257.pb | Bin 0 -> 1624 bytes .../native_encoding/proto/pre-v41/0258.pb | Bin 0 -> 60 bytes .../native_encoding/proto/pre-v41/0259.pb | 4 + .../native_encoding/proto/pre-v41/0260.pb | Bin 0 -> 214 bytes .../native_encoding/proto/pre-v41/0261.pb | Bin 0 -> 1988 bytes .../native_encoding/proto/pre-v41/0262.pb | Bin 0 -> 66 bytes .../native_encoding/proto/pre-v41/0263.pb | Bin 0 -> 443 bytes .../native_encoding/proto/pre-v41/0264.pb | Bin 0 -> 222 bytes .../native_encoding/proto/pre-v41/0265.pb | Bin 0 -> 1166 bytes .../native_encoding/proto/pre-v41/0266.pb | Bin 0 -> 330 bytes .../native_encoding/proto/pre-v41/0267.pb | Bin 0 -> 38 bytes .../native_encoding/proto/pre-v41/0268.pb | Bin 0 -> 173 bytes .../native_encoding/proto/pre-v41/0269.pb | Bin 0 -> 822 bytes .../native_encoding/proto/pre-v41/0270.pb | Bin 0 -> 7382 bytes .../native_encoding/proto/pre-v41/0271.pb | Bin 0 -> 57 bytes .../native_encoding/proto/pre-v41/0272.pb | Bin 0 -> 32 bytes .../native_encoding/proto/pre-v41/0273.pb | Bin 0 -> 554 bytes .../native_encoding/proto/pre-v41/0274.pb | Bin 0 -> 218 bytes .../native_encoding/proto/pre-v41/0275.pb | Bin 0 -> 48 bytes .../native_encoding/proto/pre-v41/0276.pb | Bin 0 -> 4484 bytes .../native_encoding/proto/pre-v41/0277.pb | Bin 0 -> 2389 bytes .../native_encoding/proto/pre-v41/0278.pb | Bin 0 -> 56 bytes .../native_encoding/proto/pre-v41/0279.pb | Bin 0 -> 11194 bytes .../native_encoding/proto/pre-v41/0280.pb | Bin 0 -> 85 bytes .../native_encoding/proto/pre-v41/0281.pb | Bin 0 -> 197 bytes .../native_encoding/proto/pre-v41/0282.pb | Bin 0 -> 102 bytes .../native_encoding/proto/pre-v41/0283.pb | Bin 0 -> 132 bytes .../native_encoding/proto/pre-v41/0284.pb | Bin 0 -> 52 bytes .../native_encoding/proto/pre-v41/0285.pb | 4 + .../native_encoding/proto/pre-v41/0286.pb | Bin 0 -> 9096 bytes .../native_encoding/proto/pre-v41/0287.pb | Bin 0 -> 293 bytes .../native_encoding/proto/pre-v41/0288.pb | Bin 0 -> 207 bytes .../native_encoding/proto/pre-v41/0289.pb | Bin 0 -> 40 bytes .../native_encoding/proto/pre-v41/0290.pb | Bin 0 -> 150 bytes .../native_encoding/proto/pre-v41/0291.pb | Bin 0 -> 77 bytes .../native_encoding/proto/pre-v41/0292.pb | Bin 0 -> 542 bytes .../native_encoding/proto/pre-v41/0293.pb | Bin 0 -> 289 bytes .../native_encoding/proto/pre-v41/0294.pb | Bin 0 -> 1763 bytes .../native_encoding/proto/pre-v41/0295.pb | Bin 0 -> 14 bytes .../native_encoding/proto/pre-v41/0296.pb | Bin 0 -> 535 bytes .../native_encoding/proto/pre-v41/0297.pb | Bin 0 -> 102 bytes .../native_encoding/proto/pre-v41/0298.pb | Bin 0 -> 27 bytes .../native_encoding/proto/pre-v41/0299.pb | Bin 0 -> 6730 bytes .../native_encoding/proto/pre-v41/0300.pb | Bin 0 -> 186 bytes .../native_encoding/proto/pre-v41/0301.pb | Bin 0 -> 2314 bytes .../native_encoding/proto/pre-v41/0302.pb | Bin 0 -> 889 bytes .../native_encoding/proto/pre-v41/0303.pb | 5 + .../native_encoding/proto/pre-v41/0304.pb | Bin 0 -> 193 bytes .../native_encoding/proto/pre-v41/0305.pb | Bin 0 -> 36 bytes .../native_encoding/proto/pre-v41/0306.pb | Bin 0 -> 38 bytes .../native_encoding/proto/pre-v41/0307.pb | 7 + .../native_encoding/proto/pre-v41/0308.pb | Bin 0 -> 34 bytes .../native_encoding/proto/pre-v41/0309.pb | Bin 0 -> 49 bytes .../native_encoding/proto/pre-v41/0310.pb | Bin 0 -> 2082 bytes .../native_encoding/proto/pre-v41/0311.pb | Bin 0 -> 1383 bytes .../native_encoding/proto/pre-v41/0312.pb | Bin 0 -> 58 bytes .../native_encoding/proto/pre-v41/0313.pb | Bin 0 -> 538 bytes .../native_encoding/proto/pre-v41/0314.pb | Bin 0 -> 90 bytes .../native_encoding/proto/pre-v41/0315.pb | Bin 0 -> 56 bytes .../native_encoding/proto/pre-v41/0316.pb | Bin 0 -> 304 bytes .../native_encoding/proto/pre-v41/0317.pb | Bin 0 -> 24 bytes .../native_encoding/proto/pre-v41/0318.pb | Bin 0 -> 9268 bytes .../native_encoding/proto/pre-v41/0319.pb | Bin 0 -> 25 bytes .../native_encoding/proto/pre-v41/0320.pb | Bin 0 -> 33 bytes .../native_encoding/proto/pre-v41/0321.pb | Bin 0 -> 41 bytes .../native_encoding/proto/pre-v41/0322.pb | Bin 0 -> 195 bytes .../native_encoding/proto/pre-v41/0323.pb | Bin 0 -> 1372 bytes .../native_encoding/proto/pre-v41/0324.pb | Bin 0 -> 1191 bytes .../native_encoding/proto/pre-v41/0325.pb | Bin 0 -> 367 bytes .../native_encoding/proto/pre-v41/0326.pb | Bin 0 -> 1608 bytes .../native_encoding/proto/pre-v41/0327.pb | Bin 0 -> 264 bytes .../native_encoding/proto/pre-v41/0328.pb | Bin 0 -> 31 bytes .../native_encoding/proto/pre-v41/0329.pb | Bin 0 -> 88 bytes .../native_encoding/proto/pre-v41/0330.pb | Bin 0 -> 1067 bytes .../native_encoding/proto/pre-v41/0331.pb | Bin 0 -> 23 bytes .../native_encoding/proto/pre-v41/0332.pb | Bin 0 -> 529 bytes .../native_encoding/proto/pre-v41/0333.pb | Bin 0 -> 88 bytes .../native_encoding/proto/pre-v41/0334.pb | Bin 0 -> 63 bytes .../native_encoding/proto/pre-v41/0335.pb | Bin 0 -> 6163 bytes .../native_encoding/proto/pre-v41/0336.pb | Bin 0 -> 11142 bytes .../native_encoding/proto/pre-v41/0337.pb | 8 + .../native_encoding/proto/pre-v41/0338.pb | Bin 0 -> 2039 bytes .../native_encoding/proto/pre-v41/0339.pb | Bin 0 -> 161 bytes .../native_encoding/proto/pre-v41/0340.pb | Bin 0 -> 98 bytes .../native_encoding/proto/pre-v41/0341.pb | Bin 0 -> 1311 bytes .../native_encoding/proto/pre-v41/0342.pb | Bin 0 -> 2866 bytes .../native_encoding/proto/pre-v41/0343.pb | Bin 0 -> 75 bytes .../native_encoding/proto/pre-v41/0344.pb | Bin 0 -> 442 bytes .../native_encoding/proto/pre-v41/0345.pb | Bin 0 -> 64 bytes .../native_encoding/proto/pre-v41/0346.pb | Bin 0 -> 116 bytes .../native_encoding/proto/pre-v41/0347.pb | Bin 0 -> 44 bytes .../native_encoding/proto/pre-v41/0348.pb | Bin 0 -> 28 bytes .../native_encoding/proto/pre-v41/0349.pb | Bin 0 -> 71 bytes .../native_encoding/proto/pre-v41/0350.pb | Bin 0 -> 2378 bytes .../native_encoding/proto/pre-v41/0351.pb | Bin 0 -> 36 bytes .../native_encoding/proto/pre-v41/0352.pb | Bin 0 -> 664 bytes .../native_encoding/proto/pre-v41/0353.pb | Bin 0 -> 968 bytes .../native_encoding/proto/pre-v41/0354.pb | Bin 0 -> 1217 bytes .../native_encoding/proto/pre-v41/0355.pb | Bin 0 -> 71 bytes .../native_encoding/proto/pre-v41/0356.pb | Bin 0 -> 85 bytes .../native_encoding/proto/pre-v41/0357.pb | Bin 0 -> 51 bytes .../native_encoding/proto/pre-v41/0358.pb | Bin 0 -> 12099 bytes .../native_encoding/proto/pre-v41/0359.pb | Bin 0 -> 1536 bytes .../native_encoding/proto/pre-v41/0360.pb | Bin 0 -> 1765 bytes .../native_encoding/proto/pre-v41/0361.pb | Bin 0 -> 2324 bytes .../native_encoding/proto/pre-v41/0362.pb | Bin 0 -> 67 bytes .../native_encoding/proto/pre-v41/0363.pb | Bin 0 -> 64 bytes .../native_encoding/proto/pre-v41/0364.pb | Bin 0 -> 354 bytes .../native_encoding/proto/pre-v41/0365.pb | Bin 0 -> 99 bytes .../native_encoding/proto/pre-v41/0366.pb | Bin 0 -> 72 bytes .../native_encoding/proto/pre-v41/0367.pb | Bin 0 -> 14 bytes .../native_encoding/proto/pre-v41/0368.pb | 16 + .../native_encoding/proto/pre-v41/0369.pb | Bin 0 -> 162 bytes .../native_encoding/proto/pre-v41/0370.pb | Bin 0 -> 1529 bytes .../native_encoding/proto/pre-v41/0371.pb | Bin 0 -> 52 bytes .../native_encoding/proto/pre-v41/0372.pb | Bin 0 -> 14 bytes .../native_encoding/proto/pre-v41/0373.pb | Bin 0 -> 258 bytes .../native_encoding/proto/pre-v41/0374.pb | Bin 0 -> 48 bytes .../native_encoding/proto/pre-v41/0375.pb | Bin 0 -> 111 bytes .../native_encoding/proto/pre-v41/0376.pb | Bin 0 -> 62 bytes .../native_encoding/proto/pre-v41/0377.pb | Bin 0 -> 1051 bytes .../native_encoding/proto/pre-v41/0378.pb | 6 + .../native_encoding/proto/pre-v41/0379.pb | Bin 0 -> 94 bytes .../native_encoding/proto/pre-v41/0380.pb | Bin 0 -> 782 bytes .../native_encoding/proto/pre-v41/0381.pb | Bin 0 -> 14 bytes .../native_encoding/proto/pre-v41/0382.pb | Bin 0 -> 397 bytes .../native_encoding/proto/pre-v41/0383.pb | Bin 0 -> 55 bytes .../native_encoding/proto/pre-v41/0384.pb | Bin 0 -> 48 bytes .../native_encoding/proto/pre-v41/0385.pb | Bin 0 -> 42 bytes .../native_encoding/proto/pre-v41/0386.pb | Bin 0 -> 152 bytes .../native_encoding/proto/pre-v41/0387.pb | Bin 0 -> 64 bytes .../native_encoding/proto/pre-v41/0388.pb | Bin 0 -> 6585 bytes .../native_encoding/proto/pre-v41/0389.pb | Bin 0 -> 86 bytes .../native_encoding/proto/pre-v41/0390.pb | Bin 0 -> 72 bytes .../native_encoding/proto/pre-v41/0391.pb | 9 + .../native_encoding/proto/pre-v41/0392.pb | Bin 0 -> 42 bytes .../native_encoding/proto/pre-v41/0393.pb | Bin 0 -> 991 bytes .../native_encoding/proto/pre-v41/0394.pb | 16 + .../native_encoding/proto/pre-v41/0395.pb | Bin 0 -> 122 bytes .../native_encoding/proto/pre-v41/0396.pb | Bin 0 -> 223 bytes .../native_encoding/proto/pre-v41/0397.pb | Bin 0 -> 512 bytes .../native_encoding/proto/pre-v41/0398.pb | Bin 0 -> 85 bytes .../native_encoding/proto/pre-v41/0399.pb | Bin 0 -> 51 bytes .../native_encoding/proto/pre-v41/0400.pb | Bin 0 -> 28 bytes .../native_encoding/proto/pre-v41/0401.pb | Bin 0 -> 1764 bytes .../native_encoding/proto/pre-v41/0402.pb | Bin 0 -> 50 bytes .../native_encoding/proto/pre-v41/0403.pb | Bin 0 -> 334 bytes .../native_encoding/proto/pre-v41/0404.pb | 4 + .../native_encoding/proto/pre-v41/0405.pb | Bin 0 -> 40 bytes .../native_encoding/proto/pre-v41/0406.pb | Bin 0 -> 10597 bytes .../native_encoding/proto/pre-v41/0407.pb | Bin 0 -> 60 bytes .../native_encoding/proto/pre-v41/0408.pb | Bin 0 -> 1832 bytes .../native_encoding/proto/pre-v41/0409.pb | Bin 0 -> 143 bytes .../native_encoding/proto/pre-v41/0410.pb | Bin 0 -> 51 bytes .../native_encoding/proto/pre-v41/0411.pb | Bin 0 -> 44 bytes .../native_encoding/proto/pre-v41/0412.pb | Bin 0 -> 38 bytes .../native_encoding/proto/pre-v41/0413.pb | Bin 0 -> 2110 bytes .../native_encoding/proto/pre-v41/0414.pb | Bin 0 -> 1809 bytes .../native_encoding/proto/pre-v41/0415.pb | Bin 0 -> 1009 bytes .../native_encoding/proto/pre-v41/0416.pb | Bin 0 -> 37 bytes .../native_encoding/proto/pre-v41/0417.pb | Bin 0 -> 28 bytes .../native_encoding/proto/pre-v41/0418.pb | Bin 0 -> 29 bytes .../native_encoding/proto/pre-v41/0419.pb | Bin 0 -> 2177 bytes .../native_encoding/proto/pre-v41/0420.pb | Bin 0 -> 11712 bytes .../native_encoding/proto/pre-v41/0421.pb | Bin 0 -> 419 bytes .../native_encoding/proto/pre-v41/0422.pb | Bin 0 -> 90 bytes .../native_encoding/proto/pre-v41/0423.pb | Bin 0 -> 467 bytes .../native_encoding/proto/pre-v41/0424.pb | Bin 0 -> 74 bytes .../native_encoding/proto/pre-v41/0425.pb | Bin 0 -> 9830 bytes .../native_encoding/proto/pre-v41/0426.pb | Bin 0 -> 2248 bytes .../native_encoding/proto/pre-v41/0427.pb | Bin 0 -> 1568 bytes .../native_encoding/proto/pre-v41/0428.pb | Bin 0 -> 64 bytes .../native_encoding/proto/pre-v41/0429.pb | Bin 0 -> 5626 bytes .../native_encoding/proto/pre-v41/0430.pb | Bin 0 -> 414 bytes .../native_encoding/proto/pre-v41/0431.pb | Bin 0 -> 348 bytes .../native_encoding/proto/pre-v41/0432.pb | Bin 0 -> 2241 bytes .../native_encoding/proto/pre-v41/0433.pb | Bin 0 -> 56 bytes .../native_encoding/proto/pre-v41/0434.pb | Bin 0 -> 166 bytes .../native_encoding/proto/pre-v41/0435.pb | Bin 0 -> 1878 bytes .../native_encoding/proto/pre-v41/0436.pb | Bin 0 -> 132 bytes .../native_encoding/proto/pre-v41/0437.pb | Bin 0 -> 2356 bytes .../native_encoding/proto/pre-v41/0438.pb | Bin 0 -> 2064 bytes .../native_encoding/proto/pre-v41/0439.pb | Bin 0 -> 32 bytes .../native_encoding/proto/pre-v41/0440.pb | Bin 0 -> 116 bytes .../native_encoding/proto/pre-v41/0441.pb | Bin 0 -> 7252 bytes .../native_encoding/proto/pre-v41/0442.pb | Bin 0 -> 146 bytes .../native_encoding/proto/pre-v41/0443.pb | Bin 0 -> 1067 bytes .../native_encoding/proto/pre-v41/0444.pb | Bin 0 -> 304 bytes .../native_encoding/proto/pre-v41/0445.pb | Bin 0 -> 114 bytes .../native_encoding/proto/pre-v41/0446.pb | 6 + .../native_encoding/proto/pre-v41/0447.pb | Bin 0 -> 2071 bytes .../native_encoding/proto/pre-v41/0448.pb | Bin 0 -> 571 bytes .../native_encoding/proto/pre-v41/0449.pb | Bin 0 -> 28 bytes .../native_encoding/proto/pre-v41/0450.pb | Bin 0 -> 768 bytes .../native_encoding/proto/pre-v41/0451.pb | Bin 0 -> 12251 bytes .../native_encoding/proto/pre-v41/0452.pb | Bin 0 -> 103 bytes .../native_encoding/proto/pre-v41/0453.pb | Bin 0 -> 38 bytes .../native_encoding/proto/pre-v41/0454.pb | Bin 0 -> 793 bytes .../native_encoding/proto/pre-v41/0455.pb | Bin 0 -> 655 bytes .../native_encoding/proto/pre-v41/0456.pb | Bin 0 -> 2044 bytes .../native_encoding/proto/pre-v41/0457.pb | 8 + .../native_encoding/proto/pre-v41/0458.pb | Bin 0 -> 180 bytes .../native_encoding/proto/pre-v41/0459.pb | Bin 0 -> 13417 bytes .../native_encoding/proto/pre-v41/0460.pb | Bin 0 -> 277 bytes .../native_encoding/proto/pre-v41/0461.pb | Bin 0 -> 79 bytes .../native_encoding/proto/pre-v41/0462.pb | Bin 0 -> 14 bytes .../native_encoding/proto/pre-v41/0463.pb | Bin 0 -> 1278 bytes .../native_encoding/proto/pre-v41/0464.pb | Bin 0 -> 46 bytes .../native_encoding/proto/pre-v41/0465.pb | Bin 0 -> 28 bytes .../native_encoding/proto/pre-v41/0466.pb | Bin 0 -> 56 bytes .../native_encoding/proto/pre-v41/0467.pb | Bin 0 -> 435 bytes .../native_encoding/proto/pre-v41/0468.pb | Bin 0 -> 59 bytes .../native_encoding/proto/pre-v41/0469.pb | Bin 0 -> 47 bytes .../native_encoding/proto/pre-v41/0470.pb | Bin 0 -> 48 bytes .../native_encoding/proto/pre-v41/0471.pb | Bin 0 -> 35 bytes .../native_encoding/proto/pre-v41/0472.pb | Bin 0 -> 2009 bytes .../native_encoding/proto/pre-v41/0473.pb | Bin 0 -> 1130 bytes .../native_encoding/proto/pre-v41/0474.pb | Bin 0 -> 33 bytes .../native_encoding/proto/pre-v41/0475.pb | Bin 0 -> 14 bytes .../native_encoding/proto/pre-v41/0476.pb | Bin 0 -> 518 bytes .../native_encoding/proto/pre-v41/0477.pb | Bin 0 -> 51 bytes .../native_encoding/proto/pre-v41/0478.pb | Bin 0 -> 90 bytes .../native_encoding/proto/pre-v41/0479.pb | Bin 0 -> 67 bytes .../native_encoding/proto/pre-v41/0480.pb | Bin 0 -> 40 bytes .../native_encoding/proto/pre-v41/0481.pb | Bin 0 -> 99 bytes .../native_encoding/proto/pre-v41/0482.pb | Bin 0 -> 7261 bytes .../native_encoding/proto/pre-v41/0483.pb | Bin 0 -> 42 bytes .../native_encoding/proto/pre-v41/0484.pb | Bin 0 -> 55 bytes .../native_encoding/proto/pre-v41/0485.pb | Bin 0 -> 201 bytes .../native_encoding/proto/pre-v41/0486.pb | Bin 0 -> 77 bytes .../native_encoding/proto/pre-v41/0487.pb | Bin 0 -> 13791 bytes .../native_encoding/proto/pre-v41/0488.pb | Bin 0 -> 907 bytes .../native_encoding/proto/pre-v41/0489.pb | Bin 0 -> 1409 bytes .../native_encoding/proto/pre-v41/0490.pb | Bin 0 -> 1790 bytes .../native_encoding/proto/pre-v41/0491.pb | Bin 0 -> 194 bytes .../native_encoding/proto/pre-v41/0492.pb | Bin 0 -> 69 bytes .../native_encoding/proto/pre-v41/0493.pb | Bin 0 -> 12432 bytes .../native_encoding/proto/pre-v41/0494.pb | 11 + .../native_encoding/proto/pre-v41/0495.pb | Bin 0 -> 1467 bytes .../native_encoding/proto/pre-v41/0496.pb | 7 + .../native_encoding/proto/pre-v41/0497.pb | Bin 0 -> 589 bytes .../native_encoding/proto/pre-v41/0498.pb | Bin 0 -> 166 bytes .../native_encoding/proto/pre-v41/0499.pb | Bin 0 -> 231 bytes .../native_encoding/proto/pre-v41/0500.pb | Bin 0 -> 113 bytes .../native_encoding/proto/pre-v41/0501.pb | Bin 0 -> 105 bytes .../native_encoding/proto/pre-v41/0502.pb | Bin 0 -> 472 bytes .../native_encoding/proto/pre-v41/0503.pb | Bin 0 -> 1125 bytes .../native_encoding/proto/pre-v41/0504.pb | Bin 0 -> 111 bytes .../native_encoding/proto/pre-v41/0505.pb | Bin 0 -> 73 bytes .../native_encoding/proto/pre-v41/0506.pb | Bin 0 -> 269 bytes .../native_encoding/proto/pre-v41/0507.pb | Bin 0 -> 7057 bytes .../native_encoding/proto/pre-v41/0508.pb | Bin 0 -> 343 bytes .../native_encoding/proto/pre-v41/0509.pb | Bin 0 -> 229 bytes .../native_encoding/proto/pre-v41/0510.pb | Bin 0 -> 391 bytes .../native_encoding/proto/pre-v41/0511.pb | Bin 0 -> 221 bytes .../native_encoding/proto/pre-v41/0512.pb | Bin 0 -> 85 bytes .../native_encoding/proto/pre-v41/0513.pb | Bin 0 -> 21 bytes .../native_encoding/proto/pre-v41/0514.pb | Bin 0 -> 27 bytes .../native_encoding/proto/pre-v41/0515.pb | Bin 0 -> 963 bytes .../native_encoding/proto/pre-v41/0516.pb | Bin 0 -> 236 bytes .../native_encoding/proto/pre-v41/0517.pb | Bin 0 -> 584 bytes .../native_encoding/proto/pre-v41/0518.pb | Bin 0 -> 198 bytes .../native_encoding/proto/pre-v41/0519.pb | Bin 0 -> 51 bytes .../native_encoding/proto/pre-v41/0520.pb | Bin 0 -> 79 bytes .../native_encoding/proto/pre-v41/0521.pb | Bin 0 -> 97 bytes .../native_encoding/proto/pre-v41/0522.pb | Bin 0 -> 41 bytes .../native_encoding/proto/pre-v41/0523.pb | Bin 0 -> 14 bytes .../native_encoding/proto/pre-v41/0524.pb | Bin 0 -> 74 bytes .../native_encoding/proto/pre-v41/0525.pb | Bin 0 -> 477 bytes .../native_encoding/proto/pre-v41/0526.pb | Bin 0 -> 2251 bytes .../native_encoding/proto/pre-v41/0527.pb | 5 + .../native_encoding/proto/pre-v41/0528.pb | Bin 0 -> 110 bytes .../native_encoding/proto/pre-v41/0529.pb | Bin 0 -> 61 bytes .../native_encoding/proto/pre-v41/0530.pb | Bin 0 -> 1940 bytes .../native_encoding/proto/pre-v41/0531.pb | Bin 0 -> 264 bytes .../native_encoding/proto/pre-v41/0532.pb | Bin 0 -> 1372 bytes .../native_encoding/proto/pre-v41/0533.pb | Bin 0 -> 97 bytes .../native_encoding/proto/pre-v41/0534.pb | Bin 0 -> 161 bytes .../native_encoding/proto/pre-v41/0535.pb | Bin 0 -> 4675 bytes .../native_encoding/proto/pre-v41/0536.pb | Bin 0 -> 73 bytes .../native_encoding/proto/pre-v41/0537.pb | Bin 0 -> 518 bytes .../native_encoding/proto/pre-v41/0538.pb | Bin 0 -> 2418 bytes .../native_encoding/proto/pre-v41/0539.pb | Bin 0 -> 33 bytes .../native_encoding/proto/pre-v41/0540.pb | Bin 0 -> 1347 bytes .../native_encoding/proto/pre-v41/0541.pb | Bin 0 -> 1922 bytes .../native_encoding/proto/pre-v41/0542.pb | Bin 0 -> 100 bytes .../native_encoding/proto/pre-v41/0543.pb | Bin 0 -> 32 bytes .../native_encoding/proto/pre-v41/0544.pb | Bin 0 -> 155 bytes .../native_encoding/proto/pre-v41/0545.pb | Bin 0 -> 14 bytes .../native_encoding/proto/pre-v41/0546.pb | Bin 0 -> 2203 bytes .../native_encoding/proto/pre-v41/0547.pb | Bin 0 -> 167 bytes .../native_encoding/proto/pre-v41/0548.pb | Bin 0 -> 27 bytes .../native_encoding/proto/pre-v41/0549.pb | Bin 0 -> 438 bytes .../native_encoding/proto/pre-v41/0550.pb | Bin 0 -> 138 bytes .../native_encoding/proto/pre-v41/0551.pb | Bin 0 -> 476 bytes .../native_encoding/proto/pre-v41/0552.pb | Bin 0 -> 78 bytes .../native_encoding/proto/pre-v41/0553.pb | Bin 0 -> 57 bytes .../native_encoding/proto/pre-v41/0554.pb | 17 + .../native_encoding/proto/pre-v41/0555.pb | Bin 0 -> 54 bytes .../native_encoding/proto/pre-v41/0556.pb | Bin 0 -> 72 bytes .../native_encoding/proto/pre-v41/0557.pb | Bin 0 -> 2755 bytes .../native_encoding/proto/pre-v41/0558.pb | Bin 0 -> 86 bytes .../native_encoding/proto/pre-v41/0559.pb | Bin 0 -> 1453 bytes .../native_encoding/proto/pre-v41/0560.pb | Bin 0 -> 47 bytes .../native_encoding/proto/pre-v41/0561.pb | Bin 0 -> 149 bytes .../native_encoding/proto/pre-v41/0562.pb | Bin 0 -> 75 bytes .../native_encoding/proto/pre-v41/0563.pb | Bin 0 -> 69 bytes .../native_encoding/proto/pre-v41/0564.pb | Bin 0 -> 322 bytes .../native_encoding/proto/pre-v41/0565.pb | Bin 0 -> 98 bytes .../native_encoding/proto/pre-v41/0566.pb | Bin 0 -> 1268 bytes .../native_encoding/proto/pre-v41/0567.pb | 5 + .../native_encoding/proto/pre-v41/0568.pb | Bin 0 -> 8977 bytes .../native_encoding/proto/pre-v41/0569.pb | Bin 0 -> 376 bytes .../native_encoding/proto/pre-v41/0570.pb | Bin 0 -> 106 bytes .../native_encoding/proto/pre-v41/0571.pb | Bin 0 -> 501 bytes .../native_encoding/proto/pre-v41/0572.pb | 6 + .../native_encoding/proto/pre-v41/0573.pb | Bin 0 -> 224 bytes .../native_encoding/proto/pre-v41/0574.pb | Bin 0 -> 2309 bytes .../native_encoding/proto/pre-v41/0575.pb | Bin 0 -> 284 bytes .../native_encoding/proto/pre-v41/0576.pb | Bin 0 -> 111 bytes .../native_encoding/proto/pre-v41/0577.pb | Bin 0 -> 68 bytes .../native_encoding/proto/pre-v41/0578.pb | Bin 0 -> 75 bytes .../native_encoding/proto/pre-v41/0579.pb | Bin 0 -> 91 bytes .../native_encoding/proto/pre-v41/0580.pb | Bin 0 -> 14 bytes .../native_encoding/proto/pre-v41/0581.pb | Bin 0 -> 25 bytes .../native_encoding/proto/pre-v41/0582.pb | Bin 0 -> 104 bytes .../native_encoding/proto/pre-v41/0583.pb | Bin 0 -> 10776 bytes .../native_encoding/proto/pre-v41/0584.pb | Bin 0 -> 81 bytes .../native_encoding/proto/pre-v41/0585.pb | Bin 0 -> 603 bytes .../native_encoding/proto/pre-v41/0586.pb | Bin 0 -> 232 bytes .../native_encoding/proto/pre-v41/0587.pb | Bin 0 -> 115 bytes .../native_encoding/proto/pre-v41/0588.pb | Bin 0 -> 179 bytes .../native_encoding/proto/pre-v41/0589.pb | Bin 0 -> 59 bytes .../native_encoding/proto/pre-v41/0590.pb | Bin 0 -> 29 bytes .../native_encoding/proto/pre-v41/0591.pb | 20 + .../native_encoding/proto/pre-v41/0592.pb | Bin 0 -> 159 bytes .../native_encoding/proto/pre-v41/0593.pb | Bin 0 -> 1835 bytes .../native_encoding/proto/pre-v41/0594.pb | Bin 0 -> 40 bytes .../native_encoding/proto/pre-v41/0595.pb | Bin 0 -> 50 bytes .../native_encoding/proto/pre-v41/0596.pb | Bin 0 -> 5468 bytes .../native_encoding/proto/pre-v41/0597.pb | Bin 0 -> 960 bytes .../native_encoding/proto/pre-v41/0598.pb | Bin 0 -> 32 bytes .../native_encoding/proto/pre-v41/0599.pb | Bin 0 -> 1906 bytes .../native_encoding/proto/pre-v41/0600.pb | Bin 0 -> 2003 bytes .../native_encoding/proto/pre-v41/0601.pb | Bin 0 -> 172 bytes .../native_encoding/proto/pre-v41/0602.pb | Bin 0 -> 46 bytes .../native_encoding/proto/pre-v41/0603.pb | Bin 0 -> 36 bytes .../native_encoding/proto/pre-v41/0604.pb | Bin 0 -> 30 bytes .../native_encoding/proto/pre-v41/0605.pb | Bin 0 -> 42 bytes .../native_encoding/proto/pre-v41/0606.pb | Bin 0 -> 791 bytes .../native_encoding/proto/pre-v41/0607.pb | Bin 0 -> 64 bytes .../native_encoding/proto/pre-v41/0608.pb | Bin 0 -> 36 bytes .../native_encoding/proto/pre-v41/0609.pb | Bin 0 -> 118 bytes .../native_encoding/proto/pre-v41/0610.pb | Bin 0 -> 14 bytes .../native_encoding/proto/pre-v41/0611.pb | Bin 0 -> 8715 bytes .../native_encoding/proto/pre-v41/0612.pb | Bin 0 -> 247 bytes .../native_encoding/proto/pre-v41/0613.pb | Bin 0 -> 236 bytes .../native_encoding/proto/pre-v41/0614.pb | Bin 0 -> 44 bytes .../native_encoding/proto/pre-v41/0615.pb | Bin 0 -> 2515 bytes .../native_encoding/proto/pre-v41/0616.pb | Bin 0 -> 294 bytes .../native_encoding/proto/pre-v41/0617.pb | 6 + .../native_encoding/proto/pre-v41/0618.pb | Bin 0 -> 14 bytes .../native_encoding/proto/pre-v41/0619.pb | Bin 0 -> 423 bytes .../native_encoding/proto/pre-v41/0620.pb | Bin 0 -> 55 bytes .../native_encoding/proto/pre-v41/0621.pb | Bin 0 -> 1339 bytes .../native_encoding/proto/pre-v41/0622.pb | Bin 0 -> 36 bytes .../native_encoding/proto/pre-v41/0623.pb | 6 + .../native_encoding/proto/pre-v41/0624.pb | Bin 0 -> 2025 bytes .../native_encoding/proto/pre-v41/0625.pb | Bin 0 -> 1812 bytes .../native_encoding/proto/pre-v41/0626.pb | Bin 0 -> 61 bytes .../native_encoding/proto/pre-v41/0627.pb | Bin 0 -> 420 bytes .../native_encoding/proto/pre-v41/0628.pb | Bin 0 -> 976 bytes .../native_encoding/proto/pre-v41/0629.pb | Bin 0 -> 259 bytes .../native_encoding/proto/pre-v41/0630.pb | Bin 0 -> 881 bytes .../native_encoding/proto/pre-v41/0631.pb | Bin 0 -> 534 bytes .../native_encoding/proto/pre-v41/0632.pb | Bin 0 -> 101 bytes .../native_encoding/proto/pre-v41/0633.pb | Bin 0 -> 128 bytes .../native_encoding/proto/pre-v41/0634.pb | 6 + .../native_encoding/proto/pre-v41/0635.pb | Bin 0 -> 519 bytes .../native_encoding/proto/pre-v41/0636.pb | Bin 0 -> 229 bytes .../native_encoding/proto/pre-v41/0637.pb | Bin 0 -> 72 bytes .../native_encoding/proto/pre-v41/0638.pb | Bin 0 -> 43 bytes .../native_encoding/proto/pre-v41/0639.pb | Bin 0 -> 1160 bytes .../native_encoding/proto/pre-v41/0640.pb | Bin 0 -> 88 bytes .../native_encoding/proto/pre-v41/0641.pb | Bin 0 -> 60 bytes .../native_encoding/proto/pre-v41/0642.pb | Bin 0 -> 85 bytes .../native_encoding/proto/pre-v41/0643.pb | Bin 0 -> 97 bytes .../native_encoding/proto/pre-v41/0644.pb | 5 + .../native_encoding/proto/pre-v41/0645.pb | Bin 0 -> 67 bytes .../native_encoding/proto/pre-v41/0646.pb | Bin 0 -> 139 bytes .../native_encoding/proto/pre-v41/0647.pb | Bin 0 -> 1014 bytes .../native_encoding/proto/pre-v41/0648.pb | Bin 0 -> 109 bytes .../native_encoding/proto/pre-v41/0649.pb | Bin 0 -> 53 bytes .../native_encoding/proto/pre-v41/0650.pb | Bin 0 -> 440 bytes .../native_encoding/proto/pre-v41/0651.pb | Bin 0 -> 2647 bytes .../native_encoding/proto/pre-v41/0652.pb | Bin 0 -> 206 bytes .../native_encoding/proto/pre-v41/0653.pb | Bin 0 -> 84 bytes .../native_encoding/proto/pre-v41/0654.pb | Bin 0 -> 40 bytes .../native_encoding/proto/pre-v41/0655.pb | Bin 0 -> 574 bytes .../native_encoding/proto/pre-v41/0656.pb | Bin 0 -> 129 bytes .../native_encoding/proto/pre-v41/0657.pb | 6 + .../native_encoding/proto/pre-v41/0658.pb | Bin 0 -> 69 bytes .../native_encoding/proto/pre-v41/0659.pb | Bin 0 -> 1535 bytes .../native_encoding/proto/pre-v41/0660.pb | Bin 0 -> 105 bytes .../native_encoding/proto/pre-v41/0661.pb | Bin 0 -> 1469 bytes .../native_encoding/proto/pre-v41/0662.pb | Bin 0 -> 75 bytes .../native_encoding/proto/pre-v41/0663.pb | Bin 0 -> 14 bytes .../native_encoding/proto/pre-v41/0664.pb | Bin 0 -> 28 bytes .../native_encoding/proto/pre-v41/0665.pb | Bin 0 -> 54 bytes .../native_encoding/proto/pre-v41/0666.pb | Bin 0 -> 650 bytes .../native_encoding/proto/pre-v41/0667.pb | Bin 0 -> 409 bytes .../native_encoding/proto/pre-v41/0668.pb | Bin 0 -> 161 bytes .../native_encoding/proto/pre-v41/0669.pb | Bin 0 -> 1439 bytes .../native_encoding/proto/pre-v41/0670.pb | Bin 0 -> 2276 bytes .../native_encoding/proto/pre-v41/0671.pb | Bin 0 -> 8092 bytes .../native_encoding/proto/pre-v41/0672.pb | Bin 0 -> 653 bytes .../native_encoding/proto/pre-v41/0673.pb | Bin 0 -> 2478 bytes .../native_encoding/proto/pre-v41/0674.pb | Bin 0 -> 20 bytes .../native_encoding/proto/pre-v41/0675.pb | Bin 0 -> 3765 bytes .../native_encoding/proto/pre-v41/0676.pb | Bin 0 -> 1587 bytes .../native_encoding/proto/pre-v41/0677.pb | Bin 0 -> 14 bytes .../native_encoding/proto/pre-v41/0678.pb | Bin 0 -> 65 bytes .../native_encoding/proto/pre-v41/0679.pb | Bin 0 -> 4860 bytes .../native_encoding/proto/pre-v41/0680.pb | 6 + .../native_encoding/proto/pre-v41/0681.pb | Bin 0 -> 1896 bytes .../native_encoding/proto/pre-v41/0682.pb | Bin 0 -> 41 bytes .../native_encoding/proto/pre-v41/0683.pb | Bin 0 -> 216 bytes .../native_encoding/proto/pre-v41/0684.pb | 12 + .../native_encoding/proto/pre-v41/0685.pb | Bin 0 -> 154 bytes .../native_encoding/proto/pre-v41/0686.pb | Bin 0 -> 59 bytes .../native_encoding/proto/pre-v41/0687.pb | Bin 0 -> 82 bytes .../native_encoding/proto/pre-v41/0688.pb | Bin 0 -> 1897 bytes .../native_encoding/proto/pre-v41/0689.pb | Bin 0 -> 654 bytes .../native_encoding/proto/pre-v41/0690.pb | 4 + .../native_encoding/proto/pre-v41/0691.pb | Bin 0 -> 116 bytes .../native_encoding/proto/pre-v41/0692.pb | Bin 0 -> 991 bytes .../native_encoding/proto/pre-v41/0693.pb | Bin 0 -> 334 bytes .../native_encoding/proto/pre-v41/0694.pb | Bin 0 -> 95 bytes .../native_encoding/proto/pre-v41/0695.pb | Bin 0 -> 87 bytes .../native_encoding/proto/pre-v41/0696.pb | 4 + .../native_encoding/proto/pre-v41/0697.pb | Bin 0 -> 73 bytes .../native_encoding/proto/pre-v41/0698.pb | Bin 0 -> 66 bytes .../native_encoding/proto/pre-v41/0699.pb | Bin 0 -> 2075 bytes .../native_encoding/proto/pre-v41/0700.pb | Bin 0 -> 305 bytes .../native_encoding/proto/pre-v41/0701.pb | Bin 0 -> 2540 bytes .../native_encoding/proto/pre-v41/0702.pb | Bin 0 -> 811 bytes .../native_encoding/proto/pre-v41/0703.pb | Bin 0 -> 7344 bytes .../native_encoding/proto/pre-v41/0704.pb | Bin 0 -> 104 bytes .../native_encoding/proto/pre-v41/0705.pb | Bin 0 -> 55 bytes .../native_encoding/proto/pre-v41/0706.pb | Bin 0 -> 53 bytes .../native_encoding/proto/pre-v41/0707.pb | Bin 0 -> 1295 bytes .../native_encoding/proto/pre-v41/0708.pb | Bin 0 -> 14 bytes .../native_encoding/proto/pre-v41/0709.pb | Bin 0 -> 78 bytes .../native_encoding/proto/pre-v41/0710.pb | 6 + .../native_encoding/proto/pre-v41/0711.pb | Bin 0 -> 23 bytes .../native_encoding/proto/pre-v41/0712.pb | Bin 0 -> 52 bytes .../native_encoding/proto/pre-v41/0713.pb | Bin 0 -> 33 bytes .../native_encoding/proto/pre-v41/0714.pb | Bin 0 -> 1577 bytes .../native_encoding/proto/pre-v41/0715.pb | Bin 0 -> 142 bytes .../native_encoding/proto/pre-v41/0716.pb | Bin 0 -> 81 bytes .../native_encoding/proto/pre-v41/0717.pb | Bin 0 -> 53 bytes .../native_encoding/proto/pre-v41/0718.pb | Bin 0 -> 71 bytes .../native_encoding/proto/pre-v41/0719.pb | Bin 0 -> 117 bytes .../native_encoding/proto/pre-v41/0720.pb | Bin 0 -> 583 bytes .../native_encoding/proto/pre-v41/0721.pb | Bin 0 -> 10217 bytes .../native_encoding/proto/pre-v41/0722.pb | Bin 0 -> 2120 bytes .../native_encoding/proto/pre-v41/0723.pb | Bin 0 -> 128 bytes .../native_encoding/proto/pre-v41/0724.pb | Bin 0 -> 139 bytes .../native_encoding/proto/pre-v41/0725.pb | Bin 0 -> 100 bytes .../native_encoding/proto/pre-v41/0726.pb | Bin 0 -> 254 bytes .../native_encoding/proto/pre-v41/0727.pb | Bin 0 -> 2656 bytes .../native_encoding/proto/pre-v41/0728.pb | Bin 0 -> 55 bytes .../native_encoding/proto/pre-v41/0729.pb | Bin 0 -> 1209 bytes .../native_encoding/proto/pre-v41/0730.pb | Bin 0 -> 209 bytes .../native_encoding/proto/pre-v41/0731.pb | Bin 0 -> 225 bytes .../native_encoding/proto/pre-v41/0732.pb | Bin 0 -> 538 bytes .../native_encoding/proto/pre-v41/0733.pb | Bin 0 -> 97 bytes .../native_encoding/proto/pre-v41/0734.pb | Bin 0 -> 507 bytes .../native_encoding/proto/pre-v41/0735.pb | Bin 0 -> 52 bytes .../native_encoding/proto/pre-v41/0736.pb | Bin 0 -> 75 bytes .../native_encoding/proto/pre-v41/0737.pb | Bin 0 -> 360 bytes .../native_encoding/proto/pre-v41/0738.pb | Bin 0 -> 120 bytes .../native_encoding/proto/pre-v41/0739.pb | Bin 0 -> 630 bytes .../native_encoding/proto/pre-v41/0740.pb | Bin 0 -> 100 bytes .../native_encoding/proto/pre-v41/0741.pb | Bin 0 -> 45 bytes .../native_encoding/proto/pre-v41/0742.pb | Bin 0 -> 14 bytes .../native_encoding/proto/pre-v41/0743.pb | Bin 0 -> 2110 bytes .../native_encoding/proto/pre-v41/0744.pb | Bin 0 -> 56 bytes .../native_encoding/proto/pre-v41/0745.pb | Bin 0 -> 58 bytes .../native_encoding/proto/pre-v41/0746.pb | Bin 0 -> 605 bytes .../native_encoding/proto/pre-v41/0747.pb | Bin 0 -> 52 bytes .../native_encoding/proto/pre-v41/0748.pb | Bin 0 -> 206 bytes .../native_encoding/proto/pre-v41/0749.pb | Bin 0 -> 14 bytes .../native_encoding/proto/pre-v41/0750.pb | Bin 0 -> 174 bytes .../native_encoding/proto/pre-v41/0751.pb | Bin 0 -> 95 bytes .../native_encoding/proto/pre-v41/0752.pb | Bin 0 -> 2263 bytes .../native_encoding/proto/pre-v41/0753.pb | Bin 0 -> 50 bytes .../native_encoding/proto/pre-v41/0754.pb | Bin 0 -> 14 bytes .../native_encoding/proto/pre-v41/0755.pb | Bin 0 -> 2625 bytes .../native_encoding/proto/pre-v41/0756.pb | Bin 0 -> 36 bytes .../native_encoding/proto/pre-v41/0757.pb | Bin 0 -> 739 bytes .../native_encoding/proto/pre-v41/0758.pb | Bin 0 -> 566 bytes .../native_encoding/proto/pre-v41/0759.pb | Bin 0 -> 2168 bytes .../native_encoding/proto/pre-v41/0760.pb | Bin 0 -> 43 bytes .../native_encoding/proto/pre-v41/0761.pb | Bin 0 -> 1743 bytes .../native_encoding/proto/pre-v41/0762.pb | Bin 0 -> 65 bytes .../native_encoding/proto/pre-v41/0763.pb | Bin 0 -> 50 bytes .../native_encoding/proto/pre-v41/0764.pb | Bin 0 -> 54 bytes .../native_encoding/proto/pre-v41/0765.pb | Bin 0 -> 125 bytes .../native_encoding/proto/pre-v41/0766.pb | Bin 0 -> 2337 bytes .../native_encoding/proto/pre-v41/0767.pb | Bin 0 -> 508 bytes .../native_encoding/proto/pre-v41/0768.pb | Bin 0 -> 57 bytes .../native_encoding/proto/pre-v41/0769.pb | 9 + .../native_encoding/proto/pre-v41/0770.pb | Bin 0 -> 11060 bytes .../native_encoding/proto/pre-v41/0771.pb | Bin 0 -> 8656 bytes .../native_encoding/proto/pre-v41/0772.pb | Bin 0 -> 599 bytes .../native_encoding/proto/pre-v41/0773.pb | 6 + .../native_encoding/proto/pre-v41/0774.pb | Bin 0 -> 28 bytes .../native_encoding/proto/pre-v41/0775.pb | Bin 0 -> 14 bytes .../native_encoding/proto/pre-v41/0776.pb | Bin 0 -> 2218 bytes .../native_encoding/proto/pre-v41/0777.pb | Bin 0 -> 1448 bytes .../native_encoding/proto/pre-v41/0778.pb | Bin 0 -> 53 bytes .../native_encoding/proto/pre-v41/0779.pb | Bin 0 -> 53 bytes .../native_encoding/proto/pre-v41/0780.pb | Bin 0 -> 2202 bytes .../native_encoding/proto/pre-v41/0781.pb | Bin 0 -> 39 bytes .../native_encoding/proto/pre-v41/0782.pb | Bin 0 -> 42 bytes .../native_encoding/proto/pre-v41/0783.pb | Bin 0 -> 67 bytes .../native_encoding/proto/pre-v41/0784.pb | Bin 0 -> 39 bytes .../native_encoding/proto/pre-v41/0785.pb | Bin 0 -> 14 bytes .../native_encoding/proto/pre-v41/0786.pb | Bin 0 -> 901 bytes .../native_encoding/proto/pre-v41/0787.pb | Bin 0 -> 194 bytes .../native_encoding/proto/pre-v41/0788.pb | Bin 0 -> 28 bytes .../native_encoding/proto/pre-v41/0789.pb | Bin 0 -> 28 bytes .../native_encoding/proto/pre-v41/0790.pb | Bin 0 -> 338 bytes .../native_encoding/proto/pre-v41/0791.pb | Bin 0 -> 50 bytes .../native_encoding/proto/pre-v41/0792.pb | Bin 0 -> 298 bytes .../native_encoding/proto/pre-v41/0793.pb | Bin 0 -> 44 bytes .../native_encoding/proto/pre-v41/0794.pb | Bin 0 -> 115 bytes .../native_encoding/proto/pre-v41/0795.pb | Bin 0 -> 1528 bytes .../native_encoding/proto/pre-v41/0796.pb | Bin 0 -> 180 bytes .../native_encoding/proto/pre-v41/0797.pb | Bin 0 -> 742 bytes .../native_encoding/proto/pre-v41/0798.pb | Bin 0 -> 35 bytes .../native_encoding/proto/pre-v41/0799.pb | 6 + .../native_encoding/proto/pre-v41/0800.pb | Bin 0 -> 540 bytes .../native_encoding/proto/pre-v41/0801.pb | Bin 0 -> 262 bytes .../native_encoding/proto/pre-v41/0802.pb | Bin 0 -> 527 bytes .../native_encoding/proto/pre-v41/0803.pb | Bin 0 -> 3466 bytes .../native_encoding/proto/pre-v41/0804.pb | Bin 0 -> 14 bytes .../native_encoding/proto/pre-v41/0805.pb | Bin 0 -> 231 bytes .../native_encoding/proto/pre-v41/0806.pb | Bin 0 -> 2113 bytes .../native_encoding/proto/pre-v41/0807.pb | Bin 0 -> 296 bytes .../native_encoding/proto/pre-v41/0808.pb | Bin 0 -> 2141 bytes .../native_encoding/proto/pre-v41/0809.pb | Bin 0 -> 411 bytes .../native_encoding/proto/pre-v41/0810.pb | Bin 0 -> 63 bytes .../native_encoding/proto/pre-v41/0811.pb | Bin 0 -> 28 bytes .../native_encoding/proto/pre-v41/0812.pb | Bin 0 -> 98 bytes .../native_encoding/proto/pre-v41/0813.pb | Bin 0 -> 289 bytes .../native_encoding/proto/pre-v41/0814.pb | Bin 0 -> 203 bytes .../native_encoding/proto/pre-v41/0815.pb | Bin 0 -> 203 bytes .../native_encoding/proto/pre-v41/0816.pb | 5 + .../native_encoding/proto/pre-v41/0817.pb | Bin 0 -> 78 bytes .../native_encoding/proto/pre-v41/0818.pb | Bin 0 -> 514 bytes .../native_encoding/proto/pre-v41/0819.pb | Bin 0 -> 39 bytes .../native_encoding/proto/pre-v41/0820.pb | Bin 0 -> 13186 bytes .../native_encoding/proto/pre-v41/0821.pb | Bin 0 -> 1110 bytes .../native_encoding/proto/pre-v41/0822.pb | Bin 0 -> 86 bytes .../native_encoding/proto/pre-v41/0823.pb | Bin 0 -> 62 bytes .../native_encoding/proto/pre-v41/0824.pb | Bin 0 -> 14 bytes .../native_encoding/proto/pre-v41/0825.pb | Bin 0 -> 31 bytes .../native_encoding/proto/pre-v41/0826.pb | Bin 0 -> 32 bytes .../native_encoding/proto/pre-v41/0827.pb | Bin 0 -> 67 bytes .../native_encoding/proto/pre-v41/0828.pb | Bin 0 -> 62 bytes .../native_encoding/proto/pre-v41/0829.pb | Bin 0 -> 632 bytes .../native_encoding/proto/pre-v41/0830.pb | 11 + .../native_encoding/proto/pre-v41/0831.pb | Bin 0 -> 155 bytes .../native_encoding/proto/pre-v41/0832.pb | Bin 0 -> 39 bytes .../native_encoding/proto/pre-v41/0833.pb | Bin 0 -> 47 bytes .../native_encoding/proto/pre-v41/0834.pb | Bin 0 -> 9092 bytes .../native_encoding/proto/pre-v41/0835.pb | Bin 0 -> 46 bytes .../native_encoding/proto/pre-v41/0836.pb | Bin 0 -> 814 bytes .../native_encoding/proto/pre-v41/0837.pb | Bin 0 -> 1541 bytes .../native_encoding/proto/pre-v41/0838.pb | Bin 0 -> 40 bytes .../native_encoding/proto/pre-v41/0839.pb | Bin 0 -> 1151 bytes .../native_encoding/proto/pre-v41/0840.pb | Bin 0 -> 119 bytes .../native_encoding/proto/pre-v41/0841.pb | Bin 0 -> 41 bytes .../native_encoding/proto/pre-v41/0842.pb | Bin 0 -> 28 bytes .../native_encoding/proto/pre-v41/0843.pb | Bin 0 -> 14 bytes .../native_encoding/proto/pre-v41/0844.pb | Bin 0 -> 2151 bytes .../native_encoding/proto/pre-v41/0845.pb | Bin 0 -> 97 bytes .../native_encoding/proto/pre-v41/0846.pb | Bin 0 -> 94 bytes .../native_encoding/proto/pre-v41/0847.pb | Bin 0 -> 173 bytes .../native_encoding/proto/pre-v41/0848.pb | Bin 0 -> 43 bytes .../native_encoding/proto/pre-v41/0849.pb | Bin 0 -> 1312 bytes .../native_encoding/proto/pre-v41/0850.pb | Bin 0 -> 683 bytes .../native_encoding/proto/pre-v41/0851.pb | Bin 0 -> 226 bytes .../native_encoding/proto/pre-v41/0852.pb | Bin 0 -> 139 bytes .../native_encoding/proto/pre-v41/0853.pb | Bin 0 -> 50 bytes .../native_encoding/proto/pre-v41/0854.pb | Bin 0 -> 53 bytes .../native_encoding/proto/pre-v41/0855.pb | Bin 0 -> 179 bytes .../native_encoding/proto/pre-v41/0856.pb | Bin 0 -> 51 bytes .../native_encoding/proto/pre-v41/0857.pb | Bin 0 -> 5303 bytes .../native_encoding/proto/pre-v41/0858.pb | Bin 0 -> 65 bytes .../native_encoding/proto/pre-v41/0859.pb | Bin 0 -> 267 bytes .../native_encoding/proto/pre-v41/0860.pb | Bin 0 -> 120 bytes .../native_encoding/proto/pre-v41/0861.pb | Bin 0 -> 2051 bytes .../native_encoding/proto/pre-v41/0862.pb | Bin 0 -> 484 bytes .../native_encoding/proto/pre-v41/0863.pb | 12 + .../native_encoding/proto/pre-v41/0864.pb | Bin 0 -> 45 bytes .../native_encoding/proto/pre-v41/0865.pb | Bin 0 -> 3880 bytes .../native_encoding/proto/pre-v41/0866.pb | Bin 0 -> 103 bytes .../native_encoding/proto/pre-v41/0867.pb | Bin 0 -> 1146 bytes .../native_encoding/proto/pre-v41/0868.pb | Bin 0 -> 493 bytes .../native_encoding/proto/pre-v41/0869.pb | Bin 0 -> 683 bytes .../native_encoding/proto/pre-v41/0870.pb | Bin 0 -> 729 bytes .../native_encoding/proto/pre-v41/0871.pb | Bin 0 -> 172 bytes .../native_encoding/proto/pre-v41/0872.pb | Bin 0 -> 368 bytes .../native_encoding/proto/pre-v41/0873.pb | Bin 0 -> 61 bytes .../native_encoding/proto/pre-v41/0874.pb | Bin 0 -> 212 bytes .../native_encoding/proto/pre-v41/0875.pb | Bin 0 -> 32 bytes .../native_encoding/proto/pre-v41/0876.pb | Bin 0 -> 7437 bytes .../native_encoding/proto/pre-v41/0877.pb | 5 + .../native_encoding/proto/pre-v41/0878.pb | Bin 0 -> 476 bytes .../native_encoding/proto/pre-v41/0879.pb | Bin 0 -> 2384 bytes .../native_encoding/proto/pre-v41/0880.pb | Bin 0 -> 571 bytes .../native_encoding/proto/pre-v41/0881.pb | Bin 0 -> 83 bytes .../native_encoding/proto/pre-v41/0882.pb | Bin 0 -> 2175 bytes .../native_encoding/proto/pre-v41/0883.pb | Bin 0 -> 56 bytes .../native_encoding/proto/pre-v41/0884.pb | Bin 0 -> 39 bytes .../native_encoding/proto/pre-v41/0885.pb | Bin 0 -> 1330 bytes .../native_encoding/proto/pre-v41/0886.pb | Bin 0 -> 42 bytes .../native_encoding/proto/pre-v41/0887.pb | Bin 0 -> 319 bytes .../native_encoding/proto/pre-v41/0888.pb | Bin 0 -> 1809 bytes .../native_encoding/proto/pre-v41/0889.pb | Bin 0 -> 367 bytes .../native_encoding/proto/pre-v41/0890.pb | Bin 0 -> 39 bytes .../native_encoding/proto/pre-v41/0891.pb | Bin 0 -> 28 bytes .../native_encoding/proto/pre-v41/0892.pb | Bin 0 -> 760 bytes .../native_encoding/proto/pre-v41/0893.pb | Bin 0 -> 14791 bytes .../native_encoding/proto/pre-v41/0894.pb | Bin 0 -> 96 bytes .../native_encoding/proto/pre-v41/0895.pb | Bin 0 -> 13848 bytes .../native_encoding/proto/pre-v41/0896.pb | Bin 0 -> 53 bytes .../native_encoding/proto/pre-v41/0897.pb | Bin 0 -> 417 bytes .../native_encoding/proto/pre-v41/0898.pb | Bin 0 -> 2064 bytes .../native_encoding/proto/pre-v41/0899.pb | Bin 0 -> 33 bytes .../native_encoding/proto/pre-v41/0900.pb | Bin 0 -> 93 bytes .../native_encoding/proto/pre-v41/0901.pb | Bin 0 -> 213 bytes .../native_encoding/proto/pre-v41/0902.pb | Bin 0 -> 77 bytes .../native_encoding/proto/pre-v41/0903.pb | Bin 0 -> 41 bytes .../native_encoding/proto/pre-v41/0904.pb | Bin 0 -> 42 bytes .../native_encoding/proto/pre-v41/0905.pb | Bin 0 -> 56 bytes .../native_encoding/proto/pre-v41/0906.pb | 4 + .../native_encoding/proto/pre-v41/0907.pb | Bin 0 -> 338 bytes .../native_encoding/proto/pre-v41/0908.pb | Bin 0 -> 79 bytes .../native_encoding/proto/pre-v41/0909.pb | Bin 0 -> 14 bytes .../native_encoding/proto/pre-v41/0910.pb | Bin 0 -> 2086 bytes .../native_encoding/proto/pre-v41/0911.pb | Bin 0 -> 2330 bytes .../native_encoding/proto/pre-v41/0912.pb | Bin 0 -> 142 bytes .../native_encoding/proto/pre-v41/0913.pb | Bin 0 -> 74 bytes .../native_encoding/proto/pre-v41/0914.pb | Bin 0 -> 97 bytes .../native_encoding/proto/pre-v41/0915.pb | Bin 0 -> 70 bytes .../native_encoding/proto/pre-v41/0916.pb | Bin 0 -> 104 bytes .../native_encoding/proto/pre-v41/0917.pb | Bin 0 -> 920 bytes .../native_encoding/proto/pre-v41/0918.pb | Bin 0 -> 108 bytes .../native_encoding/proto/pre-v41/0919.pb | Bin 0 -> 11526 bytes .../native_encoding/proto/pre-v41/0920.pb | Bin 0 -> 12144 bytes .../native_encoding/proto/pre-v41/0921.pb | Bin 0 -> 38 bytes .../native_encoding/proto/pre-v41/0922.pb | Bin 0 -> 700 bytes .../native_encoding/proto/pre-v41/0923.pb | Bin 0 -> 1875 bytes .../native_encoding/proto/pre-v41/0924.pb | Bin 0 -> 1379 bytes .../native_encoding/proto/pre-v41/0925.pb | Bin 0 -> 36 bytes .../native_encoding/proto/pre-v41/0926.pb | Bin 0 -> 81 bytes .../native_encoding/proto/pre-v41/0927.pb | Bin 0 -> 80 bytes .../native_encoding/proto/pre-v41/0928.pb | Bin 0 -> 2606 bytes .../native_encoding/proto/pre-v41/0929.pb | Bin 0 -> 126 bytes .../native_encoding/proto/pre-v41/0930.pb | Bin 0 -> 65 bytes .../native_encoding/proto/pre-v41/0931.pb | Bin 0 -> 10373 bytes .../native_encoding/proto/pre-v41/0932.pb | Bin 0 -> 46 bytes .../native_encoding/proto/pre-v41/0933.pb | Bin 0 -> 10646 bytes .../native_encoding/proto/pre-v41/0934.pb | Bin 0 -> 43 bytes .../native_encoding/proto/pre-v41/0935.pb | Bin 0 -> 138 bytes .../native_encoding/proto/pre-v41/0936.pb | Bin 0 -> 43 bytes .../native_encoding/proto/pre-v41/0937.pb | Bin 0 -> 949 bytes .../native_encoding/proto/pre-v41/0938.pb | Bin 0 -> 26 bytes .../native_encoding/proto/pre-v41/0939.pb | Bin 0 -> 1437 bytes .../native_encoding/proto/pre-v41/0940.pb | 10 + .../native_encoding/proto/pre-v41/0941.pb | Bin 0 -> 86 bytes .../native_encoding/proto/pre-v41/0942.pb | Bin 0 -> 74 bytes .../native_encoding/proto/pre-v41/0943.pb | Bin 0 -> 14 bytes .../native_encoding/proto/pre-v41/0944.pb | Bin 0 -> 204 bytes .../native_encoding/proto/pre-v41/0945.pb | Bin 0 -> 2053 bytes .../native_encoding/proto/pre-v41/0946.pb | Bin 0 -> 62 bytes .../native_encoding/proto/pre-v41/0947.pb | Bin 0 -> 134 bytes .../native_encoding/proto/pre-v41/0948.pb | Bin 0 -> 70 bytes .../native_encoding/proto/pre-v41/0949.pb | Bin 0 -> 58 bytes .../native_encoding/proto/pre-v41/0950.pb | Bin 0 -> 662 bytes .../native_encoding/proto/pre-v41/0951.pb | Bin 0 -> 14 bytes .../native_encoding/proto/pre-v41/0952.pb | Bin 0 -> 7215 bytes .../native_encoding/proto/pre-v41/0953.pb | Bin 0 -> 226 bytes .../native_encoding/proto/pre-v41/0954.pb | Bin 0 -> 1070 bytes .../native_encoding/proto/pre-v41/0955.pb | Bin 0 -> 35 bytes .../native_encoding/proto/pre-v41/0956.pb | Bin 0 -> 146 bytes .../native_encoding/proto/pre-v41/0957.pb | Bin 0 -> 107 bytes .../native_encoding/proto/pre-v41/0958.pb | Bin 0 -> 61 bytes .../native_encoding/proto/pre-v41/0959.pb | Bin 0 -> 144 bytes .../native_encoding/proto/pre-v41/0960.pb | Bin 0 -> 1622 bytes .../native_encoding/proto/pre-v41/0961.pb | Bin 0 -> 14 bytes .../native_encoding/proto/pre-v41/0962.pb | Bin 0 -> 57 bytes .../native_encoding/proto/pre-v41/0963.pb | Bin 0 -> 127 bytes .../native_encoding/proto/pre-v41/0964.pb | Bin 0 -> 91 bytes .../native_encoding/proto/pre-v41/0965.pb | Bin 0 -> 30 bytes .../native_encoding/proto/pre-v41/0966.pb | Bin 0 -> 414 bytes .../native_encoding/proto/pre-v41/0967.pb | Bin 0 -> 13407 bytes .../native_encoding/proto/pre-v41/0968.pb | Bin 0 -> 98 bytes .../native_encoding/proto/pre-v41/0969.pb | Bin 0 -> 1318 bytes .../native_encoding/proto/pre-v41/0970.pb | Bin 0 -> 231 bytes .../native_encoding/proto/pre-v41/0971.pb | Bin 0 -> 2832 bytes .../native_encoding/proto/pre-v41/0972.pb | Bin 0 -> 59 bytes .../native_encoding/proto/pre-v41/0973.pb | Bin 0 -> 35 bytes .../native_encoding/proto/pre-v41/0974.pb | Bin 0 -> 1248 bytes .../native_encoding/proto/pre-v41/0975.pb | Bin 0 -> 205 bytes .../native_encoding/proto/pre-v41/0976.pb | Bin 0 -> 40 bytes .../native_encoding/proto/pre-v41/0977.pb | Bin 0 -> 282 bytes .../native_encoding/proto/pre-v41/0978.pb | Bin 0 -> 1832 bytes .../native_encoding/proto/pre-v41/0979.pb | Bin 0 -> 2105 bytes .../native_encoding/proto/pre-v41/0980.pb | Bin 0 -> 67 bytes .../native_encoding/proto/pre-v41/0981.pb | Bin 0 -> 22 bytes .../native_encoding/proto/pre-v41/0982.pb | Bin 0 -> 470 bytes .../native_encoding/proto/pre-v41/0983.pb | Bin 0 -> 134 bytes .../native_encoding/proto/pre-v41/0984.pb | Bin 0 -> 1835 bytes .../native_encoding/proto/pre-v41/0985.pb | Bin 0 -> 42 bytes .../native_encoding/proto/pre-v41/0986.pb | Bin 0 -> 1002 bytes .../native_encoding/proto/pre-v41/0987.pb | Bin 0 -> 128 bytes .../native_encoding/proto/pre-v41/0988.pb | Bin 0 -> 5875 bytes .../native_encoding/proto/pre-v41/0989.pb | Bin 0 -> 69 bytes .../native_encoding/proto/pre-v41/0990.pb | Bin 0 -> 5750 bytes .../native_encoding/proto/pre-v41/0991.pb | Bin 0 -> 164 bytes .../native_encoding/proto/pre-v41/0992.pb | Bin 0 -> 439 bytes .../native_encoding/proto/pre-v41/0993.pb | Bin 0 -> 14 bytes .../native_encoding/proto/pre-v41/0994.pb | Bin 0 -> 2292 bytes .../native_encoding/proto/pre-v41/0995.pb | Bin 0 -> 121 bytes .../native_encoding/proto/pre-v41/0996.pb | Bin 0 -> 49 bytes .../native_encoding/proto/pre-v41/0997.pb | Bin 0 -> 34 bytes .../native_encoding/proto/pre-v41/0998.pb | Bin 0 -> 200 bytes .../native_encoding/proto/pre-v41/0999.pb | Bin 0 -> 445 bytes .../native_encoding/proto/pre-v41/1000.pb | Bin 0 -> 242 bytes .../native_encoding/proto/pre-v41/1001.pb | Bin 0 -> 78 bytes .../native_encoding/proto/pre-v41/1002.pb | Bin 0 -> 144 bytes .../native_encoding/proto/pre-v41/1003.pb | Bin 0 -> 121 bytes .../native_encoding/proto/pre-v41/1004.pb | Bin 0 -> 14 bytes .../native_encoding/proto/pre-v41/1005.pb | Bin 0 -> 46 bytes .../native_encoding/proto/pre-v41/1006.pb | Bin 0 -> 60 bytes .../native_encoding/proto/pre-v41/1007.pb | Bin 0 -> 217 bytes .../native_encoding/proto/pre-v41/1008.pb | Bin 0 -> 2177 bytes .../native_encoding/proto/pre-v41/1009.pb | Bin 0 -> 117 bytes .../native_encoding/proto/pre-v41/1010.pb | Bin 0 -> 58 bytes .../native_encoding/proto/pre-v41/1011.pb | Bin 0 -> 179 bytes .../native_encoding/proto/pre-v41/1012.pb | Bin 0 -> 40 bytes .../native_encoding/proto/pre-v41/1013.pb | Bin 0 -> 475 bytes .../native_encoding/proto/pre-v41/1014.pb | Bin 0 -> 77 bytes .../native_encoding/proto/pre-v41/1015.pb | Bin 0 -> 131 bytes .../native_encoding/proto/pre-v41/1016.pb | Bin 0 -> 245 bytes .../native_encoding/proto/pre-v41/1017.pb | Bin 0 -> 88 bytes .../native_encoding/proto/pre-v41/1018.pb | Bin 0 -> 87 bytes .../native_encoding/proto/pre-v41/1019.pb | Bin 0 -> 2627 bytes .../native_encoding/proto/pre-v41/1020.pb | Bin 0 -> 2236 bytes .../native_encoding/proto/pre-v41/1021.pb | Bin 0 -> 80 bytes .../native_encoding/proto/pre-v41/1022.pb | Bin 0 -> 207 bytes .../native_encoding/proto/pre-v41/1023.pb | Bin 0 -> 41 bytes .../tests/data/protobuf/protos/test.desc | Bin 1258 -> 0 bytes .../tests/data/protobuf/protos/test.proto | 61 - lib/codecs/tests/native.rs | 37 +- lib/dnsmsg-parser/Cargo.toml | 8 +- lib/dnsmsg-parser/src/dns_message.rs | 1 - lib/dnsmsg-parser/src/dns_message_parser.rs | 341 +- lib/dnsmsg-parser/src/ede.rs | 7 +- lib/dnstap-parser/Cargo.toml | 27 + lib/dnstap-parser/build.rs | 8 + lib/dnstap-parser/proto/dnstap.proto | 343 + lib/dnstap-parser/src/internal_events.rs | 20 + lib/dnstap-parser/src/lib.rs | 12 + .../dnstap-parser/src}/parser.rs | 143 +- .../dnstap-parser/src}/schema.rs | 8 +- lib/dnstap-parser/src/vrl_functions/mod.rs | 7 + .../src/vrl_functions/parse_dnstap.rs | 399 + lib/docs-renderer/Cargo.toml | 5 +- lib/docs-renderer/src/main.rs | 3 +- lib/docs-renderer/src/renderer.rs | 4 +- lib/enrichment/Cargo.toml | 4 +- .../src/find_enrichment_table_records.rs | 17 + .../src/get_enrichment_table_record.rs | 18 + lib/enrichment/src/lib.rs | 12 + lib/enrichment/src/tables.rs | 38 +- lib/enrichment/src/test_util.rs | 2 + lib/enrichment/src/vrl_util.rs | 24 +- lib/fakedata/Cargo.toml | 4 +- lib/fakedata/src/logs.rs | 8 +- lib/file-source/Cargo.toml | 25 +- lib/file-source/src/buffer.rs | 55 +- lib/file-source/src/checkpointer.rs | 8 +- lib/file-source/src/file_server.rs | 47 +- lib/file-source/src/file_watcher/mod.rs | 57 +- .../src/file_watcher/tests/experiment.rs | 15 +- .../tests/experiment_no_truncations.rs | 20 +- lib/file-source/src/fingerprinter.rs | 310 +- lib/file-source/src/internal_events.rs | 9 + lib/file-source/src/metadata_ext.rs | 2 +- lib/k8s-e2e-tests/Cargo.toml | 10 +- lib/k8s-e2e-tests/src/lib.rs | 27 +- lib/k8s-e2e-tests/src/metrics.rs | 6 +- lib/k8s-e2e-tests/tests/vector-agent.rs | 6 +- lib/k8s-test-framework/Cargo.toml | 4 +- .../src/helm_values_file.rs | 2 +- lib/k8s-test-framework/src/pod.rs | 3 - lib/k8s-test-framework/src/port_forward.rs | 2 +- lib/k8s-test-framework/src/reader.rs | 2 +- lib/k8s-test-framework/src/temp_file.rs | 2 +- lib/k8s-test-framework/src/util.rs | 12 +- .../src/wait_for_resource.rs | 2 +- lib/loki-logproto/Cargo.toml | 8 +- lib/loki-logproto/build.rs | 4 +- lib/loki-logproto/proto/README | 1 + lib/loki-logproto/proto/logproto.proto | 571 +- lib/loki-logproto/proto/push.proto | 56 + lib/loki-logproto/proto/stats.proto | 238 +- lib/loki-logproto/src/lib.rs | 27 +- lib/opentelemetry-proto/Cargo.toml | 12 +- lib/opentelemetry-proto/build.rs | 4 + lib/opentelemetry-proto/src/common.rs | 94 + lib/opentelemetry-proto/src/lib.rs | 5 +- .../src/{convert.rs => logs.rs} | 114 +- lib/opentelemetry-proto/src/metrics.rs | 441 + lib/opentelemetry-proto/src/proto.rs | 24 + .../opentelemetry/proto/README.md | 7 +- .../opentelemetry/proto/trace/v1/trace.proto | 8 +- lib/opentelemetry-proto/src/spans.rs | 155 + lib/portpicker/Cargo.toml | 2 +- lib/portpicker/src/lib.rs | 6 +- lib/prometheus-parser/Cargo.toml | 11 +- lib/prometheus-parser/build.rs | 2 +- lib/prometheus-parser/src/line.rs | 73 +- lib/tracing-limit/Cargo.toml | 8 +- lib/tracing-limit/benches/limit.rs | 14 +- lib/tracing-limit/examples/basic.rs | 6 +- lib/tracing-limit/examples/by_span.rs | 1 - lib/tracing-limit/src/lib.rs | 59 +- lib/vector-api-client/Cargo.toml | 18 +- lib/vector-api-client/src/gql/components.rs | 10 +- lib/vector-api-client/src/gql/health.rs | 3 - lib/vector-api-client/src/gql/meta.rs | 3 - lib/vector-api-client/src/lib.rs | 3 +- lib/vector-api-client/src/subscription.rs | 19 +- lib/vector-api-client/src/test/mod.rs | 3 - lib/vector-buffers/Cargo.toml | 55 +- lib/vector-buffers/benches/common.rs | 28 +- lib/vector-buffers/benches/sized_records.rs | 45 +- lib/vector-buffers/examples/buffer_perf.rs | 21 +- lib/vector-buffers/src/buffer_usage_data.rs | 7 +- lib/vector-buffers/src/cast_utils.rs | 19 + lib/vector-buffers/src/config.rs | 102 +- lib/vector-buffers/src/internal_events.rs | 348 +- lib/vector-buffers/src/lib.rs | 8 +- lib/vector-buffers/src/test/helpers.rs | 5 +- lib/vector-buffers/src/test/messages.rs | 32 +- lib/vector-buffers/src/test/variant.rs | 42 +- lib/vector-buffers/src/topology/builder.rs | 30 +- .../src/topology/channel/limited_queue.rs | 160 +- .../src/topology/channel/receiver.rs | 9 +- .../src/topology/channel/sender.rs | 2 +- lib/vector-buffers/src/topology/test_util.rs | 70 +- .../src/variants/disk_v2/common.rs | 2 +- lib/vector-buffers/src/variants/disk_v2/io.rs | 5 - .../src/variants/disk_v2/mod.rs | 2 +- .../src/variants/disk_v2/reader.rs | 2 +- .../src/variants/disk_v2/record.rs | 9 +- .../src/variants/disk_v2/tests/basic.rs | 16 +- .../variants/disk_v2/tests/known_errors.rs | 28 +- .../src/variants/disk_v2/tests/mod.rs | 13 +- .../disk_v2/tests/model/filesystem.rs | 3 - .../src/variants/disk_v2/tests/size_limits.rs | 10 +- .../src/variants/disk_v2/writer.rs | 14 +- lib/vector-buffers/src/variants/in_memory.rs | 20 +- lib/vector-common/Cargo.toml | 34 +- lib/vector-common/src/byte_size_of.rs | 6 +- lib/vector-common/src/config.rs | 6 + lib/vector-common/src/constants.rs | 3 + lib/vector-common/src/finalization.rs | 2 +- {src/config => lib/vector-common/src}/id.rs | 8 +- .../src/internal_event/bytes_received.rs | 4 +- .../src/internal_event/bytes_sent.rs | 4 +- .../src/internal_event/cached_event.rs | 4 +- .../component_events_dropped.rs | 10 +- .../src/internal_event/events_received.rs | 8 +- .../src/internal_event/events_sent.rs | 14 +- lib/vector-common/src/internal_event/mod.rs | 4 +- .../src/internal_event/prelude.rs | 1 + .../src/internal_event/service.rs | 13 +- lib/vector-common/src/lib.rs | 4 + lib/vector-common/src/request_metadata.rs | 2 +- lib/vector-common/src/shutdown.rs | 43 +- lib/vector-config-common/Cargo.toml | 5 +- lib/vector-config-common/src/constants.rs | 10 + .../src/human_friendly.rs | 8 +- lib/vector-config-common/src/lib.rs | 26 - .../src/schema/json_schema.rs | 7 +- lib/vector-config-common/src/validation.rs | 41 +- lib/vector-config-macros/Cargo.toml | 2 +- lib/vector-config-macros/src/ast/container.rs | 4 + lib/vector-config-macros/src/ast/field.rs | 7 +- lib/vector-config-macros/src/ast/variant.rs | 4 +- lib/vector-config-macros/src/attrs.rs | 6 +- .../src/component_name.rs | 14 +- lib/vector-config-macros/src/configurable.rs | 152 +- .../src/configurable_component.rs | 32 +- lib/vector-config-macros/src/lib.rs | 2 + lib/vector-config/Cargo.toml | 15 +- .../src/component/description.rs | 9 +- lib/vector-config/src/component/marker.rs | 9 + lib/vector-config/src/component/mod.rs | 8 +- lib/vector-config/src/external/serde_with.rs | 10 + lib/vector-config/src/external/vrl.rs | 41 +- lib/vector-config/src/lib.rs | 2 +- lib/vector-config/src/schema/helpers.rs | 2 +- .../src/schema/parser/component.rs | 6 +- lib/vector-config/src/schema/parser/query.rs | 40 +- .../src/schema/visitors/inline_single.rs | 167 +- .../src/schema/visitors/merge.rs | 2 +- .../src/schema/visitors/scoped_visit.rs | 2 +- .../src/schema/visitors/unevaluated.rs | 42 +- lib/vector-config/src/stdlib.rs | 27 +- .../tests/integration/configurable_string.rs | 8 +- lib/vector-config/tests/integration/smoke.rs | 13 +- lib/vector-core/Cargo.toml | 100 +- lib/vector-core/build.rs | 5 +- lib/vector-core/proto/event.proto | 1 + lib/vector-core/src/config/global_options.rs | 112 +- lib/vector-core/src/config/log_schema.rs | 6 +- .../src/config/metrics_expiration.rs | 196 + lib/vector-core/src/config/mod.rs | 82 +- lib/vector-core/src/config/proxy.rs | 21 +- lib/vector-core/src/config/telemetry.rs | 96 +- lib/vector-core/src/event/discriminant.rs | 37 +- lib/vector-core/src/event/log_event.rs | 103 +- lib/vector-core/src/event/lua/event.rs | 14 +- lib/vector-core/src/event/lua/log.rs | 8 +- lib/vector-core/src/event/lua/metric.rs | 68 +- lib/vector-core/src/event/lua/util.rs | 6 +- lib/vector-core/src/event/metadata.rs | 179 +- lib/vector-core/src/event/metric/mod.rs | 2 +- lib/vector-core/src/event/metric/value.rs | 2 +- lib/vector-core/src/event/proto.rs | 30 +- lib/vector-core/src/event/ref.rs | 2 +- lib/vector-core/src/event/test/common.rs | 6 +- lib/vector-core/src/event/test/mod.rs | 5 +- lib/vector-core/src/event/test/size_of.rs | 12 +- .../src/event/util/log/all_fields.rs | 188 +- lib/vector-core/src/event/util/log/mod.rs | 5 +- lib/vector-core/src/event/vrl_target.rs | 408 +- lib/vector-core/src/fanout.rs | 8 +- lib/vector-core/src/ipallowlist.rs | 18 +- lib/vector-core/src/metrics/ddsketch.rs | 8 +- lib/vector-core/src/metrics/metric_matcher.rs | 462 + lib/vector-core/src/metrics/mod.rs | 195 +- lib/vector-core/src/metrics/recency.rs | 68 +- lib/vector-core/src/metrics/recorder.rs | 53 +- lib/vector-core/src/schema/definition.rs | 4 +- lib/vector-core/src/schema/meaning.rs | 3 + lib/vector-core/src/sink.rs | 5 +- lib/vector-core/src/tls/incoming.rs | 25 +- lib/vector-core/src/tls/mod.rs | 16 +- lib/vector-core/src/tls/settings.rs | 93 +- lib/vector-core/src/transform/mod.rs | 9 +- lib/vector-lib/Cargo.toml | 8 +- lib/vector-lib/src/lib.rs | 11 +- lib/vector-stream/Cargo.toml | 17 +- lib/vector-stream/src/concurrent_map.rs | 24 +- lib/vector-stream/src/driver.rs | 20 +- lib/vector-stream/src/partitioned_batcher.rs | 10 +- lib/vector-tap/Cargo.toml | 34 + lib/vector-tap/src/controller.rs | 471 + lib/vector-tap/src/lib.rs | 261 + .../vector-tap/src}/notification.rs | 54 +- lib/vector-tap/src/topology.rs | 34 + lib/vector-vrl/functions/src/get_secret.rs | 3 +- lib/vector-vrl/functions/src/remove_secret.rs | 5 +- lib/vector-vrl/functions/src/set_secret.rs | 6 +- .../functions/src/set_semantic_meaning.rs | 2 +- lib/vector-vrl/tests/Cargo.toml | 13 +- .../resources/json-schema_definition.json | 1 + .../resources/protobuf_descriptor_set.desc | Bin 0 -> 1183 bytes .../tests/resources/public_suffix_list.dat | 15786 ++++++++++++++++ lib/vector-vrl/tests/src/docs.rs | 3 +- lib/vector-vrl/tests/src/main.rs | 4 +- lib/vector-vrl/tests/src/test_enrichment.rs | 2 + lib/vector-vrl/web-playground/Cargo.toml | 7 +- lib/vector-vrl/web-playground/README.md | 4 +- lib/vector-vrl/web-playground/build.rs | 67 +- .../public/assets/dd_icon_rgb.svg | 41 + .../web-playground/public/index.css | 615 +- .../web-playground/public/index.html | 74 +- lib/vector-vrl/web-playground/public/index.js | 11 +- lib/vector-vrl/web-playground/src/lib.rs | 60 +- license-tool.toml | 5 + proto/buf.yaml | 7 - proto/dnstap.proto | 330 - .../google/LICENSE-Apache-2.0.txt | 0 proto/{ => third-party}/google/README | 0 .../google/api/annotations.proto | 0 .../{ => third-party}/google/api/client.proto | 0 .../google/api/field_behavior.proto | 0 proto/{ => third-party}/google/api/http.proto | 0 .../google/api/resource.proto | 0 .../{ => third-party}/google/protobuf/LICENSE | 0 .../google/protobuf/any.proto | 0 .../google/protobuf/descriptor.proto | 0 .../google/protobuf/empty.proto | 0 .../google/protobuf/timestamp.proto | 0 .../google/pubsub/v1/pubsub.proto | 0 .../{ => third-party}/google/rpc/status.proto | 0 .../proto => proto/vector}/buf.yaml | 0 proto/{ => vector}/dd_metric.proto | 0 proto/{ => vector}/dd_trace.proto | 0 proto/{ => vector}/ddsketch_full.proto | 0 proto/{ => vector}/vector.proto | 0 regression/Dockerfile | 15 +- .../experiment.yaml | 10 + .../lading/lading.yaml | 3 + .../experiment.yaml | 10 + .../lading/lading.yaml | 3 + .../experiment.yaml | 10 + .../lading/lading.yaml | 4 + .../experiment.yaml | 10 + .../lading/lading.yaml | 4 + .../enterprise_http_to_http/data/.gitkeep | 1 - .../lading/lading.yaml | 16 - .../vector/vector.yaml | 45 - .../cases/file_to_blackhole/data/.gitkeep | 1 - .../cases/file_to_blackhole/experiment.yaml | 10 + .../file_to_blackhole/lading/lading.yaml | 24 +- .../file_to_blackhole/vector/vector.yaml | 2 +- .../fluent_elasticsearch/experiment.yaml | 10 + .../fluent_elasticsearch/lading/lading.yaml | 7 +- .../fluent_elasticsearch/vector/vector.yaml | 2 +- .../cases/http_elasticsearch/experiment.yaml | 10 + .../http_elasticsearch/lading/lading.yaml | 4 + .../http_text_to_http_json/experiment.yaml | 10 + .../http_text_to_http_json/lading/lading.yaml | 4 + .../http_text_to_http_json/vector/vector.yaml | 8 + .../cases/http_to_http_acks/data/.gitkeep | 1 - .../cases/http_to_http_acks/experiment.yaml | 10 + .../http_to_http_acks/lading/lading.yaml | 4 + .../cases/http_to_http_json/data/.gitkeep | 1 - .../cases/http_to_http_json/experiment.yaml | 10 + .../http_to_http_json/lading/lading.yaml | 4 + .../cases/http_to_http_noack/data/.gitkeep | 1 - .../cases/http_to_http_noack/experiment.yaml | 10 + .../http_to_http_noack/lading/lading.yaml | 4 + regression/cases/http_to_s3/data/.gitkeep | 1 - regression/cases/http_to_s3/experiment.yaml | 10 + .../cases/http_to_s3/lading/lading.yaml | 4 + .../otlp_grpc_to_blackhole/experiment.yaml | 10 + .../otlp_grpc_to_blackhole/lading/lading.yaml | 5 +- .../otlp_http_to_blackhole/experiment.yaml | 9 + .../otlp_http_to_blackhole/lading/lading.yaml | 4 + .../experiment.yaml | 10 + .../lading/lading.yaml | 5 +- .../data/.gitkeep | 1 - .../experiment.yaml | 10 + .../lading/lading.yaml | 4 + .../cases/splunk_hec_route_s3/experiment.yaml | 10 + .../splunk_hec_route_s3/lading/lading.yaml | 4 + .../experiment.yaml | 10 + .../lading/lading.yaml | 4 + .../experiment.yaml | 10 + .../lading/lading.yaml | 4 + .../cases/syslog_humio_logs/experiment.yaml | 10 + .../syslog_humio_logs/lading/lading.yaml | 7 +- .../syslog_humio_logs/vector/vector.yaml | 2 +- .../experiment.yaml | 10 + .../lading/lading.yaml | 7 +- .../vector/vector.yaml | 2 +- .../experiment.yaml | 10 + .../lading/lading.yaml | 7 +- .../vector/vector.yaml | 2 +- .../experiment.yaml | 9 + .../lading/lading.yaml | 7 +- regression/cases/syslog_loki/experiment.yaml | 10 + .../cases/syslog_loki/lading/lading.yaml | 7 +- .../cases/syslog_loki/vector/vector.yaml | 2 +- .../experiment.yaml | 9 + .../lading/lading.yaml | 7 +- .../vector/vector.yaml | 2 +- .../syslog_splunk_hec_logs/experiment.yaml | 10 + .../syslog_splunk_hec_logs/lading/lading.yaml | 7 +- .../syslog_splunk_hec_logs/vector/vector.yaml | 2 +- regression/config.yaml | 37 + regression/metadata.source | 2 - ...6-1999-api-extensions-for-lua-transform.md | 44 +- rfcs/2020-05-25-2692-more-usable-logevents.md | 2 +- rfcs/2020-07-28-3642-jmx_rfc.md | 2 +- rfcs/2020-08-21-3092-apache-metrics-source.md | 3 +- rfcs/2020-08-26-3191-host-metrics.md | 2 +- rfcs/2020-08-27-3603-postgres-metrics.md | 2 +- rfcs/2020-08-31-3640-nginx-metrics-source.md | 2 +- rfcs/2020-08-31-3641-mongo-metrics.md | 2 +- ...-accept-metrics-in-datadog-agent-source.md | 2 +- ...-07-28-13691-registered-internal-events.md | 2 +- rust-toolchain.toml | 2 +- scripts/build-docker.sh | 31 +- scripts/check-events | 4 +- scripts/check-style.sh | 3 +- scripts/check_changelog_fragments.sh | 35 +- scripts/ci-free-disk-space.sh | 57 + scripts/ci-int-e2e-test.sh | 31 - scripts/cross/bootstrap-centos.sh | 9 - scripts/cross/entrypoint-centos.sh | 4 - .../cross/x86_64-unknown-linux-gnu.dockerfile | 8 +- scripts/e2e/Dockerfile | 21 +- scripts/e2e/datadog-logs/compose.yaml | 62 +- scripts/e2e/datadog-metrics/compose.yaml | 18 +- scripts/e2e/datadog-metrics/test.yaml | 1 + scripts/e2e/opentelemetry-logs/README.md | 25 + scripts/e2e/opentelemetry-logs/compose.yaml | 70 + .../opentelemetry-logs/generator/Dockerfile | 4 + .../generator/logs_generator.py | 125 + .../generator/requirements.txt | 1 + scripts/e2e/opentelemetry-logs/test.yaml | 24 + scripts/ensure-wasm-pack-installed.sh | 13 - scripts/ensure-wasm-target-installed.sh | 2 +- scripts/environment/Dockerfile | 4 +- scripts/environment/binstall.sh | 75 + ...otstrap-macos-10.sh => bootstrap-macos.sh} | 13 +- ...ntu-20.04.sh => bootstrap-ubuntu-24.04.sh} | 64 +- ...ws-2019.ps1 => bootstrap-windows-2022.ps1} | 13 +- scripts/environment/entrypoint.sh | 15 +- scripts/environment/install-protoc.sh | 6 +- scripts/environment/prepare.sh | 187 +- scripts/environment/release-flags.sh | 2 +- scripts/environment/setup-helm.sh | 2 +- scripts/generate-component-docs.rb | 243 +- scripts/generate-release-cue.rb | 120 +- scripts/int-e2e-test.sh | 54 + scripts/integration/Dockerfile | 4 +- scripts/integration/README.md | 2 +- scripts/integration/amqp/compose.yaml | 3 +- scripts/integration/aws/compose.yaml | 8 +- scripts/integration/aws/test.yaml | 2 +- scripts/integration/databend/test.yaml | 4 +- .../integration/datadog-agent/compose.yaml | 1 + scripts/integration/datadog-logs/test.yaml | 2 +- scripts/integration/eventstoredb/test.yaml | 2 +- scripts/integration/gcp/compose.yaml | 2 +- scripts/integration/greptimedb/test.yaml | 3 +- scripts/integration/humio/compose.yaml | 2 +- scripts/integration/humio/test.yaml | 2 +- scripts/integration/kafka/compose.yaml | 25 +- scripts/integration/mqtt/test.yaml | 1 + scripts/integration/nats/test.yaml | 2 +- scripts/integration/postgres/test.yaml | 2 + scripts/integration/pulsar/compose.yaml | 14 +- scripts/integration/pulsar/test.yaml | 2 +- scripts/integration/redis/compose.yaml | 23 +- scripts/integration/redis/test.yaml | 3 +- scripts/integration/shutdown/compose.yaml | 25 +- scripts/test-e2e-kubernetes.sh | 2 +- scripts/util/commit.rb | 166 - scripts/util/conventional_commit.rb | 97 - scripts/util/git_log_commit.rb | 122 - scripts/util/printer.rb | 74 - scripts/util/release.rb | 38 - scripts/util/version.rb | 45 - scripts/verify-install.sh | 4 +- src/api/mod.rs | 15 +- src/api/schema/components/mod.rs | 29 +- src/api/schema/components/state.rs | 8 +- src/api/schema/events/encoding.rs | 2 +- src/api/schema/events/log.rs | 3 +- src/api/schema/events/metric.rs | 8 +- src/api/schema/events/mod.rs | 50 +- src/api/schema/events/output.rs | 73 +- src/api/schema/events/trace.rs | 3 +- src/api/schema/filter.rs | 2 +- src/api/schema/gen.rs | 2 +- src/api/schema/health.rs | 2 +- src/api/schema/metrics/filter.rs | 7 +- src/api/schema/metrics/host.rs | 28 + src/api/schema/metrics/mod.rs | 42 +- src/api/schema/metrics/sink/generic.rs | 2 +- src/api/schema/metrics/source/file.rs | 8 +- src/api/schema/metrics/source/generic.rs | 2 +- src/api/schema/metrics/transform/generic.rs | 2 +- src/api/schema/mod.rs | 2 +- src/api/schema/relay.rs | 2 +- src/api/server.rs | 11 +- src/api/tap.rs | 1028 - src/api/tests.rs | 342 +- src/app.rs | 201 +- src/async_read.rs | 2 +- src/aws/auth.rs | 181 +- src/aws/mod.rs | 84 +- src/aws/region.rs | 4 +- src/aws/timeout.rs | 86 + src/cli.rs | 67 +- src/codecs/encoding/config.rs | 14 +- src/codecs/encoding/encoder.rs | 30 +- src/codecs/encoding/transformer.rs | 2 +- src/codecs/ready_frames.rs | 2 +- src/common/backoff.rs | 81 + src/common/datadog.rs | 109 +- src/common/expansion.rs | 48 + src/{sources/util => common}/http/error.rs | 5 + src/common/http/mod.rs | 12 + src/common/http/server_auth.rs | 604 + src/common/mod.rs | 20 +- src/common/mqtt.rs | 123 + src/common/s3.rs | 11 +- src/common/sqs.rs | 2 +- src/common/websocket.rs | 241 + src/components/validation/mod.rs | 31 +- src/components/validation/resources/event.rs | 18 +- src/components/validation/resources/http.rs | 319 +- src/components/validation/resources/mod.rs | 58 +- src/components/validation/runner/config.rs | 25 +- src/components/validation/runner/io.rs | 5 +- src/components/validation/runner/mod.rs | 51 +- .../validators/component_spec/mod.rs | 7 +- src/conditions/datadog_search.rs | 971 +- src/conditions/vrl.rs | 12 +- src/config/api.rs | 31 +- src/config/builder.rs | 348 +- src/config/cmd.rs | 33 +- src/config/compiler.rs | 51 +- src/config/diff.rs | 87 +- src/config/dot_graph.rs | 29 + src/config/enrichment_table.rs | 113 +- src/config/enterprise.rs | 1005 - src/config/format.rs | 63 +- src/config/graph.rs | 179 +- src/config/loading/config_builder.rs | 8 +- src/config/loading/loader.rs | 74 +- src/config/loading/mod.rs | 101 +- src/config/loading/secret.rs | 69 +- src/config/loading/secret_backend_example.rs | 2 +- src/config/loading/source.rs | 4 +- src/config/mod.rs | 254 +- src/config/provider.rs | 2 - src/config/schema.rs | 5 +- src/config/secret.rs | 2 +- src/config/sink.rs | 58 +- src/config/source.rs | 25 +- src/config/transform.rs | 29 +- src/config/unit_test/mod.rs | 57 +- src/config/unit_test/tests.rs | 79 +- src/config/unit_test/unit_test_components.rs | 11 +- src/config/validation.rs | 50 +- src/config/vars.rs | 95 +- src/config/watcher.rs | 306 +- src/convert_config.rs | 8 +- src/docker.rs | 63 +- src/encoding_transcode.rs | 10 +- src/enrichment_tables/file.rs | 774 +- src/enrichment_tables/geoip.rs | 93 +- src/enrichment_tables/memory/config.rs | 203 + .../memory/internal_events.rs | 154 + src/enrichment_tables/memory/mod.rs | 9 + src/enrichment_tables/memory/source.rs | 138 + src/enrichment_tables/memory/table.rs | 889 + src/enrichment_tables/mmdb.rs | 280 + src/enrichment_tables/mod.rs | 66 +- src/extra_context.rs | 2 +- src/gcp.rs | 41 +- src/generate.rs | 92 +- src/generate_schema.rs | 45 +- src/graph.rs | 109 +- src/http.rs | 199 +- src/internal_events/adaptive_concurrency.rs | 20 +- src/internal_events/aggregate.rs | 6 +- src/internal_events/amqp.rs | 25 +- src/internal_events/apache_metrics.rs | 19 +- src/internal_events/api.rs | 2 +- src/internal_events/aws.rs | 5 +- src/internal_events/aws_cloudwatch_logs.rs | 6 +- src/internal_events/aws_ec2_metadata.rs | 11 +- src/internal_events/aws_ecs_metrics.rs | 25 +- src/internal_events/aws_kinesis.rs | 7 +- src/internal_events/aws_kinesis_firehose.rs | 17 +- src/internal_events/aws_sqs.rs | 132 +- src/internal_events/batch.rs | 7 +- src/internal_events/codecs.rs | 41 +- src/internal_events/common.rs | 36 +- src/internal_events/conditions.rs | 8 +- src/internal_events/datadog_agent.rs | 27 + src/internal_events/datadog_metrics.rs | 8 +- src/internal_events/datadog_traces.rs | 14 +- src/internal_events/dnstap.rs | 24 +- src/internal_events/docker_logs.rs | 39 +- src/internal_events/encoding_transcode.rs | 7 +- src/internal_events/eventstoredb_metrics.rs | 14 +- src/internal_events/exec.rs | 57 +- src/internal_events/file.rs | 196 +- src/internal_events/file_descriptor.rs | 5 +- src/internal_events/fluent.rs | 11 +- src/internal_events/gcp_pubsub.rs | 21 +- src/internal_events/grpc.rs | 18 +- src/internal_events/heartbeat.rs | 12 +- src/internal_events/host_metrics.rs | 20 +- src/internal_events/http.rs | 49 +- src/internal_events/http_client.rs | 42 +- src/internal_events/http_client_source.rs | 24 +- src/internal_events/influxdb.rs | 6 +- src/internal_events/internal_logs.rs | 16 +- src/internal_events/internal_metrics.rs | 21 - src/internal_events/journald.rs | 33 +- src/internal_events/kafka.rs | 91 +- src/internal_events/kubernetes_logs.rs | 100 +- src/internal_events/log_to_metric.rs | 33 +- src/internal_events/logplex.rs | 9 +- src/internal_events/loki.rs | 41 +- src/internal_events/lua.rs | 23 +- src/internal_events/metric_to_log.rs | 6 +- src/internal_events/mod.rs | 28 +- src/internal_events/mongodb_metrics.rs | 30 +- src/internal_events/mqtt.rs | 7 +- src/internal_events/nginx_metrics.rs | 30 +- src/internal_events/open.rs | 4 +- src/internal_events/parser.rs | 18 +- src/internal_events/postgresql_metrics.rs | 9 +- src/internal_events/prelude.rs | 3 +- src/internal_events/process.rs | 36 +- src/internal_events/prometheus.rs | 22 +- src/internal_events/pulsar.rs | 28 +- src/internal_events/redis.rs | 7 +- src/internal_events/reduce.rs | 31 +- src/internal_events/remap.rs | 7 +- src/internal_events/sematext_metrics.rs | 16 +- src/internal_events/socket.rs | 87 +- src/internal_events/splunk_hec.rs | 60 +- src/internal_events/statsd_sink.rs | 9 +- src/internal_events/tag_cardinality_limit.rs | 14 +- src/internal_events/tcp.rs | 31 +- src/internal_events/template.rs | 13 +- src/internal_events/throttle.rs | 2 +- src/internal_events/udp.rs | 10 +- src/internal_events/unix.rs | 39 +- src/internal_events/websocket.rs | 205 +- src/internal_events/websocket_server.rs | 135 + src/internal_events/window.rs | 14 + src/internal_events/windows.rs | 36 +- .../allocations/allocator/stack.rs | 2 +- .../allocator/tracing_allocator.rs | 90 +- src/internal_telemetry/allocations/mod.rs | 47 +- src/kafka.rs | 2 +- src/kubernetes/reflector.rs | 67 +- src/lib.rs | 36 +- src/line_agg.rs | 15 +- src/list.rs | 8 +- src/nats.rs | 2 +- src/providers/http.rs | 37 +- src/secrets/aws_secrets_manager.rs | 106 + src/secrets/directory.rs | 52 + src/secrets/exec.rs | 14 +- src/secrets/file.rs | 46 + src/secrets/mod.rs | 73 +- src/secrets/test.rs | 2 +- src/serde.rs | 2 +- src/signal.rs | 23 +- src/sinks/amqp/channel.rs | 86 + src/sinks/amqp/config.rs | 181 +- src/sinks/amqp/encoder.rs | 2 +- src/sinks/amqp/integration_tests.rs | 141 +- src/sinks/amqp/mod.rs | 5 +- src/sinks/amqp/service.rs | 28 +- src/sinks/amqp/sink.rs | 25 +- src/sinks/appsignal/config.rs | 4 +- src/sinks/appsignal/encoder.rs | 9 +- src/sinks/appsignal/integration_tests.rs | 8 +- src/sinks/appsignal/tests.rs | 6 +- src/sinks/aws_cloudwatch_logs/config.rs | 29 +- .../aws_cloudwatch_logs/integration_tests.rs | 213 +- src/sinks/aws_cloudwatch_logs/request.rs | 22 +- .../aws_cloudwatch_logs/request_builder.rs | 32 +- src/sinks/aws_cloudwatch_logs/service.rs | 30 +- .../integration_tests.rs | 6 +- src/sinks/aws_cloudwatch_metrics/mod.rs | 7 +- src/sinks/aws_kinesis/firehose/config.rs | 6 +- .../aws_kinesis/firehose/integration_tests.rs | 18 +- src/sinks/aws_kinesis/firehose/record.rs | 3 +- src/sinks/aws_kinesis/record.rs | 8 +- src/sinks/aws_kinesis/sink.rs | 38 +- src/sinks/aws_kinesis/streams/config.rs | 11 +- .../aws_kinesis/streams/integration_tests.rs | 19 +- src/sinks/aws_kinesis/streams/record.rs | 3 +- src/sinks/aws_s3/config.rs | 36 +- src/sinks/aws_s3/integration_tests.rs | 22 +- src/sinks/aws_s3/sink.rs | 14 +- src/sinks/aws_s_s/client.rs | 7 +- src/sinks/aws_s_s/config.rs | 2 - src/sinks/aws_s_s/sink.rs | 9 - src/sinks/aws_s_s/sns/client.rs | 1 - src/sinks/aws_s_s/sns/config.rs | 6 +- src/sinks/aws_s_s/sns/integration_tests.rs | 8 +- src/sinks/aws_s_s/sqs/client.rs | 1 - src/sinks/aws_s_s/sqs/config.rs | 4 +- src/sinks/aws_s_s/sqs/integration_tests.rs | 4 +- src/sinks/axiom.rs | 107 +- src/sinks/azure_blob/config.rs | 24 +- src/sinks/azure_blob/integration_tests.rs | 21 +- src/sinks/azure_blob/request_builder.rs | 11 +- src/sinks/azure_blob/test.rs | 8 +- src/sinks/azure_common/service.rs | 2 +- src/sinks/azure_common/sink.rs | 19 +- src/sinks/azure_monitor_logs/config.rs | 2 +- src/sinks/azure_monitor_logs/service.rs | 27 +- src/sinks/azure_monitor_logs/sink.rs | 7 +- src/sinks/clickhouse/config.rs | 68 +- src/sinks/clickhouse/integration_tests.rs | 54 +- src/sinks/clickhouse/mod.rs | 3 +- src/sinks/clickhouse/service.rs | 119 +- src/sinks/console/sink.rs | 2 +- src/sinks/databend/api.rs | 218 - src/sinks/databend/config.rs | 155 +- src/sinks/databend/encoding.rs | 37 + src/sinks/databend/error.rs | 56 - src/sinks/databend/integration_tests.rs | 88 +- src/sinks/databend/mod.rs | 2 - src/sinks/databend/service.rs | 154 +- src/sinks/datadog/events/config.rs | 24 +- src/sinks/datadog/events/tests.rs | 4 +- src/sinks/datadog/logs/config.rs | 70 +- src/sinks/datadog/logs/integration_tests.rs | 2 +- src/sinks/datadog/logs/mod.rs | 6 +- src/sinks/datadog/logs/service.rs | 22 +- src/sinks/datadog/logs/sink.rs | 476 +- src/sinks/datadog/logs/tests.rs | 109 +- src/sinks/datadog/metrics/config.rs | 17 +- src/sinks/datadog/metrics/encoder.rs | 9 +- .../datadog/metrics/integration_tests.rs | 6 +- src/sinks/datadog/metrics/request_builder.rs | 11 +- src/sinks/datadog/metrics/tests.rs | 4 +- src/sinks/datadog/mod.rs | 43 +- src/sinks/datadog/test_utils.rs | 4 +- src/sinks/datadog/traces/apm_stats/flusher.rs | 1 - src/sinks/datadog/traces/config.rs | 17 +- src/sinks/datadog/traces/request_builder.rs | 3 +- src/sinks/datadog/traces/service.rs | 4 +- src/sinks/datadog/traces/sink.rs | 4 +- src/sinks/datadog/traces/tests.rs | 4 +- src/sinks/elasticsearch/common.rs | 263 +- src/sinks/elasticsearch/config.rs | 78 +- src/sinks/elasticsearch/encoder.rs | 199 +- src/sinks/elasticsearch/integration_tests.rs | 25 +- src/sinks/elasticsearch/mod.rs | 166 +- src/sinks/elasticsearch/retry.rs | 18 +- src/sinks/elasticsearch/service.rs | 29 +- src/sinks/elasticsearch/sink.rs | 39 +- src/sinks/elasticsearch/tests.rs | 263 +- src/sinks/file/bytes_path.rs | 2 +- src/sinks/file/mod.rs | 160 +- src/sinks/gcp/cloud_storage.rs | 21 +- src/sinks/gcp/mod.rs | 1 - src/sinks/gcp/pubsub.rs | 57 +- src/sinks/gcp/stackdriver/logs/config.rs | 37 +- src/sinks/gcp/stackdriver/logs/encoder.rs | 55 +- src/sinks/gcp/stackdriver/logs/tests.rs | 53 +- src/sinks/gcp/stackdriver/metrics/config.rs | 2 +- src/sinks/gcp/stackdriver/metrics/tests.rs | 6 +- .../chronicle_unstructured.rs | 229 +- src/sinks/gcp_chronicle/compression.rs | 133 + src/sinks/gcp_chronicle/mod.rs | 5 + src/sinks/gcp_chronicle/partitioner.rs | 77 + src/sinks/gcp_chronicle/sink.rs | 86 + src/sinks/gcs_common/config.rs | 7 +- src/sinks/gcs_common/service.rs | 38 +- src/sinks/gcs_common/sink.rs | 19 +- src/sinks/greptimedb/integration_tests.rs | 88 - src/sinks/greptimedb/logs/config.rs | 202 + .../greptimedb/logs/http_request_builder.rs | 294 + .../greptimedb/logs/integration_tests.rs | 148 + src/sinks/greptimedb/logs/mod.rs | 11 + src/sinks/greptimedb/logs/sink.rs | 88 + src/sinks/greptimedb/{ => metrics}/batch.rs | 12 +- src/sinks/greptimedb/metrics/config.rs | 182 + .../greptimedb/metrics/integration_tests.rs | 148 + src/sinks/greptimedb/metrics/mod.rs | 30 + src/sinks/greptimedb/metrics/request.rs | 99 + .../{ => metrics}/request_builder.rs | 216 +- src/sinks/greptimedb/metrics/service.rs | 139 + src/sinks/greptimedb/{ => metrics}/sink.rs | 30 +- src/sinks/greptimedb/mod.rs | 167 +- src/sinks/greptimedb/service.rs | 181 - src/sinks/honeycomb/config.rs | 31 +- src/sinks/honeycomb/encoder.rs | 19 +- src/sinks/honeycomb/request_builder.rs | 3 +- src/sinks/honeycomb/service.rs | 9 +- src/sinks/honeycomb/tests.rs | 6 +- src/sinks/http/batch.rs | 2 +- src/sinks/http/config.rs | 123 +- src/sinks/http/encoder.rs | 2 +- src/sinks/http/request_builder.rs | 22 +- src/sinks/http/service.rs | 51 +- src/sinks/http/sink.rs | 74 +- src/sinks/http/tests.rs | 368 +- src/sinks/humio/logs.rs | 46 +- src/sinks/humio/metrics.rs | 7 +- src/sinks/influxdb/logs.rs | 13 +- src/sinks/influxdb/metrics.rs | 29 +- src/sinks/influxdb/mod.rs | 27 +- src/sinks/kafka/config.rs | 171 +- src/sinks/kafka/service.rs | 38 +- src/sinks/kafka/sink.rs | 64 +- src/sinks/kafka/tests.rs | 62 +- src/sinks/keep/config.rs | 171 + src/sinks/keep/encoder.rs | 50 + src/sinks/keep/mod.rs | 10 + src/sinks/keep/request_builder.rs | 48 + src/sinks/keep/service.rs | 33 + src/sinks/keep/sink.rs | 77 + src/sinks/loki/config.rs | 48 +- src/sinks/loki/event.rs | 19 +- src/sinks/loki/healthcheck.rs | 2 +- src/sinks/loki/integration_tests.rs | 4 +- src/sinks/loki/sink.rs | 245 +- src/sinks/loki/tests.rs | 62 +- src/sinks/mezmo.rs | 7 +- src/sinks/mod.rs | 19 +- src/sinks/mqtt/config.rs | 88 +- src/sinks/mqtt/integration_tests.rs | 18 +- src/sinks/mqtt/request_builder.rs | 2 +- src/sinks/mqtt/service.rs | 4 +- src/sinks/mqtt/sink.rs | 46 +- src/sinks/nats/config.rs | 105 +- src/sinks/nats/integration_tests.rs | 55 +- src/sinks/nats/mod.rs | 2 + src/sinks/nats/request_builder.rs | 2 +- src/sinks/nats/service.rs | 9 +- src/sinks/nats/sink.rs | 10 +- src/sinks/new_relic/config.rs | 2 +- src/sinks/new_relic/encoding.rs | 3 +- src/sinks/new_relic/mod.rs | 14 +- src/sinks/new_relic/model.rs | 216 +- src/sinks/new_relic/sink.rs | 2 +- src/sinks/new_relic/tests.rs | 316 +- src/sinks/opentelemetry/mod.rs | 95 + src/sinks/papertrail.rs | 10 +- src/sinks/postgres/config.rs | 152 + src/sinks/postgres/integration_tests.rs | 511 + src/sinks/postgres/mod.rs | 7 + src/sinks/postgres/service.rs | 166 + src/sinks/postgres/sink.rs | 47 + src/sinks/prelude.rs | 2 +- src/sinks/prometheus/collector.rs | 29 +- src/sinks/prometheus/exporter.rs | 331 +- src/sinks/prometheus/remote_write/config.rs | 17 +- .../remote_write/integration_tests.rs | 6 +- src/sinks/prometheus/remote_write/mod.rs | 5 +- src/sinks/prometheus/remote_write/service.rs | 6 +- src/sinks/prometheus/remote_write/sink.rs | 4 +- src/sinks/prometheus/remote_write/tests.rs | 12 +- src/sinks/pulsar/config.rs | 117 +- src/sinks/pulsar/encoder.rs | 2 +- src/sinks/pulsar/integration_tests.rs | 31 +- src/sinks/pulsar/service.rs | 6 +- src/sinks/pulsar/sink.rs | 6 +- src/sinks/redis/config.rs | 221 +- src/sinks/redis/integration_tests.rs | 492 +- src/sinks/redis/mod.rs | 31 +- src/sinks/redis/request_builder.rs | 12 +- src/sinks/redis/service.rs | 39 +- src/sinks/redis/sink.rs | 311 +- src/sinks/redis/tests.rs | 59 +- src/sinks/s3_common/config.rs | 87 +- src/sinks/s3_common/partitioner.rs | 39 +- src/sinks/s3_common/service.rs | 19 +- src/sinks/s3_common/sink.rs | 23 +- src/sinks/sematext/logs.rs | 2 +- src/sinks/sematext/metrics.rs | 4 +- src/sinks/socket.rs | 186 +- src/sinks/splunk_hec/common/service.rs | 15 +- src/sinks/splunk_hec/common/util.rs | 30 +- src/sinks/splunk_hec/logs/config.rs | 55 +- .../splunk_hec/logs/integration_tests.rs | 25 +- src/sinks/splunk_hec/logs/sink.rs | 127 +- src/sinks/splunk_hec/logs/tests.rs | 188 +- src/sinks/splunk_hec/metrics/config.rs | 9 +- src/sinks/splunk_hec/metrics/sink.rs | 1 - src/sinks/splunk_hec/metrics/tests.rs | 2 +- src/sinks/statsd/config.rs | 2 +- src/sinks/statsd/encoder.rs | 17 +- src/sinks/statsd/request_builder.rs | 14 +- .../util/adaptive_concurrency/controller.rs | 18 +- src/sinks/util/adaptive_concurrency/mod.rs | 8 +- .../util/adaptive_concurrency/service.rs | 2 +- src/sinks/util/adaptive_concurrency/tests.rs | 72 +- src/sinks/util/batch.rs | 6 +- src/sinks/util/buffer/compression.rs | 86 +- src/sinks/util/buffer/metrics/mod.rs | 14 +- src/sinks/util/buffer/metrics/normalize.rs | 229 +- src/sinks/util/buffer/mod.rs | 16 +- src/sinks/util/builder.rs | 20 +- src/sinks/util/datagram.rs | 106 + src/sinks/util/encoding.rs | 161 +- src/sinks/util/http.rs | 214 +- src/sinks/util/mod.rs | 5 +- src/sinks/util/normalizer.rs | 10 +- src/sinks/util/partitioner.rs | 38 +- src/sinks/util/request_builder.rs | 4 +- src/sinks/util/retries.rs | 164 +- src/sinks/util/service.rs | 49 +- src/sinks/util/service/concurrency.rs | 13 +- src/sinks/util/service/health.rs | 14 +- src/sinks/util/service/net/mod.rs | 7 +- src/sinks/util/service/net/tcp.rs | 3 +- src/sinks/util/service/net/unix.rs | 36 +- src/sinks/util/sink.rs | 4 +- src/sinks/util/snappy.rs | 2 +- src/sinks/util/socket_bytes_sink.rs | 4 +- src/sinks/util/tcp.rs | 54 +- src/sinks/util/test.rs | 2 +- src/sinks/util/udp.rs | 77 +- src/sinks/util/unix.rs | 241 +- src/sinks/util/uri.rs | 10 +- src/sinks/vector/config.rs | 2 +- src/sinks/vector/mod.rs | 5 +- src/sinks/webhdfs/config.rs | 6 +- src/sinks/webhdfs/integration_tests.rs | 11 +- src/sinks/websocket/config.rs | 57 +- src/sinks/websocket/sink.rs | 290 +- src/sinks/websocket_server/buffering.rs | 282 + src/sinks/websocket_server/config.rs | 178 + src/sinks/websocket_server/mod.rs | 5 + src/sinks/websocket_server/sink.rs | 943 + src/source_sender/mod.rs | 161 +- src/sources/amqp.rs | 28 +- src/sources/apache_metrics/mod.rs | 6 +- src/sources/apache_metrics/parser.rs | 19 +- src/sources/aws_ecs_metrics/mod.rs | 9 +- src/sources/aws_ecs_metrics/parser.rs | 30 +- src/sources/aws_kinesis_firehose/errors.rs | 1 + src/sources/aws_kinesis_firehose/filters.rs | 61 +- src/sources/aws_kinesis_firehose/handlers.rs | 67 +- src/sources/aws_kinesis_firehose/mod.rs | 6 +- src/sources/aws_s3/mod.rs | 67 +- src/sources/aws_s3/sqs.rs | 341 +- src/sources/aws_sqs/config.rs | 6 +- src/sources/aws_sqs/integration_tests.rs | 6 +- src/sources/aws_sqs/source.rs | 4 +- .../datadog_agent/integration_tests.rs | 2 +- src/sources/datadog_agent/logs.rs | 66 +- src/sources/datadog_agent/metrics.rs | 14 +- src/sources/datadog_agent/mod.rs | 44 +- src/sources/datadog_agent/tests.rs | 94 +- src/sources/datadog_agent/traces.rs | 8 +- src/sources/demo_logs.rs | 53 +- src/sources/dnstap/mod.rs | 27 +- src/sources/dnstap/tcp.rs | 8 +- src/sources/dnstap/unix.rs | 1 - src/sources/docker_logs/mod.rs | 97 +- src/sources/docker_logs/tests.rs | 127 +- src/sources/eventstoredb_metrics/mod.rs | 2 +- src/sources/eventstoredb_metrics/types.rs | 59 +- src/sources/exec/mod.rs | 21 +- src/sources/exec/tests.rs | 6 +- src/sources/file.rs | 134 +- .../file_descriptors/file_descriptor.rs | 24 +- src/sources/file_descriptors/mod.rs | 6 +- src/sources/fluent/message.rs | 5 +- src/sources/fluent/mod.rs | 381 +- src/sources/gcp_pubsub.rs | 68 +- src/sources/heroku_logs.rs | 114 +- src/sources/host_metrics/cgroups.rs | 82 +- src/sources/host_metrics/disk.rs | 7 +- src/sources/host_metrics/filesystem.rs | 7 +- src/sources/host_metrics/mod.rs | 59 +- src/sources/host_metrics/process.rs | 84 + src/sources/host_metrics/tcp.rs | 415 + src/sources/http_client/client.rs | 263 +- src/sources/http_client/tests.rs | 225 +- src/sources/http_server.rs | 344 +- src/sources/internal_logs.rs | 27 +- src/sources/internal_metrics.rs | 56 +- src/sources/journald.rs | 39 +- src/sources/kafka.rs | 175 +- .../kubernetes_logs/k8s_paths_provider.rs | 4 +- src/sources/kubernetes_logs/lifecycle.rs | 21 +- src/sources/kubernetes_logs/mod.rs | 78 +- src/sources/kubernetes_logs/parser/cri.rs | 2 +- src/sources/kubernetes_logs/parser/docker.rs | 15 +- src/sources/kubernetes_logs/parser/mod.rs | 4 +- .../kubernetes_logs/partial_events_merger.rs | 123 +- src/sources/kubernetes_logs/path_helpers.rs | 43 +- .../kubernetes_logs/pod_metadata_annotator.rs | 70 +- src/sources/logstash.rs | 332 +- src/sources/mod.rs | 9 +- src/sources/mongodb_metrics/mod.rs | 8 +- src/sources/mongodb_metrics/types.rs | 10 - src/sources/mqtt/config.rs | 175 + src/sources/mqtt/integration_tests.rs | 117 + src/sources/mqtt/mod.rs | 5 + src/sources/mqtt/source.rs | 124 + src/sources/nats.rs | 112 +- src/sources/nginx_metrics/mod.rs | 8 +- src/sources/nginx_metrics/parser.rs | 11 +- src/sources/opentelemetry/grpc.rs | 76 +- src/sources/opentelemetry/http.rs | 216 +- .../opentelemetry/integration_tests.rs | 196 +- src/sources/opentelemetry/mod.rs | 77 +- src/sources/opentelemetry/reply.rs | 13 - src/sources/opentelemetry/status.rs | 1 + src/sources/opentelemetry/tests.rs | 1147 +- src/sources/postgresql_metrics.rs | 40 +- src/sources/prometheus/parser.rs | 49 +- src/sources/prometheus/pushgateway.rs | 24 +- src/sources/prometheus/remote_write.rs | 15 +- src/sources/prometheus/scrape.rs | 39 +- src/sources/pulsar.rs | 117 +- src/sources/redis/channel.rs | 5 +- src/sources/redis/mod.rs | 10 +- src/sources/socket/mod.rs | 397 +- src/sources/socket/tcp.rs | 30 +- src/sources/socket/udp.rs | 91 +- src/sources/socket/unix.rs | 28 +- src/sources/splunk_hec/mod.rs | 147 +- src/sources/static_metrics.rs | 305 + src/sources/statsd/mod.rs | 167 +- src/sources/statsd/parser.rs | 287 +- src/sources/statsd/unix.rs | 15 +- src/sources/syslog.rs | 46 +- src/sources/util/framestream.rs | 179 +- src/sources/util/grpc/decompression.rs | 6 +- src/sources/util/grpc/mod.rs | 33 +- src/sources/util/http/auth.rs | 85 - src/sources/util/http/encoding.rs | 7 +- src/sources/util/http/headers.rs | 165 + src/sources/util/http/method.rs | 4 + src/sources/util/http/mod.rs | 16 +- src/sources/util/http/prelude.rs | 71 +- src/sources/util/http/query.rs | 161 +- src/sources/util/http_client.rs | 35 +- src/sources/util/message_decoding.rs | 61 +- src/sources/util/mod.rs | 20 +- src/sources/util/net/mod.rs | 6 +- src/sources/util/unix_datagram.rs | 7 +- src/sources/util/unix_stream.rs | 19 +- src/sources/util/wrappers.rs | 2 +- src/sources/vector/mod.rs | 20 +- src/sources/websocket/config.rs | 267 + src/sources/websocket/mod.rs | 2 + src/sources/websocket/source.rs | 728 + src/tap/cmd.rs | 190 +- src/tap/mod.rs | 6 +- src/template.rs | 332 +- src/test_util/components.rs | 110 +- src/test_util/compression.rs | 5 + src/test_util/integration.rs | 28 + src/test_util/metrics.rs | 28 +- src/test_util/mock/mod.rs | 5 +- src/test_util/mock/sources/backpressure.rs | 6 +- src/test_util/mock/sources/basic.rs | 28 +- src/test_util/mock/sources/error.rs | 2 +- src/test_util/mock/sources/panic.rs | 2 +- src/test_util/mock/sources/tripwire.rs | 2 +- src/test_util/mock/transforms/basic.rs | 2 +- .../mock/transforms/error_definitions.rs | 2 +- src/test_util/mock/transforms/noop.rs | 2 +- src/test_util/mod.rs | 73 +- src/top/cmd.rs | 11 +- src/top/dashboard.rs | 32 +- src/top/metrics.rs | 192 +- src/top/mod.rs | 5 + src/top/state.rs | 13 +- src/topology/builder.rs | 168 +- src/topology/controller.rs | 39 +- src/topology/mod.rs | 33 +- src/topology/running.rs | 277 +- src/topology/schema.rs | 46 +- src/topology/task.rs | 1 + src/topology/test/backpressure.rs | 4 +- src/topology/test/end_to_end.rs | 4 +- src/topology/test/reload.rs | 31 + src/trace.rs | 2 +- src/transforms/aggregate.rs | 728 +- src/transforms/aws_ec2_metadata.rs | 65 +- src/transforms/dedupe/common.rs | 118 + src/transforms/dedupe/config.rs | 101 +- src/transforms/dedupe/mod.rs | 108 +- src/transforms/dedupe/timed_transform.rs | 69 + src/transforms/dedupe/transform.rs | 4 +- src/transforms/exclusive_route/config.rs | 192 + src/transforms/exclusive_route/mod.rs | 4 + src/transforms/exclusive_route/tests.rs | 97 + src/transforms/exclusive_route/transform.rs | 51 + src/transforms/filter.rs | 4 +- src/transforms/log_to_metric.rs | 458 +- src/transforms/lua/v1/mod.rs | 59 +- src/transforms/lua/v2/mod.rs | 22 +- src/transforms/metric_to_log.rs | 2 +- src/transforms/mod.rs | 19 +- src/transforms/reduce/config.rs | 242 + src/transforms/reduce/merge_strategy.rs | 171 +- src/transforms/reduce/mod.rs | 973 +- src/transforms/reduce/transform.rs | 1045 + src/transforms/remap.rs | 248 +- src/transforms/route.rs | 58 +- src/transforms/sample/config.rs | 125 +- src/transforms/sample/mod.rs | 4 +- src/transforms/sample/tests.rs | 341 + src/transforms/sample/transform.rs | 402 +- .../tag_cardinality_limit/config.rs | 39 +- src/transforms/tag_cardinality_limit/mod.rs | 88 +- .../tag_cardinality_limit/tag_value_set.rs | 2 +- src/transforms/tag_cardinality_limit/tests.rs | 251 +- src/transforms/throttle/config.rs | 100 + src/transforms/throttle/mod.rs | 3 + src/transforms/throttle/rate_limiter.rs | 57 + .../{throttle.rs => throttle/transform.rs} | 279 +- src/transforms/window/config.rs | 94 + src/transforms/window/mod.rs | 2 + src/transforms/window/transform.rs | 405 + src/unit_test.rs | 96 +- src/utilization.rs | 188 +- src/validate.rs | 31 +- testing/github-20228/config.toml | 46 + tests/behavior/config/secret.toml | 13 + tests/behavior/transforms/reduce.toml | 20 +- tests/behavior/transforms/remap.toml | 26 + tests/data/GeoIP2-Anonymous-IP-Test.mmdb | Bin 0 -> 2534 bytes tests/data/Makefile | 18 + tests/data/adaptive-concurrency-template.toml | 7 + .../defers-at-high-concurrency.toml | 16 +- .../drops-at-high-concurrency.toml | 2 +- .../certs/pulsar-chain.cert.pem | 98 + .../intermediate_server/certs/pulsar.cert.pem | 32 + .../ca/intermediate_server/csr/pulsar.csr.pem | 17 + tests/data/ca/intermediate_server/index.txt | 1 + .../data/ca/intermediate_server/index.txt.old | 1 + .../ca/intermediate_server/newcerts/1008.pem | 32 + .../ca/intermediate_server/private/kafka.pass | 1 + .../private/pulsar.key.pem | 27 + tests/data/ca/intermediate_server/serial | 2 +- tests/data/ca/intermediate_server/serial.old | 2 +- tests/data/custom-type.mmdb | Bin 0 -> 2614 bytes tests/data/dnstap/Dockerfile | 8 +- tests/data/e2e/datadog/logs/agent_only.yaml | 3 + tests/data/e2e/datadog/logs/agent_vector.yaml | 3 + .../opentelemetry/logs/collector-sink.yaml | 26 + .../opentelemetry/logs/collector-source.yaml | 28 + .../e2e/opentelemetry/logs/output/.gitignore | 1 + tests/data/e2e/opentelemetry/logs/vector.yaml | 82 + tests/data/enterprise/base.toml | 14 - tests/data/enterprise/missing_api_key.toml | 14 - tests/data/kafka_server_jaas.conf | 1 + tests/data/nats/nats-client.key | 52 +- tests/data/nats/nats-client.pem | 50 +- tests/data/nats/nats-server.key | 52 +- tests/data/nats/nats-server.pem | 48 +- tests/data/nats/rootCA.pem | 52 +- tests/data/opentelemetry/config.yaml | 6 + tests/data/protobuf/.gitignore | 2 + tests/data/protobuf/Makefile | 12 + tests/data/protobuf/README.md | 11 + tests/data/protobuf/serialize.py | 16 + tests/data/protobuf/test_proto.desc | 9 + tests/data/protobuf/test_proto.pb | 2 + tests/data/protobuf/test_proto.proto | 11 + .../secret-backends/directory-secrets/jkl | 1 + tests/data/secret-backends/file-secrets.json | 1 + tests/e2e/datadog/logs/mod.rs | 47 +- tests/e2e/datadog/metrics/mod.rs | 47 +- tests/e2e/datadog/metrics/series.rs | 10 +- tests/e2e/datadog/metrics/sketches.rs | 2 +- tests/e2e/datadog/mod.rs | 9 +- tests/e2e/mod.rs | 3 + tests/e2e/opentelemetry/logs/mod.rs | 205 + tests/e2e/opentelemetry/mod.rs | 1 + tests/integration/cli.rs | 13 +- tests/integration/shutdown.rs | 14 +- .../components/sources/datadog_agent.yaml | 47 + tilt/Dockerfile | 3 +- vdev/Cargo.toml | 38 +- vdev/README.md | 2 +- vdev/src/app.rs | 10 +- vdev/src/commands/check/licenses.rs | 3 +- vdev/src/commands/check/markdown.rs | 33 +- vdev/src/commands/compose_tests/ci_paths.rs | 3 - vdev/src/commands/compose_tests/show.rs | 8 +- vdev/src/commands/compose_tests/start.rs | 14 +- vdev/src/commands/compose_tests/stop.rs | 13 +- vdev/src/commands/compose_tests/test.rs | 37 +- vdev/src/commands/e2e/mod.rs | 4 +- vdev/src/commands/e2e/show.rs | 2 +- vdev/src/commands/e2e/start.rs | 7 +- vdev/src/commands/e2e/stop.rs | 8 +- vdev/src/commands/e2e/test.rs | 7 +- vdev/src/commands/info.rs | 11 +- vdev/src/commands/integration/build.rs | 14 + vdev/src/commands/integration/mod.rs | 5 +- vdev/src/commands/integration/show.rs | 2 +- vdev/src/commands/integration/start.rs | 7 +- vdev/src/commands/integration/stop.rs | 5 +- vdev/src/commands/integration/test.rs | 7 +- vdev/src/commands/meta/install_git_hooks.rs | 2 +- vdev/src/commands/release/homebrew.rs | 136 +- vdev/src/commands/release/prepare.rs | 538 +- vdev/src/features.rs | 22 +- vdev/src/git.rs | 100 +- vdev/src/platform.rs | 4 +- vdev/src/testing/build.rs | 65 + vdev/src/testing/config.rs | 51 +- vdev/src/testing/docker.rs | 44 + vdev/src/testing/integration.rs | 217 +- vdev/src/testing/mod.rs | 2 + vdev/src/testing/runner.rs | 122 +- vdev/src/testing/state.rs | 16 +- vdev/src/util.rs | 31 +- website/.env.example | 3 + website/.htmltest.yml | 2 + website/.nvmrc | 2 +- website/.prettierrc.json | 17 + website/Makefile | 15 +- website/README.md | 38 +- website/assets/js/app.js | 2 +- website/assets/js/search.tsx | 133 +- website/config.toml | 43 +- website/content/en/_index.md | 4 +- website/content/en/blog/graphql-api.md | 6 +- .../en/blog/highlights-february-2025.md | 48 + .../content/en/blog/kubernetes-integration.md | 2 +- website/content/en/blog/log-namespacing.md | 5 +- website/content/en/community/_index.md | 77 +- website/content/en/docs/_index.md | 2 +- website/content/en/docs/about/_index.md | 15 - .../en/docs/about/under-the-hood/_index.md | 15 - .../under-the-hood/architecture/_index.md | 21 - .../about/under-the-hood/networking/_index.md | 8 - .../about/what-is-observability-pipelines.md | 20 - .../en/docs/administration/management.md | 7 + .../en/docs/administration/monitoring.md | 86 +- .../{tuning => optimization}/_index.md | 6 +- .../{tuning => optimization}/pgo.md | 0 .../en/docs/administration/upgrading.md | 4 +- .../content/en/docs/architecture/_index.md | 21 + .../networking => architecture}/arc.md | 4 +- .../architecture/buffering-model.md | 4 +- .../architecture/concurrency-model.md | 4 +- .../architecture/data-model/_index.md | 4 +- .../architecture/data-model/log.md | 0 .../architecture/data-model/metric.md | 2 +- .../end-to-end-acknowledgements.md | 6 +- .../guarantees.md | 12 +- .../architecture/pipeline-model.md | 4 +- .../architecture/runtime-model.md | 4 +- website/content/en/docs/example.md | 7 - .../_index.md} | 9 +- .../docs/{about => introduction}/concepts.md | 34 +- website/content/en/docs/reference/_index.md | 6 +- website/content/en/docs/reference/api.md | 3 +- .../en/docs/reference/configuration/_index.md | 199 +- .../{greptimedb.md => greptimedb_logs.md} | 6 +- .../configuration/sinks/greptimedb_metrics.md | 15 + .../reference/configuration/sinks/keep.md | 14 + .../configuration/sinks/opentelemetry.md | 14 + .../reference/configuration/sinks/postgres.md | 14 + .../configuration/sinks/websocket.md | 2 +- .../configuration/sinks/websocket_server.md | 14 + .../reference/configuration/sources/mqtt.md | 14 + .../configuration/sources/static_metrics.md | 14 + .../configuration/sources/websocket.md | 14 + .../configuration/template-syntax.md | 4 +- .../transforms/exclusive_route.md | 14 + .../configuration/transforms/window.md | 14 + .../reference/configuration/unit-tests.md | 4 +- .../docs/reference/environment_variables.md | 99 + website/content/en/docs/reference/glossary.md | 1 + .../content/en/docs/reference/vrl/_index.md | 90 +- .../en/docs/setup/deployment/_index.md | 2 +- .../en/docs/setup/going-to-prod/_index.md | 2 +- .../en/docs/setup/going-to-prod/arch/agent.md | 2 +- .../setup/going-to-prod/arch/aggregator.md | 2 +- .../docs/setup/going-to-prod/architecting.md | 8 +- .../setup/going-to-prod/high-availability.md | 6 +- .../en/docs/setup/going-to-prod/sizing.md | 2 +- .../en/docs/setup/installation/_index.md | 1 + .../setup/installation/manual/from-source.md | 2 +- .../installation/operating-systems/nixos.md | 119 +- .../installation/package-managers/homebrew.md | 2 +- .../installation/package-managers/pacman.md | 4 +- .../setup/installation/platforms/docker.md | 11 +- .../installation/platforms/kubernetes.md | 2 +- website/content/en/guides/_index.md | 1 + website/content/en/guides/advanced/_index.md | 2 +- .../advanced/cloudwatch-logs-firehose.md | 2 +- .../advanced/custom-aggregations-with-lua.md | 4 +- .../advanced/merge-multiline-logs-with-lua.md | 4 +- .../advanced/parsing-csv-logs-with-lua.md | 28 +- website/content/en/guides/developer/_index.md | 6 + .../guides/developer/config-autocompletion.md | 57 + .../content/en/guides/developer/debugging.md | 989 + .../en/guides/getting-started/_index.md | 6 + .../guides/getting-started/getting-started.md | 63 +- .../transformation.md | 19 +- website/content/en/guides/level-up/_index.md | 2 +- .../guides/level-up/csv-enrichment-guide.md | 4 +- .../en/guides/level-up/log_namespace.md | 191 + .../level-up/managing-complex-configs.md | 4 +- .../en/guides/level-up/managing-schemas.md | 6 +- .../en/guides/level-up/vector-tap-guide.md | 2 +- .../2020-02-14-global-log-schema.md | 2 +- .../2020-02-24-log-data-model-changes.md | 2 +- .../2020-04-07-vector-to-vector-metrics.md | 2 +- .../en/highlights/2020-12-23-vector-top.md | 2 +- .../2021-01-10-kafka-sink-metrics.md | 2 +- .../2021-10-08-0-17-upgrade-guide.md | 2 +- .../2022-03-22-0-21-0-upgrade-guide.md | 6 +- .../2022-08-16-0-24-0-upgrade-guide.md | 2 +- .../2022-11-07-0-26-0-upgrade-guide.md | 2 +- .../2023-03-26-0-37-0-upgrade-guide.md | 59 + .../2024-05-07-0-38-0-upgrade-guide.md | 64 + .../2024-06-17-0-39-0-upgrade-guide.md | 42 + .../2024-07-29-0-40-0-upgrade-guide.md | 166 + .../highlights/2024-11-07-exclusive_route.md | 44 + .../2025-01-13-0-44-0-upgrade-guide.md | 42 + .../2025-02-24-0-45-0-upgrade-guide.md | 44 + .../2025-02-24-memory_enrichment_table.md | 113 + website/content/en/releases/0.37.0.md | 4 + website/content/en/releases/0.37.1.md | 4 + website/content/en/releases/0.38.0.md | 4 + website/content/en/releases/0.39.0.md | 4 + website/content/en/releases/0.40.0.md | 4 + website/content/en/releases/0.40.1.md | 4 + website/content/en/releases/0.40.2.md | 4 + website/content/en/releases/0.41.0.md | 4 + website/content/en/releases/0.41.1.md | 4 + website/content/en/releases/0.42.0.md | 4 + website/content/en/releases/0.43.0.md | 4 + website/content/en/releases/0.43.1.md | 4 + website/content/en/releases/0.44.0.md | 4 + website/content/en/releases/0.45.0.md | 4 + website/content/en/releases/0.46.0.md | 4 + website/content/en/releases/0.46.1.md | 4 + website/content/en/releases/0.47.0.md | 4 + website/content/en/releases/0.48.0.md | 4 + website/cue/reference.cue | 23 +- .../reference/administration/downloads.cue | 7 + .../administration/interfaces/kubectl.cue | 2 +- .../reference/administration/management.cue | 2 +- website/cue/reference/api.cue | 43 +- website/cue/reference/authors.cue | 49 - website/cue/reference/cli.cue | 69 +- website/cue/reference/components.cue | 143 +- .../cue/reference/components/base/sources.cue | 61 - .../reference/components/base/transforms.cue | 17 - .../components/{base => generated}/sinks.cue | 49 +- .../components/generated/sources.cue | 91 + .../components/generated/transforms.cue | 47 + website/cue/reference/components/kafka.cue | 2 +- website/cue/reference/components/sinks.cue | 22 +- .../cue/reference/components/sinks/amqp.cue | 2 +- .../reference/components/sinks/appsignal.cue | 2 +- .../components/sinks/aws_cloudwatch.cue | 3 +- .../components/sinks/aws_cloudwatch_logs.cue | 2 +- .../sinks/aws_cloudwatch_metrics.cue | 2 +- .../components/sinks/aws_kinesis_firehose.cue | 2 +- .../components/sinks/aws_kinesis_streams.cue | 2 +- .../cue/reference/components/sinks/aws_s3.cue | 26 +- .../reference/components/sinks/aws_sns.cue | 2 +- .../reference/components/sinks/aws_sqs.cue | 2 +- .../cue/reference/components/sinks/axiom.cue | 11 +- .../reference/components/sinks/azure_blob.cue | 2 +- .../components/sinks/azure_monitor_logs.cue | 2 +- .../sinks/base/unit_test_stream.cue | 3 - .../reference/components/sinks/blackhole.cue | 2 +- .../reference/components/sinks/clickhouse.cue | 2 +- .../reference/components/sinks/console.cue | 2 +- .../reference/components/sinks/databend.cue | 6 +- .../components/sinks/datadog_events.cue | 2 +- .../components/sinks/datadog_logs.cue | 2 +- .../components/sinks/datadog_metrics.cue | 2 +- .../components/sinks/datadog_traces.cue | 2 +- .../components/sinks/elasticsearch.cue | 36 +- .../cue/reference/components/sinks/file.cue | 19 +- .../sinks/gcp_chronicle_unstructured.cue | 5 +- .../components/sinks/gcp_cloud_storage.cue | 12 +- .../reference/components/sinks/gcp_pubsub.cue | 2 +- .../components/sinks/gcp_stackdriver_logs.cue | 2 +- .../sinks/gcp_stackdriver_metrics.cue | 2 +- .../sinks/{base => generated}/amqp.cue | 191 +- .../sinks/{base => generated}/appsignal.cue | 49 +- .../aws_cloudwatch_logs.cue | 234 +- .../aws_cloudwatch_metrics.cue | 67 +- .../aws_kinesis_firehose.cue | 209 +- .../aws_kinesis_streams.cue | 209 +- .../sinks/{base => generated}/aws_s3.cue | 277 +- .../sinks/{base => generated}/aws_sns.cue | 207 +- .../sinks/{base => generated}/aws_sqs.cue | 207 +- .../sinks/{base => generated}/axiom.cue | 107 +- .../sinks/{base => generated}/azure_blob.cue | 215 +- .../azure_monitor_logs.cue | 47 +- .../sinks/{base => generated}/blackhole.cue | 10 +- .../components/sinks/generated/clickhouse.cue | 701 + .../sinks/{base => generated}/console.cue | 181 +- .../sinks/{base => generated}/databend.cue | 259 +- .../{base => generated}/datadog_events.cue | 47 +- .../{base => generated}/datadog_logs.cue | 61 +- .../{base => generated}/datadog_metrics.cue | 49 +- .../{base => generated}/datadog_traces.cue | 49 +- .../{base => generated}/elasticsearch.cue | 165 +- .../sinks/{base => generated}/file.cue | 181 +- .../gcp_chronicle_unstructured.cue | 256 +- .../{base => generated}/gcp_cloud_storage.cue | 229 +- .../sinks/{base => generated}/gcp_pubsub.cue | 189 +- .../gcp_stackdriver_logs.cue | 73 +- .../gcp_stackdriver_metrics.cue | 47 +- .../sinks/{base => generated}/greptimedb.cue | 83 +- .../greptimedb_logs.cue} | 214 +- .../sinks/generated/greptimedb_metrics.cue | 424 + .../sinks/{base => generated}/honeycomb.cue | 64 +- .../sinks/{base => generated}/http.cue | 365 +- .../sinks/{base => generated}/humio_logs.cue | 196 +- .../{base => generated}/humio_metrics.cue | 52 +- .../{base => generated}/influxdb_logs.cue | 47 +- .../{base => generated}/influxdb_metrics.cue | 47 +- .../sinks/{base => generated}/kafka.cue | 203 +- .../components/sinks/generated/keep.cue | 289 + .../sinks/{base => generated}/logdna.cue | 26 +- .../sinks/{base => generated}/loki.cue | 354 +- .../sinks/{base => generated}/mezmo.cue | 26 +- .../sinks/{base => generated}/mqtt.cue | 182 +- .../sinks/{base => generated}/nats.cue | 202 +- .../sinks/{base => generated}/new_relic.cue | 26 +- .../sinks/generated/opentelemetry.cue | 1046 + .../sinks/{base => generated}/papertrail.cue | 175 +- .../components/sinks/generated/postgres.cue | 286 + .../prometheus_exporter.cue | 171 +- .../prometheus_remote_write.cue | 84 +- .../sinks/{base => generated}/pulsar.cue | 221 +- .../sinks/{base => generated}/redis.cue | 273 +- .../{base => generated}/sematext_logs.cue | 26 +- .../{base => generated}/sematext_metrics.cue | 26 +- .../sinks/{base => generated}/socket.cue | 219 +- .../{base => generated}/splunk_hec_logs.cue | 200 +- .../splunk_hec_metrics.cue | 45 +- .../sinks/{base => generated}/statsd.cue | 35 +- .../sinks/{base => generated}/unit_test.cue | 2 +- .../sinks/generated/unit_test_stream.cue | 3 + .../sinks/{base => generated}/vector.cue | 49 +- .../sinks/{base => generated}/webhdfs.cue | 183 +- .../sinks/{base => generated}/websocket.cue | 335 +- .../sinks/generated/websocket_server.cue | 951 + .../components/sinks/greptimedb_logs.cue | 90 + ...{greptimedb.cue => greptimedb_metrics.cue} | 11 +- .../reference/components/sinks/honeycomb.cue | 9 +- .../cue/reference/components/sinks/http.cue | 2 +- .../reference/components/sinks/humio_logs.cue | 2 +- .../components/sinks/humio_metrics.cue | 2 +- .../components/sinks/influxdb_logs.cue | 2 +- .../components/sinks/influxdb_metrics.cue | 4 +- .../cue/reference/components/sinks/kafka.cue | 2 +- .../cue/reference/components/sinks/keep.cue | 95 + .../cue/reference/components/sinks/loki.cue | 2 +- .../cue/reference/components/sinks/mezmo.cue | 2 +- .../cue/reference/components/sinks/mqtt.cue | 2 +- .../cue/reference/components/sinks/nats.cue | 2 +- .../reference/components/sinks/new_relic.cue | 2 +- .../components/sinks/opentelemetry.cue | 236 + .../reference/components/sinks/papertrail.cue | 2 +- .../reference/components/sinks/postgres.cue | 223 + .../components/sinks/prometheus_exporter.cue | 25 +- .../sinks/prometheus_remote_write.cue | 2 +- .../cue/reference/components/sinks/pulsar.cue | 15 +- .../cue/reference/components/sinks/redis.cue | 15 +- .../components/sinks/sematext_logs.cue | 2 +- .../components/sinks/sematext_metrics.cue | 2 +- .../cue/reference/components/sinks/socket.cue | 15 +- .../components/sinks/splunk_hec_logs.cue | 2 +- .../components/sinks/splunk_hec_metrics.cue | 2 +- .../cue/reference/components/sinks/statsd.cue | 2 +- .../cue/reference/components/sinks/vector.cue | 5 +- .../reference/components/sinks/webhdfs.cue | 2 +- .../reference/components/sinks/websocket.cue | 2 +- .../components/sinks/websocket_server.cue | 242 + website/cue/reference/components/sources.cue | 2 +- .../cue/reference/components/sources/amqp.cue | 62 +- .../components/sources/apache_metrics.cue | 2 +- .../components/sources/aws_ecs_metrics.cue | 2 +- .../sources/aws_kinesis_firehose.cue | 68 +- .../reference/components/sources/aws_s3.cue | 80 +- .../reference/components/sources/aws_sqs.cue | 46 +- .../components/sources/base/nginx_metrics.cue | 165 - .../components/sources/base/unit_test.cue | 3 - .../sources/base/unit_test_stream.cue | 3 - .../components/sources/datadog_agent.cue | 16 +- .../components/sources/demo_logs.cue | 2 +- .../reference/components/sources/dnstap.cue | 3 +- .../components/sources/docker_logs.cue | 2 +- .../sources/eventstoredb_metrics.cue | 2 +- .../cue/reference/components/sources/exec.cue | 76 +- .../cue/reference/components/sources/file.cue | 6 +- .../components/sources/file_descriptor.cue | 22 +- .../reference/components/sources/fluent.cue | 2 +- .../components/sources/gcp_pubsub.cue | 2 +- .../sources/{base => generated}/amqp.cue | 181 +- .../{base => generated}/apache_metrics.cue | 2 +- .../{base => generated}/aws_ecs_metrics.cue | 2 +- .../aws_kinesis_firehose.cue | 183 +- .../sources/{base => generated}/aws_s3.cue | 310 +- .../sources/{base => generated}/aws_sqs.cue | 201 +- .../{base => generated}/datadog_agent.cue | 185 +- .../sources/{base => generated}/demo_logs.cue | 158 +- .../sources/{base => generated}/dnstap.cue | 33 +- .../{base => generated}/docker_logs.cue | 4 +- .../eventstoredb_metrics.cue | 2 +- .../sources/{base => generated}/exec.cue | 158 +- .../sources/{base => generated}/file.cue | 14 +- .../{base => generated}/file_descriptor.cue | 158 +- .../sources/{base => generated}/fluent.cue | 86 +- .../{base => generated}/gcp_pubsub.cue | 181 +- .../{base => generated}/heroku_logs.cue | 234 +- .../{base => generated}/host_metrics.cue | 45 +- .../sources/{base => generated}/http.cue | 257 +- .../{base => generated}/http_client.cue | 360 +- .../{base => generated}/http_server.cue | 257 +- .../{base => generated}/internal_logs.cue | 4 +- .../{base => generated}/internal_metrics.cue | 4 +- .../sources/{base => generated}/journald.cue | 4 +- .../sources/{base => generated}/kafka.cue | 183 +- .../{base => generated}/kubernetes_logs.cue | 17 +- .../sources/{base => generated}/logstash.cue | 32 +- .../{base => generated}/mongodb_metrics.cue | 2 +- .../components/sources/generated/mqtt.cue | 591 + .../sources/{base => generated}/nats.cue | 185 +- .../sources/generated/nginx_metrics.cue | 304 + .../{base => generated}/opentelemetry.cue | 67 +- .../postgresql_metrics.cue | 2 +- .../prometheus_pushgateway.cue | 72 +- .../prometheus_remote_write.cue | 72 +- .../{base => generated}/prometheus_scrape.cue | 177 +- .../sources/{base => generated}/pulsar.cue | 189 +- .../sources/{base => generated}/redis.cue | 158 +- .../sources/{base => generated}/socket.cue | 212 +- .../{base => generated}/splunk_hec.cue | 25 +- .../sources/generated/static_metrics.cue | 270 + .../sources/{base => generated}/statsd.cue | 52 +- .../sources/{base => generated}/stdin.cue | 158 +- .../sources/{base => generated}/syslog.cue | 31 +- .../sources/generated/unit_test.cue | 3 + .../sources/generated/unit_test_stream.cue | 3 + .../sources/{base => generated}/vector.cue | 27 +- .../sources/generated/websocket.cue | 812 + .../components/sources/heroku_logs.cue | 3 +- .../components/sources/host_metrics.cue | 70 +- .../components/sources/http_client.cue | 24 +- .../components/sources/http_server.cue | 93 +- .../components/sources/internal_logs.cue | 2 +- .../components/sources/internal_metrics.cue | 101 +- .../reference/components/sources/journald.cue | 2 +- .../reference/components/sources/kafka.cue | 82 +- .../components/sources/kubernetes_logs.cue | 15 +- .../reference/components/sources/logstash.cue | 2 +- .../components/sources/mongodb_metrics.cue | 2 +- .../cue/reference/components/sources/mqtt.cue | 107 + .../cue/reference/components/sources/nats.cue | 50 +- .../components/sources/nginx_metrics.cue | 2 +- .../components/sources/opentelemetry.cue | 85 +- .../components/sources/postgresql_metrics.cue | 2 +- .../sources/prometheus_pushgateway.cue | 3 +- .../sources/prometheus_remote_write.cue | 3 +- .../components/sources/prometheus_scrape.cue | 18 +- .../reference/components/sources/pulsar.cue | 72 +- .../reference/components/sources/redis.cue | 48 +- .../reference/components/sources/socket.cue | 63 +- .../components/sources/splunk_hec.cue | 2 +- .../components/sources/static_metrics.cue | 85 + .../reference/components/sources/statsd.cue | 11 +- .../reference/components/sources/stdin.cue | 32 +- .../reference/components/sources/syslog.cue | 3 +- .../reference/components/sources/vector.cue | 5 +- .../components/sources/websocket.cue | 121 + .../cue/reference/components/transforms.cue | 2 +- .../components/transforms/aggregate.cue | 8 +- .../transforms/aws_ec2_metadata.cue | 2 +- .../components/transforms/base/aggregate.cue | 11 - .../components/transforms/base/sample.cue | 38 - .../transforms/base/tag_cardinality_limit.cue | 54 - .../components/transforms/dedupe.cue | 8 +- .../components/transforms/exclusive_route.cue | 113 + .../components/transforms/filter.cue | 14 +- .../transforms/generated/aggregate.cue | 35 + .../{base => generated}/aws_ec2_metadata.cue | 4 +- .../transforms/{base => generated}/dedupe.cue | 18 +- .../transforms/generated/exclusive_route.cue | 41 + .../transforms/{base => generated}/filter.cue | 2 +- .../{base => generated}/log_to_metric.cue | 31 +- .../transforms/{base => generated}/lua.cue | 4 +- .../{base => generated}/metric_to_log.cue | 4 +- .../transforms/{base => generated}/reduce.cue | 18 +- .../transforms/{base => generated}/remap.cue | 15 +- .../transforms/{base => generated}/route.cue | 32 +- .../transforms/generated/sample.cue | 74 + .../generated/tag_cardinality_limit.cue | 119 + .../{base => generated}/throttle.cue | 2 +- .../transforms/generated/window.cue | 35 + .../components/transforms/log_to_metric.cue | 18 +- .../reference/components/transforms/lua.cue | 12 +- .../components/transforms/metric_to_log.cue | 8 +- .../components/transforms/reduce.cue | 32 +- .../reference/components/transforms/remap.cue | 30 +- .../reference/components/transforms/route.cue | 90 +- .../components/transforms/sample.cue | 11 +- .../transforms/tag_cardinality_limit.cue | 20 +- .../components/transforms/throttle.cue | 8 +- .../components/transforms/window.cue | 150 + website/cue/reference/configuration.cue | 758 +- website/cue/reference/data_model/schema.cue | 2 +- website/cue/reference/generated/api.cue | 44 + .../cue/reference/generated/configuration.cue | 926 + website/cue/reference/releases.cue | 1 + website/cue/reference/releases/0.10.0.cue | 624 +- website/cue/reference/releases/0.11.0.cue | 678 +- website/cue/reference/releases/0.11.1.cue | 18 +- website/cue/reference/releases/0.12.0.cue | 426 +- website/cue/reference/releases/0.12.1.cue | 14 +- website/cue/reference/releases/0.12.2.cue | 16 +- website/cue/reference/releases/0.13.0.cue | 112 +- website/cue/reference/releases/0.14.0.cue | 410 +- website/cue/reference/releases/0.15.0.cue | 520 +- website/cue/reference/releases/0.15.1.cue | 28 +- website/cue/reference/releases/0.16.0.cue | 452 +- website/cue/reference/releases/0.16.1.cue | 6 +- website/cue/reference/releases/0.17.0.cue | 526 +- website/cue/reference/releases/0.17.1.cue | 24 +- website/cue/reference/releases/0.17.2.cue | 12 +- website/cue/reference/releases/0.17.3.cue | 8 +- website/cue/reference/releases/0.18.0.cue | 406 +- website/cue/reference/releases/0.18.1.cue | 14 +- website/cue/reference/releases/0.19.0.cue | 446 +- website/cue/reference/releases/0.19.1.cue | 20 +- website/cue/reference/releases/0.19.2.cue | 16 +- website/cue/reference/releases/0.20.0.cue | 638 +- website/cue/reference/releases/0.20.1.cue | 50 +- website/cue/reference/releases/0.21.0.cue | 838 +- website/cue/reference/releases/0.21.1.cue | 46 +- website/cue/reference/releases/0.21.2.cue | 24 +- website/cue/reference/releases/0.22.0.cue | 604 +- website/cue/reference/releases/0.22.1.cue | 28 +- website/cue/reference/releases/0.22.2.cue | 22 +- website/cue/reference/releases/0.22.3.cue | 10 +- website/cue/reference/releases/0.23.0.cue | 510 +- website/cue/reference/releases/0.23.3.cue | 44 +- website/cue/reference/releases/0.24.0.cue | 556 +- website/cue/reference/releases/0.24.1.cue | 32 +- website/cue/reference/releases/0.24.2.cue | 36 +- website/cue/reference/releases/0.25.0.cue | 714 +- website/cue/reference/releases/0.25.1.cue | 16 +- website/cue/reference/releases/0.25.2.cue | 14 +- website/cue/reference/releases/0.26.0.cue | 492 +- website/cue/reference/releases/0.27.0.cue | 508 +- website/cue/reference/releases/0.27.1.cue | 14 +- website/cue/reference/releases/0.28.0.cue | 766 +- website/cue/reference/releases/0.28.1.cue | 8 +- website/cue/reference/releases/0.28.2.cue | 26 +- website/cue/reference/releases/0.29.0.cue | 538 +- website/cue/reference/releases/0.29.1.cue | 8 +- website/cue/reference/releases/0.30.0.cue | 313 +- website/cue/reference/releases/0.31.0.cue | 372 +- website/cue/reference/releases/0.32.0.cue | 414 +- website/cue/reference/releases/0.32.1.cue | 20 +- website/cue/reference/releases/0.32.2.cue | 14 +- website/cue/reference/releases/0.33.0.cue | 470 +- website/cue/reference/releases/0.33.1.cue | 32 +- website/cue/reference/releases/0.34.0.cue | 402 +- website/cue/reference/releases/0.34.1.cue | 22 +- website/cue/reference/releases/0.34.2.cue | 14 +- website/cue/reference/releases/0.35.0.cue | 372 +- website/cue/reference/releases/0.35.1.cue | 8 +- website/cue/reference/releases/0.36.0.cue | 370 +- website/cue/reference/releases/0.36.1.cue | 20 +- website/cue/reference/releases/0.37.0.cue | 390 + website/cue/reference/releases/0.37.1.cue | 42 + website/cue/reference/releases/0.38.0.cue | 331 + website/cue/reference/releases/0.39.0.cue | 264 + website/cue/reference/releases/0.4.0.cue | 619 +- website/cue/reference/releases/0.40.0.cue | 403 + website/cue/reference/releases/0.40.1.cue | 36 + website/cue/reference/releases/0.40.2.cue | 27 + website/cue/reference/releases/0.41.0.cue | 364 + website/cue/reference/releases/0.41.1.cue | 29 + website/cue/reference/releases/0.42.0.cue | 379 + website/cue/reference/releases/0.43.0.cue | 463 + website/cue/reference/releases/0.43.1.cue | 49 + website/cue/reference/releases/0.44.0.cue | 308 + website/cue/reference/releases/0.45.0.cue | 364 + website/cue/reference/releases/0.46.0.cue | 368 + website/cue/reference/releases/0.46.1.cue | 21 + website/cue/reference/releases/0.47.0.cue | 419 + website/cue/reference/releases/0.48.0.cue | 294 + website/cue/reference/releases/0.5.0.cue | 129 +- website/cue/reference/releases/0.6.0.cue | 495 +- website/cue/reference/releases/0.7.0.cue | 331 +- website/cue/reference/releases/0.7.1.cue | 27 +- website/cue/reference/releases/0.7.2.cue | 27 +- website/cue/reference/releases/0.8.0.cue | 429 +- website/cue/reference/releases/0.8.1.cue | 3 +- website/cue/reference/releases/0.8.2.cue | 11 +- website/cue/reference/releases/0.9.0.cue | 426 +- website/cue/reference/releases/0.9.1.cue | 33 +- website/cue/reference/releases/0.9.2.cue | 3 +- .../cue/reference/remap/concepts/metrics.cue | 2 +- .../remap/errors/305_divide_by_zero_error.cue | 2 +- .../cue/reference/remap/expressions/path.cue | 24 +- website/cue/reference/remap/functions.cue | 20 +- .../reference/remap/functions/camelcase.cue | 43 + .../remap/functions/community_id.cue | 4 +- website/cue/reference/remap/functions/crc.cue | 164 + .../remap/functions/decode_charset.cue | 52 + .../reference/remap/functions/decode_lz4.cue | 59 + .../remap/functions/decode_punycode.cue | 2 +- .../cue/reference/remap/functions/decrypt.cue | 17 +- .../remap/functions/encode_charset.cue | 52 + .../reference/remap/functions/encode_lz4.cue | 39 + .../remap/functions/encode_proto.cue | 52 + .../remap/functions/encode_punycode.cue | 2 +- .../cue/reference/remap/functions/encrypt.cue | 2 + .../find_enrichment_table_records.cue | 18 +- .../cue/reference/remap/functions/flatten.cue | 2 +- .../functions/get_enrichment_table_record.cue | 14 + .../reference/remap/functions/get_secret.cue | 3 +- .../reference/remap/functions/haversine.cue | 72 + .../remap/functions/ip_cidr_contains.cue | 9 +- .../reference/remap/functions/is_nullish.cue | 11 +- .../reference/remap/functions/kebabcase.cue | 43 + .../remap/functions/match_datadog_query.cue | 4 +- .../remap/functions/object_from_array.cue | 54 + .../remap/functions/parse_aws_alb_log.cue | 1 + .../reference/remap/functions/parse_bytes.cue | 73 + .../reference/remap/functions/parse_cbor.cue | 36 + .../remap/functions/parse_dnstap.cue | 142 + .../remap/functions/parse_duration.cue | 7 + .../reference/remap/functions/parse_etld.cue | 22 + .../reference/remap/functions/parse_float.cue | 4 +- .../reference/remap/functions/parse_groks.cue | 25 + .../remap/functions/parse_influxdb.cue | 109 + .../reference/remap/functions/parse_json.cue | 11 + .../reference/remap/functions/parse_klog.cue | 7 +- .../functions/parse_linux_authorization.cue | 4 +- .../remap/functions/parse_nginx_log.cue | 27 +- .../reference/remap/functions/parse_proto.cue | 65 + .../remap/functions/parse_regex_all.cue | 3 +- .../remap/functions/parse_timestamp.cue | 18 +- .../reference/remap/functions/pascalcase.cue | 43 + .../remap/functions/remove_secret.cue | 3 +- .../cue/reference/remap/functions/replace.cue | 13 +- .../remap/functions/screamingsnakecase.cue | 43 + .../reference/remap/functions/set_secret.cue | 3 +- .../remap/functions/set_semantic_meaning.cue | 3 +- .../remap/functions/shannon_entropy.cue | 63 + .../cue/reference/remap/functions/sieve.cue | 65 + .../reference/remap/functions/snakecase.cue | 43 + .../reference/remap/functions/to_float.cue | 1 - .../cue/reference/remap/functions/to_int.cue | 3 +- .../functions/to_syslog_facility_code.cue | 32 + .../remap/functions/to_unix_timestamp.cue | 4 +- .../reference/remap/functions/unflatten.cue | 111 + .../remap/functions/uuid_from_friendly_id.cue | 32 + .../cue/reference/remap/functions/uuid_v7.cue | 44 + .../remap/functions/validate_json_schema.cue | 79 + website/cue/reference/remap/functions/zip.cue | 55 + .../reference/remap/syntax/expressions.cue | 22 + website/cue/reference/services/keep.cue | 10 + website/cue/reference/services/postgres.cue | 10 + .../reference/services/websocket_client.cue | 8 + website/cue/reference/team.cue | 162 - website/cue/reference/urls.cue | 47 +- website/cue/reference/versions.cue | 18 + website/data/redirects.yaml | 2 + website/layouts/_default/blog-post-card.html | 4 +- website/layouts/docs/component.html | 20 +- website/layouts/guides/li.html | 4 +- website/layouts/guides/list.html | 2 +- website/layouts/guides/section.html | 4 +- website/layouts/highlights/li.html | 45 +- website/layouts/index.redirects | 2 +- .../layouts/partials/author-with-avatar.html | 33 + website/layouts/partials/badge.html | 20 +- website/layouts/partials/blog/authors.html | 22 - website/layouts/partials/blog/content.html | 4 +- .../partials/components/example-configs.html | 4 +- website/layouts/partials/content.html | 6 +- website/layouts/partials/css.html | 56 +- website/layouts/partials/data.html | 3626 ++-- .../partials/docs/component-under-hero.html | 37 +- .../partials/download/download-matrix.html | 6 +- website/layouts/partials/heading.html | 9 +- .../layouts/partials/highlights/authors.html | 22 - .../layouts/partials/javascript/search.html | 15 +- website/layouts/partials/logs_output.html | 82 + website/layouts/partials/page-actions.html | 20 - website/layouts/partials/telemetry_input.html | 59 + .../layouts/partials/telemetry_output.html | 49 + website/layouts/releases/single.html | 108 +- .../shortcodes/internal-metrics-list.html | 38 - website/layouts/shortcodes/jump.html | 14 +- website/package.json | 21 +- website/scripts/create-config-examples.js | 40 +- website/scripts/cue.sh | 4 - .../{algolia-index.ts => typesense-index.ts} | 39 +- website/scripts/typesense-sync.ts | 19 + .../gifs/guides/config-autocomplete.gif | Bin 0 -> 326623 bytes website/static/img/exclusive_route.svg | 1 + ...dd-metrics-vector-errors-visualization.png | Bin 0 -> 222690 bytes website/static/img/guides/debugging-meme.png | Bin 0 -> 2318961 bytes website/static/img/guides/vector-tap.png | Bin 0 -> 56287 bytes website/static/img/guides/vector-top.png | Bin 0 -> 61285 bytes website/static/img/sliding-window.svg | 418 + website/typesense.config.json | 132 + website/yarn.lock | 4279 ++--- workload-checks/README.md | 9 - .../cases/http_text_to_http_json/README.md | 5 - .../http_text_to_http_json/experiment.yaml | 22 - .../http_text_to_http_json/lading/lading.yaml | 16 - .../http_text_to_http_json/vector/vector.yaml | 16 - workload-checks/typical/machine.yaml | 7 - 6051 files changed, 121443 insertions(+), 45533 deletions(-) create mode 100644 .github/DISCUSSION_TEMPLATE/q-a.yml create mode 100644 .github/actions/spelling/only.txt delete mode 100644 .github/semantic.yml create mode 100644 .github/workflows/ci-integration-review.yml rename .github/workflows/{comment-trigger.yml => ci-review-trigger.yml} (51%) delete mode 100644 .github/workflows/integration-comment.yml create mode 100644 .github/workflows/scorecard.yml create mode 100644 .github/workflows/semantic.yml delete mode 100644 .github/workflows/workload_checks.yml create mode 100644 aqua/aqua.yaml delete mode 100644 buf.work.yaml create mode 100644 buf.yaml create mode 100644 changelog.d/10870_aws_sink_retry_logic_update.enhancement.md create mode 100644 changelog.d/1155_http_sink_templateable_uri.feature.md delete mode 100644 changelog.d/18445_armv6_binaries.enhancement.md delete mode 100644 changelog.d/18863_k8s_logs_rotate_wait.enhancement.md delete mode 100644 changelog.d/19813_add_mqtt_sink.feature.md delete mode 100644 changelog.d/19891_s3_sink_add_express_onezone_storage_class.enhancement.md delete mode 100644 changelog.d/19892_dnstap_over_tcp.breaking.md delete mode 100644 changelog.d/19921_missing_dns_record_types.enhancement.md delete mode 100644 changelog.d/19937_edns_ede_support.feature.md delete mode 100644 changelog.d/20006_improve_greptimedb_tls.enhancement.md delete mode 100644 changelog.d/20032_gzip_zlib_performance.fix.md delete mode 100644 changelog.d/20035_dnstap_lowercase_hostnames.feature.md create mode 100644 changelog.d/201_http_sink_header_templates.enhancement.md create mode 100644 changelog.d/22070_utilization_metric_periodic_emit.fix.md create mode 100644 changelog.d/22764_clickhouse_query_settings.feature.md create mode 100644 changelog.d/23074_enrichment_function_single_bounded_date_range_search.enhancement.md create mode 100644 changelog.d/23103_fix_uri_provided_basic_auth_opensearch.fix.md create mode 100644 changelog.d/23111_postgres_sink_healthcheck_disabled.fix.md create mode 100644 changelog.d/23220_hyphens_in_secrets.fix.md create mode 100644 changelog.d/23279_logs_sources_output_logs.breaking.md create mode 100644 changelog.d/23287_fix_nats_sink_ignored_healthcheck.fix.md create mode 100644 changelog.d/23361_interval_ms_readable.fix.md create mode 100644 changelog.d/23371_metric-tag-values-full-unmodifiable-tags.fix.md create mode 100644 changelog.d/23453_prevent_negative_buffer_size_and_event_gauges.fix.md create mode 100644 changelog.d/23480_dedupe_transform_time_settings.feature.md create mode 100644 changelog.d/6491_add_websocket_source.feature.md create mode 100644 changelog.d/8679_add_support_max_bytes_memory_buffers.feature.md delete mode 100644 changelog.d/aws_credentials_process.fix.md create mode 100644 changelog.d/batch-newline.fix.md delete mode 100644 changelog.d/clickhouse_acknowledgements.breaking.md delete mode 100644 changelog.d/datadog_agent_ddtags_parsing.enhancement.md delete mode 100644 changelog.d/deprecate_strict_env_vars.breaking.md create mode 100644 changelog.d/fix_concurrent_map_potential_hang.fix.md create mode 100644 changelog.d/fix_metric_set_memory_growth.feature.md delete mode 100644 changelog.d/include_paths_glob_patterns.enhancement.md create mode 100644 changelog.d/log_to_metric_config.fix.md delete mode 100644 changelog.d/pulsar_source.feature.md create mode 100644 changelog.d/redis_sentinel_in_sink.feature.md create mode 100644 changelog.d/redis_zadd_in_sink.feature.md delete mode 100644 changelog.d/splunk_hec_logs_raw_timestamp.fix.md delete mode 100644 changelog.d/splunk_hec_received_event_bytes.fix.md create mode 100644 changelog.d/strftime_items_in_unsigned_int_templates.enhancement.md create mode 100644 lib/codecs/src/common/length_delimited.rs delete mode 100644 lib/codecs/src/common/protobuf.rs create mode 100644 lib/codecs/src/decoding/format/influxdb.rs create mode 100644 lib/codecs/src/decoding/framing/chunked_gelf.rs create mode 100644 lib/codecs/src/encoding/format/cef.rs create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0000.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0001.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0002.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0003.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0004.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0005.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0006.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0007.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0008.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0009.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0010.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0011.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0012.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0013.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0014.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0015.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0016.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0017.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0018.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0019.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0020.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0021.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0022.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0023.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0024.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0025.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0026.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0027.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0028.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0029.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0030.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0031.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0032.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0033.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0034.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0035.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0036.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0037.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0038.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0039.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0040.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0041.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0042.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0043.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0044.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0045.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0046.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0047.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0048.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0049.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0050.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0051.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0052.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0053.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0054.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0055.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0056.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0057.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0058.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0059.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0060.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0061.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0062.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0063.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0064.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0065.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0066.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0067.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0068.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0069.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0070.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0071.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0072.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0073.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0074.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0075.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0076.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0077.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0078.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0079.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0080.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0081.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0082.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0083.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0084.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0085.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0086.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0087.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0088.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0089.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0090.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0091.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0092.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0093.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0094.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0095.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0096.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0097.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0098.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0099.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0100.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0101.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0102.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0103.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0104.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0105.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0106.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0107.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0108.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0109.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0110.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0111.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0112.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0113.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0114.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0115.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0116.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0117.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0118.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0119.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0120.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0121.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0122.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0123.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0124.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0125.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0126.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0127.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0128.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0129.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0130.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0131.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0132.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0133.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0134.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0135.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0136.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0137.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0138.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0139.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0140.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0141.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0142.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0143.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0144.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0145.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0146.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0147.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0148.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0149.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0150.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0151.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0152.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0153.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0154.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0155.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0156.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0157.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0158.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0159.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0160.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0161.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0162.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0163.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0164.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0165.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0166.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0167.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0168.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0169.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0170.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0171.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0172.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0173.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0174.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0175.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0176.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0177.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0178.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0179.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0180.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0181.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0182.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0183.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0184.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0185.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0186.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0187.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0188.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0189.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0190.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0191.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0192.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0193.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0194.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0195.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0196.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0197.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0198.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0199.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0200.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0201.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0202.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0203.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0204.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0205.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0206.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0207.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0208.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0209.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0210.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0211.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0212.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0213.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0214.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0215.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0216.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0217.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0218.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0219.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0220.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0221.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0222.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0223.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0224.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0225.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0226.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0227.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0228.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0229.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0230.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0231.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0232.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0233.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0234.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0235.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0236.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0237.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0238.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0239.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0240.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0241.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0242.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0243.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0244.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0245.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0246.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0247.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0248.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0249.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0250.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0251.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0252.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0253.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0254.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0255.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0256.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0257.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0258.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0259.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0260.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0261.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0262.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0263.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0264.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0265.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0266.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0267.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0268.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0269.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0270.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0271.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0272.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0273.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0274.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0275.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0276.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0277.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0278.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0279.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0280.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0281.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0282.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0283.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0284.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0285.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0286.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0287.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0288.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0289.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0290.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0291.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0292.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0293.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0294.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0295.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0296.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0297.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0298.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0299.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0300.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0301.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0302.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0303.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0304.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0305.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0306.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0307.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0308.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0309.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0310.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0311.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0312.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0313.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0314.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0315.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0316.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0317.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0318.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0319.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0320.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0321.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0322.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0323.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0324.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0325.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0326.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0327.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0328.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0329.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0330.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0331.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0332.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0333.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0334.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0335.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0336.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0337.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0338.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0339.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0340.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0341.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0342.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0343.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0344.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0345.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0346.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0347.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0348.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0349.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0350.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0351.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0352.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0353.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0354.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0355.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0356.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0357.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0358.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0359.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0360.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0361.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0362.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0363.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0364.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0365.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0366.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0367.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0368.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0369.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0370.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0371.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0372.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0373.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0374.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0375.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0376.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0377.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0378.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0379.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0380.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0381.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0382.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0383.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0384.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0385.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0386.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0387.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0388.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0389.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0390.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0391.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0392.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0393.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0394.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0395.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0396.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0397.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0398.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0399.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0400.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0401.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0402.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0403.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0404.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0405.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0406.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0407.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0408.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0409.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0410.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0411.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0412.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0413.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0414.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0415.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0416.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0417.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0418.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0419.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0420.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0421.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0422.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0423.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0424.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0425.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0426.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0427.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0428.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0429.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0430.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0431.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0432.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0433.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0434.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0435.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0436.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0437.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0438.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0439.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0440.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0441.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0442.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0443.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0444.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0445.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0446.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0447.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0448.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0449.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0450.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0451.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0452.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0453.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0454.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0455.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0456.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0457.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0458.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0459.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0460.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0461.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0462.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0463.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0464.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0465.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0466.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0467.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0468.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0469.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0470.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0471.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0472.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0473.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0474.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0475.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0476.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0477.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0478.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0479.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0480.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0481.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0482.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0483.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0484.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0485.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0486.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0487.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0488.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0489.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0490.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0491.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0492.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0493.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0494.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0495.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0496.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0497.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0498.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0499.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0500.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0501.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0502.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0503.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0504.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0505.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0506.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0507.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0508.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0509.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0510.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0511.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0512.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0513.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0514.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0515.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0516.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0517.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0518.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0519.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0520.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0521.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0522.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0523.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0524.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0525.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0526.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0527.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0528.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0529.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0530.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0531.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0532.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0533.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0534.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0535.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0536.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0537.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0538.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0539.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0540.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0541.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0542.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0543.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0544.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0545.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0546.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0547.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0548.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0549.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0550.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0551.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0552.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0553.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0554.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0555.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0556.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0557.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0558.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0559.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0560.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0561.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0562.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0563.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0564.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0565.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0566.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0567.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0568.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0569.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0570.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0571.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0572.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0573.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0574.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0575.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0576.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0577.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0578.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0579.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0580.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0581.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0582.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0583.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0584.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0585.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0586.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0587.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0588.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0589.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0590.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0591.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0592.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0593.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0594.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0595.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0596.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0597.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0598.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0599.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0600.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0601.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0602.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0603.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0604.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0605.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0606.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0607.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0608.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0609.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0610.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0611.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0612.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0613.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0614.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0615.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0616.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0617.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0618.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0619.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0620.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0621.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0622.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0623.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0624.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0625.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0626.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0627.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0628.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0629.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0630.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0631.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0632.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0633.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0634.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0635.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0636.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0637.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0638.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0639.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0640.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0641.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0642.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0643.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0644.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0645.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0646.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0647.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0648.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0649.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0650.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0651.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0652.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0653.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0654.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0655.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0656.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0657.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0658.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0659.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0660.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0661.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0662.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0663.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0664.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0665.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0666.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0667.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0668.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0669.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0670.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0671.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0672.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0673.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0674.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0675.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0676.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0677.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0678.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0679.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0680.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0681.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0682.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0683.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0684.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0685.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0686.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0687.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0688.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0689.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0690.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0691.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0692.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0693.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0694.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0695.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0696.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0697.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0698.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0699.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0700.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0701.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0702.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0703.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0704.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0705.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0706.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0707.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0708.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0709.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0710.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0711.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0712.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0713.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0714.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0715.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0716.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0717.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0718.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0719.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0720.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0721.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0722.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0723.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0724.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0725.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0726.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0727.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0728.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0729.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0730.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0731.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0732.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0733.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0734.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0735.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0736.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0737.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0738.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0739.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0740.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0741.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0742.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0743.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0744.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0745.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0746.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0747.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0748.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0749.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0750.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0751.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0752.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0753.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0754.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0755.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0756.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0757.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0758.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0759.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0760.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0761.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0762.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0763.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0764.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0765.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0766.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0767.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0768.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0769.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0770.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0771.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0772.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0773.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0774.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0775.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0776.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0777.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0778.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0779.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0780.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0781.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0782.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0783.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0784.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0785.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0786.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0787.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0788.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0789.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0790.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0791.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0792.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0793.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0794.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0795.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0796.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0797.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0798.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0799.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0800.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0801.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0802.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0803.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0804.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0805.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0806.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0807.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0808.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0809.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0810.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0811.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0812.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0813.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0814.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0815.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0816.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0817.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0818.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0819.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0820.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0821.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0822.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0823.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0824.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0825.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0826.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0827.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0828.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0829.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0830.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0831.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0832.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0833.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0834.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0835.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0836.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0837.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0838.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0839.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0840.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0841.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0842.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0843.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0844.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0845.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0846.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0847.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0848.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0849.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0850.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0851.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0852.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0853.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0854.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0855.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0856.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0857.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0858.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0859.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0860.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0861.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0862.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0863.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0864.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0865.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0866.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0867.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0868.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0869.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0870.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0871.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0872.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0873.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0874.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0875.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0876.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0877.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0878.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0879.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0880.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0881.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0882.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0883.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0884.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0885.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0886.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0887.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0888.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0889.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0890.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0891.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0892.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0893.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0894.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0895.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0896.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0897.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0898.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0899.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0900.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0901.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0902.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0903.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0904.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0905.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0906.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0907.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0908.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0909.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0910.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0911.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0912.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0913.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0914.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0915.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0916.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0917.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0918.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0919.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0920.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0921.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0922.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0923.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0924.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0925.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0926.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0927.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0928.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0929.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0930.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0931.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0932.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0933.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0934.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0935.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0936.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0937.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0938.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0939.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0940.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0941.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0942.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0943.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0944.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0945.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0946.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0947.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0948.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0949.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0950.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0951.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0952.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0953.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0954.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0955.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0956.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0957.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0958.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0959.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0960.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0961.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0962.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0963.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0964.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0965.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0966.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0967.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0968.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0969.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0970.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0971.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0972.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0973.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0974.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0975.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0976.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0977.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0978.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0979.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0980.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0981.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0982.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0983.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0984.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0985.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0986.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0987.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0988.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0989.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0990.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0991.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0992.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0993.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0994.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0995.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0996.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0997.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0998.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/0999.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/1000.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/1001.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/1002.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/1003.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/1004.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/1005.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/1006.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/1007.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/1008.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/1009.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/1010.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/1011.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/1012.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/1013.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/1014.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/1015.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/1016.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/1017.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/1018.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/1019.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/1020.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/1021.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/1022.json create mode 100644 lib/codecs/tests/data/native_encoding/json/pre-v41/1023.json create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0000.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0001.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0002.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0003.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0004.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0005.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0006.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0007.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0008.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0009.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0010.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0011.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0012.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0013.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0014.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0015.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0016.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0017.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0018.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0019.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0020.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0021.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0022.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0023.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0024.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0025.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0026.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0027.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0028.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0029.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0030.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0031.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0032.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0033.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0034.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0035.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0036.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0037.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0038.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0039.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0040.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0041.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0042.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0043.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0044.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0045.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0046.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0047.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0048.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0049.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0050.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0051.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0052.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0053.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0054.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0055.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0056.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0057.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0058.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0059.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0060.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0061.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0062.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0063.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0064.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0065.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0066.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0067.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0068.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0069.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0070.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0071.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0072.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0073.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0074.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0075.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0076.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0077.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0078.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0079.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0080.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0081.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0082.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0083.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0084.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0085.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0086.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0087.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0088.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0089.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0090.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0091.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0092.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0093.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0094.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0095.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0096.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0097.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0098.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0099.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0100.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0101.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0102.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0103.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0104.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0105.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0106.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0107.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0108.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0109.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0110.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0111.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0112.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0113.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0114.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0115.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0116.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0117.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0118.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0119.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0120.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0121.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0122.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0123.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0124.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0125.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0126.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0127.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0128.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0129.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0130.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0131.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0132.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0133.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0134.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0135.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0136.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0137.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0138.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0139.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0140.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0141.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0142.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0143.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0144.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0145.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0146.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0147.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0148.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0149.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0150.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0151.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0152.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0153.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0154.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0155.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0156.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0157.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0158.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0159.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0160.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0161.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0162.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0163.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0164.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0165.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0166.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0167.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0168.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0169.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0170.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0171.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0172.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0173.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0174.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0175.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0176.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0177.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0178.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0179.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0180.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0181.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0182.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0183.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0184.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0185.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0186.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0187.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0188.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0189.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0190.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0191.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0192.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0193.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0194.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0195.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0196.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0197.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0198.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0199.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0200.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0201.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0202.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0203.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0204.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0205.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0206.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0207.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0208.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0209.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0210.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0211.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0212.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0213.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0214.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0215.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0216.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0217.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0218.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0219.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0220.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0221.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0222.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0223.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0224.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0225.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0226.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0227.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0228.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0229.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0230.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0231.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0232.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0233.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0234.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0235.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0236.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0237.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0238.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0239.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0240.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0241.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0242.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0243.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0244.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0245.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0246.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0247.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0248.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0249.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0250.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0251.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0252.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0253.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0254.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0255.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0256.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0257.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0258.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0259.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0260.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0261.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0262.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0263.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0264.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0265.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0266.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0267.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0268.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0269.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0270.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0271.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0272.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0273.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0274.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0275.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0276.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0277.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0278.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0279.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0280.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0281.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0282.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0283.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0284.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0285.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0286.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0287.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0288.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0289.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0290.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0291.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0292.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0293.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0294.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0295.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0296.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0297.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0298.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0299.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0300.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0301.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0302.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0303.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0304.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0305.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0306.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0307.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0308.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0309.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0310.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0311.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0312.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0313.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0314.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0315.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0316.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0317.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0318.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0319.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0320.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0321.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0322.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0323.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0324.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0325.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0326.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0327.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0328.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0329.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0330.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0331.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0332.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0333.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0334.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0335.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0336.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0337.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0338.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0339.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0340.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0341.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0342.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0343.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0344.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0345.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0346.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0347.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0348.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0349.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0350.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0351.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0352.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0353.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0354.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0355.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0356.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0357.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0358.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0359.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0360.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0361.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0362.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0363.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0364.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0365.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0366.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0367.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0368.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0369.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0370.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0371.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0372.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0373.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0374.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0375.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0376.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0377.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0378.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0379.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0380.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0381.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0382.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0383.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0384.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0385.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0386.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0387.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0388.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0389.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0390.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0391.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0392.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0393.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0394.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0395.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0396.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0397.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0398.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0399.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0400.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0401.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0402.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0403.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0404.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0405.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0406.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0407.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0408.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0409.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0410.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0411.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0412.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0413.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0414.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0415.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0416.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0417.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0418.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0419.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0420.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0421.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0422.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0423.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0424.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0425.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0426.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0427.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0428.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0429.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0430.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0431.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0432.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0433.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0434.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0435.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0436.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0437.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0438.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0439.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0440.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0441.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0442.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0443.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0444.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0445.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0446.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0447.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0448.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0449.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0450.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0451.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0452.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0453.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0454.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0455.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0456.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0457.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0458.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0459.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0460.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0461.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0462.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0463.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0464.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0465.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0466.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0467.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0468.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0469.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0470.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0471.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0472.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0473.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0474.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0475.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0476.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0477.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0478.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0479.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0480.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0481.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0482.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0483.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0484.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0485.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0486.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0487.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0488.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0489.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0490.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0491.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0492.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0493.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0494.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0495.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0496.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0497.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0498.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0499.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0500.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0501.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0502.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0503.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0504.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0505.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0506.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0507.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0508.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0509.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0510.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0511.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0512.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0513.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0514.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0515.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0516.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0517.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0518.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0519.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0520.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0521.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0522.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0523.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0524.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0525.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0526.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0527.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0528.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0529.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0530.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0531.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0532.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0533.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0534.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0535.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0536.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0537.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0538.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0539.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0540.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0541.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0542.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0543.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0544.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0545.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0546.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0547.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0548.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0549.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0550.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0551.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0552.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0553.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0554.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0555.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0556.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0557.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0558.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0559.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0560.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0561.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0562.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0563.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0564.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0565.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0566.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0567.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0568.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0569.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0570.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0571.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0572.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0573.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0574.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0575.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0576.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0577.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0578.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0579.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0580.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0581.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0582.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0583.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0584.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0585.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0586.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0587.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0588.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0589.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0590.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0591.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0592.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0593.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0594.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0595.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0596.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0597.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0598.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0599.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0600.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0601.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0602.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0603.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0604.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0605.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0606.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0607.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0608.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0609.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0610.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0611.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0612.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0613.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0614.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0615.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0616.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0617.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0618.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0619.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0620.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0621.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0622.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0623.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0624.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0625.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0626.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0627.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0628.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0629.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0630.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0631.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0632.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0633.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0634.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0635.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0636.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0637.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0638.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0639.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0640.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0641.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0642.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0643.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0644.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0645.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0646.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0647.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0648.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0649.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0650.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0651.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0652.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0653.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0654.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0655.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0656.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0657.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0658.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0659.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0660.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0661.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0662.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0663.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0664.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0665.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0666.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0667.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0668.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0669.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0670.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0671.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0672.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0673.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0674.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0675.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0676.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0677.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0678.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0679.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0680.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0681.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0682.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0683.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0684.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0685.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0686.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0687.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0688.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0689.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0690.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0691.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0692.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0693.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0694.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0695.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0696.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0697.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0698.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0699.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0700.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0701.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0702.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0703.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0704.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0705.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0706.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0707.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0708.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0709.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0710.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0711.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0712.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0713.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0714.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0715.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0716.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0717.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0718.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0719.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0720.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0721.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0722.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0723.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0724.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0725.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0726.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0727.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0728.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0729.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0730.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0731.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0732.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0733.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0734.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0735.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0736.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0737.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0738.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0739.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0740.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0741.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0742.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0743.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0744.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0745.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0746.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0747.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0748.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0749.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0750.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0751.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0752.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0753.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0754.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0755.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0756.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0757.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0758.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0759.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0760.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0761.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0762.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0763.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0764.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0765.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0766.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0767.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0768.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0769.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0770.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0771.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0772.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0773.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0774.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0775.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0776.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0777.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0778.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0779.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0780.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0781.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0782.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0783.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0784.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0785.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0786.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0787.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0788.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0789.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0790.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0791.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0792.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0793.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0794.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0795.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0796.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0797.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0798.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0799.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0800.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0801.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0802.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0803.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0804.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0805.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0806.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0807.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0808.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0809.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0810.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0811.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0812.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0813.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0814.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0815.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0816.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0817.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0818.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0819.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0820.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0821.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0822.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0823.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0824.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0825.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0826.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0827.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0828.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0829.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0830.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0831.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0832.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0833.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0834.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0835.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0836.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0837.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0838.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0839.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0840.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0841.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0842.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0843.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0844.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0845.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0846.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0847.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0848.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0849.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0850.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0851.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0852.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0853.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0854.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0855.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0856.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0857.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0858.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0859.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0860.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0861.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0862.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0863.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0864.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0865.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0866.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0867.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0868.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0869.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0870.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0871.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0872.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0873.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0874.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0875.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0876.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0877.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0878.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0879.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0880.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0881.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0882.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0883.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0884.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0885.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0886.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0887.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0888.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0889.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0890.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0891.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0892.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0893.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0894.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0895.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0896.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0897.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0898.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0899.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0900.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0901.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0902.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0903.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0904.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0905.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0906.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0907.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0908.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0909.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0910.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0911.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0912.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0913.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0914.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0915.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0916.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0917.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0918.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0919.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0920.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0921.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0922.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0923.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0924.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0925.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0926.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0927.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0928.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0929.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0930.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0931.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0932.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0933.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0934.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0935.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0936.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0937.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0938.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0939.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0940.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0941.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0942.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0943.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0944.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0945.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0946.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0947.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0948.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0949.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0950.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0951.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0952.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0953.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0954.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0955.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0956.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0957.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0958.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0959.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0960.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0961.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0962.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0963.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0964.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0965.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0966.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0967.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0968.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0969.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0970.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0971.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0972.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0973.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0974.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0975.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0976.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0977.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0978.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0979.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0980.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0981.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0982.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0983.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0984.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0985.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0986.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0987.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0988.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0989.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0990.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0991.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0992.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0993.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0994.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0995.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0996.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0997.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0998.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/0999.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/1000.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/1001.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/1002.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/1003.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/1004.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/1005.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/1006.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/1007.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/1008.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/1009.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/1010.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/1011.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/1012.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/1013.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/1014.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/1015.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/1016.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/1017.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/1018.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/1019.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/1020.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/1021.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/1022.pb create mode 100644 lib/codecs/tests/data/native_encoding/proto/pre-v41/1023.pb delete mode 100644 lib/codecs/tests/data/protobuf/protos/test.desc delete mode 100644 lib/codecs/tests/data/protobuf/protos/test.proto create mode 100644 lib/dnstap-parser/Cargo.toml create mode 100644 lib/dnstap-parser/build.rs create mode 100644 lib/dnstap-parser/proto/dnstap.proto create mode 100644 lib/dnstap-parser/src/internal_events.rs create mode 100644 lib/dnstap-parser/src/lib.rs rename {src/sources/dnstap => lib/dnstap-parser/src}/parser.rs (90%) rename {src/sources/dnstap => lib/dnstap-parser/src}/schema.rs (98%) create mode 100644 lib/dnstap-parser/src/vrl_functions/mod.rs create mode 100644 lib/dnstap-parser/src/vrl_functions/parse_dnstap.rs create mode 100644 lib/loki-logproto/proto/push.proto create mode 100644 lib/opentelemetry-proto/src/common.rs rename lib/opentelemetry-proto/src/{convert.rs => logs.rs} (72%) create mode 100644 lib/opentelemetry-proto/src/metrics.rs create mode 100644 lib/opentelemetry-proto/src/spans.rs create mode 100644 lib/vector-buffers/src/cast_utils.rs create mode 100644 lib/vector-common/src/constants.rs rename {src/config => lib/vector-common/src}/id.rs (94%) create mode 100644 lib/vector-core/src/config/metrics_expiration.rs create mode 100644 lib/vector-core/src/metrics/metric_matcher.rs create mode 100644 lib/vector-tap/Cargo.toml create mode 100644 lib/vector-tap/src/controller.rs create mode 100644 lib/vector-tap/src/lib.rs rename {src/api/schema/events => lib/vector-tap/src}/notification.rs (52%) create mode 100644 lib/vector-tap/src/topology.rs create mode 100644 lib/vector-vrl/tests/resources/json-schema_definition.json create mode 100644 lib/vector-vrl/tests/resources/protobuf_descriptor_set.desc create mode 100644 lib/vector-vrl/tests/resources/public_suffix_list.dat create mode 100644 lib/vector-vrl/web-playground/public/assets/dd_icon_rgb.svg delete mode 100644 proto/buf.yaml delete mode 100644 proto/dnstap.proto rename proto/{ => third-party}/google/LICENSE-Apache-2.0.txt (100%) rename proto/{ => third-party}/google/README (100%) rename proto/{ => third-party}/google/api/annotations.proto (100%) rename proto/{ => third-party}/google/api/client.proto (100%) rename proto/{ => third-party}/google/api/field_behavior.proto (100%) rename proto/{ => third-party}/google/api/http.proto (100%) rename proto/{ => third-party}/google/api/resource.proto (100%) rename proto/{ => third-party}/google/protobuf/LICENSE (100%) rename proto/{ => third-party}/google/protobuf/any.proto (100%) rename proto/{ => third-party}/google/protobuf/descriptor.proto (100%) rename proto/{ => third-party}/google/protobuf/empty.proto (100%) rename proto/{ => third-party}/google/protobuf/timestamp.proto (100%) rename proto/{ => third-party}/google/pubsub/v1/pubsub.proto (100%) rename proto/{ => third-party}/google/rpc/status.proto (100%) rename {lib/vector-core/proto => proto/vector}/buf.yaml (100%) rename proto/{ => vector}/dd_metric.proto (100%) rename proto/{ => vector}/dd_trace.proto (100%) rename proto/{ => vector}/ddsketch_full.proto (100%) rename proto/{ => vector}/vector.proto (100%) create mode 100644 regression/cases/datadog_agent_remap_blackhole/experiment.yaml create mode 100644 regression/cases/datadog_agent_remap_blackhole_acks/experiment.yaml create mode 100644 regression/cases/datadog_agent_remap_datadog_logs/experiment.yaml create mode 100644 regression/cases/datadog_agent_remap_datadog_logs_acks/experiment.yaml delete mode 100644 regression/cases/enterprise_http_to_http/data/.gitkeep delete mode 100644 regression/cases/enterprise_http_to_http/lading/lading.yaml delete mode 100644 regression/cases/enterprise_http_to_http/vector/vector.yaml create mode 100644 regression/cases/fluent_elasticsearch/experiment.yaml create mode 100644 regression/cases/http_elasticsearch/experiment.yaml create mode 100644 regression/cases/http_text_to_http_json/experiment.yaml create mode 100644 regression/cases/http_to_http_acks/experiment.yaml create mode 100644 regression/cases/http_to_http_json/experiment.yaml create mode 100644 regression/cases/http_to_http_noack/experiment.yaml create mode 100644 regression/cases/http_to_s3/experiment.yaml create mode 100644 regression/cases/otlp_grpc_to_blackhole/experiment.yaml create mode 100644 regression/cases/socket_to_socket_blackhole/experiment.yaml create mode 100644 regression/cases/splunk_hec_indexer_ack_blackhole/experiment.yaml create mode 100644 regression/cases/splunk_hec_route_s3/experiment.yaml create mode 100644 regression/cases/splunk_hec_to_splunk_hec_logs_acks/experiment.yaml create mode 100644 regression/cases/splunk_hec_to_splunk_hec_logs_noack/experiment.yaml create mode 100644 regression/cases/syslog_humio_logs/experiment.yaml create mode 100644 regression/cases/syslog_log2metric_humio_metrics/experiment.yaml create mode 100644 regression/cases/syslog_log2metric_splunk_hec_metrics/experiment.yaml create mode 100644 regression/cases/syslog_loki/experiment.yaml create mode 100644 regression/cases/syslog_splunk_hec_logs/experiment.yaml create mode 100644 regression/config.yaml delete mode 100644 regression/metadata.source create mode 100755 scripts/ci-free-disk-space.sh delete mode 100755 scripts/ci-int-e2e-test.sh delete mode 100755 scripts/cross/bootstrap-centos.sh delete mode 100755 scripts/cross/entrypoint-centos.sh create mode 100644 scripts/e2e/opentelemetry-logs/README.md create mode 100644 scripts/e2e/opentelemetry-logs/compose.yaml create mode 100644 scripts/e2e/opentelemetry-logs/generator/Dockerfile create mode 100755 scripts/e2e/opentelemetry-logs/generator/logs_generator.py create mode 100644 scripts/e2e/opentelemetry-logs/generator/requirements.txt create mode 100644 scripts/e2e/opentelemetry-logs/test.yaml delete mode 100644 scripts/ensure-wasm-pack-installed.sh create mode 100755 scripts/environment/binstall.sh rename scripts/environment/{bootstrap-macos-10.sh => bootstrap-macos.sh} (57%) rename scripts/environment/{bootstrap-ubuntu-20.04.sh => bootstrap-ubuntu-24.04.sh} (70%) rename scripts/environment/{bootstrap-windows-2019.ps1 => bootstrap-windows-2022.ps1} (73%) create mode 100755 scripts/int-e2e-test.sh delete mode 100644 scripts/util/commit.rb delete mode 100644 scripts/util/conventional_commit.rb delete mode 100644 scripts/util/git_log_commit.rb delete mode 100644 scripts/util/printer.rb delete mode 100644 scripts/util/release.rb delete mode 100644 scripts/util/version.rb delete mode 100644 src/api/tap.rs create mode 100644 src/aws/timeout.rs create mode 100644 src/common/backoff.rs create mode 100644 src/common/expansion.rs rename src/{sources/util => common}/http/error.rs (83%) create mode 100644 src/common/http/mod.rs create mode 100644 src/common/http/server_auth.rs create mode 100644 src/common/mqtt.rs create mode 100644 src/common/websocket.rs create mode 100644 src/config/dot_graph.rs delete mode 100644 src/config/enterprise.rs create mode 100644 src/enrichment_tables/memory/config.rs create mode 100644 src/enrichment_tables/memory/internal_events.rs create mode 100644 src/enrichment_tables/memory/mod.rs create mode 100644 src/enrichment_tables/memory/source.rs create mode 100644 src/enrichment_tables/memory/table.rs create mode 100644 src/enrichment_tables/mmdb.rs create mode 100644 src/internal_events/datadog_agent.rs delete mode 100644 src/internal_events/internal_metrics.rs create mode 100644 src/internal_events/websocket_server.rs create mode 100644 src/internal_events/window.rs create mode 100644 src/secrets/aws_secrets_manager.rs create mode 100644 src/secrets/directory.rs create mode 100644 src/secrets/file.rs create mode 100644 src/sinks/amqp/channel.rs delete mode 100644 src/sinks/databend/api.rs delete mode 100644 src/sinks/databend/error.rs rename src/sinks/{gcp => gcp_chronicle}/chronicle_unstructured.rs (73%) create mode 100644 src/sinks/gcp_chronicle/compression.rs create mode 100644 src/sinks/gcp_chronicle/mod.rs create mode 100644 src/sinks/gcp_chronicle/partitioner.rs create mode 100644 src/sinks/gcp_chronicle/sink.rs delete mode 100644 src/sinks/greptimedb/integration_tests.rs create mode 100644 src/sinks/greptimedb/logs/config.rs create mode 100644 src/sinks/greptimedb/logs/http_request_builder.rs create mode 100644 src/sinks/greptimedb/logs/integration_tests.rs create mode 100644 src/sinks/greptimedb/logs/mod.rs create mode 100644 src/sinks/greptimedb/logs/sink.rs rename src/sinks/greptimedb/{ => metrics}/batch.rs (87%) create mode 100644 src/sinks/greptimedb/metrics/config.rs create mode 100644 src/sinks/greptimedb/metrics/integration_tests.rs create mode 100644 src/sinks/greptimedb/metrics/mod.rs create mode 100644 src/sinks/greptimedb/metrics/request.rs rename src/sinks/greptimedb/{ => metrics}/request_builder.rs (71%) create mode 100644 src/sinks/greptimedb/metrics/service.rs rename src/sinks/greptimedb/{ => metrics}/sink.rs (62%) delete mode 100644 src/sinks/greptimedb/service.rs create mode 100644 src/sinks/keep/config.rs create mode 100644 src/sinks/keep/encoder.rs create mode 100644 src/sinks/keep/mod.rs create mode 100644 src/sinks/keep/request_builder.rs create mode 100644 src/sinks/keep/service.rs create mode 100644 src/sinks/keep/sink.rs create mode 100644 src/sinks/opentelemetry/mod.rs create mode 100644 src/sinks/postgres/config.rs create mode 100644 src/sinks/postgres/integration_tests.rs create mode 100644 src/sinks/postgres/mod.rs create mode 100644 src/sinks/postgres/service.rs create mode 100644 src/sinks/postgres/sink.rs create mode 100644 src/sinks/util/datagram.rs create mode 100644 src/sinks/websocket_server/buffering.rs create mode 100644 src/sinks/websocket_server/config.rs create mode 100644 src/sinks/websocket_server/mod.rs create mode 100644 src/sinks/websocket_server/sink.rs create mode 100644 src/sources/host_metrics/process.rs create mode 100644 src/sources/host_metrics/tcp.rs create mode 100644 src/sources/mqtt/config.rs create mode 100644 src/sources/mqtt/integration_tests.rs create mode 100644 src/sources/mqtt/mod.rs create mode 100644 src/sources/mqtt/source.rs create mode 100644 src/sources/static_metrics.rs delete mode 100644 src/sources/util/http/auth.rs create mode 100644 src/sources/util/http/headers.rs create mode 100644 src/sources/websocket/config.rs create mode 100644 src/sources/websocket/mod.rs create mode 100644 src/sources/websocket/source.rs create mode 100644 src/test_util/compression.rs create mode 100644 src/test_util/integration.rs create mode 100644 src/transforms/dedupe/common.rs create mode 100644 src/transforms/dedupe/timed_transform.rs create mode 100644 src/transforms/exclusive_route/config.rs create mode 100644 src/transforms/exclusive_route/mod.rs create mode 100644 src/transforms/exclusive_route/tests.rs create mode 100644 src/transforms/exclusive_route/transform.rs create mode 100644 src/transforms/reduce/config.rs create mode 100644 src/transforms/reduce/transform.rs create mode 100644 src/transforms/sample/tests.rs create mode 100644 src/transforms/throttle/config.rs create mode 100644 src/transforms/throttle/mod.rs create mode 100644 src/transforms/throttle/rate_limiter.rs rename src/transforms/{throttle.rs => throttle/transform.rs} (55%) create mode 100644 src/transforms/window/config.rs create mode 100644 src/transforms/window/mod.rs create mode 100644 src/transforms/window/transform.rs create mode 100644 testing/github-20228/config.toml create mode 100644 tests/data/GeoIP2-Anonymous-IP-Test.mmdb create mode 100644 tests/data/ca/intermediate_server/certs/pulsar-chain.cert.pem create mode 100644 tests/data/ca/intermediate_server/certs/pulsar.cert.pem create mode 100644 tests/data/ca/intermediate_server/csr/pulsar.csr.pem create mode 100644 tests/data/ca/intermediate_server/newcerts/1008.pem create mode 100644 tests/data/ca/intermediate_server/private/kafka.pass create mode 100644 tests/data/ca/intermediate_server/private/pulsar.key.pem create mode 100644 tests/data/custom-type.mmdb create mode 100644 tests/data/e2e/opentelemetry/logs/collector-sink.yaml create mode 100644 tests/data/e2e/opentelemetry/logs/collector-source.yaml create mode 100644 tests/data/e2e/opentelemetry/logs/output/.gitignore create mode 100644 tests/data/e2e/opentelemetry/logs/vector.yaml delete mode 100644 tests/data/enterprise/base.toml delete mode 100644 tests/data/enterprise/missing_api_key.toml create mode 100644 tests/data/protobuf/.gitignore create mode 100644 tests/data/protobuf/Makefile create mode 100644 tests/data/protobuf/README.md create mode 100644 tests/data/protobuf/serialize.py create mode 100644 tests/data/protobuf/test_proto.desc create mode 100644 tests/data/protobuf/test_proto.pb create mode 100644 tests/data/protobuf/test_proto.proto create mode 100644 tests/data/secret-backends/directory-secrets/jkl create mode 100644 tests/data/secret-backends/file-secrets.json create mode 100644 tests/e2e/opentelemetry/logs/mod.rs create mode 100644 tests/e2e/opentelemetry/mod.rs create mode 100644 tests/validation/components/sources/datadog_agent.yaml create mode 100644 vdev/src/commands/integration/build.rs create mode 100644 vdev/src/testing/build.rs create mode 100644 vdev/src/testing/docker.rs create mode 100644 website/.env.example create mode 100644 website/.prettierrc.json create mode 100644 website/content/en/blog/highlights-february-2025.md delete mode 100644 website/content/en/docs/about/_index.md delete mode 100644 website/content/en/docs/about/under-the-hood/_index.md delete mode 100644 website/content/en/docs/about/under-the-hood/architecture/_index.md delete mode 100644 website/content/en/docs/about/under-the-hood/networking/_index.md delete mode 100644 website/content/en/docs/about/what-is-observability-pipelines.md rename website/content/en/docs/administration/{tuning => optimization}/_index.md (79%) rename website/content/en/docs/administration/{tuning => optimization}/pgo.md (100%) create mode 100644 website/content/en/docs/architecture/_index.md rename website/content/en/docs/{about/under-the-hood/networking => architecture}/arc.md (95%) rename website/content/en/docs/{about/under-the-hood => }/architecture/buffering-model.md (99%) rename website/content/en/docs/{about/under-the-hood => }/architecture/concurrency-model.md (94%) rename website/content/en/docs/{about/under-the-hood => }/architecture/data-model/_index.md (89%) rename website/content/en/docs/{about/under-the-hood => }/architecture/data-model/log.md (100%) rename website/content/en/docs/{about/under-the-hood => }/architecture/data-model/metric.md (79%) rename website/content/en/docs/{about/under-the-hood => }/architecture/end-to-end-acknowledgements.md (95%) rename website/content/en/docs/{about/under-the-hood => architecture}/guarantees.md (95%) rename website/content/en/docs/{about/under-the-hood => }/architecture/pipeline-model.md (95%) rename website/content/en/docs/{about/under-the-hood => }/architecture/runtime-model.md (94%) delete mode 100644 website/content/en/docs/example.md rename website/content/en/docs/{about/what-is-vector.md => introduction/_index.md} (92%) rename website/content/en/docs/{about => introduction}/concepts.md (82%) rename website/content/en/docs/reference/configuration/sinks/{greptimedb.md => greptimedb_logs.md} (67%) create mode 100644 website/content/en/docs/reference/configuration/sinks/greptimedb_metrics.md create mode 100644 website/content/en/docs/reference/configuration/sinks/keep.md create mode 100644 website/content/en/docs/reference/configuration/sinks/opentelemetry.md create mode 100644 website/content/en/docs/reference/configuration/sinks/postgres.md create mode 100644 website/content/en/docs/reference/configuration/sinks/websocket_server.md create mode 100644 website/content/en/docs/reference/configuration/sources/mqtt.md create mode 100644 website/content/en/docs/reference/configuration/sources/static_metrics.md create mode 100644 website/content/en/docs/reference/configuration/sources/websocket.md create mode 100644 website/content/en/docs/reference/configuration/transforms/exclusive_route.md create mode 100644 website/content/en/docs/reference/configuration/transforms/window.md create mode 100644 website/content/en/docs/reference/environment_variables.md create mode 100644 website/content/en/guides/developer/_index.md create mode 100644 website/content/en/guides/developer/config-autocompletion.md create mode 100644 website/content/en/guides/developer/debugging.md create mode 100644 website/content/en/guides/getting-started/_index.md rename website/content/en/guides/{level-up => getting-started}/transformation.md (89%) create mode 100644 website/content/en/guides/level-up/log_namespace.md create mode 100644 website/content/en/highlights/2023-03-26-0-37-0-upgrade-guide.md create mode 100644 website/content/en/highlights/2024-05-07-0-38-0-upgrade-guide.md create mode 100644 website/content/en/highlights/2024-06-17-0-39-0-upgrade-guide.md create mode 100644 website/content/en/highlights/2024-07-29-0-40-0-upgrade-guide.md create mode 100644 website/content/en/highlights/2024-11-07-exclusive_route.md create mode 100644 website/content/en/highlights/2025-01-13-0-44-0-upgrade-guide.md create mode 100644 website/content/en/highlights/2025-02-24-0-45-0-upgrade-guide.md create mode 100644 website/content/en/highlights/2025-02-24-memory_enrichment_table.md create mode 100644 website/content/en/releases/0.37.0.md create mode 100644 website/content/en/releases/0.37.1.md create mode 100644 website/content/en/releases/0.38.0.md create mode 100644 website/content/en/releases/0.39.0.md create mode 100644 website/content/en/releases/0.40.0.md create mode 100644 website/content/en/releases/0.40.1.md create mode 100644 website/content/en/releases/0.40.2.md create mode 100644 website/content/en/releases/0.41.0.md create mode 100644 website/content/en/releases/0.41.1.md create mode 100644 website/content/en/releases/0.42.0.md create mode 100644 website/content/en/releases/0.43.0.md create mode 100644 website/content/en/releases/0.43.1.md create mode 100644 website/content/en/releases/0.44.0.md create mode 100644 website/content/en/releases/0.45.0.md create mode 100644 website/content/en/releases/0.46.0.md create mode 100644 website/content/en/releases/0.46.1.md create mode 100644 website/content/en/releases/0.47.0.md create mode 100644 website/content/en/releases/0.48.0.md delete mode 100644 website/cue/reference/authors.cue delete mode 100644 website/cue/reference/components/base/sources.cue delete mode 100644 website/cue/reference/components/base/transforms.cue rename website/cue/reference/components/{base => generated}/sinks.cue (81%) create mode 100644 website/cue/reference/components/generated/sources.cue create mode 100644 website/cue/reference/components/generated/transforms.cue delete mode 100644 website/cue/reference/components/sinks/base/unit_test_stream.cue rename website/cue/reference/components/sinks/{base => generated}/amqp.cue (65%) rename website/cue/reference/components/sinks/{base => generated}/appsignal.cue (88%) rename website/cue/reference/components/sinks/{base => generated}/aws_cloudwatch_logs.cue (74%) rename website/cue/reference/components/sinks/{base => generated}/aws_cloudwatch_metrics.cue (85%) rename website/cue/reference/components/sinks/{base => generated}/aws_kinesis_firehose.cue (75%) rename website/cue/reference/components/sinks/{base => generated}/aws_kinesis_streams.cue (75%) rename website/cue/reference/components/sinks/{base => generated}/aws_s3.cue (78%) rename website/cue/reference/components/sinks/{base => generated}/aws_sns.cue (74%) rename website/cue/reference/components/sinks/{base => generated}/aws_sqs.cue (74%) rename website/cue/reference/components/sinks/{base => generated}/axiom.cue (79%) rename website/cue/reference/components/sinks/{base => generated}/azure_blob.cue (72%) rename website/cue/reference/components/sinks/{base => generated}/azure_monitor_logs.cue (89%) rename website/cue/reference/components/sinks/{base => generated}/blackhole.cue (74%) create mode 100644 website/cue/reference/components/sinks/generated/clickhouse.cue rename website/cue/reference/components/sinks/{base => generated}/console.cue (61%) rename website/cue/reference/components/sinks/{base => generated}/databend.cue (66%) rename website/cue/reference/components/sinks/{base => generated}/datadog_events.cue (87%) rename website/cue/reference/components/sinks/{base => generated}/datadog_logs.cue (85%) rename website/cue/reference/components/sinks/{base => generated}/datadog_metrics.cue (87%) rename website/cue/reference/components/sinks/{base => generated}/datadog_traces.cue (88%) rename website/cue/reference/components/sinks/{base => generated}/elasticsearch.cue (82%) rename website/cue/reference/components/sinks/{base => generated}/file.cue (64%) rename website/cue/reference/components/sinks/{base => generated}/gcp_chronicle_unstructured.cue (70%) rename website/cue/reference/components/sinks/{base => generated}/gcp_cloud_storage.cue (77%) rename website/cue/reference/components/sinks/{base => generated}/gcp_pubsub.cue (75%) rename website/cue/reference/components/sinks/{base => generated}/gcp_stackdriver_logs.cue (87%) rename website/cue/reference/components/sinks/{base => generated}/gcp_stackdriver_metrics.cue (88%) rename website/cue/reference/components/sinks/{base => generated}/greptimedb.cue (80%) rename website/cue/reference/components/sinks/{base/clickhouse.cue => generated/greptimedb_logs.cue} (76%) create mode 100644 website/cue/reference/components/sinks/generated/greptimedb_metrics.cue rename website/cue/reference/components/sinks/{base => generated}/honeycomb.cue (83%) rename website/cue/reference/components/sinks/{base => generated}/http.cue (65%) rename website/cue/reference/components/sinks/{base => generated}/humio_logs.cue (75%) rename website/cue/reference/components/sinks/{base => generated}/humio_metrics.cue (89%) rename website/cue/reference/components/sinks/{base => generated}/influxdb_logs.cue (90%) rename website/cue/reference/components/sinks/{base => generated}/influxdb_metrics.cue (88%) rename website/cue/reference/components/sinks/{base => generated}/kafka.cue (70%) create mode 100644 website/cue/reference/components/sinks/generated/keep.cue rename website/cue/reference/components/sinks/{base => generated}/logdna.cue (91%) rename website/cue/reference/components/sinks/{base => generated}/loki.cue (65%) rename website/cue/reference/components/sinks/{base => generated}/mezmo.cue (91%) rename website/cue/reference/components/sinks/{base => generated}/mqtt.cue (66%) rename website/cue/reference/components/sinks/{base => generated}/nats.cue (74%) rename website/cue/reference/components/sinks/{base => generated}/new_relic.cue (91%) create mode 100644 website/cue/reference/components/sinks/generated/opentelemetry.cue rename website/cue/reference/components/sinks/{base => generated}/papertrail.cue (66%) create mode 100644 website/cue/reference/components/sinks/generated/postgres.cue rename website/cue/reference/components/sinks/{base => generated}/prometheus_exporter.cue (57%) rename website/cue/reference/components/sinks/{base => generated}/prometheus_remote_write.cue (84%) rename website/cue/reference/components/sinks/{base => generated}/pulsar.cue (63%) rename website/cue/reference/components/sinks/{base => generated}/redis.cue (66%) rename website/cue/reference/components/sinks/{base => generated}/sematext_logs.cue (90%) rename website/cue/reference/components/sinks/{base => generated}/sematext_metrics.cue (90%) rename website/cue/reference/components/sinks/{base => generated}/socket.cue (64%) rename website/cue/reference/components/sinks/{base => generated}/splunk_hec_logs.cue (77%) rename website/cue/reference/components/sinks/{base => generated}/splunk_hec_metrics.cue (90%) rename website/cue/reference/components/sinks/{base => generated}/statsd.cue (84%) rename website/cue/reference/components/sinks/{base => generated}/unit_test.cue (84%) create mode 100644 website/cue/reference/components/sinks/generated/unit_test_stream.cue rename website/cue/reference/components/sinks/{base => generated}/vector.cue (86%) rename website/cue/reference/components/sinks/{base => generated}/webhdfs.cue (66%) rename website/cue/reference/components/sinks/{base => generated}/websocket.cue (55%) create mode 100644 website/cue/reference/components/sinks/generated/websocket_server.cue create mode 100644 website/cue/reference/components/sinks/greptimedb_logs.cue rename website/cue/reference/components/sinks/{greptimedb.cue => greptimedb_metrics.cue} (87%) create mode 100644 website/cue/reference/components/sinks/keep.cue create mode 100644 website/cue/reference/components/sinks/opentelemetry.cue create mode 100644 website/cue/reference/components/sinks/postgres.cue create mode 100644 website/cue/reference/components/sinks/websocket_server.cue delete mode 100644 website/cue/reference/components/sources/base/nginx_metrics.cue delete mode 100644 website/cue/reference/components/sources/base/unit_test.cue delete mode 100644 website/cue/reference/components/sources/base/unit_test_stream.cue rename website/cue/reference/components/sources/{base => generated}/amqp.cue (69%) rename website/cue/reference/components/sources/{base => generated}/apache_metrics.cue (89%) rename website/cue/reference/components/sources/{base => generated}/aws_ecs_metrics.cue (97%) rename website/cue/reference/components/sources/{base => generated}/aws_kinesis_firehose.cue (73%) rename website/cue/reference/components/sources/{base => generated}/aws_s3.cue (70%) rename website/cue/reference/components/sources/{base => generated}/aws_sqs.cue (72%) rename website/cue/reference/components/sources/{base => generated}/datadog_agent.cue (71%) rename website/cue/reference/components/sources/{base => generated}/demo_logs.cue (68%) rename website/cue/reference/components/sources/{base => generated}/dnstap.cue (88%) rename website/cue/reference/components/sources/{base => generated}/docker_logs.cue (98%) rename website/cue/reference/components/sources/{base => generated}/eventstoredb_metrics.cue (89%) rename website/cue/reference/components/sources/{base => generated}/exec.cue (69%) rename website/cue/reference/components/sources/{base => generated}/file.cue (96%) rename website/cue/reference/components/sources/{base => generated}/file_descriptor.cue (65%) rename website/cue/reference/components/sources/{base => generated}/fluent.cue (63%) rename website/cue/reference/components/sources/{base => generated}/gcp_pubsub.cue (73%) rename website/cue/reference/components/sources/{base => generated}/heroku_logs.cue (66%) rename website/cue/reference/components/sources/{base => generated}/host_metrics.cue (86%) rename website/cue/reference/components/sources/{base => generated}/http.cue (67%) rename website/cue/reference/components/sources/{base => generated}/http_client.cue (56%) rename website/cue/reference/components/sources/{base => generated}/http_server.cue (66%) rename website/cue/reference/components/sources/{base => generated}/internal_logs.cue (87%) rename website/cue/reference/components/sources/{base => generated}/internal_metrics.cue (92%) rename website/cue/reference/components/sources/{base => generated}/journald.cue (97%) rename website/cue/reference/components/sources/{base => generated}/kafka.cue (75%) rename website/cue/reference/components/sources/{base => generated}/kubernetes_logs.cue (93%) rename website/cue/reference/components/sources/{base => generated}/logstash.cue (80%) rename website/cue/reference/components/sources/{base => generated}/mongodb_metrics.cue (92%) create mode 100644 website/cue/reference/components/sources/generated/mqtt.cue rename website/cue/reference/components/sources/{base => generated}/nats.cue (72%) create mode 100644 website/cue/reference/components/sources/generated/nginx_metrics.cue rename website/cue/reference/components/sources/{base => generated}/opentelemetry.cue (79%) rename website/cue/reference/components/sources/{base => generated}/postgresql_metrics.cue (96%) rename website/cue/reference/components/sources/{base => generated}/prometheus_pushgateway.cue (70%) rename website/cue/reference/components/sources/{base => generated}/prometheus_remote_write.cue (69%) rename website/cue/reference/components/sources/{base => generated}/prometheus_scrape.cue (52%) rename website/cue/reference/components/sources/{base => generated}/pulsar.cue (68%) rename website/cue/reference/components/sources/{base => generated}/redis.cue (67%) rename website/cue/reference/components/sources/{base => generated}/socket.cue (69%) rename website/cue/reference/components/sources/{base => generated}/splunk_hec.cue (89%) create mode 100644 website/cue/reference/components/sources/generated/static_metrics.cue rename website/cue/reference/components/sources/{base => generated}/statsd.cue (72%) rename website/cue/reference/components/sources/{base => generated}/stdin.cue (65%) rename website/cue/reference/components/sources/{base => generated}/syslog.cue (84%) create mode 100644 website/cue/reference/components/sources/generated/unit_test.cue create mode 100644 website/cue/reference/components/sources/generated/unit_test_stream.cue rename website/cue/reference/components/sources/{base => generated}/vector.cue (81%) create mode 100644 website/cue/reference/components/sources/generated/websocket.cue create mode 100644 website/cue/reference/components/sources/mqtt.cue create mode 100644 website/cue/reference/components/sources/static_metrics.cue create mode 100644 website/cue/reference/components/sources/websocket.cue delete mode 100644 website/cue/reference/components/transforms/base/aggregate.cue delete mode 100644 website/cue/reference/components/transforms/base/sample.cue delete mode 100644 website/cue/reference/components/transforms/base/tag_cardinality_limit.cue create mode 100644 website/cue/reference/components/transforms/exclusive_route.cue create mode 100644 website/cue/reference/components/transforms/generated/aggregate.cue rename website/cue/reference/components/transforms/{base => generated}/aws_ec2_metadata.cue (95%) rename website/cue/reference/components/transforms/{base => generated}/dedupe.cue (69%) create mode 100644 website/cue/reference/components/transforms/generated/exclusive_route.cue rename website/cue/reference/components/transforms/{base => generated}/filter.cue (76%) rename website/cue/reference/components/transforms/{base => generated}/log_to_metric.cue (82%) rename website/cue/reference/components/transforms/{base => generated}/lua.cue (97%) rename website/cue/reference/components/transforms/{base => generated}/metric_to_log.cue (92%) rename website/cue/reference/components/transforms/{base => generated}/reduce.cue (80%) rename website/cue/reference/components/transforms/{base => generated}/remap.cue (89%) rename website/cue/reference/components/transforms/{base => generated}/route.cue (62%) create mode 100644 website/cue/reference/components/transforms/generated/sample.cue create mode 100644 website/cue/reference/components/transforms/generated/tag_cardinality_limit.cue rename website/cue/reference/components/transforms/{base => generated}/throttle.cue (96%) create mode 100644 website/cue/reference/components/transforms/generated/window.cue create mode 100644 website/cue/reference/components/transforms/window.cue create mode 100644 website/cue/reference/generated/api.cue create mode 100644 website/cue/reference/generated/configuration.cue create mode 100644 website/cue/reference/releases/0.37.0.cue create mode 100644 website/cue/reference/releases/0.37.1.cue create mode 100644 website/cue/reference/releases/0.38.0.cue create mode 100644 website/cue/reference/releases/0.39.0.cue create mode 100644 website/cue/reference/releases/0.40.0.cue create mode 100644 website/cue/reference/releases/0.40.1.cue create mode 100644 website/cue/reference/releases/0.40.2.cue create mode 100644 website/cue/reference/releases/0.41.0.cue create mode 100644 website/cue/reference/releases/0.41.1.cue create mode 100644 website/cue/reference/releases/0.42.0.cue create mode 100644 website/cue/reference/releases/0.43.0.cue create mode 100644 website/cue/reference/releases/0.43.1.cue create mode 100644 website/cue/reference/releases/0.44.0.cue create mode 100644 website/cue/reference/releases/0.45.0.cue create mode 100644 website/cue/reference/releases/0.46.0.cue create mode 100644 website/cue/reference/releases/0.46.1.cue create mode 100644 website/cue/reference/releases/0.47.0.cue create mode 100644 website/cue/reference/releases/0.48.0.cue create mode 100644 website/cue/reference/remap/functions/camelcase.cue create mode 100644 website/cue/reference/remap/functions/crc.cue create mode 100644 website/cue/reference/remap/functions/decode_charset.cue create mode 100644 website/cue/reference/remap/functions/decode_lz4.cue create mode 100644 website/cue/reference/remap/functions/encode_charset.cue create mode 100644 website/cue/reference/remap/functions/encode_lz4.cue create mode 100644 website/cue/reference/remap/functions/encode_proto.cue create mode 100644 website/cue/reference/remap/functions/haversine.cue create mode 100644 website/cue/reference/remap/functions/kebabcase.cue create mode 100644 website/cue/reference/remap/functions/object_from_array.cue create mode 100644 website/cue/reference/remap/functions/parse_bytes.cue create mode 100644 website/cue/reference/remap/functions/parse_cbor.cue create mode 100644 website/cue/reference/remap/functions/parse_dnstap.cue create mode 100644 website/cue/reference/remap/functions/parse_influxdb.cue create mode 100644 website/cue/reference/remap/functions/parse_proto.cue create mode 100644 website/cue/reference/remap/functions/pascalcase.cue create mode 100644 website/cue/reference/remap/functions/screamingsnakecase.cue create mode 100644 website/cue/reference/remap/functions/shannon_entropy.cue create mode 100644 website/cue/reference/remap/functions/sieve.cue create mode 100644 website/cue/reference/remap/functions/snakecase.cue create mode 100644 website/cue/reference/remap/functions/to_syslog_facility_code.cue create mode 100644 website/cue/reference/remap/functions/unflatten.cue create mode 100644 website/cue/reference/remap/functions/uuid_from_friendly_id.cue create mode 100644 website/cue/reference/remap/functions/uuid_v7.cue create mode 100644 website/cue/reference/remap/functions/validate_json_schema.cue create mode 100644 website/cue/reference/remap/functions/zip.cue create mode 100644 website/cue/reference/remap/syntax/expressions.cue create mode 100644 website/cue/reference/services/keep.cue create mode 100644 website/cue/reference/services/postgres.cue create mode 100644 website/cue/reference/services/websocket_client.cue delete mode 100644 website/cue/reference/team.cue create mode 100644 website/layouts/partials/author-with-avatar.html delete mode 100644 website/layouts/partials/blog/authors.html delete mode 100644 website/layouts/partials/highlights/authors.html create mode 100644 website/layouts/partials/logs_output.html delete mode 100644 website/layouts/partials/page-actions.html create mode 100644 website/layouts/partials/telemetry_input.html create mode 100644 website/layouts/partials/telemetry_output.html delete mode 100644 website/layouts/shortcodes/internal-metrics-list.html rename website/scripts/{algolia-index.ts => typesense-index.ts} (86%) create mode 100644 website/scripts/typesense-sync.ts create mode 100644 website/static/gifs/guides/config-autocomplete.gif create mode 100644 website/static/img/exclusive_route.svg create mode 100644 website/static/img/guides/dd-metrics-vector-errors-visualization.png create mode 100644 website/static/img/guides/debugging-meme.png create mode 100644 website/static/img/guides/vector-tap.png create mode 100644 website/static/img/guides/vector-top.png create mode 100644 website/static/img/sliding-window.svg create mode 100644 website/typesense.config.json delete mode 100644 workload-checks/README.md delete mode 100644 workload-checks/typical/cases/http_text_to_http_json/README.md delete mode 100644 workload-checks/typical/cases/http_text_to_http_json/experiment.yaml delete mode 100644 workload-checks/typical/cases/http_text_to_http_json/lading/lading.yaml delete mode 100644 workload-checks/typical/cases/http_text_to_http_json/vector/vector.yaml delete mode 100644 workload-checks/typical/machine.yaml diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index c42b902aa2c0a..9b7fa0951b6f1 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,100 +1,15 @@ -docs/ @vectordotdev/ux-team -lib/dnsmsg-parser/ @vectordotdev/integrations-team -lib/file-source/ @vectordotdev/integrations-team -lib/k8s-e2e-tests/ @vectordotdev/integrations-team -lib/k8s-test-framework/ @vectordotdev/integrations-team -lib/opentelemetry-proto/ @vectordotdev/integrations-team -lib/vector-common/ @vectordotdev/core-team -lib/vector-config/ @vectordotdev/core-team -lib/vector-config-common/ @vectordotdev/core-team -lib/vector-config-macros/ @vectordotdev/core-team -lib/vector-core/ @vectordotdev/core-team -lib/vector-vrl-functions/ @vectordotdev/processing-team -lib/vrl/ @vectordotdev/processing-team -src/config/ @vectordotdev/core-team -src/internal_telemetry/ @vectordotdev/core-team -src/sinks/ @vectordotdev/integrations-team -src/sinks/amqp/ @StephenWakely @vectordotdev/integrations-team -src/sinks/appsignal/ @neuronull @vectordotdev/integrations-team -src/sinks/aws_cloudwatch_logs/ @vectordotdev/integrations-team -src/sinks/aws_cloudwatch_metrics/ @vectordotdev/integrations-team -src/sinks/aws_kinesis/ @vectordotdev/integrations-team # sink_aws_kinesis_firehose,sink_aws_kinesis_stream -src/sinks/aws_s3/ @vectordotdev/integrations-team -src/sinks/aws_sqs/ @vectordotdev/integrations-team -src/sinks/axiom.rs @vectordotdev/integrations-team -src/sinks/azure_blob/ @dsmith3197 @vectordotdev/integrations-team -src/sinks/azure_monitor_logs.rs @dsmith3197 @vectordotdev/integrations-team -src/sinks/blackhole/ @dsmith3197 @vectordotdev/integrations-team -src/sinks/clickhouse/ @dsmith3197 @vectordotdev/integrations-team -src/sinks/console/ @dsmith3197 @vectordotdev/integrations-team -src/sinks/databend/ @vectordotdev/integrations-team -src/sinks/datadog_events/ @neuronull @vectordotdev/integrations-team -src/sinks/datadog_logs/ @neuronull @vectordotdev/integrations-team -src/sinks/datadog_metrics/ @neuronull @vectordotdev/integrations-team -src/sinks/datadog_traces/ @neuronull @vectordotdev/integrations-team -src/sinks/elasticsearch/ @vectordotdev/integrations-team -src/sinks/file/ @vectordotdev/integrations-team -src/sinks/gcp/ @StephenWakely @vectordotdev/integrations-team # sink_gcp_chronicle_unstructured,sink_gcp_cloud_storage,sink_gcp_pubsub,sink_gcp_stackdriver_logs,sink_gcp_stackdriver_metrics -src/sinks/honeycomb.rs @vectordotdev/integrations-team -src/sinks/http.rs @neuronull @vectordotdev/integrations-team -src/sinks/humio/ @StephenWakely @vectordotdev/integrations-team # sink_humio_logs,sink_humio_metrics -src/sinks/influxdb/ @dsmith3197 @vectordotdev/integrations-team # sink_influxdb_logs,sink_influxdb_metrics -src/sinks/kafka/ @dsmith3197 @vectordotdev/integrations-team -src/sinks/logdna.rs @neuronull @vectordotdev/integrations-team -src/sinks/loki/ @vectordotdev/integrations-team -src/sinks/nats.rs @StephenWakely @vectordotdev/integrations-team -src/sinks/new_relic/ @dsmith3197 @vectordotdev/integrations-team # sink_newrelix,sink_newrelic_logs -src/sinks/papertrail.rs @StephenWakely @vectordotdev/integrations-team -src/sinks/prometheus/ @StephenWakely @vectordotdev/integrations-team # sink_prometheus_exporter,sink_prometheus_remote_write -src/sinks/pulsar.rs @dsmith3197 @vectordotdev/integrations-team -src/sinks/redis.rs @StephenWakely @vectordotdev/integrations-team -src/sinks/sematext/ @vectordotdev/integrations-team # sink_sematext_logs,sink_sematext_metrics -src/sinks/socket.rs @neuronull @vectordotdev/integrations-team -src/sinks/splunk_hec/ @StephenWakely @vectordotdev/integrations-team # sink_splunk_hec_logs,sink_splunk_hec_metrics -src/sinks/statsd.rs @neuronull @vectordotdev/integrations-team -src/sinks/vector/ @neuronull @vectordotdev/integrations-team -src/sinks/websocket/ @neuronull @vectordotdev/integrations-team -src/source_sender/ @vectordotdev/core-team -src/sources/ @vectordotdev/integrations-team -src/sources/amqp.rs @StephenWakely @vectordotdev/integrations-team -src/sources/apache_metrics/ @dsmith3197 @vectordotdev/integrations-team -src/sources/aws_ecs_metrics/ @vectordotdev/integrations-team -src/sources/aws_kinesis_firehose/ @vectordotdev/integrations-team -src/sources/aws_s3/ @vectordotdev/integrations-team -src/sources/aws_sqs/ @vectordotdev/integrations-team -src/sources/datadog_agent/ @neuronull @vectordotdev/integrations-team -src/sources/demo_logs.rs @StephenWakely @vectordotdev/integrations-team -src/sources/dnstap/ @StephenWakely @vectordotdev/integrations-team -src/sources/docker_logs/ @vectordotdev/integrations-team -src/sources/eventstoredb_metrics/ @dsmith3197 @vectordotdev/integrations-team -src/sources/exec/ @dsmith3197 @vectordotdev/integrations-team -src/sources/file.rs @vectordotdev/integrations-team -src/sources/file_descriptors/ @dsmith3197 @vectordotdev/integrations-team # source_file_descriptor,source_stdin -src/sources/fluent/ @neuronull @vectordotdev/integrations-team -src/sources/gcp_pubsub.rs @StephenWakely @vectordotdev/integrations-team -src/sources/heroku_logs.rs @vectordotdev/integrations-team -src/sources/host_metrics/ @dsmith3197 @vectordotdev/integrations-team -src/sources/http_client/ @neuronull @vectordotdev/integrations-team -src/sources/http_server.rs @neuronull @vectordotdev/integrations-team -src/sources/internal_logs.rs @neuronull @vectordotdev/integrations-team -src/sources/internal_metrics.rs @neuronull @vectordotdev/integrations-team -src/sources/journald.rs @vectordotdev/integrations-team -src/sources/kafka.rs @dsmith3197 @vectordotdev/integrations-team -src/sources/kubernetes_logs/ @vectordotdev/integrations-team -src/sources/logstash.rs @neuronull @vectordotdev/integrations-team -src/sources/mongodb_metrics/ @dsmith3197 @vectordotdev/integrations-team -src/sources/nats.rs @StephenWakely @vectordotdev/integrations-team -src/sources/nginx_metrics/ @dsmith3197 @vectordotdev/integrations-team -src/sources/opentelemetry/ @vectordotdev/integrations-team -src/sources/postgresql_metrics.rs @dsmith3197 @vectordotdev/integrations-team -src/sources/prometheus/ @StephenWakely @vectordotdev/integrations-team # source_prometheus_remote_write,source_prometheus_scrape -src/sources/redis/ @StephenWakely @vectordotdev/integrations-team -src/sources/socket/ @neuronull @vectordotdev/integrations-team -src/sources/splunk_hec/ @StephenWakely @vectordotdev/integrations-team -src/sources/statsd/ @neuronull @vectordotdev/integrations-team -src/sources/syslog.rs @StephenWakely @vectordotdev/integrations-team -src/sources/vector/ @neuronull @vectordotdev/integrations-team -src/test_util/ @vectordotdev/core-team -src/topology/ @vectordotdev/core-team -src/transforms/ @vectordotdev/processing-team -website/ @vectordotdev/ux-team @vectordotdev/documentation +* @vectordotdev/vector + +.github/workflows/regression.yml @vectordotdev/vector @vectordotdev/single-machine-performance +regression/config.yaml @vectordotdev/vector @vectordotdev/single-machine-performance + +docs/ @vectordotdev/vector @vectordotdev/ux-team @vectordotdev/documentation +website/ @vectordotdev/vector @vectordotdev/ux-team +website/content @vectordotdev/vector @vectordotdev/documentation +website/cue/reference @vectordotdev/vector @vectordotdev/documentation + +website/js @vectordotdev/vector @vectordotdev/vector-website +website/layouts @vectordotdev/vector @vectordotdev/vector-website +website/scripts @vectordotdev/vector @vectordotdev/vector-website +website/data @vectordotdev/vector @vectordotdev/vector-website +website/* @vectordotdev/vector @vectordotdev/vector-website diff --git a/.github/DISCUSSION_TEMPLATE/q-a.yml b/.github/DISCUSSION_TEMPLATE/q-a.yml new file mode 100644 index 0000000000000..23cb1f8467745 --- /dev/null +++ b/.github/DISCUSSION_TEMPLATE/q-a.yml @@ -0,0 +1,33 @@ +title: "Q&A" +labels: [ q-a ] +body: + - type: markdown + attributes: + value: | + Please fill out the following fields to help us assist you effectively. + + - type: textarea + id: question + attributes: + label: Question + description: What are you trying to do? What issue are you encountering? + + - type: textarea + id: config + attributes: + label: Vector Config + description: Your Vector configuration (please redact sensitive data) + placeholder: | + ```yaml + # your config + ``` + + - type: textarea + id: logs + attributes: + label: Vector Logs + description: Paste any relevant Vector logs or error messages. + placeholder: | + ```sh + Jul 10 14:32:02 vector[1234]: ERROR ... + ``` diff --git a/.github/ISSUE_TEMPLATE/bug.yml b/.github/ISSUE_TEMPLATE/bug.yml index e1b887b4b31a9..7a9ef223e45b1 100644 --- a/.github/ISSUE_TEMPLATE/bug.yml +++ b/.github/ISSUE_TEMPLATE/bug.yml @@ -1,5 +1,6 @@ name: Bug description: 🐛 Let us know about an unexpected error, a crash, or an incorrect behavior. +type: 'Bug' labels: - 'type: bug' body: diff --git a/.github/ISSUE_TEMPLATE/feature.yml b/.github/ISSUE_TEMPLATE/feature.yml index 3675a2173a962..ed43f6891b3ce 100644 --- a/.github/ISSUE_TEMPLATE/feature.yml +++ b/.github/ISSUE_TEMPLATE/feature.yml @@ -1,4 +1,5 @@ name: Feature +type: 'Feature' description: 🚀 Suggest a new feature. labels: - 'type: feature' diff --git a/.github/ISSUE_TEMPLATE/minor-release.md b/.github/ISSUE_TEMPLATE/minor-release.md index 8c82e25067c71..5d34d0c63143f 100644 --- a/.github/ISSUE_TEMPLATE/minor-release.md +++ b/.github/ISSUE_TEMPLATE/minor-release.md @@ -5,50 +5,96 @@ title: "Vector [version] release" labels: "domain: releasing" --- -The week before the release: -- [ ] Check for any outstanding deprecation actions in [DEPRECATIONS.md](docs/DEPRECATIONS.md) and - take them (or have someone help you take them) +# Setup and Automation + +Note the preparation steps are now automated. First, alter/create release.env + +```shell +export NEW_VECTOR_VERSION= # replace this with the actual new version +export MINOR_VERSION=$(echo "NEW_VECTOR_VERSION" | cut -d. -f2) +export PREP_BRANCH=prepare-v-0-"${MINOR_VERSION}"-"${NEW_VECTOR_VERSION}"-website +export RELEASE_BRANCH=v0."${MINOR_VERSION}" +export NEW_VRL_VERSION= # replace this with the actual new VRL version +``` + +and then source it by running `source ./release.env` + +# The week before the release + +## 1. Manual Steps + +- [ ] Cut a new release of [VRL](https://github.com/vectordotdev/vrl) if needed + - VRL release steps: https://github.com/vectordotdev/vrl/blob/main/release/README.md + +## 2. Automated Steps + +Run the following: + +```shell +cargo vdev release prepare --version "${NEW_VECTOR_VERSION}" --vrl-version "${NEW_VRL_VERSION}" +``` + +Automated steps include: - [ ] Create a new release branch from master to freeze commits - - `git fetch && git checkout origin/master && git checkout -b v0. && git push -u` + - `git fetch && git checkout origin/master && git checkout -b "{RELEASE_BRANCH}" && git push -u` - [ ] Create a new release preparation branch from `master` - - `git checkout -b prepare-v0. && git push -u` -- [ ] Check if there is a newer version of Alpine or Debian available to update the release images - in `distribution/docker/`. Update if so. + - `git checkout -b "${PREP_BRANCH}" && git push -u` +- [ ] Pin VRL to latest released version rather than `main` +- [ ] Check if there is a newer version of [Alpine](https://alpinelinux.org/releases/) or + [Debian](https://www.debian.org/releases/) available to update the release images in + `distribution/docker/`. Update if so. - [ ] Run `cargo vdev build release-cue` to generate a new cue file for the release - - [ ] Add description key to the generated cue file with a description of the release (see - previous releases for examples). - - [ ] Ensure any breaking changes are highlighted in the release upgrade guide - - [ ] Ensure any deprecations are highlighted in the release upgrade guide + - [ ] Copy VRL changelogs from the VRL version in the last Vector release as a new changelog entry + ([example](https://github.com/vectordotdev/vector/blob/9c67bba358195f5018febca2f228dfcb2be794b5/website/cue/reference/releases/0.41.0.cue#L33-L64)) - [ ] Update version number in `website/cue/reference/administration/interfaces/kubectl.cue` - [ ] Update version number in `distribution/install.sh` - [ ] Add new version to `website/cue/reference/versions.cue` - [ ] Create new release md file by copying an existing one in `./website/content/en/releases/` and updating version number - [ ] Commit these changes -- [ ] Open PR against the release branch (`v0.`) for review -- [ ] PR approval +- [ ] Open PR against the release branch (`"${RELEASE_BRANCH}"`) for review -On the day of release: +## 3. Manual Steps + +- [ ] Edit `website/cue/reference/releases/"${NEW_VECTOR_VERSION}".cue` + - [ ] Add description key to the generated cue file with a description of the release (see + previous releases for examples). + - [ ] Ensure any breaking changes are highlighted in the release upgrade guide + - [ ] Ensure any deprecations are highlighted in the release upgrade guide + - [ ] Review generated changelog entries to ensure they are understandable to end-users +- [ ] Check for any outstanding deprecation actions in [DEPRECATIONS.md](https://github.com/vectordotdev/vector/blob/master/docs/DEPRECATIONS.md) and + take them (or have someone help you take them) +- [ ] PR review & approval + +# On the day of release - [ ] Rebase the release preparation branch on the release branch - [ ] Squash the release preparation commits (but not the cherry-picked commits!) to a single - commit. This makes it easier to cherry-pick to master after the release.  + commit. This makes it easier to cherry-pick to master after the release. - [ ] Ensure release date in cue matches current date. - [ ] Merge release preparation branch into the release branch - - `git co v0. && git merge --ff-only prepare-v0.` + - `git switch "${RELEASE_BRANCH}" && git merge --ff-only "${PREP_BRANCH}"` - [ ] Tag new release - - [ ] `git tag v0..0 -a -m v0..0` - - [ ] `git push origin v0..0` + - [ ] `git tag v"${NEW_VECTOR_VERSION}" -a -m v"${NEW_VECTOR_VERSION}"` + - [ ] `git push origin v"${NEW_VECTOR_VERSION}"` - [ ] Wait for release workflow to complete - - Discoverable via [https://github.com/timberio/vector/actions/workflows/release.yml](https://github.com/timberio/vector/actions/workflows/release.yml) + - Discoverable via [release.yml](https://github.com/vectordotdev/vector/actions/workflows/release.yml) +- [ ] Reset the `website` branch to the `HEAD` of the release branch to update https://vector.dev + - [ ] `git switch website && git reset --hard origin/"${RELEASE_BRANCH}" && git push` + - [ ] Confirm that the release changelog was published to https://vector.dev/releases/ + - The deployment is done by Amplify. You can see + the [deployment logs here](https://dd-corpsite.datadoghq.com/logs?query=service%3Awebsites-vector%20branch%3Awebsite&agg_m=count&agg_m_source=base&agg_t=count&cols=host%2Cservice&fromUser=true&messageDisplay=inline&refresh_mode=sliding&storage=hot&stream_sort=time%2Casc&viz=stream). - [ ] Release Linux packages. See [`vector-release` usage](https://github.com/DataDog/vector-release#usage). + - Note: the pipeline inputs are the version number `v"${NEW_VECTOR_VERSION}"` and a personal GitHub token. + - [ ] Manually trigger the `trigger-package-release-pipeline-prod-stable` job. - [ ] Release updated Helm chart. See [releasing Helm chart](https://github.com/vectordotdev/helm-charts#releasing). - [ ] Once Helm chart is released, updated Vector manifests - Run `cargo vdev build manifests` and open a PR with changes -- [ ] Add docker images to [https://github.com/DataDog/images](https://github.com/DataDog/images/tree/master/vector) to have them available internally. -- [ ] Cherry-pick any release commits from the release branch that are not on `master`, to `master` -- [ ] Bump the release number in the `Cargo.toml` on master to the next major release -- [ ] Reset the `website` branch to the `HEAD` of the release branch to update https://vector.dev - - [ ] `git checkout website && git reset --hard origin/v0. && git push` +- [ ] Add docker images to [https://github.com/DataDog/images](https://github.com/DataDog/images/tree/master/vector) to have them available internally. ([Example PR](https://github.com/DataDog/images/pull/7104)) +- [ ] Create a new PR with title starting as `chore(releasing):` + - [ ] Cherry-pick any release commits from the release branch that are not on `master`, to `master` + - [ ] Bump the release number in the `Cargo.toml` on master to the next minor release. + - [ ] Also, update `Cargo.lock` with: `cargo update -p vector` + - [ ] If there is a VRL version update, revert it and make it track the git `main` branch and then run `cargo update -p vrl`. - [ ] Kick-off post-mortems for any regressions resolved by the release diff --git a/.github/ISSUE_TEMPLATE/patch-release.md b/.github/ISSUE_TEMPLATE/patch-release.md index 2beb8531dbd9a..afc5d8d826423 100644 --- a/.github/ISSUE_TEMPLATE/patch-release.md +++ b/.github/ISSUE_TEMPLATE/patch-release.md @@ -5,43 +5,56 @@ title: "Vector [version] release" labels: "domain: releasing" --- -Before the release: +# Setup environment + +```shell +export CURRENT_MINOR_VERSION = # e.g. 47 +export CURRENT_PATCH_VERSION = # e.g. 0 +export CURRENT_VERSION="${RELEASE_BRANCH}"."${CURRENT_PATCH_VERSION}" +export NEW_PATCH_VERSION = # e.g. 1 +export NEW_VERSION="${RELEASE_BRANCH}"."${NEW_PATCH_VERSION}" +export RELEASE_BRANCH=v0."${CURRENT_MINOR_VERSION}" +export PREP_BRANCH=prepare-v-0-"${CURRENT_MINOR_VERSION}"-"${NEW_PATCH_VERSION}"-website +``` + +# Before the release - [ ] Create a new release preparation branch from the current release branch - - `git fetch && git checkout v0. && git checkout -b prepare-v0.` + - `git fetch --all && git checkout "${RELEASE_BRANCH}" && git checkout -b "${PREP_BRANCH}""` - [ ] Cherry-pick in all commits to be released from the associated release milestone - If any merge conflicts occur, attempt to solve them and if needed enlist the aid of those familiar with the conflicting commits. +- [ ] Bump the release number in the `Cargo.toml` to the current version number - [ ] Run `cargo vdev build release-cue` to generate a new cue file for the release -- [ ] Add `changelog` key to generated cue file - [ ] Add description key to the generated cue file with a description of the release (see previous releases for examples). - [ ] Update version number in `distribution/install.sh` - [ ] Add new version to `website/cue/reference/versions.cue` -- [ ] Create new release md file by copying an existing one in `./website/content/en/releases/` and - updating version number -- [ ] Bump the release number in the `Cargo.toml` to the current version number +- [ ] Create new release md file by copying an existing one in `./website/content/en/releases/`. + - Update the version number to `"${NEW_VERSION}"` and increase the `weight` by 1. - [ ] Run `cargo check` to regenerate `Cargo.lock` file - [ ] Commit these changes -- [ ] Open PR against the release branch (`v0.`) for review +- [ ] Open PR against the release branch (`"${RELEASE_BRANCH}"`) for review - [ ] PR approval -On the day of release: +# On the day of release - [ ] Ensure release date in cue matches current date. - [ ] Rebase the release preparation branch on the release branch - Squash the release preparation commits (but not the cherry-picked commits!) to a single commit. This makes it easier to cherry-pick to master after the release. - - `git checkout prepare-v0. && git rebase -i v0.` + - `git fetch --all && git checkout website-prepare-v0-"${CURRENT_MINOR_VERSION}"-"${NEW_PATCH_VERSION}" && git rebase -i "${RELEASE_BRANCH}"` - [ ] Merge release preparation branch into the release branch - - `git co v0. && git merge --ff-only prepare-v0..` + - `git checkout "${RELEASE_BRANCH}" && git merge --ff-only website-prepare-v0-"${CURRENT_MINOR_VERSION}"-"${NEW_PATCH_VERSION}"` - [ ] Tag new release - - [ ] `git tag v0.. -a -m v0..` - - [ ] `git push origin v0..` + - [ ] `git tag "${NEW_VERSION}" -a -m "${NEW_VERSION}"` + - [ ] `git push origin "${NEW_VERSION}"` - [ ] Wait for release workflow to complete - Discoverable via [https://github.com/timberio/vector/actions/workflows/release.yml](https://github.com/timberio/vector/actions/workflows/release.yml) - [ ] Release Linux packages. See [`vector-release` usage](https://github.com/DataDog/vector-release#usage). + - Note: the pipeline inputs are the version number `"${CURRENT_VERSION}"` and a personal GitHub token. + - [ ] Manually trigger the `trigger-package-release-pipeline-prod-stable` job. - [ ] Push the release branch to update the remote (This should close the preparation branch PR). - - `git checkout v0. && git push` + - `git checkout "${RELEASE_BRANCH}" && git push` - [ ] Release updated Helm chart. See [releasing Helm chart](https://github.com/vectordotdev/helm-charts#releasing). - [ ] Once Helm chart is released, updated Vector manifests - Run `cargo vdev build manifests` and open a PR with changes @@ -49,5 +62,5 @@ On the day of release: - Follow the [instructions at the top of the mirror.yaml file](https://github.com/DataDog/images/blob/fbf12868e90d52e513ebca0389610dea8a3c7e1a/mirror.yaml#L33-L49). - [ ] Cherry-pick any release commits from the release branch that are not on `master`, to `master` - [ ] Reset the `website` branch to the `HEAD` of the release branch to update https://vector.dev - - [ ] `git checkout website && git reset --hard origin/v0.. && git push` + - [ ] `git checkout website && git reset --hard origin/"${RELEASE_BRANCH}" && git push` - [ ] Kick-off post-mortems for any regressions resolved by the release diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 0d67c6c3b69b8..76781a822e55d 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,11 +1,61 @@ +## Summary + + +## Vector configuration + + +## How did you test this PR? + + +## Change Type +- [ ] Bug fix +- [ ] New feature +- [ ] Non-functional (chore, refactoring, docs) +- [ ] Performance + +## Is this a breaking change? +- [ ] Yes +- [ ] No + +## Does this PR include user facing changes? + +- [ ] Yes. Please add a changelog fragment based on our [guidelines](https://github.com/vectordotdev/vector/blob/master/changelog.d/README.md). +- [ ] No. A maintainer will apply the `no-changelog` label to this PR. + +## References + + + +## Notes +- Please read our [Vector contributor resources](https://github.com/vectordotdev/vector/tree/master/docs#getting-started). +- Do not hesitate to use `@vectordotdev/vector` to reach out to us regarding this PR. +- Some CI checks run only after we manually approve them. + - We recommend adding a `pre-push` hook, please see [this template](https://github.com/vectordotdev/vector/blob/master/CONTRIBUTING.md#Pre-push). + - Alternatively, we recommend running the following locally before pushing to the remote branch: + - `cargo fmt --all` + - `cargo clippy --workspace --all-targets -- -D warnings` + - `cargo nextest run --workspace` (alternatively, you can run `cargo test --all`) +- After a review is requested, please avoid force pushes to help us review incrementally. + - Feel free to push as many commits as you want. They will be squashed into one before merging. + - For example, you can run `git merge origin master` and `git push`. +- If this PR introduces changes Vector dependencies (modifies `Cargo.lock`), please + run `cargo vdev build licenses` to regenerate the [license inventory](https://github.com/vectordotdev/vrl/blob/main/LICENSE-3rdparty.csv) and commit the changes (if any). More details [here](https://crates.io/crates/dd-rust-license-tool). + + + + + + diff --git a/lib/vector-vrl/web-playground/public/index.css b/lib/vector-vrl/web-playground/public/index.css index 296b067889097..364ac714fe491 100644 --- a/lib/vector-vrl/web-playground/public/index.css +++ b/lib/vector-vrl/web-playground/public/index.css @@ -1,329 +1,422 @@ -body { - margin-right: 2vw; - margin-left: 2vw; - font-family: ui-sans-serif, system-ui, -apple-system, BlinkMacSystemFont, Segoe UI, Roboto, - Helvetica Neue, Arial, Noto Sans, sans-serif, Apple Color Emoji, Segoe UI Emoji, - Segoe UI Symbol, Noto Color Emoji; +:root { + --datadog-purple-lightest: #f8f6ff; + --datadog-purple-light: #e1d8ff; + --datadog-purple: #632ca6; + --datadog-purple-vibrant: #7b40f1; + --datadog-purple-dark: #451b78; + --datadog-gray: #252525; + --datadog-gray-light: #b6b6b6; + --datadog-gray-lighter: #eaeaea; + --datadog-background: #fafbfc; + --light-grey: #f5f5f5; } -table { - width: 100%; - border-spacing: 10px; +body { + margin: 0; + padding: 0; + font-family: "Open Sans", -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Oxygen, Ubuntu, + Cantarell, "Helvetica Neue", sans-serif; + background-color: var(--datadog-background); + color: var(--datadog-gray); } -td { - text-align: left; - font-weight: bold; +.top-bar-wrapper { + border-bottom: 1px solid var(--datadog-purple-light); + padding: 10px 0; } - .headers-grid { display: grid; - grid-template-columns: 3fr 2fr 7fr; - grid-template-rows: 90px; - gap: 10px; - width: 100%; - height: 100%; - margin: auto; + grid-template-columns: auto 1fr auto; + gap: 20px; + padding: 0 15px; + max-width: 1600px; + margin: 0 auto; + align-items: stretch; } .headers-grid-item { - background-color: #dfd8ec; - padding: 5px 10px; - border-radius: 4px; - border: none; - display: grid; - align-items: center; + background-color: var(--datadog-purple-lightest); + padding: 16px; + border-radius: 12px; + border: 1px solid var(--datadog-purple-light); + display: flex; + flex-direction: column; justify-content: center; - height: 100%; +} + +.title-section { + flex-direction: row; + align-items: center; + gap: 8px; +} + +#logo { + width: 20px; + height: 20px; +} + +.headers-grid-item h2 { + font-size: 22px; + font-weight: 600; + color: var(--datadog-purple); + margin: 0; } #description-cell { - grid-column: 3; - display: grid; + grid-column: 2; +} + +#description-cell p { + font-size: 13px; + line-height: 1.4; + margin: 0; + color: var(--datadog-gray); +} + +#description-cell p, +.version-info td { + color: var(--datadog-gray); +} + +.version-info { + justify-content: center; +} + +.version-info table { + width: 100%; + border-spacing: 0; +} + +.version-info tr { + display: flex; + justify-content: space-between; + align-items: baseline; +} + +.version-info td { + padding: 2px 0; + font-weight: normal; + font-size: 12px; + color: var(--datadog-gray); +} + +.version-info td:first-child { + font-weight: 600; +} + +#description-cell a:hover, +.version-info a:hover { + text-decoration: underline; } div#App { - padding-top: 5px; display: grid; - width: 100%; - height: 100%; - grid-template-columns: repeat(2, 1fr); - grid-template-rows: 1fr 18fr; + width: calc(100% - 40px); + max-width: 1600px; + height: calc(100vh - 240px); + grid-template-columns: minmax(45%, 1fr) minmax(0, 1fr); + grid-template-rows: auto 1fr; grid-gap: 1rem; - resize: both; - overflow: hidden; + padding: 20px 20px; + margin: 0 auto; } div#toolbar-section { - padding-top: 30px; - display: grid; + padding: 10px 0; + display: flex; + flex-wrap: wrap; grid-row: 1; - grid-column: 1 / span 2; - grid-template-columns: 2fr 2fr 6fr; - grid-gap: 1rem; + grid-column: 1 / -1; + gap: 1rem; align-items: center; } - #toolbar-section #run-code-btn { +#toolbar-section #run-code-btn, +#toolbar-section #share-code-btn { + height: 36px; + flex: 0 1 auto; + min-width: 120px; +} + +div#input-section, +div#output-section { + border: 1px solid var(--datadog-gray-lighter); + border-radius: 4px; +} + +div#input-section { + display: grid; + grid-column: 1; + grid-row: 2; +} + +div#output-section { + display: grid; + grid-column: 2; + grid-row: 2; + grid-template-rows: 1fr 1fr; + gap: 1rem; +} + +#input-section #cell, +#output-section #event-cell, +#output-section #output-cell { + display: grid; + grid-template-rows: auto 1fr; +} + +.cell-title { + font-weight: 600; + font-size: 14px; + color: var(--datadog-gray); + margin: 0; + padding: 8px 12px; + background-color: var(--datadog-gray-lighter); + border-bottom: 1px solid var(--datadog-gray-light); +} + +/* BUTTONS */ +.btn { + display: inline-block; + outline: 0; + border: none; + cursor: pointer; + border-radius: 4px; + font-size: 14px; + height: 36px; + padding: 0 16px; + font-weight: 500; + transition: all 0.2s ease; +} + +.btn:active { + transform: translateY(1px); +} +.btn-primary { + background-color: var(--datadog-purple); + color: var(--datadog-background); +} + +.btn-primary:hover { + background-color: var(--datadog-purple-dark); +} + +.btn-secondary { + background-color: var(--datadog-gray-lighter); + color: var(--datadog-gray); + border: 1px solid var(--datadog-gray-light); +} + +.btn-secondary:hover { + background-color: var(--datadog-gray-light); +} + +/* Media Queries for Responsiveness */ +@media only screen and (min-width: 1601px) { + .headers-grid, + div#App { + max-width: 1800px; + } + + .headers-grid { + padding: 20px 30px; + } + + div#App { + padding: 30px 30px; + } + + .headers-grid-item h2 { + font-size: 24px; + } + + #description-cell p { + font-size: 16px; + } +} + +@media only screen and (min-width: 1201px) and (max-width: 1600px) { + .headers-grid, + div#App { + max-width: 1400px; + } + + .headers-grid { + padding: 18px 25px; + } + + div#App { + padding: 25px 25px; + } + + .headers-grid-item h2 { + font-size: 22px; + } + + #description-cell p { + font-size: 15px; + } +} + +@media only screen and (max-width: 1200px) { + .headers-grid, + div#App { + max-width: 1000px; + } +} + +@media only screen and (max-width: 1024px) { + .headers-grid { + grid-template-columns: 1fr; + gap: 10px; + } + + .headers-grid-item { + width: 100%; + box-sizing: border-box; + } + + #description-cell { grid-column: 1; - height: 40px; } - #toolbar-section #share-code-btn { - grid-column: 2; - height: 40px; + div#App { + grid-template-columns: 1fr; + height: auto; + min-height: calc(100vh - 180px); } - /* input pane */ div#input-section { - display: grid; grid-column: 1; grid-row: 2; - overflow: hidden; + height: 40vh; } - #input-section #cell { - display: grid; - grid-template-rows: 4% 96%; - overflow: hidden; + div#output-section { + grid-column: 1; + grid-row: 3; + grid-template-rows: auto auto; + grid-template-columns: 1fr; + height: auto; } - #input-section #cell #input-cell-title { - height: 100%; - grid-row: 1; - font-weight: bold; + #output-section #event-cell, + #output-section #output-cell { + height: 30vh; } +} - #input-section #cell #container-program { - display: grid; - height: 100%; +@media only screen and (max-width: 768px) { + .headers-grid, + div#App { + width: calc(100% - 20px); + padding-left: 10px; + padding-right: 10px; } - div#output-section { - display: grid; - grid-column: 2; - grid-row: 2; - grid-template-rows: 30% 60%; + .headers-grid { + grid-template-columns: 1fr; + gap: 10px; } - /* event pane */ - #output-section #event-cell { - display: grid; - /* this is double of input-cell-title row - because we are working with half the space */ - grid-template-rows: 12% 88%; - grid-row: 1; - height: 100%; + .headers-grid-item h2 { + font-size: 18px; } - #output-section #event-cell #event-cell-title { - display: grid; - grid-row: 1; - font-weight: bold; + #toolbar-section #run-code-btn, + #toolbar-section #share-code-btn { + flex: 1 1 auto; } +} - #output-section #event-cell #container-event { - display: grid; - grid-row: 2; - height: 100%; +@media only screen and (max-width: 480px) { + .headers-grid, + div#App { + width: calc(100% - 16px); + padding-left: 8px; + padding-right: 8px; } - /* output pane */ - #output-section #output-cell { - display: grid; - grid-template-rows: 6% 94%; - grid-row: 2; - height: 100%; + .headers-grid-item h2 { + font-size: 16px; } - #output-section #output-cell #output-cell-title { - display: grid; - grid-row: 1; - font-weight: bold; - height: 50px; + #description-cell p { + font-size: 12px; } - #output-section #output-cell #container-output { - display: grid; - grid-row: 2; - height: 100%; + div#toolbar-section { + padding: 10px 0; } -/* BUTTONS */ -.btn { - display: inline-block; - outline: 0; - border: none; - cursor: pointer; - border-radius: 8px; - font-size: 13px; - height: 30px; - padding: 0 20px; + #toolbar-section #run-code-btn, + #toolbar-section #share-code-btn { + width: 100%; + margin-bottom: 8px; + } } -.btn:active { - box-shadow: 0 4px #666; - transform: translateY(2px); +@media only screen and (max-width: 320px) { + .headers-grid, + div#App { + width: calc(100% - 10px); + padding-left: 12px; + padding-right: 12px; + } + + .headers-grid-item h2 { + font-size: 14px; + } + + #description-cell p { + font-size: 11px; + } } -.btn-primary { - background-color: #9147ff; - color: white; +#toolbar-section { + display: flex; + align-items: center; + gap: 8px; } -.btn-primary:hover { - background-color: #6a1ae1; +.timezone-container { + margin-left: auto; + display: flex; + align-items: center; } -.btn-secondary { - background-color: #0000001a; - color: #000000; +#timezone-label { + margin-right: 8px; + font-weight: bold; + font-family: "Open Sans", -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Oxygen, Ubuntu, + Cantarell, "Helvetica Neue", sans-serif; + font-size: 14px; } -.btn-secondary:hover { - background-color: #c0bdbd; -} -/* END OF BUTTONS */ - -/* Portrait and Landscape */ -@media only screen - and (min-width: 200px) - and (max-width: 1000px) { - div#App { - display: grid; - width: 100%; - height: 100%; - /* the app will have multiple rows - stacking each section on top of each other */ - grid-template-rows: 20vh 10vh 50vh; - grid-template-columns: 100%; - } - - div#toolbar-section { - display: grid; - grid-row: 1; - grid-column: 1; - grid-template-columns: 100%; - grid-template-rows: repeat(2, 1fr); - } - - #toolbar-section #run-code-btn { - display: grid; - grid-row: 1; - grid-column: 1; - } - - #toolbar-section #share-code-btn { - display: grid; - grid-row: 2; - grid-column: 1; - } - - /* input pane */ - div#input-section { - display: grid; - grid-column: 1; - grid-row: 2; - overflow: hidden; - } - - #input-section #cell { - display: grid; - grid-column: 1; - grid-template-rows: 23% 77%; - overflow: hidden; - } - - #input-section #cell #input-cell-title { - height: 100%; - grid-column: 1; - grid-row: 1; - } - - #input-section #cell #container-program { - display: grid; - height: 100%; - } - - div#output-section { - display: grid; - grid-row: 4 / 5; - grid-column: 1; - grid-template-rows: 50% 50%; - overflow: hidden; - } - - /* event pane */ - #output-section #event-cell { - display: grid; - grid-template-rows: 20% 80%; - grid-row: 1; - height: 100%; - } - - #output-section #event-cell #event-cell-title { - display: grid; - grid-row: 1; - } - - #output-section #event-cell #container-event { - display: grid; - height: 100%; - } - - /* output pane */ - #output-section #output-cell { - display: grid; - grid-template-rows: 20% 80%; - grid-row: 2; - height: 100%; - } - - #output-section #output-cell #output-cell-title { - display: grid; - grid-row: 1; - } - - #output-section #output-cell #container-output { - display: grid; - grid-row: 2; - height: 100%; - } - - .btn-primary { - display: inline-block; - outline: 0; - border: none; - cursor: pointer; - border-radius: 4px; - font-size: 13px; - height: 30px; - background-color: #9147ff; - color: white; - padding: 0 20px; - align-items: center; - } - - .btn-primary:hover { - background-color: #772ce8; - } - - .btn-secondary { - display: inline-block; - outline: 0; - border: none; - cursor: pointer; - border-radius: 4px; - font-size: 13px; - height: 30px; - background-color: #0000001a; - color: #000000; - padding: 0 20px; - align-items: center; - } - - .btn-secondary:hover { - background-color: #dcdcdc; - } +#timezone-input { + padding: 0px 10px; + border: 1px solid #ccc; + border-radius: 4px; + background-color: #f8f9fa; + border-width: 1px; + border-style: solid; + font-size: 14px; + font-family: "Open Sans", -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Oxygen, Ubuntu, + Cantarell, "Helvetica Neue", sans-serif; + line-height: 1.5; + color: #212529; + height: 36px; + box-sizing: border-box; +} + +#output-cell-title .cell-title { + display: flex; + justify-content: space-between; + align-items: center; +} + +#elapsed-time { + font-weight: normal; + font-size: 12px; } diff --git a/lib/vector-vrl/web-playground/public/index.html b/lib/vector-vrl/web-playground/public/index.html index 8fd0d464d88b1..7064dc6f4cd2e 100644 --- a/lib/vector-vrl/web-playground/public/index.html +++ b/lib/vector-vrl/web-playground/public/index.html @@ -2,7 +2,6 @@ - VRL playground @@ -22,44 +21,66 @@ -
-
-

VRL Playground

-
-
- - - - - - - - - -
Vector Version:
VRL Version:
-
-
-

+

+
+
+ +

VRL Playground

+
+
+

Vector Remap Language (VRL) is an expression-oriented language designed for transforming observability data. This playground lets you write a program, run it against an event or events, share it, and see how the events are transformed.

+
+
+ + + + + + + + + +
Vector Version:
VRL Version:
+
- -
-
- +
+ + + + + + + + + + + + + + + +
+

Program

-
@@ -74,7 +95,10 @@

VRL Playground

-

Output

+

+ Output + +

diff --git a/lib/vector-vrl/web-playground/public/index.js b/lib/vector-vrl/web-playground/public/index.js index aaf0c87007253..1f08115e8bb6a 100644 --- a/lib/vector-vrl/web-playground/public/index.js +++ b/lib/vector-vrl/web-playground/public/index.js @@ -39,7 +39,6 @@ export class VrlWebPlayground { constructor() { let temp = init().then(() => { this.run_vrl = run_vrl; - this.vector_version = vector_version(); this.vector_link = vector_link(); @@ -210,10 +209,18 @@ export class VrlWebPlayground { return input; } - let res = this.run_vrl(input); + const tz_input = document.getElementById('timezone-input'); + // set default tz if nothing is set. this is going to use default timezone + let timezone = tz_input.value ? tz_input.value : (tz_input.value = "Default"); + let res = this.run_vrl(input, timezone); console.log("[DEBUG::handleRunCode()] Printing out res: ", res); if (res.result) { this.outputEditor.setValue(JSON.stringify(res.result, null, "\t")); + if (res.elapsed_time !== null) { + document.getElementById("elapsed-time").textContent = `Duration: ${res.elapsed_time} milliseconds`; + }else { + document.getElementById("elapsed-time").textContent = ""; + } } else if (res.msg) { // disable json linting for error msgs // since error msgs are not valid json diff --git a/lib/vector-vrl/web-playground/src/lib.rs b/lib/vector-vrl/web-playground/src/lib.rs index a434050aefa3c..14c983762e917 100644 --- a/lib/vector-vrl/web-playground/src/lib.rs +++ b/lib/vector-vrl/web-playground/src/lib.rs @@ -29,17 +29,22 @@ impl Input { } } -// The module returns the result of the last expression and the event that results from the -// applied program +// The module returns the result of the last expression, the resulting event, +// and the execution time. #[derive(Deserialize, Serialize)] pub struct VrlCompileResult { pub output: Value, pub result: Value, + pub elapsed_time: Option, } impl VrlCompileResult { - fn new(output: Value, result: Value) -> Self { - Self { output, result } + fn new(output: Value, result: Value, elapsed_time: Option) -> Self { + Self { + output, + result, + elapsed_time, + } } } @@ -76,7 +81,10 @@ impl VrlDiagnosticResult { } } -fn compile(mut input: Input) -> Result { +fn compile( + mut input: Input, + tz_str: Option, +) -> Result { let mut functions = vrl::stdlib::all(); functions.extend(vector_vrl_functions::all()); functions.extend(enrichment::vrl_functions()); @@ -85,7 +93,24 @@ fn compile(mut input: Input) -> Result { let state = TypeState::default(); let mut runtime = Runtime::default(); let config = CompileConfig::default(); - let timezone = TimeZone::default(); + + let timezone = match tz_str.as_deref() { + // Empty or "Default" tz string will default to tz default + None | Some("") | Some("Default") => TimeZone::default(), + Some(other) => match other.parse() { + Ok(tz) => TimeZone::Named(tz), + Err(_) => { + // Returns error message if tz parsing has failed. + // This avoids head scratching, instead of it silently using the default timezone. + let error_message = format!("Invalid timezone identifier: '{other}'"); + return Err(VrlDiagnosticResult { + list: vec![error_message.clone()], + msg: error_message.clone(), + msg_colorized: error_message, + }); + } + }, + }; let mut target_value = TargetValue { value: event.clone(), @@ -98,18 +123,32 @@ fn compile(mut input: Input) -> Result { Err(diagnostics) => return Err(VrlDiagnosticResult::new(&input.program, diagnostics)), }; - match runtime.resolve(&mut target_value, &program.program, &timezone) { - Ok(result) => Ok(VrlCompileResult::new(result, target_value.value)), + let (result, elapsed_time) = + if let Some(performance) = web_sys::window().and_then(|w| w.performance()) { + let start_time = performance.now(); + let result = runtime.resolve(&mut target_value, &program.program, &timezone); + let end_time = performance.now(); + (result, Some(end_time - start_time)) + } else { + // If performance API is not available, run the program without timing. + let result = runtime.resolve(&mut target_value, &program.program, &timezone); + (result, None) + }; + + match result { + // The final event is in `target_value.value`. + // The value of the last expression is in `res`. + Ok(res) => Ok(VrlCompileResult::new(res, target_value.value, elapsed_time)), Err(err) => Err(VrlDiagnosticResult::new_runtime_error(&input.program, err)), } } // The user-facing function #[wasm_bindgen] -pub fn run_vrl(incoming: &JsValue) -> JsValue { +pub fn run_vrl(incoming: &JsValue, tz_str: &str) -> JsValue { let input: Input = incoming.into_serde().unwrap(); - match compile(input) { + match compile(input, Some(tz_str.to_string())) { Ok(res) => JsValue::from_serde(&res).unwrap(), Err(err) => JsValue::from_serde(&err).unwrap(), } @@ -129,6 +168,7 @@ pub fn vector_link() -> String { pub fn vrl_version() -> String { built_info::VRL_VERSION.to_string() } + #[wasm_bindgen] pub fn vrl_link() -> String { built_info::VRL_LINK.to_string() diff --git a/license-tool.toml b/license-tool.toml index 6a91a44f04184..70e0ed9a741e5 100644 --- a/license-tool.toml +++ b/license-tool.toml @@ -1,12 +1,17 @@ [overrides] "backon" = { origin = "https://github.com/Xuanwo/backon" } "bollard-stubs" = { origin = "https://github.com/fussybeaver/bollard" } +"crunchy" = { origin = "https://github.com/eira-fransham/crunchy" } "openssl-macros" = { origin = "https://github.com/sfackler/rust-openssl" } "serde_nanos" = { origin = "https://github.com/caspervonb/serde_nanos" } +# rust-license-tool can't find the license for jsonpath-rust 0.5.1 +"jsonpath-rust" = { license = "MIT", origin = "https://github.com/besok/jsonpath-rust" } + # `ring` has a custom license that is mostly "ISC-style" but parts of it also fall under OpenSSL licensing. "ring-0.16.20" = { license = "ISC AND Custom" } "ring-0.17.5" = { license = "ISC AND Custom" } +"ring-0.17.8" = { license = "ISC AND Custom" } # `rustls-webpki` doesn't specify their license in the metadata, but the file contains the ISC terms. "rustls-webpki-0.100.1" = { license = "ISC" } diff --git a/proto/buf.yaml b/proto/buf.yaml deleted file mode 100644 index 1a5194568a90a..0000000000000 --- a/proto/buf.yaml +++ /dev/null @@ -1,7 +0,0 @@ -version: v1 -breaking: - use: - - FILE -lint: - use: - - DEFAULT diff --git a/proto/dnstap.proto b/proto/dnstap.proto deleted file mode 100644 index aa7229b9b505b..0000000000000 --- a/proto/dnstap.proto +++ /dev/null @@ -1,330 +0,0 @@ -// dnstap: flexible, structured event replication format for DNS software -// -// This file contains the protobuf schemas for the "dnstap" structured event -// replication format for DNS software. - -// Written in 2013-2014 by Farsight Security, Inc. -// -// To the extent possible under law, the author(s) have dedicated all -// copyright and related and neighboring rights to this file to the public -// domain worldwide. This file is distributed without any warranty. -// -// You should have received a copy of the CC0 Public Domain Dedication along -// with this file. If not, see: -// -// . - -syntax = "proto2"; -package dnstap; - -// "Dnstap": this is the top-level dnstap type, which is a "union" type that -// contains other kinds of dnstap payloads, although currently only one type -// of dnstap payload is defined. -// See: https://developers.google.com/protocol-buffers/docs/techniques#union -message Dnstap { - // DNS server identity. - // If enabled, this is the identity string of the DNS server which generated - // this message. Typically this would be the same string as returned by an - // "NSID" (RFC 5001) query. - optional bytes identity = 1; - - // DNS server version. - // If enabled, this is the version string of the DNS server which generated - // this message. Typically this would be the same string as returned by a - // "version.bind" query. - optional bytes version = 2; - - // Extra data for this payload. - // This field can be used for adding an arbitrary byte-string annotation to - // the payload. No encoding or interpretation is applied or enforced. - optional bytes extra = 3; - - // Identifies which field below is filled in. - enum Type { - MESSAGE = 1; - } - required Type type = 15; - - // One of the following will be filled in. - optional Message message = 14; -} - -// SocketFamily: the network protocol family of a socket. This specifies how -// to interpret "network address" fields. -enum SocketFamily { - INET = 1; // IPv4 (RFC 791) - INET6 = 2; // IPv6 (RFC 2460) -} - -// SocketProtocol: the protocol used to transport a DNS message. -enum SocketProtocol { - UDP = 1; // DNS over UDP transport (RFC 1035 section 4.2.1) - TCP = 2; // DNS over TCP transport (RFC 1035 section 4.2.2) - DOT = 3; // DNS over TLS (RFC 7858) - DOH = 4; // DNS over HTTPS (RFC 8484) - DNSCryptUDP = 5; // DNSCrypt over UDP (https://dnscrypt.info/protocol) - DNSCryptTCP = 6; // DNSCrypt over TCP (https://dnscrypt.info/protocol) -} - -// Policy: information about any name server operator policy -// applied to the processing of a DNS message. -message Policy { - - // Match: what aspect of the message or message exchange - // triggered the application of the Policy. - enum Match { - QNAME = 1; // Name in question section of query - CLIENT_IP = 2; // Client IP address - RESPONSE_IP = 3; // Address in A/AAAA RRSet - NS_NAME = 4; // Authoritative name server, by name - NS_IP = 5; // Authoritative name server, by IP address - } - - // The Action taken to implement the Policy. - enum Action { - NXDOMAIN = 1; // Respond with NXDOMAIN - NODATA = 2; // Respond with empty answer section - PASS = 3; // Do not alter the response (passthrough) - DROP = 4; // Do not respond. - TRUNCATE = 5; // Truncate UDP response, forcing TCP retry - LOCAL_DATA = 6; // Respond with local data from policy - } - - // type: the type of policy applied, e.g. "RPZ" for a - // policy from a Response Policy Zone. - optional string type = 1; - - // rule: the rule matched by the message. - // - // In a RPZ context, this is the owner name of the rule in - // the Response Policy Zone in wire format. - optional bytes rule = 2; - - // action: the policy action taken in response to the - // rule match. - optional Action action = 3; - - // match: the feature of the message exchange which matched the rule. - optional Match match = 4; - - // The matched value. Format depends on the matched feature . - optional bytes value = 5; -} - -// Message: a wire-format (RFC 1035 section 4) DNS message and associated -// metadata. Applications generating "Message" payloads should follow -// certain requirements based on the MessageType, see below. -message Message { - - // There are eight types of "Message" defined that correspond to the - // four arrows in the following diagram, slightly modified from RFC 1035 - // section 2: - - // +---------+ +----------+ +--------+ - // | | query | | query | | - // | Stub |-SQ--------CQ->| Recursive|-RQ----AQ->| Auth. | - // | Resolver| | Server | | Name | - // | |<-SR--------CR-| |<-RR----AR-| Server | - // +---------+ response | | response | | - // +----------+ +--------+ - - // Each arrow has two Type values each, one for each "end" of each arrow, - // because these are considered to be distinct events. Each end of each - // arrow on the diagram above has been marked with a two-letter Type - // mnemonic. Clockwise from upper left, these mnemonic values are: - // - // SQ: STUB_QUERY - // CQ: CLIENT_QUERY - // RQ: RESOLVER_QUERY - // AQ: AUTH_QUERY - // AR: AUTH_RESPONSE - // RR: RESOLVER_RESPONSE - // CR: CLIENT_RESPONSE - // SR: STUB_RESPONSE - - // Two additional types of "Message" have been defined for the - // "forwarding" case where an upstream DNS server is responsible for - // further recursion. These are not shown on the diagram above, but have - // the following mnemonic values: - - // FQ: FORWARDER_QUERY - // FR: FORWARDER_RESPONSE - - // The "Message" Type values are defined below. - - enum Type { - // AUTH_QUERY is a DNS query message received from a resolver by an - // authoritative name server, from the perspective of the authoritative - // name server. - AUTH_QUERY = 1; - - // AUTH_RESPONSE is a DNS response message sent from an authoritative - // name server to a resolver, from the perspective of the authoritative - // name server. - AUTH_RESPONSE = 2; - - // RESOLVER_QUERY is a DNS query message sent from a resolver to an - // authoritative name server, from the perspective of the resolver. - // Resolvers typically clear the RD (recursion desired) bit when - // sending queries. - RESOLVER_QUERY = 3; - - // RESOLVER_RESPONSE is a DNS response message received from an - // authoritative name server by a resolver, from the perspective of - // the resolver. - RESOLVER_RESPONSE = 4; - - // CLIENT_QUERY is a DNS query message sent from a client to a DNS - // server which is expected to perform further recursion, from the - // perspective of the DNS server. The client may be a stub resolver or - // forwarder or some other type of software which typically sets the RD - // (recursion desired) bit when querying the DNS server. The DNS server - // may be a simple forwarding proxy or it may be a full recursive - // resolver. - CLIENT_QUERY = 5; - - // CLIENT_RESPONSE is a DNS response message sent from a DNS server to - // a client, from the perspective of the DNS server. The DNS server - // typically sets the RA (recursion available) bit when responding. - CLIENT_RESPONSE = 6; - - // FORWARDER_QUERY is a DNS query message sent from a downstream DNS - // server to an upstream DNS server which is expected to perform - // further recursion, from the perspective of the downstream DNS - // server. - FORWARDER_QUERY = 7; - - // FORWARDER_RESPONSE is a DNS response message sent from an upstream - // DNS server performing recursion to a downstream DNS server, from the - // perspective of the downstream DNS server. - FORWARDER_RESPONSE = 8; - - // STUB_QUERY is a DNS query message sent from a stub resolver to a DNS - // server, from the perspective of the stub resolver. - STUB_QUERY = 9; - - // STUB_RESPONSE is a DNS response message sent from a DNS server to a - // stub resolver, from the perspective of the stub resolver. - STUB_RESPONSE = 10; - - // TOOL_QUERY is a DNS query message sent from a DNS software tool to a - // DNS server, from the perspective of the tool. - TOOL_QUERY = 11; - - // TOOL_RESPONSE is a DNS response message received by a DNS software - // tool from a DNS server, from the perspective of the tool. - TOOL_RESPONSE = 12; - - // UPDATE_QUERY is a Dynamic DNS Update request (RFC 2136) received - // by an authoritative name server, from the perspective of the - // authoritative name server. - UPDATE_QUERY = 13; - - // UPDATE_RESPONSE is a Dynamic DNS Update response (RFC 2136) sent - // from an authoritative name server, from the perspective of the - // authoritative name server. - UPDATE_RESPONSE = 14; - } - - // One of the Type values described above. - required Type type = 1; - - // One of the SocketFamily values described above. - optional SocketFamily socket_family = 2; - - // One of the SocketProtocol values described above. - optional SocketProtocol socket_protocol = 3; - - // The network address of the message initiator. - // For SocketFamily INET, this field is 4 octets (IPv4 address). - // For SocketFamily INET6, this field is 16 octets (IPv6 address). - optional bytes query_address = 4; - - // The network address of the message responder. - // For SocketFamily INET, this field is 4 octets (IPv4 address). - // For SocketFamily INET6, this field is 16 octets (IPv6 address). - optional bytes response_address = 5; - - // The transport port of the message initiator. - // This is a 16-bit UDP or TCP port number, depending on SocketProtocol. - optional uint32 query_port = 6; - - // The transport port of the message responder. - // This is a 16-bit UDP or TCP port number, depending on SocketProtocol. - optional uint32 response_port = 7; - - // The time at which the DNS query message was sent or received, depending - // on whether this is an AUTH_QUERY, RESOLVER_QUERY, or CLIENT_QUERY. - // This is the number of seconds since the UNIX epoch. - optional uint64 query_time_sec = 8; - - // The time at which the DNS query message was sent or received. - // This is the seconds fraction, expressed as a count of nanoseconds. - optional fixed32 query_time_nsec = 9; - - // The initiator's original wire-format DNS query message, verbatim. - optional bytes query_message = 10; - - // The "zone" or "bailiwick" pertaining to the DNS query message. - // This is a wire-format DNS domain name. - optional bytes query_zone = 11; - - // The time at which the DNS response message was sent or received, - // depending on whether this is an AUTH_RESPONSE, RESOLVER_RESPONSE, or - // CLIENT_RESPONSE. - // This is the number of seconds since the UNIX epoch. - optional uint64 response_time_sec = 12; - - // The time at which the DNS response message was sent or received. - // This is the seconds fraction, expressed as a count of nanoseconds. - optional fixed32 response_time_nsec = 13; - - // The responder's original wire-format DNS response message, verbatim. - optional bytes response_message = 14; - - // Operator policy applied to the processing of this message, if any. - optional Policy policy = 15; -} - -// All fields except for 'type' in the Message schema are optional. -// It is recommended that at least the following fields be filled in for -// particular types of Messages. - -// AUTH_QUERY: -// socket_family, socket_protocol -// query_address, query_port -// query_message -// query_time_sec, query_time_nsec - -// AUTH_RESPONSE: -// socket_family, socket_protocol -// query_address, query_port -// query_time_sec, query_time_nsec -// response_message -// response_time_sec, response_time_nsec - -// RESOLVER_QUERY: -// socket_family, socket_protocol -// query_message -// query_time_sec, query_time_nsec -// query_zone -// response_address, response_port - -// RESOLVER_RESPONSE: -// socket_family, socket_protocol -// query_time_sec, query_time_nsec -// query_zone -// response_address, response_port -// response_message -// response_time_sec, response_time_nsec - -// CLIENT_QUERY: -// socket_family, socket_protocol -// query_message -// query_time_sec, query_time_nsec - -// CLIENT_RESPONSE: -// socket_family, socket_protocol -// query_time_sec, query_time_nsec -// response_message -// response_time_sec, response_time_nsec diff --git a/proto/google/LICENSE-Apache-2.0.txt b/proto/third-party/google/LICENSE-Apache-2.0.txt similarity index 100% rename from proto/google/LICENSE-Apache-2.0.txt rename to proto/third-party/google/LICENSE-Apache-2.0.txt diff --git a/proto/google/README b/proto/third-party/google/README similarity index 100% rename from proto/google/README rename to proto/third-party/google/README diff --git a/proto/google/api/annotations.proto b/proto/third-party/google/api/annotations.proto similarity index 100% rename from proto/google/api/annotations.proto rename to proto/third-party/google/api/annotations.proto diff --git a/proto/google/api/client.proto b/proto/third-party/google/api/client.proto similarity index 100% rename from proto/google/api/client.proto rename to proto/third-party/google/api/client.proto diff --git a/proto/google/api/field_behavior.proto b/proto/third-party/google/api/field_behavior.proto similarity index 100% rename from proto/google/api/field_behavior.proto rename to proto/third-party/google/api/field_behavior.proto diff --git a/proto/google/api/http.proto b/proto/third-party/google/api/http.proto similarity index 100% rename from proto/google/api/http.proto rename to proto/third-party/google/api/http.proto diff --git a/proto/google/api/resource.proto b/proto/third-party/google/api/resource.proto similarity index 100% rename from proto/google/api/resource.proto rename to proto/third-party/google/api/resource.proto diff --git a/proto/google/protobuf/LICENSE b/proto/third-party/google/protobuf/LICENSE similarity index 100% rename from proto/google/protobuf/LICENSE rename to proto/third-party/google/protobuf/LICENSE diff --git a/proto/google/protobuf/any.proto b/proto/third-party/google/protobuf/any.proto similarity index 100% rename from proto/google/protobuf/any.proto rename to proto/third-party/google/protobuf/any.proto diff --git a/proto/google/protobuf/descriptor.proto b/proto/third-party/google/protobuf/descriptor.proto similarity index 100% rename from proto/google/protobuf/descriptor.proto rename to proto/third-party/google/protobuf/descriptor.proto diff --git a/proto/google/protobuf/empty.proto b/proto/third-party/google/protobuf/empty.proto similarity index 100% rename from proto/google/protobuf/empty.proto rename to proto/third-party/google/protobuf/empty.proto diff --git a/proto/google/protobuf/timestamp.proto b/proto/third-party/google/protobuf/timestamp.proto similarity index 100% rename from proto/google/protobuf/timestamp.proto rename to proto/third-party/google/protobuf/timestamp.proto diff --git a/proto/google/pubsub/v1/pubsub.proto b/proto/third-party/google/pubsub/v1/pubsub.proto similarity index 100% rename from proto/google/pubsub/v1/pubsub.proto rename to proto/third-party/google/pubsub/v1/pubsub.proto diff --git a/proto/google/rpc/status.proto b/proto/third-party/google/rpc/status.proto similarity index 100% rename from proto/google/rpc/status.proto rename to proto/third-party/google/rpc/status.proto diff --git a/lib/vector-core/proto/buf.yaml b/proto/vector/buf.yaml similarity index 100% rename from lib/vector-core/proto/buf.yaml rename to proto/vector/buf.yaml diff --git a/proto/dd_metric.proto b/proto/vector/dd_metric.proto similarity index 100% rename from proto/dd_metric.proto rename to proto/vector/dd_metric.proto diff --git a/proto/dd_trace.proto b/proto/vector/dd_trace.proto similarity index 100% rename from proto/dd_trace.proto rename to proto/vector/dd_trace.proto diff --git a/proto/ddsketch_full.proto b/proto/vector/ddsketch_full.proto similarity index 100% rename from proto/ddsketch_full.proto rename to proto/vector/ddsketch_full.proto diff --git a/proto/vector.proto b/proto/vector/vector.proto similarity index 100% rename from proto/vector.proto rename to proto/vector/vector.proto diff --git a/regression/Dockerfile b/regression/Dockerfile index b1b1ca1b5cc1c..4203d280d40c5 100644 --- a/regression/Dockerfile +++ b/regression/Dockerfile @@ -1,12 +1,7 @@ -# -# LADING -# -FROM ghcr.io/datadog/lading@sha256:15d003762f0015e99f2a9772fd8d7ac31165e4af2f645c404499ee48146202c8 as lading - # # VECTOR BUILDER # -FROM ghcr.io/vectordotdev/vector/soak-builder@sha256:c51a7091de2caebaa690e17f37dbfed4d4059dcdf5114a5596e8ca9b5ef494f3 as builder +FROM ghcr.io/vectordotdev/vector/soak-builder@sha256:c51a7091de2caebaa690e17f37dbfed4d4059dcdf5114a5596e8ca9b5ef494f3 AS builder WORKDIR /vector COPY . . RUN bash scripts/environment/install-protoc.sh @@ -21,12 +16,10 @@ RUN --mount=type=cache,target=/usr/local/cargo/registry \ # FROM docker.io/debian:bookworm-slim@sha256:01bd742e2c269abf94e2fefb47b08b5b61c9a880b993417d23a1d0bd9fa60dc4 RUN apt-get update && apt-get dist-upgrade -y && apt-get -y --no-install-recommends install zlib1g ca-certificates && rm -rf /var/lib/apt/lists/* -COPY --from=lading /usr/bin/lading /usr/local/bin/lading -COPY --from=builder /vector/vector /usr/local/bin/vector +COPY --from=builder /vector/vector /usr/bin/vector RUN mkdir --parents --mode=0777 /var/lib/vector # Smoke test -RUN ["/usr/local/bin/lading", "--help"] -RUN ["/usr/local/bin/vector", "--version"] +RUN ["/usr/bin/vector", "--version"] -ENTRYPOINT ["/usr/local/bin/lading"] +ENTRYPOINT ["/usr/bin/vector"] diff --git a/regression/cases/datadog_agent_remap_blackhole/experiment.yaml b/regression/cases/datadog_agent_remap_blackhole/experiment.yaml new file mode 100644 index 0000000000000..3ab841613771d --- /dev/null +++ b/regression/cases/datadog_agent_remap_blackhole/experiment.yaml @@ -0,0 +1,10 @@ +optimization_goal: ingress_throughput + +target: + name: vector + command: /usr/bin/vector + cpu_allotment: 7 + memory_allotment: 8GiB + + environment: + VECTOR_THREADS: 4 diff --git a/regression/cases/datadog_agent_remap_blackhole/lading/lading.yaml b/regression/cases/datadog_agent_remap_blackhole/lading/lading.yaml index 28d82defced27..b870f2e2675bb 100644 --- a/regression/cases/datadog_agent_remap_blackhole/lading/lading.yaml +++ b/regression/cases/datadog_agent_remap_blackhole/lading/lading.yaml @@ -11,3 +11,6 @@ generator: post: variant: "datadog_log" maximum_prebuild_cache_size_bytes: "256 Mb" +target_metrics: + - prometheus: # internal telemetry + uri: "http://127.0.0.1:9090/metrics" diff --git a/regression/cases/datadog_agent_remap_blackhole_acks/experiment.yaml b/regression/cases/datadog_agent_remap_blackhole_acks/experiment.yaml new file mode 100644 index 0000000000000..3ab841613771d --- /dev/null +++ b/regression/cases/datadog_agent_remap_blackhole_acks/experiment.yaml @@ -0,0 +1,10 @@ +optimization_goal: ingress_throughput + +target: + name: vector + command: /usr/bin/vector + cpu_allotment: 7 + memory_allotment: 8GiB + + environment: + VECTOR_THREADS: 4 diff --git a/regression/cases/datadog_agent_remap_blackhole_acks/lading/lading.yaml b/regression/cases/datadog_agent_remap_blackhole_acks/lading/lading.yaml index 28d82defced27..b870f2e2675bb 100644 --- a/regression/cases/datadog_agent_remap_blackhole_acks/lading/lading.yaml +++ b/regression/cases/datadog_agent_remap_blackhole_acks/lading/lading.yaml @@ -11,3 +11,6 @@ generator: post: variant: "datadog_log" maximum_prebuild_cache_size_bytes: "256 Mb" +target_metrics: + - prometheus: # internal telemetry + uri: "http://127.0.0.1:9090/metrics" diff --git a/regression/cases/datadog_agent_remap_datadog_logs/experiment.yaml b/regression/cases/datadog_agent_remap_datadog_logs/experiment.yaml new file mode 100644 index 0000000000000..3ab841613771d --- /dev/null +++ b/regression/cases/datadog_agent_remap_datadog_logs/experiment.yaml @@ -0,0 +1,10 @@ +optimization_goal: ingress_throughput + +target: + name: vector + command: /usr/bin/vector + cpu_allotment: 7 + memory_allotment: 8GiB + + environment: + VECTOR_THREADS: 4 diff --git a/regression/cases/datadog_agent_remap_datadog_logs/lading/lading.yaml b/regression/cases/datadog_agent_remap_datadog_logs/lading/lading.yaml index 0f67c2c14f151..dedc4de0b9819 100644 --- a/regression/cases/datadog_agent_remap_datadog_logs/lading/lading.yaml +++ b/regression/cases/datadog_agent_remap_datadog_logs/lading/lading.yaml @@ -14,3 +14,7 @@ generator: blackhole: - http: binding_addr: "0.0.0.0:8080" + +target_metrics: + - prometheus: # internal telemetry + uri: "http://127.0.0.1:9090/metrics" diff --git a/regression/cases/datadog_agent_remap_datadog_logs_acks/experiment.yaml b/regression/cases/datadog_agent_remap_datadog_logs_acks/experiment.yaml new file mode 100644 index 0000000000000..3ab841613771d --- /dev/null +++ b/regression/cases/datadog_agent_remap_datadog_logs_acks/experiment.yaml @@ -0,0 +1,10 @@ +optimization_goal: ingress_throughput + +target: + name: vector + command: /usr/bin/vector + cpu_allotment: 7 + memory_allotment: 8GiB + + environment: + VECTOR_THREADS: 4 diff --git a/regression/cases/datadog_agent_remap_datadog_logs_acks/lading/lading.yaml b/regression/cases/datadog_agent_remap_datadog_logs_acks/lading/lading.yaml index 0f67c2c14f151..dedc4de0b9819 100644 --- a/regression/cases/datadog_agent_remap_datadog_logs_acks/lading/lading.yaml +++ b/regression/cases/datadog_agent_remap_datadog_logs_acks/lading/lading.yaml @@ -14,3 +14,7 @@ generator: blackhole: - http: binding_addr: "0.0.0.0:8080" + +target_metrics: + - prometheus: # internal telemetry + uri: "http://127.0.0.1:9090/metrics" diff --git a/regression/cases/enterprise_http_to_http/data/.gitkeep b/regression/cases/enterprise_http_to_http/data/.gitkeep deleted file mode 100644 index 8b137891791fe..0000000000000 --- a/regression/cases/enterprise_http_to_http/data/.gitkeep +++ /dev/null @@ -1 +0,0 @@ - diff --git a/regression/cases/enterprise_http_to_http/lading/lading.yaml b/regression/cases/enterprise_http_to_http/lading/lading.yaml deleted file mode 100644 index 31461642dbf8f..0000000000000 --- a/regression/cases/enterprise_http_to_http/lading/lading.yaml +++ /dev/null @@ -1,16 +0,0 @@ -generator: - - http: - seed: [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, - 59, 61, 67, 71, 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, 127, 131] - target_uri: "http://localhost:8282/" - bytes_per_second: "100 Mb" - parallel_connections: 10 - method: - post: - variant: "ascii" - maximum_prebuild_cache_size_bytes: "256 Mb" - headers: {} - -blackhole: - - http: - binding_addr: "0.0.0.0:8080" diff --git a/regression/cases/enterprise_http_to_http/vector/vector.yaml b/regression/cases/enterprise_http_to_http/vector/vector.yaml deleted file mode 100644 index c092bdd84fdc9..0000000000000 --- a/regression/cases/enterprise_http_to_http/vector/vector.yaml +++ /dev/null @@ -1,45 +0,0 @@ -data_dir: "/var/lib/vector" - -## -## Enterprise -## -enterprise: - api_key: "${DD_API_KEY-}" - configuration_key: "${DD_CONFIGURATION_KEY-}" - endpoint: "http://localhost:8080" - -## -## Sources -## - -sources: - internal_metrics: - type: "internal_metrics" - - http_source: - type: "http" - acknowledgements: - enabled: false - address: "0.0.0.0:8282" - -## -## Sinks -## - -sinks: - prometheus: - type: "prometheus_exporter" - inputs: [ "internal_metrics" ] - address: "0.0.0.0:9090" - - http_sink: - type: "http" - inputs: [ "http_source" ] - uri: "http://localhost:8080" - encoding: - codec: "text" - healthcheck: - enabled: false - buffer: - type: "memory" - max_events: 50000 # buffer 50 payloads at a time diff --git a/regression/cases/file_to_blackhole/data/.gitkeep b/regression/cases/file_to_blackhole/data/.gitkeep index 8b137891791fe..e69de29bb2d1d 100644 --- a/regression/cases/file_to_blackhole/data/.gitkeep +++ b/regression/cases/file_to_blackhole/data/.gitkeep @@ -1 +0,0 @@ - diff --git a/regression/cases/file_to_blackhole/experiment.yaml b/regression/cases/file_to_blackhole/experiment.yaml index 9b6d2c53c5271..7675acfd306d0 100644 --- a/regression/cases/file_to_blackhole/experiment.yaml +++ b/regression/cases/file_to_blackhole/experiment.yaml @@ -1 +1,11 @@ optimization_goal: egress_throughput +erratic: true + +target: + name: vector + command: /usr/bin/vector + cpu_allotment: 7 + memory_allotment: 8GiB + + environment: + VECTOR_THREADS: 4 diff --git a/regression/cases/file_to_blackhole/lading/lading.yaml b/regression/cases/file_to_blackhole/lading/lading.yaml index 091608f32d65f..5017287363f08 100644 --- a/regression/cases/file_to_blackhole/lading/lading.yaml +++ b/regression/cases/file_to_blackhole/lading/lading.yaml @@ -1,14 +1,22 @@ generator: - file_gen: - seed: [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, - 59, 61, 67, 71, 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, 127, 131] - path_template: "/tmp/file-gen-%NNN%.log" - duplicates: 4 - variant: "ascii" - bytes_per_second: "100Mb" - maximum_bytes_per_file: "100Mb" - maximum_prebuild_cache_size_bytes: "400Mb" + logrotate_fs: + seed: [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, + 59, 61, 67, 71, 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, 127, 131] + load_profile: + constant: 100MiB + concurrent_logs: 4 + maximum_bytes_per_log: 100MiB + total_rotations: 5 + max_depth: 0 + variant: "ascii" + maximum_prebuild_cache_size_bytes: 250MiB + mount_point: /smp-shared blackhole: - tcp: binding_addr: "127.0.0.1:15400" + +target_metrics: + - prometheus: # internal telemetry + uri: "http://127.0.0.1:9090/metrics" diff --git a/regression/cases/file_to_blackhole/vector/vector.yaml b/regression/cases/file_to_blackhole/vector/vector.yaml index 48edf30e0e851..377ae72154fa9 100644 --- a/regression/cases/file_to_blackhole/vector/vector.yaml +++ b/regression/cases/file_to_blackhole/vector/vector.yaml @@ -11,7 +11,7 @@ sources: file: type: "file" include: - - "/tmp/file-gen-*.log" + - "/smp-shared/*.log" ## ## Sinks diff --git a/regression/cases/fluent_elasticsearch/experiment.yaml b/regression/cases/fluent_elasticsearch/experiment.yaml new file mode 100644 index 0000000000000..3ab841613771d --- /dev/null +++ b/regression/cases/fluent_elasticsearch/experiment.yaml @@ -0,0 +1,10 @@ +optimization_goal: ingress_throughput + +target: + name: vector + command: /usr/bin/vector + cpu_allotment: 7 + memory_allotment: 8GiB + + environment: + VECTOR_THREADS: 4 diff --git a/regression/cases/fluent_elasticsearch/lading/lading.yaml b/regression/cases/fluent_elasticsearch/lading/lading.yaml index de7f7d6d30ad4..80829c35e3539 100644 --- a/regression/cases/fluent_elasticsearch/lading/lading.yaml +++ b/regression/cases/fluent_elasticsearch/lading/lading.yaml @@ -2,12 +2,15 @@ generator: - tcp: seed: [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, 127, 131] - addr: "localhost:8282" + addr: "0.0.0.0:8282" variant: "fluent" bytes_per_second: "256 Mb" - block_sizes: ["1Kb", "2Kb", "4Kb", "8Kb", "256Kb", "512Kb", "1024Kb"] maximum_prebuild_cache_size_bytes: "256 Mb" blackhole: - http: binding_addr: "0.0.0.0:8080" + +target_metrics: + - prometheus: # internal telemetry + uri: "http://127.0.0.1:9090/metrics" diff --git a/regression/cases/fluent_elasticsearch/vector/vector.yaml b/regression/cases/fluent_elasticsearch/vector/vector.yaml index 2098b71a773d9..217d8b9cf9eb6 100644 --- a/regression/cases/fluent_elasticsearch/vector/vector.yaml +++ b/regression/cases/fluent_elasticsearch/vector/vector.yaml @@ -31,7 +31,7 @@ sinks: elasticsearch: type: "elasticsearch" inputs: [ "fluent" ] - endpoint: "http://localhost:8080" + endpoint: "http://0.0.0.0:8080" mode: "bulk" pipeline: "pipeline-name" compression: "none" diff --git a/regression/cases/http_elasticsearch/experiment.yaml b/regression/cases/http_elasticsearch/experiment.yaml new file mode 100644 index 0000000000000..3ab841613771d --- /dev/null +++ b/regression/cases/http_elasticsearch/experiment.yaml @@ -0,0 +1,10 @@ +optimization_goal: ingress_throughput + +target: + name: vector + command: /usr/bin/vector + cpu_allotment: 7 + memory_allotment: 8GiB + + environment: + VECTOR_THREADS: 4 diff --git a/regression/cases/http_elasticsearch/lading/lading.yaml b/regression/cases/http_elasticsearch/lading/lading.yaml index 1fa16431e80d9..69cc36c1de926 100644 --- a/regression/cases/http_elasticsearch/lading/lading.yaml +++ b/regression/cases/http_elasticsearch/lading/lading.yaml @@ -14,3 +14,7 @@ generator: blackhole: - http: binding_addr: "0.0.0.0:8080" + +target_metrics: + - prometheus: # internal telemetry + uri: "http://127.0.0.1:9090/metrics" diff --git a/regression/cases/http_text_to_http_json/experiment.yaml b/regression/cases/http_text_to_http_json/experiment.yaml new file mode 100644 index 0000000000000..3ab841613771d --- /dev/null +++ b/regression/cases/http_text_to_http_json/experiment.yaml @@ -0,0 +1,10 @@ +optimization_goal: ingress_throughput + +target: + name: vector + command: /usr/bin/vector + cpu_allotment: 7 + memory_allotment: 8GiB + + environment: + VECTOR_THREADS: 4 diff --git a/regression/cases/http_text_to_http_json/lading/lading.yaml b/regression/cases/http_text_to_http_json/lading/lading.yaml index 1ec4b39036c1f..97b45f7087245 100644 --- a/regression/cases/http_text_to_http_json/lading/lading.yaml +++ b/regression/cases/http_text_to_http_json/lading/lading.yaml @@ -14,3 +14,7 @@ generator: blackhole: - http: binding_addr: "0.0.0.0:8080" + +target_metrics: + - prometheus: # internal telemetry + uri: "http://127.0.0.1:9090/metrics" diff --git a/regression/cases/http_text_to_http_json/vector/vector.yaml b/regression/cases/http_text_to_http_json/vector/vector.yaml index ecb5961f00897..36c3aa08578dc 100644 --- a/regression/cases/http_text_to_http_json/vector/vector.yaml +++ b/regression/cases/http_text_to_http_json/vector/vector.yaml @@ -5,6 +5,9 @@ data_dir: "/var/lib/vector" ## sources: + internal_metrics: + type: "internal_metrics" + logs: type: "http" address: "0.0.0.0:8282" @@ -16,6 +19,11 @@ sources: ## sinks: + prometheus: + type: "prometheus_exporter" + inputs: [ "internal_metrics" ] + address: "0.0.0.0:9090" + http_sink: type: "http" uri: "http://localhost:8080" diff --git a/regression/cases/http_to_http_acks/data/.gitkeep b/regression/cases/http_to_http_acks/data/.gitkeep index 8b137891791fe..e69de29bb2d1d 100644 --- a/regression/cases/http_to_http_acks/data/.gitkeep +++ b/regression/cases/http_to_http_acks/data/.gitkeep @@ -1 +0,0 @@ - diff --git a/regression/cases/http_to_http_acks/experiment.yaml b/regression/cases/http_to_http_acks/experiment.yaml new file mode 100644 index 0000000000000..3ab841613771d --- /dev/null +++ b/regression/cases/http_to_http_acks/experiment.yaml @@ -0,0 +1,10 @@ +optimization_goal: ingress_throughput + +target: + name: vector + command: /usr/bin/vector + cpu_allotment: 7 + memory_allotment: 8GiB + + environment: + VECTOR_THREADS: 4 diff --git a/regression/cases/http_to_http_acks/lading/lading.yaml b/regression/cases/http_to_http_acks/lading/lading.yaml index 31461642dbf8f..7f0bbeef95068 100644 --- a/regression/cases/http_to_http_acks/lading/lading.yaml +++ b/regression/cases/http_to_http_acks/lading/lading.yaml @@ -14,3 +14,7 @@ generator: blackhole: - http: binding_addr: "0.0.0.0:8080" + +target_metrics: + - prometheus: # internal telemetry + uri: "http://127.0.0.1:9090/metrics" diff --git a/regression/cases/http_to_http_json/data/.gitkeep b/regression/cases/http_to_http_json/data/.gitkeep index 8b137891791fe..e69de29bb2d1d 100644 --- a/regression/cases/http_to_http_json/data/.gitkeep +++ b/regression/cases/http_to_http_json/data/.gitkeep @@ -1 +0,0 @@ - diff --git a/regression/cases/http_to_http_json/experiment.yaml b/regression/cases/http_to_http_json/experiment.yaml new file mode 100644 index 0000000000000..3ab841613771d --- /dev/null +++ b/regression/cases/http_to_http_json/experiment.yaml @@ -0,0 +1,10 @@ +optimization_goal: ingress_throughput + +target: + name: vector + command: /usr/bin/vector + cpu_allotment: 7 + memory_allotment: 8GiB + + environment: + VECTOR_THREADS: 4 diff --git a/regression/cases/http_to_http_json/lading/lading.yaml b/regression/cases/http_to_http_json/lading/lading.yaml index 72122f41aa613..68b9b25a6b6ff 100644 --- a/regression/cases/http_to_http_json/lading/lading.yaml +++ b/regression/cases/http_to_http_json/lading/lading.yaml @@ -14,3 +14,7 @@ generator: blackhole: - http: binding_addr: "0.0.0.0:8080" + +target_metrics: + - prometheus: # internal telemetry + uri: "http://127.0.0.1:9090/metrics" diff --git a/regression/cases/http_to_http_noack/data/.gitkeep b/regression/cases/http_to_http_noack/data/.gitkeep index 8b137891791fe..e69de29bb2d1d 100644 --- a/regression/cases/http_to_http_noack/data/.gitkeep +++ b/regression/cases/http_to_http_noack/data/.gitkeep @@ -1 +0,0 @@ - diff --git a/regression/cases/http_to_http_noack/experiment.yaml b/regression/cases/http_to_http_noack/experiment.yaml new file mode 100644 index 0000000000000..3ab841613771d --- /dev/null +++ b/regression/cases/http_to_http_noack/experiment.yaml @@ -0,0 +1,10 @@ +optimization_goal: ingress_throughput + +target: + name: vector + command: /usr/bin/vector + cpu_allotment: 7 + memory_allotment: 8GiB + + environment: + VECTOR_THREADS: 4 diff --git a/regression/cases/http_to_http_noack/lading/lading.yaml b/regression/cases/http_to_http_noack/lading/lading.yaml index 31461642dbf8f..7f0bbeef95068 100644 --- a/regression/cases/http_to_http_noack/lading/lading.yaml +++ b/regression/cases/http_to_http_noack/lading/lading.yaml @@ -14,3 +14,7 @@ generator: blackhole: - http: binding_addr: "0.0.0.0:8080" + +target_metrics: + - prometheus: # internal telemetry + uri: "http://127.0.0.1:9090/metrics" diff --git a/regression/cases/http_to_s3/data/.gitkeep b/regression/cases/http_to_s3/data/.gitkeep index 8b137891791fe..e69de29bb2d1d 100644 --- a/regression/cases/http_to_s3/data/.gitkeep +++ b/regression/cases/http_to_s3/data/.gitkeep @@ -1 +0,0 @@ - diff --git a/regression/cases/http_to_s3/experiment.yaml b/regression/cases/http_to_s3/experiment.yaml new file mode 100644 index 0000000000000..3ab841613771d --- /dev/null +++ b/regression/cases/http_to_s3/experiment.yaml @@ -0,0 +1,10 @@ +optimization_goal: ingress_throughput + +target: + name: vector + command: /usr/bin/vector + cpu_allotment: 7 + memory_allotment: 8GiB + + environment: + VECTOR_THREADS: 4 diff --git a/regression/cases/http_to_s3/lading/lading.yaml b/regression/cases/http_to_s3/lading/lading.yaml index 170cec7fb2aa2..0ce93d58e2ab4 100644 --- a/regression/cases/http_to_s3/lading/lading.yaml +++ b/regression/cases/http_to_s3/lading/lading.yaml @@ -13,3 +13,7 @@ generator: blackhole: - http: binding_addr: "0.0.0.0:8080" + +target_metrics: + - prometheus: # internal telemetry + uri: "http://127.0.0.1:9090/metrics" diff --git a/regression/cases/otlp_grpc_to_blackhole/experiment.yaml b/regression/cases/otlp_grpc_to_blackhole/experiment.yaml new file mode 100644 index 0000000000000..3ab841613771d --- /dev/null +++ b/regression/cases/otlp_grpc_to_blackhole/experiment.yaml @@ -0,0 +1,10 @@ +optimization_goal: ingress_throughput + +target: + name: vector + command: /usr/bin/vector + cpu_allotment: 7 + memory_allotment: 8GiB + + environment: + VECTOR_THREADS: 4 diff --git a/regression/cases/otlp_grpc_to_blackhole/lading/lading.yaml b/regression/cases/otlp_grpc_to_blackhole/lading/lading.yaml index c4c9ea1f4fe3d..763b301d51bf9 100644 --- a/regression/cases/otlp_grpc_to_blackhole/lading/lading.yaml +++ b/regression/cases/otlp_grpc_to_blackhole/lading/lading.yaml @@ -5,10 +5,13 @@ generator: target_uri: "http://127.0.0.1:4317/opentelemetry.proto.collector.logs.v1.LogsService/Export" bytes_per_second: "100 Mb" parallel_connections: 5 - block_sizes: ["1Mb", "0.5Mb", "0.25Mb", "0.125Mb", "128Kb"] maximum_prebuild_cache_size_bytes: "8 Mb" variant: "opentelemetry_logs" blackhole: - tcp: binding_addr: "127.0.0.1:15400" + +target_metrics: + - prometheus: # internal telemetry + uri: "http://127.0.0.1:9090/metrics" diff --git a/regression/cases/otlp_http_to_blackhole/experiment.yaml b/regression/cases/otlp_http_to_blackhole/experiment.yaml index 88dfaecaa4a52..3ab841613771d 100644 --- a/regression/cases/otlp_http_to_blackhole/experiment.yaml +++ b/regression/cases/otlp_http_to_blackhole/experiment.yaml @@ -1 +1,10 @@ optimization_goal: ingress_throughput + +target: + name: vector + command: /usr/bin/vector + cpu_allotment: 7 + memory_allotment: 8GiB + + environment: + VECTOR_THREADS: 4 diff --git a/regression/cases/otlp_http_to_blackhole/lading/lading.yaml b/regression/cases/otlp_http_to_blackhole/lading/lading.yaml index 47bd47f1242a6..7f71aa751b070 100644 --- a/regression/cases/otlp_http_to_blackhole/lading/lading.yaml +++ b/regression/cases/otlp_http_to_blackhole/lading/lading.yaml @@ -15,3 +15,7 @@ generator: blackhole: - tcp: binding_addr: "127.0.0.1:15400" + +target_metrics: + - prometheus: # internal telemetry + uri: "http://127.0.0.1:9090/metrics" diff --git a/regression/cases/socket_to_socket_blackhole/experiment.yaml b/regression/cases/socket_to_socket_blackhole/experiment.yaml new file mode 100644 index 0000000000000..3ab841613771d --- /dev/null +++ b/regression/cases/socket_to_socket_blackhole/experiment.yaml @@ -0,0 +1,10 @@ +optimization_goal: ingress_throughput + +target: + name: vector + command: /usr/bin/vector + cpu_allotment: 7 + memory_allotment: 8GiB + + environment: + VECTOR_THREADS: 4 diff --git a/regression/cases/socket_to_socket_blackhole/lading/lading.yaml b/regression/cases/socket_to_socket_blackhole/lading/lading.yaml index e00bc05f893bb..6a6b12171d473 100644 --- a/regression/cases/socket_to_socket_blackhole/lading/lading.yaml +++ b/regression/cases/socket_to_socket_blackhole/lading/lading.yaml @@ -5,9 +5,12 @@ generator: addr: "0.0.0.0:8282" variant: "syslog5424" bytes_per_second: "500 Mb" - block_sizes: ["1Mb", "0.5Mb", "0.25Mb", "0.125Mb", "128Kb"] maximum_prebuild_cache_size_bytes: "256 Mb" blackhole: - tcp: binding_addr: "0.0.0.0:8080" + +target_metrics: + - prometheus: # internal telemetry + uri: "http://127.0.0.1:9090/metrics" diff --git a/regression/cases/splunk_hec_indexer_ack_blackhole/data/.gitkeep b/regression/cases/splunk_hec_indexer_ack_blackhole/data/.gitkeep index 8b137891791fe..e69de29bb2d1d 100644 --- a/regression/cases/splunk_hec_indexer_ack_blackhole/data/.gitkeep +++ b/regression/cases/splunk_hec_indexer_ack_blackhole/data/.gitkeep @@ -1 +0,0 @@ - diff --git a/regression/cases/splunk_hec_indexer_ack_blackhole/experiment.yaml b/regression/cases/splunk_hec_indexer_ack_blackhole/experiment.yaml new file mode 100644 index 0000000000000..3ab841613771d --- /dev/null +++ b/regression/cases/splunk_hec_indexer_ack_blackhole/experiment.yaml @@ -0,0 +1,10 @@ +optimization_goal: ingress_throughput + +target: + name: vector + command: /usr/bin/vector + cpu_allotment: 7 + memory_allotment: 8GiB + + environment: + VECTOR_THREADS: 4 diff --git a/regression/cases/splunk_hec_indexer_ack_blackhole/lading/lading.yaml b/regression/cases/splunk_hec_indexer_ack_blackhole/lading/lading.yaml index 7a91b8fb67b1a..560a72de2d84c 100644 --- a/regression/cases/splunk_hec_indexer_ack_blackhole/lading/lading.yaml +++ b/regression/cases/splunk_hec_indexer_ack_blackhole/lading/lading.yaml @@ -11,3 +11,7 @@ generator: acknowledgements: ack_query_interval_seconds: 10 ack_timeout_seconds: 300 + +target_metrics: + - prometheus: # internal telemetry + uri: "http://127.0.0.1:9090/metrics" diff --git a/regression/cases/splunk_hec_route_s3/experiment.yaml b/regression/cases/splunk_hec_route_s3/experiment.yaml new file mode 100644 index 0000000000000..3ab841613771d --- /dev/null +++ b/regression/cases/splunk_hec_route_s3/experiment.yaml @@ -0,0 +1,10 @@ +optimization_goal: ingress_throughput + +target: + name: vector + command: /usr/bin/vector + cpu_allotment: 7 + memory_allotment: 8GiB + + environment: + VECTOR_THREADS: 4 diff --git a/regression/cases/splunk_hec_route_s3/lading/lading.yaml b/regression/cases/splunk_hec_route_s3/lading/lading.yaml index 0e7b95418cfc7..308e3c05ce98c 100644 --- a/regression/cases/splunk_hec_route_s3/lading/lading.yaml +++ b/regression/cases/splunk_hec_route_s3/lading/lading.yaml @@ -11,3 +11,7 @@ generator: blackhole: - http: binding_addr: "0.0.0.0:8080" + +target_metrics: + - prometheus: # internal telemetry + uri: "http://127.0.0.1:9090/metrics" diff --git a/regression/cases/splunk_hec_to_splunk_hec_logs_acks/experiment.yaml b/regression/cases/splunk_hec_to_splunk_hec_logs_acks/experiment.yaml new file mode 100644 index 0000000000000..3ab841613771d --- /dev/null +++ b/regression/cases/splunk_hec_to_splunk_hec_logs_acks/experiment.yaml @@ -0,0 +1,10 @@ +optimization_goal: ingress_throughput + +target: + name: vector + command: /usr/bin/vector + cpu_allotment: 7 + memory_allotment: 8GiB + + environment: + VECTOR_THREADS: 4 diff --git a/regression/cases/splunk_hec_to_splunk_hec_logs_acks/lading/lading.yaml b/regression/cases/splunk_hec_to_splunk_hec_logs_acks/lading/lading.yaml index af5c4f3e1898e..a84c3fb60727b 100644 --- a/regression/cases/splunk_hec_to_splunk_hec_logs_acks/lading/lading.yaml +++ b/regression/cases/splunk_hec_to_splunk_hec_logs_acks/lading/lading.yaml @@ -14,3 +14,7 @@ generator: blackhole: - splunk_hec: binding_addr: "0.0.0.0:8080" + +target_metrics: + - prometheus: # internal telemetry + uri: "http://127.0.0.1:9090/metrics" diff --git a/regression/cases/splunk_hec_to_splunk_hec_logs_noack/experiment.yaml b/regression/cases/splunk_hec_to_splunk_hec_logs_noack/experiment.yaml new file mode 100644 index 0000000000000..3ab841613771d --- /dev/null +++ b/regression/cases/splunk_hec_to_splunk_hec_logs_noack/experiment.yaml @@ -0,0 +1,10 @@ +optimization_goal: ingress_throughput + +target: + name: vector + command: /usr/bin/vector + cpu_allotment: 7 + memory_allotment: 8GiB + + environment: + VECTOR_THREADS: 4 diff --git a/regression/cases/splunk_hec_to_splunk_hec_logs_noack/lading/lading.yaml b/regression/cases/splunk_hec_to_splunk_hec_logs_noack/lading/lading.yaml index 7e432bda304e7..a487150adf69b 100644 --- a/regression/cases/splunk_hec_to_splunk_hec_logs_noack/lading/lading.yaml +++ b/regression/cases/splunk_hec_to_splunk_hec_logs_noack/lading/lading.yaml @@ -11,3 +11,7 @@ generator: blackhole: - splunk_hec: binding_addr: "0.0.0.0:8080" + +target_metrics: + - prometheus: # internal telemetry + uri: "http://127.0.0.1:9090/metrics" diff --git a/regression/cases/syslog_humio_logs/experiment.yaml b/regression/cases/syslog_humio_logs/experiment.yaml new file mode 100644 index 0000000000000..3ab841613771d --- /dev/null +++ b/regression/cases/syslog_humio_logs/experiment.yaml @@ -0,0 +1,10 @@ +optimization_goal: ingress_throughput + +target: + name: vector + command: /usr/bin/vector + cpu_allotment: 7 + memory_allotment: 8GiB + + environment: + VECTOR_THREADS: 4 diff --git a/regression/cases/syslog_humio_logs/lading/lading.yaml b/regression/cases/syslog_humio_logs/lading/lading.yaml index 2852db5d5155d..8c8c616ea4488 100644 --- a/regression/cases/syslog_humio_logs/lading/lading.yaml +++ b/regression/cases/syslog_humio_logs/lading/lading.yaml @@ -1,12 +1,15 @@ generator: - tcp: seed: [2, 3, 5, 7, 11, 13, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, 127, 131, 137] - addr: "localhost:8282" + addr: "0.0.0.0:8282" variant: "syslog5424" bytes_per_second: "500 Mb" - block_sizes: ["1Kb", "2Kb", "4Kb", "8Kb", "256Kb", "512Kb", "1024Kb"] maximum_prebuild_cache_size_bytes: "256 Mb" blackhole: - http: binding_addr: "0.0.0.0:8080" + +target_metrics: + - prometheus: # internal telemetry + uri: "http://127.0.0.1:9090/metrics" diff --git a/regression/cases/syslog_humio_logs/vector/vector.yaml b/regression/cases/syslog_humio_logs/vector/vector.yaml index 2145e2f6b5638..485e2a8911d61 100644 --- a/regression/cases/syslog_humio_logs/vector/vector.yaml +++ b/regression/cases/syslog_humio_logs/vector/vector.yaml @@ -27,7 +27,7 @@ sinks: humio_logs: type: "humio_logs" inputs: [ "syslog" ] - endpoint: "http://localhost:8080" + endpoint: "http://0.0.0.0:8080" encoding: codec: "json" token: "humio_token" diff --git a/regression/cases/syslog_log2metric_humio_metrics/experiment.yaml b/regression/cases/syslog_log2metric_humio_metrics/experiment.yaml new file mode 100644 index 0000000000000..3ab841613771d --- /dev/null +++ b/regression/cases/syslog_log2metric_humio_metrics/experiment.yaml @@ -0,0 +1,10 @@ +optimization_goal: ingress_throughput + +target: + name: vector + command: /usr/bin/vector + cpu_allotment: 7 + memory_allotment: 8GiB + + environment: + VECTOR_THREADS: 4 diff --git a/regression/cases/syslog_log2metric_humio_metrics/lading/lading.yaml b/regression/cases/syslog_log2metric_humio_metrics/lading/lading.yaml index 2852db5d5155d..8c8c616ea4488 100644 --- a/regression/cases/syslog_log2metric_humio_metrics/lading/lading.yaml +++ b/regression/cases/syslog_log2metric_humio_metrics/lading/lading.yaml @@ -1,12 +1,15 @@ generator: - tcp: seed: [2, 3, 5, 7, 11, 13, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, 127, 131, 137] - addr: "localhost:8282" + addr: "0.0.0.0:8282" variant: "syslog5424" bytes_per_second: "500 Mb" - block_sizes: ["1Kb", "2Kb", "4Kb", "8Kb", "256Kb", "512Kb", "1024Kb"] maximum_prebuild_cache_size_bytes: "256 Mb" blackhole: - http: binding_addr: "0.0.0.0:8080" + +target_metrics: + - prometheus: # internal telemetry + uri: "http://127.0.0.1:9090/metrics" diff --git a/regression/cases/syslog_log2metric_humio_metrics/vector/vector.yaml b/regression/cases/syslog_log2metric_humio_metrics/vector/vector.yaml index f0113d3423b38..780fc384c4ea2 100644 --- a/regression/cases/syslog_log2metric_humio_metrics/vector/vector.yaml +++ b/regression/cases/syslog_log2metric_humio_metrics/vector/vector.yaml @@ -41,7 +41,7 @@ sinks: humio_metrics: type: "humio_metrics" inputs: [ "log2metric" ] - endpoint: "http://localhost:8080" + endpoint: "http://0.0.0.0:8080" token: "humio_token" healthcheck: enabled: false diff --git a/regression/cases/syslog_log2metric_splunk_hec_metrics/experiment.yaml b/regression/cases/syslog_log2metric_splunk_hec_metrics/experiment.yaml new file mode 100644 index 0000000000000..3ab841613771d --- /dev/null +++ b/regression/cases/syslog_log2metric_splunk_hec_metrics/experiment.yaml @@ -0,0 +1,10 @@ +optimization_goal: ingress_throughput + +target: + name: vector + command: /usr/bin/vector + cpu_allotment: 7 + memory_allotment: 8GiB + + environment: + VECTOR_THREADS: 4 diff --git a/regression/cases/syslog_log2metric_splunk_hec_metrics/lading/lading.yaml b/regression/cases/syslog_log2metric_splunk_hec_metrics/lading/lading.yaml index 2852db5d5155d..8c8c616ea4488 100644 --- a/regression/cases/syslog_log2metric_splunk_hec_metrics/lading/lading.yaml +++ b/regression/cases/syslog_log2metric_splunk_hec_metrics/lading/lading.yaml @@ -1,12 +1,15 @@ generator: - tcp: seed: [2, 3, 5, 7, 11, 13, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, 127, 131, 137] - addr: "localhost:8282" + addr: "0.0.0.0:8282" variant: "syslog5424" bytes_per_second: "500 Mb" - block_sizes: ["1Kb", "2Kb", "4Kb", "8Kb", "256Kb", "512Kb", "1024Kb"] maximum_prebuild_cache_size_bytes: "256 Mb" blackhole: - http: binding_addr: "0.0.0.0:8080" + +target_metrics: + - prometheus: # internal telemetry + uri: "http://127.0.0.1:9090/metrics" diff --git a/regression/cases/syslog_log2metric_splunk_hec_metrics/vector/vector.yaml b/regression/cases/syslog_log2metric_splunk_hec_metrics/vector/vector.yaml index 4c02e036f81da..86036373115d4 100644 --- a/regression/cases/syslog_log2metric_splunk_hec_metrics/vector/vector.yaml +++ b/regression/cases/syslog_log2metric_splunk_hec_metrics/vector/vector.yaml @@ -43,7 +43,7 @@ sinks: splunk_hec_metrics: type: "splunk_hec_metrics" inputs: [ "log2metric" ] - endpoint: "http://localhost:8080" + endpoint: "http://0.0.0.0:8080" token: "splunk_token" healthcheck: enabled: false diff --git a/regression/cases/syslog_log2metric_tag_cardinality_limit_blackhole/experiment.yaml b/regression/cases/syslog_log2metric_tag_cardinality_limit_blackhole/experiment.yaml index 88dfaecaa4a52..3ab841613771d 100644 --- a/regression/cases/syslog_log2metric_tag_cardinality_limit_blackhole/experiment.yaml +++ b/regression/cases/syslog_log2metric_tag_cardinality_limit_blackhole/experiment.yaml @@ -1 +1,10 @@ optimization_goal: ingress_throughput + +target: + name: vector + command: /usr/bin/vector + cpu_allotment: 7 + memory_allotment: 8GiB + + environment: + VECTOR_THREADS: 4 diff --git a/regression/cases/syslog_log2metric_tag_cardinality_limit_blackhole/lading/lading.yaml b/regression/cases/syslog_log2metric_tag_cardinality_limit_blackhole/lading/lading.yaml index fca7626e0bc7f..e8d45f0936c2c 100644 --- a/regression/cases/syslog_log2metric_tag_cardinality_limit_blackhole/lading/lading.yaml +++ b/regression/cases/syslog_log2metric_tag_cardinality_limit_blackhole/lading/lading.yaml @@ -1,8 +1,11 @@ generator: - tcp: seed: [2, 3, 5, 7, 11, 13, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, 127, 131, 137] - addr: "localhost:8282" + addr: "0.0.0.0:8282" variant: "syslog5424" bytes_per_second: "500 Mb" - block_sizes: ["1Kb", "2Kb", "4Kb", "8Kb", "256Kb", "512Kb", "1024Kb"] maximum_prebuild_cache_size_bytes: "256 Mb" + +target_metrics: + - prometheus: # internal telemetry + uri: "http://127.0.0.1:9090/metrics" diff --git a/regression/cases/syslog_loki/experiment.yaml b/regression/cases/syslog_loki/experiment.yaml new file mode 100644 index 0000000000000..3ab841613771d --- /dev/null +++ b/regression/cases/syslog_loki/experiment.yaml @@ -0,0 +1,10 @@ +optimization_goal: ingress_throughput + +target: + name: vector + command: /usr/bin/vector + cpu_allotment: 7 + memory_allotment: 8GiB + + environment: + VECTOR_THREADS: 4 diff --git a/regression/cases/syslog_loki/lading/lading.yaml b/regression/cases/syslog_loki/lading/lading.yaml index 94a7c95746465..adf4f48e5ef5d 100644 --- a/regression/cases/syslog_loki/lading/lading.yaml +++ b/regression/cases/syslog_loki/lading/lading.yaml @@ -2,12 +2,15 @@ generator: - tcp: seed: [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, 127, 131] - addr: "localhost:8282" + addr: "0.0.0.0:8282" variant: "syslog5424" bytes_per_second: "500 Mb" - block_sizes: ["1Kb", "2Kb", "4Kb", "8Kb", "256Kb", "512Kb", "1024Kb"] maximum_prebuild_cache_size_bytes: "256 Mb" blackhole: - http: binding_addr: "0.0.0.0:8080" + +target_metrics: + - prometheus: # internal telemetry + uri: "http://127.0.0.1:9090/metrics" diff --git a/regression/cases/syslog_loki/vector/vector.yaml b/regression/cases/syslog_loki/vector/vector.yaml index a6196b0fd2b8a..778842f59b526 100644 --- a/regression/cases/syslog_loki/vector/vector.yaml +++ b/regression/cases/syslog_loki/vector/vector.yaml @@ -27,7 +27,7 @@ sinks: loki: type: "loki" inputs: [ "syslog" ] - endpoint: "http://localhost:8080" + endpoint: "http://0.0.0.0:8080" encoding: codec: "json" out_of_order_action: "accept" diff --git a/regression/cases/syslog_regex_logs2metric_ddmetrics/experiment.yaml b/regression/cases/syslog_regex_logs2metric_ddmetrics/experiment.yaml index 88dfaecaa4a52..3ab841613771d 100644 --- a/regression/cases/syslog_regex_logs2metric_ddmetrics/experiment.yaml +++ b/regression/cases/syslog_regex_logs2metric_ddmetrics/experiment.yaml @@ -1 +1,10 @@ optimization_goal: ingress_throughput + +target: + name: vector + command: /usr/bin/vector + cpu_allotment: 7 + memory_allotment: 8GiB + + environment: + VECTOR_THREADS: 4 diff --git a/regression/cases/syslog_regex_logs2metric_ddmetrics/lading/lading.yaml b/regression/cases/syslog_regex_logs2metric_ddmetrics/lading/lading.yaml index 2852db5d5155d..8c8c616ea4488 100644 --- a/regression/cases/syslog_regex_logs2metric_ddmetrics/lading/lading.yaml +++ b/regression/cases/syslog_regex_logs2metric_ddmetrics/lading/lading.yaml @@ -1,12 +1,15 @@ generator: - tcp: seed: [2, 3, 5, 7, 11, 13, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, 127, 131, 137] - addr: "localhost:8282" + addr: "0.0.0.0:8282" variant: "syslog5424" bytes_per_second: "500 Mb" - block_sizes: ["1Kb", "2Kb", "4Kb", "8Kb", "256Kb", "512Kb", "1024Kb"] maximum_prebuild_cache_size_bytes: "256 Mb" blackhole: - http: binding_addr: "0.0.0.0:8080" + +target_metrics: + - prometheus: # internal telemetry + uri: "http://127.0.0.1:9090/metrics" diff --git a/regression/cases/syslog_regex_logs2metric_ddmetrics/vector/vector.yaml b/regression/cases/syslog_regex_logs2metric_ddmetrics/vector/vector.yaml index b66ca6210dd7b..5d50c63e57e6e 100644 --- a/regression/cases/syslog_regex_logs2metric_ddmetrics/vector/vector.yaml +++ b/regression/cases/syslog_regex_logs2metric_ddmetrics/vector/vector.yaml @@ -48,6 +48,6 @@ sinks: datadog_metrics: type: "datadog_metrics" inputs: [ "log2metric" ] - endpoint: "http://localhost:8080" + endpoint: "http://0.0.0.0:8080" default_api_key: "DEADBEEF" default_namespace: "vector" diff --git a/regression/cases/syslog_splunk_hec_logs/experiment.yaml b/regression/cases/syslog_splunk_hec_logs/experiment.yaml new file mode 100644 index 0000000000000..3ab841613771d --- /dev/null +++ b/regression/cases/syslog_splunk_hec_logs/experiment.yaml @@ -0,0 +1,10 @@ +optimization_goal: ingress_throughput + +target: + name: vector + command: /usr/bin/vector + cpu_allotment: 7 + memory_allotment: 8GiB + + environment: + VECTOR_THREADS: 4 diff --git a/regression/cases/syslog_splunk_hec_logs/lading/lading.yaml b/regression/cases/syslog_splunk_hec_logs/lading/lading.yaml index 2852db5d5155d..8c8c616ea4488 100644 --- a/regression/cases/syslog_splunk_hec_logs/lading/lading.yaml +++ b/regression/cases/syslog_splunk_hec_logs/lading/lading.yaml @@ -1,12 +1,15 @@ generator: - tcp: seed: [2, 3, 5, 7, 11, 13, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, 127, 131, 137] - addr: "localhost:8282" + addr: "0.0.0.0:8282" variant: "syslog5424" bytes_per_second: "500 Mb" - block_sizes: ["1Kb", "2Kb", "4Kb", "8Kb", "256Kb", "512Kb", "1024Kb"] maximum_prebuild_cache_size_bytes: "256 Mb" blackhole: - http: binding_addr: "0.0.0.0:8080" + +target_metrics: + - prometheus: # internal telemetry + uri: "http://127.0.0.1:9090/metrics" diff --git a/regression/cases/syslog_splunk_hec_logs/vector/vector.yaml b/regression/cases/syslog_splunk_hec_logs/vector/vector.yaml index af4b6f0387cff..e4d63f4e23b41 100644 --- a/regression/cases/syslog_splunk_hec_logs/vector/vector.yaml +++ b/regression/cases/syslog_splunk_hec_logs/vector/vector.yaml @@ -27,7 +27,7 @@ sinks: splunk_hec_logs: type: "splunk_hec_logs" inputs: [ "syslog" ] - endpoint: "http://localhost:8080" + endpoint: "http://0.0.0.0:8080" encoding: codec: "json" token: "abcd1234" diff --git a/regression/config.yaml b/regression/config.yaml new file mode 100644 index 0000000000000..b9b54de25b372 --- /dev/null +++ b/regression/config.yaml @@ -0,0 +1,37 @@ +lading: + version: 0.25.4 + +target: + + +# Link templates for reports. +# +# Values may be removed to disable corresponding links in reports. +# +# Links that may be set: +# - `metrics_dashboard`: link to a metrics dashboard +# - `profiles`: link to profiles +# - `per_experiment_logs`: link to logs, for each experiment +# +# Additionally, arbitrary links may be added to experiment configuration files. +# These should be added as a list of key-value pairs in a 'report_links' +# section. For example: +# ``` +# report_links: +# - text: Link Text +# link: "link template" +# ``` +# +# Available variables: +# - `job_id`: the job ID +# - `start_time_ms`: start time of the job's metrics data, in ms +# - `end_time_ms`: end time of the job's metrics data, in ms +# - `filter_start`: arbitrary time before this job's replicates were run, +# suitable for filtering logs and profiles by job ID. +# - `filter_end`: arbitrary time after this job's replicates were run and logs +# were uploaded, suitable for filtering logs and profiles by job ID. +# +# Additional variables for per-experiment links: +# - `experiment`: the name of the experiment +report: + metrics_dashboard: "https://app.datadoghq.com/dashboard/ykh-ua8-vcu/SMP-Regression-Detector-Metrics?fromUser=true&refresh_mode=paused&tpl_var_run-id%5B0%5D={{ job_id }}&view=spans&from_ts={{ start_time_ms }}&to_ts={{ end_time_ms }}&live=false" diff --git a/regression/metadata.source b/regression/metadata.source deleted file mode 100644 index 79d756fb4d14a..0000000000000 --- a/regression/metadata.source +++ /dev/null @@ -1,2 +0,0 @@ -REPOSITORY="git@github.com:vectordotdev/vector.git" -TARGET_ENVARS="VECTOR_THREADS=8,VECTOR_REQUIRE_HEALTHY=true" diff --git a/rfcs/2020-03-06-1999-api-extensions-for-lua-transform.md b/rfcs/2020-03-06-1999-api-extensions-for-lua-transform.md index fbc130122a115..4b8b75c032d66 100644 --- a/rfcs/2020-03-06-1999-api-extensions-for-lua-transform.md +++ b/rfcs/2020-03-06-1999-api-extensions-for-lua-transform.md @@ -471,12 +471,12 @@ Here `event` is an encoded event to be produced by the transform, and `lane` is > An emitting function is called from a transform component called `example_transform` with `lane` parameter set to `example_lane`. Then the downstream `console` sink have to be defined as the following to be able to read the emitted event: > -> ```toml -> [sinks.example_console] -> type = "console" -> inputs = ["example_transform.example_lane"] # would output the event from `example_lane` -> encoding.codec = "text" -> ``` +> ```toml +> [sinks.example_console] +> type = "console" +> inputs = ["example_transform.example_lane"] # would output the event from `example_lane` +> encoding.codec = "text" +> ``` > > Other components connected to the same transform, but with different lanes names or without lane names at all would not receive any event. @@ -488,7 +488,7 @@ Events produced by the transforms through calling an emitting function can have Both log and metrics events are encoded using [external tagging](https://serde.rs/enum-representations.html#externally-tagged). -* [Log events](https://vector.dev/docs/about/data-model/log/) could be seen as tables created using +* [Log events](https://vector.dev/docs/architecture/data-model/log/) could be seen as tables created using ```lua { @@ -498,12 +498,12 @@ Both log and metrics events are encoded using [external tagging](https://serde.r } ``` - The content of the `log` field corresponds to the usual [log event](https://vector.dev/docs/about/data-model/log/#examples) structure, with possible nesting of the fields. + The content of the `log` field corresponds to the usual [log event](https://vector.dev/docs/architecture/data-model/log/#examples) structure, with possible nesting of the fields. If a log event is created by the user inside the transform is a table, then, if default fields named according to the [global schema](https://vector.dev/docs/reference/global-options/#log_schema) are not present in such a table, then they are automatically added to the event. This rule does not apply to events having `userdata` type. **Example 1** - > The global schema is configured so that `message_key` is `"message"`, `timestamp_key` is `"timestamp"`, and `host_key` is is `"instance_id"`. + > The global schema is configured so that `message_key` is `"message"`, `timestamp_key` is `"timestamp"`, and `host_key` is `"instance_id"`. > > If a new event is created inside the user-defined Lua code as a table > @@ -532,7 +532,7 @@ Both log and metrics events are encoded using [external tagging](https://serde.r > > And then emits the event. In that case Vector would not automatically insert the `timestamp` field. -* [Metric events](https://vector.dev/docs/about/data-model/metric/) could be seen as tables created using +* [Metric events](https://vector.dev/docs/architecture/data-model/metric/) could be seen as tables created using ```lua { @@ -542,7 +542,7 @@ Both log and metrics events are encoded using [external tagging](https://serde.r } ``` - The content of the `metric` field matches the [metric data model](https://vector.dev/docs/about/data-model/metric). The values use [external tagging](https://serde.rs/enum-representations.html#externally-tagged) with respect to the metric type, see the examples. + The content of the `metric` field matches the [metric data model](https://vector.dev/docs/architecture/data-model/metric). The values use [external tagging](https://serde.rs/enum-representations.html#externally-tagged) with respect to the metric type, see the examples. In case when the metric events are created as tables in user-defined code, the following default values are assumed if they are not provided: @@ -552,7 +552,7 @@ Both log and metrics events are encoded using [external tagging](https://serde.r | `kind` | `absolute` | | `tags` | empty map | - Furthermore, for [`aggregated_histogram`](https://vector.dev/docs/about/data-model/metric/#aggregated_histogram) the `count` field inside the `value` map can be omitted. + Furthermore, for [`aggregated_histogram`](https://vector.dev/docs/architecture/data-model/metric/#aggregated_histogram) the `count` field inside the `value` map can be omitted. **Example: `counter`** @@ -621,7 +621,7 @@ Both log and metrics events are encoded using [external tagging](https://serde.r > } > } > } - > Note that the field [`count`](https://vector.dev/docs/about/data-model/metric/#count) is not required because it can be inferred by Vector automatically by summing up the values from `counts`. + > Note that the field [`count`](https://vector.dev/docs/architecture/data-model/metric/#count) is not required because it can be inferred by Vector automatically by summing up the values from `counts`. **Example: `aggregated_summary`** > The minimal Lua code required to create an aggregated summary metric is the following: @@ -645,14 +645,14 @@ The mapping between Vector data types and Lua data types is the following: | Vector Type | Lua Type | Comment | | :----------- | :-------- | :------- | -| [`String`](https://vector.dev/docs/about/data-model/log/#strings) | [`string`](https://www.lua.org/pil/2.4.html) || -| [`Integer`](https://vector.dev/docs/about/data-model/log/#ints) | [`integer`](https://docs.rs/mlua/0.6.0/mlua/type.Integer.html) || -| [`Float`](https://vector.dev/docs/about/data-model/log/#floats) | [`number`](https://docs.rs/mlua/0.6.0/mlua/type.Number.html) || -| [`Boolean`](https://vector.dev/docs/about/data-model/log/#booleans) | [`boolean`](https://www.lua.org/pil/2.2.html) || -| [`Timestamp`](https://vector.dev/docs/about/data-model/log/#timestamps) | [`userdata`](https://www.lua.org/pil/28.1.html) | There is no dedicated timestamp type in Lua. However, there is a standard library function [`os.date`](https://www.lua.org/manual/5.1/manual.html#pdf-os.date) which returns a table with fields `year`, `month`, `day`, `hour`, `min`, `sec`, and some others. Other standard library functions, such as [`os.time`](https://www.lua.org/manual/5.1/manual.html#pdf-os.time), support tables with these fields as arguments. Because of that, Vector timestamps passed to the transform are represented as `userdata` with the same set of accessible fields. In order to have one-to-one correspondence between Vector timestamps and Lua timestamps, `os.date` function from the standard library is patched to return not a table, but `userdata` with the same set of fields as it usually would return instead. This approach makes it possible to have both compatibility with the standard library functions and a dedicated data type for timestamps. | -| [`Null`](https://vector.dev/docs/about/data-model/log/#null-values) | empty string | In Lua setting a table field to `nil` means deletion of this field. Furthermore, setting an array element to `nil` leads to deletion of this element. In order to avoid inconsistencies, already present `Null` values are visible represented as empty strings from Lua code, and it is impossible to create a new `Null` value in the user-defined code. | -| [`Map`](https://vector.dev/docs/about/data-model/log/#maps) | [`userdata`](https://www.lua.org/pil/28.1.html) or [`table`](https://www.lua.org/pil/2.5.html) | Maps which are parts of events passed to the transform from Vector have `userdata` type. User-created maps have `table` type. Both types are converted to Vector's `Map` type when they are emitted from the transform. | -| [`Array`](https://vector.dev/docs/about/data-model/log/#arrays) | [`sequence`](https://www.lua.org/pil/11.1.html) | Sequences in Lua are a special case of tables. Because of that fact, the indexes can in principle start from any number. However, the convention in Lua is to to start indexes from 1 instead of 0, so Vector should adhere it. | +| [`String`](https://vector.dev/docs/architecture/data-model/log/#strings) | [`string`](https://www.lua.org/pil/2.4.html) || +| [`Integer`](https://vector.dev/docs/architecture/data-model/log/#ints) | [`integer`](https://docs.rs/mlua/0.6.0/mlua/type.Integer.html) || +| [`Float`](https://vector.dev/docs/architecture/data-model/log/#floats) | [`number`](https://docs.rs/mlua/0.6.0/mlua/type.Number.html) || +| [`Boolean`](https://vector.dev/docs/architecture/data-model/log/#booleans) | [`boolean`](https://www.lua.org/pil/2.2.html) || +| [`Timestamp`](https://vector.dev/docs/architecture/data-model/log/#timestamps) | [`userdata`](https://www.lua.org/pil/28.1.html) | There is no dedicated timestamp type in Lua. However, there is a standard library function [`os.date`](https://www.lua.org/manual/5.1/manual.html#pdf-os.date) which returns a table with fields `year`, `month`, `day`, `hour`, `min`, `sec`, and some others. Other standard library functions, such as [`os.time`](https://www.lua.org/manual/5.1/manual.html#pdf-os.time), support tables with these fields as arguments. Because of that, Vector timestamps passed to the transform are represented as `userdata` with the same set of accessible fields. In order to have one-to-one correspondence between Vector timestamps and Lua timestamps, `os.date` function from the standard library is patched to return not a table, but `userdata` with the same set of fields as it usually would return instead. This approach makes it possible to have both compatibility with the standard library functions and a dedicated data type for timestamps. | +| [`Null`](https://vector.dev/docs/architecture/data-model/log/#null-values) | empty string | In Lua setting a table field to `nil` means deletion of this field. Furthermore, setting an array element to `nil` leads to deletion of this element. In order to avoid inconsistencies, already present `Null` values are visible represented as empty strings from Lua code, and it is impossible to create a new `Null` value in the user-defined code. | +| [`Map`](https://vector.dev/docs/architecture/data-model/log/#maps) | [`userdata`](https://www.lua.org/pil/28.1.html) or [`table`](https://www.lua.org/pil/2.5.html) | Maps which are parts of events passed to the transform from Vector have `userdata` type. User-created maps have `table` type. Both types are converted to Vector's `Map` type when they are emitted from the transform. | +| [`Array`](https://vector.dev/docs/architecture/data-model/log/#arrays) | [`sequence`](https://www.lua.org/pil/11.1.html) | Sequences in Lua are a special case of tables. Because of that fact, the indexes can in principle start from any number. However, the convention in Lua is to start indexes from 1 instead of 0, so Vector should adhere it. | ### Configuration @@ -679,7 +679,7 @@ The implementation of `lua` transform supports only log events. Processing of lo Events have type [`userdata`](https://www.lua.org/pil/28.1.html) with custom [metamethods](https://www.lua.org/pil/13.html), so they are views to Vector's events. Thus passing an event to Lua has zero cost, so only when fields are actually accessed the data is copied to Lua. -The fields are accessed through string indexes using [Vector's field path notation](https://vector.dev/docs/about/data-model/log/). +The fields are accessed through string indexes using [Vector's field path notation](https://vector.dev/docs/architecture/data-model/log/). ## Sales Pitch diff --git a/rfcs/2020-05-25-2692-more-usable-logevents.md b/rfcs/2020-05-25-2692-more-usable-logevents.md index 73e50f5231dd6..09ab89b35d7e5 100644 --- a/rfcs/2020-05-25-2692-more-usable-logevents.md +++ b/rfcs/2020-05-25-2692-more-usable-logevents.md @@ -162,7 +162,7 @@ There is no guide accompanying this RFC, it only minimally touches user facing s ## Doc Level Proposal -> **Placement:** Insert into [Log Event](https://vector.dev/docs/about/data-model/log/#types)'s [Types](https://vector.dev/docs/about/data-model/log/#types) section +> **Placement:** Insert into [Log Event](https://vector.dev/docs/architecture/data-model/log/#types)'s [Types](https://vector.dev/docs/architecture/data-model/log/#types) section ### Bytes diff --git a/rfcs/2020-07-28-3642-jmx_rfc.md b/rfcs/2020-07-28-3642-jmx_rfc.md index fb3a98d32cd56..b872b276574f5 100644 --- a/rfcs/2020-07-28-3642-jmx_rfc.md +++ b/rfcs/2020-07-28-3642-jmx_rfc.md @@ -223,7 +223,7 @@ principles of Vector: > One Tool. All Data. - One simple tool gets your logs, metrics, and traces > (coming soon) from A to B. -[Vector principles](https://vector.dev/docs/about/what-is-vector/#who-should-use-vector) +[Vector principles](https://vector.dev/docs/) If users are already running Prometheus though, they could opt for the Prometheus path. diff --git a/rfcs/2020-08-21-3092-apache-metrics-source.md b/rfcs/2020-08-21-3092-apache-metrics-source.md index c7ab4a9162ad4..4b285632f121b 100644 --- a/rfcs/2020-08-21-3092-apache-metrics-source.md +++ b/rfcs/2020-08-21-3092-apache-metrics-source.md @@ -166,8 +166,7 @@ principles of Vector: > One Tool. All Data. - One simple tool gets your logs, metrics, and traces > (coming soon) from A to B. -[Vector -principles](https://vector.dev/docs/about/what-is-vector/#who-should-use-vector) +[Vector principles](https://vector.dev/docs/) On the same page, it is mentioned that Vector should be a replacement for Telegraf. diff --git a/rfcs/2020-08-26-3191-host-metrics.md b/rfcs/2020-08-26-3191-host-metrics.md index f494030000fad..58243ddf1eeda 100644 --- a/rfcs/2020-08-26-3191-host-metrics.md +++ b/rfcs/2020-08-26-3191-host-metrics.md @@ -180,7 +180,7 @@ principles of Vector: > (coming soon) from A to B. [Vector -principles](https://vector.dev/docs/about/what-is-vector/#who-should-use-vector) +principles](https://vector.dev/docs/) On the same page, it is mentioned that Vector should be a replacement for Telegraf. diff --git a/rfcs/2020-08-27-3603-postgres-metrics.md b/rfcs/2020-08-27-3603-postgres-metrics.md index cbc7facbdca6c..2b47b4e07e12f 100644 --- a/rfcs/2020-08-27-3603-postgres-metrics.md +++ b/rfcs/2020-08-27-3603-postgres-metrics.md @@ -136,7 +136,7 @@ principles of Vector: > (coming soon) from A to B. [Vector -principles](https://vector.dev/docs/about/what-is-vector/#who-should-use-vector) +principles](https://vector.dev/docs/) On the same page, it is mentioned that Vector should be a replacement for Telegraf. diff --git a/rfcs/2020-08-31-3640-nginx-metrics-source.md b/rfcs/2020-08-31-3640-nginx-metrics-source.md index 5351aa1351660..93551a51492c9 100644 --- a/rfcs/2020-08-31-3640-nginx-metrics-source.md +++ b/rfcs/2020-08-31-3640-nginx-metrics-source.md @@ -131,7 +131,7 @@ principles of Vector: > (coming soon) from A to B. [Vector -principles](https://vector.dev/docs/about/what-is-vector/#who-should-use-vector) +principles](https://vector.dev/docs/) On the same page, it is mentioned that Vector should be a replacement for Telegraf. diff --git a/rfcs/2020-08-31-3641-mongo-metrics.md b/rfcs/2020-08-31-3641-mongo-metrics.md index ca100cf457302..32949f59b1273 100644 --- a/rfcs/2020-08-31-3641-mongo-metrics.md +++ b/rfcs/2020-08-31-3641-mongo-metrics.md @@ -1107,7 +1107,7 @@ principles of Vector: > (coming soon) from A to B. [Vector -principles](https://vector.dev/docs/about/what-is-vector/#who-should-use-vector) +principles](https://vector.dev/docs/) On the same page, it is mentioned that Vector should be a replacement for Telegraf. diff --git a/rfcs/2021-09-01-8547-accept-metrics-in-datadog-agent-source.md b/rfcs/2021-09-01-8547-accept-metrics-in-datadog-agent-source.md index aec7cd9db8e3a..ac95ef626762e 100644 --- a/rfcs/2021-09-01-8547-accept-metrics-in-datadog-agent-source.md +++ b/rfcs/2021-09-01-8547-accept-metrics-in-datadog-agent-source.md @@ -116,7 +116,7 @@ A few details about the Datadog Agents & [Datadog metrics](https://docs.datadogh [here](https://github.com/DataDog/agent-payload/blob/master/proto/metrics/agent_payload.proto#L47-L81). Vector has a nice description of its [metrics data -model](https://vector.dev/docs/about/under-the-hood/architecture/data-model/metric/) and a [concise enum for +model](https://vector.dev/docs/architecture/data-model/metric/) and a [concise enum for representing it](https://github.com/vectordotdev/vector/blob/master/lib/vector-core/src/event/metric.rs#L135-L169). diff --git a/rfcs/2022-07-28-13691-registered-internal-events.md b/rfcs/2022-07-28-13691-registered-internal-events.md index 126afb28c1550..b749d2d946f24 100644 --- a/rfcs/2022-07-28-13691-registered-internal-events.md +++ b/rfcs/2022-07-28-13691-registered-internal-events.md @@ -161,7 +161,7 @@ impl RegisterInternalEvent for RegisteredEndpointBytesReceived { type Handle = EndpointBytesReceivedHandle; fn register(self) -> Self::Handle { - let bytes_total = register_counter!( + let bytes_total = counter!( "component_received_bytes_total", "protocol" => self.protocol, "endpoint" => self.endpoint.clone(), diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 1c8cfba9f594b..c2ee2a8766ad1 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,3 +1,3 @@ [toolchain] -channel = "1.75.0" +channel = "1.88" profile = "default" diff --git a/scripts/build-docker.sh b/scripts/build-docker.sh index 14a53e4a9722a..b05e488029270 100755 --- a/scripts/build-docker.sh +++ b/scripts/build-docker.sh @@ -15,7 +15,8 @@ VERSION="${VECTOR_VERSION:-"$(cargo vdev version)"}" DATE="${DATE:-"$(date -u +%Y-%m-%d)"}" PLATFORM="${PLATFORM:-}" PUSH="${PUSH:-"true"}" -REPO="${REPO:-"timberio/vector"}" +REPOS="${REPOS:-"timberio/vector"}" +IFS=, read -ra REPO_LIST <<< "$REPOS" IFS=, read -ra REQUESTED_PLATFORMS <<< "$PLATFORM" declare -A SUPPORTED_PLATFORMS=( @@ -50,34 +51,42 @@ evaluate_supported_platforms_for_base() { build() { local BASE="$1" local VERSION="$2" - - local TAG="$REPO:$VERSION-$BASE" local DOCKERFILE="distribution/docker/$BASE/Dockerfile" + local BUILDABLE_PLATFORMS="" + if [ -n "$PLATFORM" ]; then + BUILDABLE_PLATFORMS=$(evaluate_supported_platforms_for_base "$BASE") + fi + # Collect all tags + TAGS=() + for REPO in "${REPO_LIST[@]}"; do + TAGS+=(--tag "$REPO:$VERSION-$BASE") + done + + # Build once with all tags if [ -n "$PLATFORM" ]; then ARGS=() if [[ "$PUSH" == "true" ]]; then ARGS+=(--push) fi - local BUILDABLE_PLATFORMS - BUILDABLE_PLATFORMS=$(evaluate_supported_platforms_for_base "$BASE") - docker buildx build \ --platform="$BUILDABLE_PLATFORMS" \ - --tag "$TAG" \ + "${TAGS[@]}" \ target/artifacts \ -f "$DOCKERFILE" \ "${ARGS[@]}" else docker build \ - --tag "$TAG" \ + "${TAGS[@]}" \ target/artifacts \ -f "$DOCKERFILE" - if [[ "$PUSH" == "true" ]]; then + if [[ "$PUSH" == "true" ]]; then + for TAG in "${TAGS[@]}"; do docker push "$TAG" - fi + done + fi fi } @@ -85,7 +94,7 @@ build() { # Build # -echo "Building $REPO:* Docker images" +echo "Building Docker images for $REPOS" if [[ "$CHANNEL" == "release" ]]; then VERSION_EXACT="$VERSION" diff --git a/scripts/check-events b/scripts/check-events index 588ccc8347305..b0335cd7440a4 100755 --- a/scripts/check-events +++ b/scripts/check-events @@ -80,7 +80,7 @@ class Event # Scan for counter names and tags def scan_metrics(block) - block.scan(/ (counter|gauge|histogram)!\((?:\n\s+)?"([^"]+)",(.+?)\)[;\n]/ms) \ + block.scan(/ (counter|gauge|histogram)!\((?:\n\s+)?"([^"]+)",?(.+?)\)[;\n]/ms) \ do |type, name, tags| tags = Hash[tags.scan(/"([^"]+)" => (.+?)(?:,|$)/)] add_metric(type, name, tags) @@ -94,7 +94,7 @@ class Event handle_fields.scan(/^ *([a-z0-9_]+): *(.+?) *= *(.+?),$/m) do |name, type, assignment| self.scan_component_dropped_events(assignment) # This is a _slightly_ different regex than the above, couldn't figure a way to unify them - assignment.match(/register_(counter|gauge|histogram)!\((?:\n\s+)?"([^"]+)"(,.+)?\)/ms) \ + assignment.match(/ (counter|gauge|histogram)!\((?:\n\s+)?"([^"]+)"(,.+)?\)/ms) \ do |type, name, tags| tags = tags || '' diff --git a/scripts/check-style.sh b/scripts/check-style.sh index bf4f18fdf65ad..3754325f9b3ba 100755 --- a/scripts/check-style.sh +++ b/scripts/check-style.sh @@ -33,6 +33,7 @@ for FILE in $(git ls-files); do *ico) continue;; *sig) continue;; *html) continue;; + *desc) continue;; tests/data*) continue;; lib/codecs/tests/data*) continue;; lib/vector-core/tests/data*) continue;; @@ -74,7 +75,7 @@ for FILE in $(git ls-files); do fi # check that the lines don't contain trailing spaces - if grep ' $' "$FILE" > /dev/null; then + if grep -n ' $' "$FILE"; then case "$MODE" in check) echo "File \"$FILE\" contains trailing spaces in some of the lines" diff --git a/scripts/check_changelog_fragments.sh b/scripts/check_changelog_fragments.sh index 0cb2718d4764a..916ebe88dd53b 100755 --- a/scripts/check_changelog_fragments.sh +++ b/scripts/check_changelog_fragments.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # This script is intended to run during CI, however it can be run locally by # committing changelog fragments before executing the script. If the script @@ -17,16 +17,18 @@ if [ ! -d "${CHANGELOG_DIR}" ]; then fi # diff-filter=A lists only added files -FRAGMENTS=$(git diff --name-only --diff-filter=A --merge-base origin/master ${CHANGELOG_DIR}) +FRAGMENTS=$(git diff --name-only --diff-filter=A --merge-base "${MERGE_BASE:-origin/master}" ${CHANGELOG_DIR}) if [ -z "$FRAGMENTS" ]; then echo "No changelog fragments detected" - echo "If no changes necessitate user-facing explanations, add the GH label 'no-changelog'" + echo "If no changes necessitate user-facing explanations, add the GH label 'no-changelog'" echo "Otherwise, add changelog fragments to changelog.d/" echo "For details, see 'changelog.d/README.md'" exit 1 fi +[[ "$(wc -l <<< "$FRAGMENTS")" -gt "${MAX_FRAGMENTS:-1000}" ]] && exit 1 + # extract the basename from the file path FRAGMENTS=$(xargs -n1 basename <<< "${FRAGMENTS}") @@ -56,22 +58,17 @@ while IFS= read -r fname; do exit 1 fi - # if specified, this option validates that the contents of the news fragment - # contains a properly formatted authors line at the end of the file, generally - # used for external contributor PRs. - if [[ $1 == "--authors" ]]; then - last=$( tail -n 1 "${CHANGELOG_DIR}/${fname}" ) - if [[ "${last}" == "authors: "*@* ]]; then - echo "invalid fragment contents: author should not be prefixed with @" - exit 1 - elif [[ "${last}" == "authors: "*,* ]]; then - echo "invalid fragment contents: authors should be space delimited, not comma delimited." - exit 1 - elif ! [[ "${last}" =~ ^(authors: .*)$ ]]; then - echo "invalid fragment contents: author option was specified but fragment ${fname} contains no authors." - exit 1 - fi - + # Each fragment should have a properly formatted authors line at the end of the file. + last=$( tail -n 1 "${CHANGELOG_DIR}/${fname}" ) + if [[ "${last}" == "authors: "*@* ]]; then + echo "invalid fragment contents: author should not be prefixed with @" + exit 1 + elif [[ "${last}" == "authors: "*,* ]]; then + echo "invalid fragment contents: authors should be space delimited, not comma delimited." + exit 1 + elif ! [[ "${last}" =~ ^(authors: .*)$ ]]; then + echo "invalid fragment contents: author option was specified but fragment ${fname} contains no authors." + exit 1 fi done <<< "$FRAGMENTS" diff --git a/scripts/ci-free-disk-space.sh b/scripts/ci-free-disk-space.sh new file mode 100755 index 0000000000000..00af4d1b1da25 --- /dev/null +++ b/scripts/ci-free-disk-space.sh @@ -0,0 +1,57 @@ +#!/usr/bin/env bash +# +# From: https://github.com/apache/flink +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# +# The Azure provided machines typically have the following disk allocation: +# Total space: 85GB +# Allocated: 67 GB +# Free: 17 GB +# This script frees up 28 GB of disk space by deleting unneeded packages and +# large directories. +# The Flink end to end tests download and generate more than 17 GB of files, +# causing unpredictable behavior and build failures. +# +echo "==============================================================================" +echo "Freeing up disk space on CI system" +echo "==============================================================================" + +echo "Listing 100 largest packages" +dpkg-query -Wf '${Installed-Size}\t${Package}\n' | sort -n | tail -n 100 +df -h +echo "Removing large packages" +sudo apt-get remove -y '^dotnet-.*' +sudo apt-get remove -y '^llvm-.*' +sudo apt-get remove -y 'php.*' +sudo apt-get remove -y '^mongodb-.*' +sudo apt-get remove -y '^mysql-.*' +sudo apt-get remove -y azure-cli google-cloud-sdk hhvm google-chrome-stable firefox powershell mono-devel libgl1-mesa-dri +sudo apt-get autoremove -y +sudo apt-get clean +df -h +echo "Removing large directories" + +sudo rm -rf /usr/share/dotnet/ +sudo rm -rf /usr/local/graalvm/ +sudo rm -rf /usr/local/.ghcup/ +sudo rm -rf /usr/local/share/powershell +sudo rm -rf /usr/local/share/chromium +sudo rm -rf /usr/local/lib/android +# sudo rm -rf /usr/local/lib/node_modules # we use node +df -h diff --git a/scripts/ci-int-e2e-test.sh b/scripts/ci-int-e2e-test.sh deleted file mode 100755 index 3b569db530f5f..0000000000000 --- a/scripts/ci-int-e2e-test.sh +++ /dev/null @@ -1,31 +0,0 @@ -#!/bin/bash - -# Used in CI to run and stop an integration test and upload the results of it. -# This is useful to allow retrying the integration test at a higher level than -# the nextest and reduce code duplication in the workflow file. - -set -u - -if [[ -z "${CI:-}" ]]; then - echo "Aborted: this script is for use in CI." >&2 - exit 1 -fi - -if [ $# -ne 2 ] -then - echo "usage: $0 [int|e2e] TEST_NAME" - exit 1 -fi - -set -x - -TEST_TYPE=$1 # either "int" or "e2e" -TEST_NAME=$2 - -cargo vdev -v "${TEST_TYPE}" start -a "${TEST_NAME}" -sleep 30 -cargo vdev -v "${TEST_TYPE}" test --retries 2 -a "${TEST_NAME}" -RET=$? -cargo vdev -v "${TEST_TYPE}" stop -a "${TEST_NAME}" -./scripts/upload-test-results.sh -exit $RET diff --git a/scripts/cross/bootstrap-centos.sh b/scripts/cross/bootstrap-centos.sh deleted file mode 100755 index 58b1cbba003c6..0000000000000 --- a/scripts/cross/bootstrap-centos.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/sh -set -o errexit - -yum install -y unzip centos-release-scl -yum install -y llvm-toolset-7 - -# needed to compile openssl -yum install -y perl-IPC-Cmd - diff --git a/scripts/cross/entrypoint-centos.sh b/scripts/cross/entrypoint-centos.sh deleted file mode 100755 index 8bfe60ab537e7..0000000000000 --- a/scripts/cross/entrypoint-centos.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash -# shellcheck source=/dev/null -source scl_source enable llvm-toolset-7 -exec "$@" diff --git a/scripts/cross/x86_64-unknown-linux-gnu.dockerfile b/scripts/cross/x86_64-unknown-linux-gnu.dockerfile index 8bddd861d419b..aabbd92c25249 100644 --- a/scripts/cross/x86_64-unknown-linux-gnu.dockerfile +++ b/scripts/cross/x86_64-unknown-linux-gnu.dockerfile @@ -1,6 +1,4 @@ -FROM ghcr.io/cross-rs/x86_64-unknown-linux-gnu:0.2.5-centos +FROM ghcr.io/cross-rs/x86_64-unknown-linux-gnu:0.2.5 -COPY scripts/cross/bootstrap-centos.sh scripts/cross/entrypoint-centos.sh scripts/environment/install-protoc.sh / -RUN /bootstrap-centos.sh && bash /install-protoc.sh - -ENTRYPOINT [ "/entrypoint-centos.sh" ] +COPY scripts/cross/bootstrap-ubuntu.sh scripts/environment/install-protoc.sh / +RUN /bootstrap-ubuntu.sh && bash /install-protoc.sh diff --git a/scripts/e2e/Dockerfile b/scripts/e2e/Dockerfile index f976155d47e8d..3af5d83489c27 100644 --- a/scripts/e2e/Dockerfile +++ b/scripts/e2e/Dockerfile @@ -1,8 +1,7 @@ -ARG RUST_VERSION +ARG RUST_VERSION=1.85 ARG FEATURES -ARG DEBIAN_RELEASE=slim-bookworm -FROM docker.io/rust:${RUST_VERSION}-${DEBIAN_RELEASE} +FROM docker.io/rust:${RUST_VERSION}-slim-bookworm RUN apt-get update && apt-get -y --no-install-recommends install \ build-essential \ @@ -17,18 +16,10 @@ RUN apt-get update && apt-get -y --no-install-recommends install \ libxxhash-dev \ unzip \ zlib1g-dev \ - zlib1g + zlib1g \ + mold -RUN git clone https://github.com/rui314/mold.git \ - && mkdir mold/build \ - && cd mold/build \ - && git checkout v2.0.0 \ - && ../install-build-deps.sh \ - && cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_CXX_COMPILER=c++ .. \ - && cmake --build . -j $(nproc) \ - && cmake --install . - -RUN rustup run "${RUST_VERSION}" cargo install cargo-nextest --version 0.9.64 --locked +RUN cargo install cargo-nextest --version 0.9.95 --locked COPY scripts/environment/install-protoc.sh / COPY tests/data/ca/certs /certs @@ -41,6 +32,6 @@ ARG FEATURES RUN --mount=type=cache,target=/vector/target \ --mount=type=cache,target=/usr/local/cargo/registry \ --mount=type=cache,target=/usr/local/cargo/git \ - /usr/local/bin/mold -run cargo build --tests --lib --bin vector \ + /usr/bin/mold -run cargo build --tests --lib --bin vector \ --no-default-features --features $FEATURES && \ cp target/debug/vector /usr/bin/vector diff --git a/scripts/e2e/datadog-logs/compose.yaml b/scripts/e2e/datadog-logs/compose.yaml index 4ca9633af2e4e..ae004588f46f3 100644 --- a/scripts/e2e/datadog-logs/compose.yaml +++ b/scripts/e2e/datadog-logs/compose.yaml @@ -1,4 +1,4 @@ -version: '3' +version: '3.8' services: # Generates random log data for consumption by the custom Agent check @@ -17,7 +17,9 @@ services: - "-o" - "/var/log/a_custom.log" volumes: - - log_path:/var/log/ + - type: volume + source: log_path + target: /var/log/ # Tails a custom log created by `log_generator` and sends log data to # the `fakeintake-agent` service @@ -32,12 +34,22 @@ services: - DD_ENABLE_PAYLOADS_SERVICE_CHECKS=false - DD_CONTAINER_EXCLUDE="name:.*" volumes: - # The Agent config file - - ${PWD}/tests/data/e2e/datadog/logs/agent_only.yaml:/etc/datadog-agent/datadog.yaml - # The custom logs check - - ${PWD}/tests/data/e2e/datadog/logs/logs.conf.d:/conf.d:ro - # The custom log to tail, created by the `log_generator` service - - log_path:/var/log/ + # The Agent config file + - type: bind + source: ../../../tests/data/e2e/datadog/logs/agent_only.yaml + target: /etc/datadog-agent/datadog.yaml + read_only: true + + # The custom logs check + - type: bind + source: ../../../tests/data/e2e/datadog/logs/logs.conf.d + target: /conf.d + read_only: true + + # The custom log to tail, created by the `log_generator` service + - type: volume + source: log_path + target: /var/log/ # Tails a custom log created by `log_generator` and sends log data to # the `vector` service @@ -52,12 +64,22 @@ services: - DD_ENABLE_PAYLOADS_SERVICE_CHECKS=false - DD_CONTAINER_EXCLUDE="name:.*" volumes: - # The Agent config file - - ${PWD}/tests/data/e2e/datadog/logs/agent_vector.yaml:/etc/datadog-agent/datadog.yaml - # The custom logs check - - ${PWD}/tests/data/e2e/datadog/logs/logs.conf.d:/conf.d:ro - # The custom log to tail, created by the `log_generator` service - - log_path:/var/log/ + # The Agent config file + - type: bind + source: ../../../tests/data/e2e/datadog/logs/agent_vector.yaml + target: /etc/datadog-agent/datadog.yaml + read_only: true + + # The custom logs check + - type: bind + source: ../../../tests/data/e2e/datadog/logs/logs.conf.d + target: /conf.d + read_only: true + + # The custom log to tail, created by the `log_generator` service + - type: volume + source: log_path + target: /var/log/ # Receives log data from the `datadog-agent-vector` service and sends # to the `fakeintake-vector` service. @@ -65,7 +87,7 @@ services: depends_on: - fakeintake-vector build: - context: ${PWD} + context: ../../.. # re-using the integration test runner image since it already has # compiled vector on it. image: ${CONFIG_VECTOR_IMAGE} @@ -79,17 +101,21 @@ services: - "-c" - "/home/vector/tests/data/e2e/datadog/logs/vector.toml" volumes: - - ${PWD}:/home/vector + - type: bind + source: ../../.. + target: /home/vector # Receives log data from the `datadog-agent` service. Is queried by the test runner # which does the validation of consistency with the other fakeintake service. fakeintake-agent: - image: docker.io/datadog/fakeintake:latest + # TODO: temporarily pegging the image as latest results in failures + image: docker.io/datadog/fakeintake:ved764626 # Receives log data from the `datadog-agent-vector` service. Is queried by the test runner # which does the validation of consistency with the other fakeintake service. fakeintake-vector: - image: docker.io/datadog/fakeintake:latest + # TODO: temporarily pegging the image as latest results in failures + image: docker.io/datadog/fakeintake:ved764626 networks: default: diff --git a/scripts/e2e/datadog-metrics/compose.yaml b/scripts/e2e/datadog-metrics/compose.yaml index d0860b83b71e3..80523ac8144f4 100644 --- a/scripts/e2e/datadog-metrics/compose.yaml +++ b/scripts/e2e/datadog-metrics/compose.yaml @@ -27,8 +27,8 @@ services: - DD_API_KEY=${TEST_DATADOG_API_KEY:?TEST_DATADOG_API_KEY required} - DD_HOSTNAME=datadog-agent volumes: - # The Agent config file - - ${PWD}/tests/data/e2e/datadog/metrics/agent_only.yaml:/etc/datadog-agent/datadog.yaml + # The Agent config file + - ../../../tests/data/e2e/datadog/metrics/agent_only.yaml:/etc/datadog-agent/datadog.yaml # Sends metric data received from the Emitter to the `vector` service datadog-agent-vector: @@ -39,8 +39,8 @@ services: - DD_API_KEY=${TEST_DATADOG_API_KEY:?TEST_DATADOG_API_KEY required} - DD_HOSTNAME=datadog-agent-vector volumes: - # The Agent config file - - ${PWD}/tests/data/e2e/datadog/metrics/agent_vector.yaml:/etc/datadog-agent/datadog.yaml + # The Agent config file + - ../../../tests/data/e2e/datadog/metrics/agent_vector.yaml:/etc/datadog-agent/datadog.yaml # Receives metric data from the `datadog-agent-vector` service and sends # to the `fakeintake-vector` service. @@ -48,7 +48,7 @@ services: depends_on: - fakeintake-vector build: - context: ${PWD} + context: ../../.. # re-using the integration test runner image since it already has # compiled vector on it. image: ${CONFIG_VECTOR_IMAGE} @@ -62,17 +62,19 @@ services: - "-c" - "/home/vector/tests/data/e2e/datadog/metrics/vector.toml" volumes: - - ${PWD}:/home/vector + - ../../..:/home/vector # Receives metric data from the `datadog-agent` service. Is queried by the test runner # which does the validation of consistency with the other fakeintake service. fakeintake-agent: - image: docker.io/datadog/fakeintake:latest + # TODO: temporarily pegging the image as latest results in failures + image: docker.io/datadog/fakeintake:v77a06f2b # Receives metric data from the `datadog-agent-vector` service. Is queried by the test runner # which does the validation of consistency with the other fakeintake service. fakeintake-vector: - image: docker.io/datadog/fakeintake:latest + # TODO: temporarily pegging the image as latest results in failures + image: docker.io/datadog/fakeintake:v77a06f2b networks: default: diff --git a/scripts/e2e/datadog-metrics/test.yaml b/scripts/e2e/datadog-metrics/test.yaml index b9025d08ebf3d..d1b814fd02cb8 100644 --- a/scripts/e2e/datadog-metrics/test.yaml +++ b/scripts/e2e/datadog-metrics/test.yaml @@ -24,4 +24,5 @@ paths: - "src/sinks/datadog/metrics/**" - "src/sinks/util/**" - "scripts/integration/datadog-e2e/metrics/**" +- "tests/e2e/datadog/metrics/**" - "tests/data/e2e/datadog/metrics/**" diff --git a/scripts/e2e/opentelemetry-logs/README.md b/scripts/e2e/opentelemetry-logs/README.md new file mode 100644 index 0000000000000..af55f71d0f3d7 --- /dev/null +++ b/scripts/e2e/opentelemetry-logs/README.md @@ -0,0 +1,25 @@ +# OpenTelemetry Vector E2E Log Pipeline Test + +This end-to-end (E2E) test validates that log events generated in a container are correctly ingested by Vector, processed, and forwarded to an OpenTelemetry Collector sink, where they are exported to a file for verification. + +## How this test works + +- **Orchestrates all required services:** + - **Log generator**: Emits fake OTLP logs. + - **Vector**: Receives, transforms, and forwards logs to the OTEL sink and a file. + - **OTEL Collector Source**: Forwards or processes logs upstream. + - **OTEL Collector Sink**: Receives logs from Vector and writes them to a file. +- **Mounts volumes** to share configuration and output files between containers and the host. +- **Exposes ports** for OTLP HTTP ingestion and for accessing Vector/collector APIs if needed. + +## How to Run + +```shell +# from the repo root directory +./scripts/int-e2e-test.sh e2e opentelemetry-logs +``` + +## Notes + +- The test ensures true end-to-end delivery and format compliance for OTLP logs through Vector and the OpenTelemetry Collector stack. +- Adjust the log generator, remap logic, or assertions as needed for your use case. diff --git a/scripts/e2e/opentelemetry-logs/compose.yaml b/scripts/e2e/opentelemetry-logs/compose.yaml new file mode 100644 index 0000000000000..c82b2e8a569d8 --- /dev/null +++ b/scripts/e2e/opentelemetry-logs/compose.yaml @@ -0,0 +1,70 @@ +name: opentelemetry-vector-e2e +services: + otel-collector-source: + container_name: otel-collector-source + image: otel/opentelemetry-collector-contrib:${CONFIG_COLLECTOR_VERSION:-latest} + init: true + volumes: + - type: bind + source: ../../../tests/data/e2e/opentelemetry/logs/collector-source.yaml + target: /etc/otelcol-contrib/config.yaml + read_only: true + ports: + - "${OTEL_COLLECTOR_SOURCE_GRPC_PORT:-4317}:4317" + - "${OTEL_COLLECTOR_SOURCE_HTTP_PORT:-4318}:4318" + command: [ "--config=/etc/otelcol-contrib/config.yaml" ] + + logs-generator: + container_name: logs-generator + build: + context: ./generator + init: true + depends_on: + - otel-collector-source + - vector + - otel-collector-sink + volumes: + - type: bind + source: ./generator + target: /generator + environment: + - PYTHONUNBUFFERED=1 + command: [ "python", "/generator/logs_generator.py", "-n", "100" ] + + otel-collector-sink: + container_name: otel-collector-sink + image: otel/opentelemetry-collector-contrib:${CONFIG_COLLECTOR_VERSION:-latest} + init: true + volumes: + - type: bind + source: ../../../tests/data/e2e/opentelemetry/logs/collector-sink.yaml + target: /etc/otelcol-contrib/config.yaml + read_only: true + - type: bind + source: ../../../tests/data/e2e/opentelemetry/logs/output + target: /output + ports: + - "${OTEL_COLLECTOR_SINK_HTTP_PORT:-5318}:5318" + + vector: + container_name: vector-otel-logs-e2e + build: + context: ../../../ + dockerfile: ./scripts/e2e/Dockerfile + args: + FEATURES: e2e-tests-opentelemetry + RUST_VERSION: ${RUST_VERSION:-1.88} + init: true + volumes: + - type: bind + source: ../../../tests/data/e2e/opentelemetry/logs/vector.yaml + target: /etc/vector/vector.yaml + read_only: true + - type: bind + source: ../../../tests/data/e2e/opentelemetry/logs/output + target: /output + environment: + - VECTOR_LOG=${VECTOR_LOG:-info} + - FEATURES=e2e-tests-opentelemetry + - OTEL_E2E_OUTPUT_PATH + command: [ "vector", "-c", "/etc/vector/vector.yaml" ] diff --git a/scripts/e2e/opentelemetry-logs/generator/Dockerfile b/scripts/e2e/opentelemetry-logs/generator/Dockerfile new file mode 100644 index 0000000000000..349b4a8a669aa --- /dev/null +++ b/scripts/e2e/opentelemetry-logs/generator/Dockerfile @@ -0,0 +1,4 @@ +FROM python:3.11-alpine +WORKDIR /generator +COPY requirements.txt logs_generator.py ./ +RUN pip install --no-cache-dir -r requirements.txt diff --git a/scripts/e2e/opentelemetry-logs/generator/logs_generator.py b/scripts/e2e/opentelemetry-logs/generator/logs_generator.py new file mode 100755 index 0000000000000..a5266485dc64a --- /dev/null +++ b/scripts/e2e/opentelemetry-logs/generator/logs_generator.py @@ -0,0 +1,125 @@ +#!/usr/bin/env python3 + +import argparse +import json +import random +import time +import uuid + +import requests + +SEVERITIES = ["DEBUG", "INFO", "WARN", "ERROR"] +PATHS = ["/", "/login", "/api/data", "/metrics", "/health"] + +def generate_log(endpoint: str, count: int) -> dict: + now_nanos = time.time_ns() + timestamp = time.strftime("%Y-%m-%dT%H:%M:%S%z") + severity = random.choice(SEVERITIES) + log_id = str(uuid.uuid4())[:8] + + log_data = { + "resourceLogs": [ + { + "resource": { + "attributes": [ + {"key": "service.name", "value": {"stringValue": "opentelemetry-logs"}} + ] + }, + "scopeLogs": [ + { + "scope": {"name": "log-generator"}, + "logRecords": [ + { + "timeUnixNano": now_nanos, + "severityText": severity, + "body": {"stringValue": f"[{log_id}] {severity} log {count} at {timestamp}"}, + "attributes": [ + {"key": "count", "value": {"intValue": count}} + ] + } + ] + } + ] + } + ] + } + + try: + response = requests.post( + endpoint, + data=json.dumps(log_data), + headers={"Content-Type": "application/json"}, + timeout=2 + ) + if response.status_code == 200: + return { + "success": True, + "message": f"Log {count} sent successfully", + "log_id": log_id, + "status_code": response.status_code + } + else: + return { + "success": False, + "message": f"HTTP {response.status_code}: {response.text.strip() or '[empty]'}", + "log_id": log_id, + "status code": response.status_code, + } + + except requests.exceptions.RequestException as e: + return { + "success": False, + "message": f"RequestException: {str(e)}", + "log_id": log_id, + } + + +def non_negative_float(value): + f = float(value) + if f < 0: + raise argparse.ArgumentTypeError(f"Interval must be non-negative, got {value}") + return f + + +def main(): + parser = argparse.ArgumentParser(description="Generate OTLP logs periodically.") + parser.add_argument( + "--interval", + type=non_negative_float, + help="Seconds between log sends (non-negative, optional)" + ) + parser.add_argument("-n", type=int, default=0, help="Total logs to send (0 or negative = infinite)") + parser.add_argument("--host", type=str, default="otel-collector-source", help="Host for the OTLP collector") + parser.add_argument("--port", type=int, default=4318, help="Port for OTLP HTTP logs") + parser.add_argument("--path", type=str, default="/v1/logs", help="OTLP HTTP logs path") + + args = parser.parse_args() + endpoint = f"http://{args.host}:{args.port}{args.path}" + + print(f"Starting log generator → {endpoint}") + + count = 0 + sent = 0 + failed = 0 + + while True: + result = generate_log(endpoint, count) + count += 1 + if result["success"]: + print(f"✅ Sent log {count} (ID: {result['log_id']})") + sent += 1 + else: + print(f"❌ Failed log {count} (ID: {result['log_id']}): {result['message']}") + failed += 1 + + if 0 < args.n <= count: + break + + if args.interval is not None: + time.sleep(args.interval) + + print(f"\n📊 Finished: Sent={sent}, Failed={failed}") + + +if __name__ == "__main__": + main() diff --git a/scripts/e2e/opentelemetry-logs/generator/requirements.txt b/scripts/e2e/opentelemetry-logs/generator/requirements.txt new file mode 100644 index 0000000000000..f2293605cf1b0 --- /dev/null +++ b/scripts/e2e/opentelemetry-logs/generator/requirements.txt @@ -0,0 +1 @@ +requests diff --git a/scripts/e2e/opentelemetry-logs/test.yaml b/scripts/e2e/opentelemetry-logs/test.yaml new file mode 100644 index 0000000000000..70e3862ae3347 --- /dev/null +++ b/scripts/e2e/opentelemetry-logs/test.yaml @@ -0,0 +1,24 @@ +features: +- e2e-tests-opentelemetry + +test: "e2e" + +test_filter: "opentelemetry::logs::" + +runner: + env: + OTEL_COLLECTOR_SOURCE_GRPC_PORT: '4317' + OTEL_COLLECTOR_SOURCE_HTTP_PORT: '4318' + OTEL_COLLECTOR_SINK_HTTP_PORT: '5318' + +matrix: + # Determines which `otel/opentelemetry-collector-contrib` version to use + collector_version: [ 'latest' ] + +# Only trigger this integration test if relevant OTEL source/sink files change +paths: + - "src/sources/opentelemetry/**" + - "src/sinks/opentelemetry/**" + - "src/internal_events/opentelemetry_*" + - "tests/e2e/opentelemetry/logs/**" + - "scripts/e2e/opentelemetry-logs/**" diff --git a/scripts/ensure-wasm-pack-installed.sh b/scripts/ensure-wasm-pack-installed.sh deleted file mode 100644 index 2e4b8a9e7c9ef..0000000000000 --- a/scripts/ensure-wasm-pack-installed.sh +++ /dev/null @@ -1,13 +0,0 @@ -#! /usr/bin/env bash - -if [[ "$(wasm-pack --version)" != "wasm-pack 0.10.3" ]] ; then - echo "wasm-pack version 0.10.3 is not installed" - # We are using the version from git due to the bug: https://github.com/vectordotdev/vector/pull/16060#issuecomment-1428429602 - echo "running cargo install --git https://github.com/rustwasm/wasm-pack.git --rev e3582b7 wasm-pack" - cargo install --git https://github.com/rustwasm/wasm-pack.git --rev e3582b7 wasm-pack -else - echo "wasm-pack version 0.10.3 is installed already" -fi - -brew install llvm -export PATH="/opt/homebrew/opt/llvm/bin:$PATH" diff --git a/scripts/ensure-wasm-target-installed.sh b/scripts/ensure-wasm-target-installed.sh index e80b06c4d3f1c..da89901163941 100644 --- a/scripts/ensure-wasm-target-installed.sh +++ b/scripts/ensure-wasm-target-installed.sh @@ -1,4 +1,4 @@ -#! /usr/bin/env bash +#!/usr/bin/env bash if [[ "$(rustup target list --installed | grep wasm32-unknown-unknown)" != "wasm32-unknown-unknown" ]] ; then echo "wasm32-unknown-unknown target is not installed" diff --git a/scripts/environment/Dockerfile b/scripts/environment/Dockerfile index ac1b90a36dcc8..08c2c1692e645 100644 --- a/scripts/environment/Dockerfile +++ b/scripts/environment/Dockerfile @@ -1,4 +1,4 @@ -FROM docker.io/ubuntu:20.04 AS ENVIRONMENT +FROM docker.io/ubuntu:24.04 ENV DEBIAN_FRONTEND=noninteractive \ TZ='America/New York' \ PATH=/root/.cargo/bin:/root/.local/bin/:$PATH \ @@ -12,7 +12,7 @@ RUN echo $TZ > /etc/timezone # Setup the env COPY scripts/environment/*.sh /git/vectordotdev/vector/scripts/environment/ -RUN cd git/vectordotdev/vector && ./scripts/environment/bootstrap-ubuntu-20.04.sh +RUN cd git/vectordotdev/vector && ./scripts/environment/bootstrap-ubuntu-24.04.sh # Setup the toolchain WORKDIR /git/vectordotdev/vector diff --git a/scripts/environment/binstall.sh b/scripts/environment/binstall.sh new file mode 100755 index 0000000000000..def6e9eb7b3cc --- /dev/null +++ b/scripts/environment/binstall.sh @@ -0,0 +1,75 @@ +#!/usr/bin/env bash + +set -eux +set -o pipefail + +BINSTALL_VERSION="v1.14.1" +BINSTALL_SHA256SUM_X86_64_LINUX="e1d1231720e6ed497a4b0f8881b08f5df9ce1a938fb3ae6f2444e95eb601fe99" +BINSTALL_SHA256SUM_AARCH64_LINUX="17d69bcc07a0e38c912e7f596ed71b1f5f59dc8980da59890c5bc86c07e8506a" +BINSTALL_SHA256SUM_ARMV7_LINUX="e4ba720023e02b071aa805ae62412e94741c1bb0e0a2bb2b35896fec3d140128" +BINSTALL_SHA256SUM_AARCH64_DARWIN="07d46d31fb68ac10b906c5d39d611ded7787966f4ed15c598cb6175b45a2b069" +BINSTALL_SHA256SUM_X86_64_DARWIN="3de381bdcca08c418dc790d2a283711894a0577c6e55bba0d4e6cb8b0378b36" + +pushd "$(mktemp -d)" + +base_url="https://github.com/cargo-bins/cargo-binstall/releases/download/${BINSTALL_VERSION}/cargo-binstall" + +download() { + curl --retry 3 --proto '=https' --tlsv1.2 -fsSL "$@" +} + +os="$(uname -s)" +machine="$(uname -m)" + +if [ "$os" = "Darwin" ]; then + if [ "$machine" = "arm64" ]; then + url="${base_url}-aarch64-apple-darwin.zip" + download_sha256sum="${BINSTALL_SHA256SUM_AARCH64_DARWIN}" + elif [ "$machine" = "x86_64" ]; then + url="${base_url}-x86_64-apple-darwin.zip" + download_sha256sum="${BINSTALL_SHA256SUM_X86_64_DARWIN}" + else + echo "Unsupported OS ${os} machine ${machine}" + popd + exit 1 + fi + + download -o output.zip "$url" +elif [ "$os" = "Linux" ]; then + if [ "$machine" = "armv7l" ]; then + target="armv7-unknown-linux-musleabihf" + download_sha256sum="${BINSTALL_SHA256SUM_ARMV7_LINUX}" + elif [ "$machine" = "aarch64" ]; then + target="${machine}-unknown-linux-musl" + download_sha256sum="${BINSTALL_SHA256SUM_AARCH64_LINUX}" + elif [ "$machine" = "x86_64" ]; then + target="${machine}-unknown-linux-musl" + download_sha256sum="${BINSTALL_SHA256SUM_X86_64_LINUX}" + else + echo "Unsupported OS ${os} machine ${machine}" + popd + exit 1 + fi + + url="${base_url}-${target}.tgz" + + download -o output.tgz "$url" +# elif [ "${OS-}" = "Windows_NT" ]; then +# target="${machine}-pc-windows-msvc" +# url="${base_url}-${target}.zip" +# download -o output.zip "$url" +else + echo "Unsupported OS ${os}" + popd + exit 1 +fi + +echo "${download_sha256sum} $(echo output.*)" | sha256sum --check + +case "$(echo output.*)" in + *.zip) unzip output.* ;; + *.tgz) tar -xvzf output.* ;; + *) >&2 echo "output.* not found"; exit 1 ;; +esac + +./cargo-binstall --self-install || ./cargo-binstall -y --force cargo-binstall diff --git a/scripts/environment/bootstrap-macos-10.sh b/scripts/environment/bootstrap-macos.sh similarity index 57% rename from scripts/environment/bootstrap-macos-10.sh rename to scripts/environment/bootstrap-macos.sh index 98fb8910dad12..b1ddf5c58c489 100755 --- a/scripts/environment/bootstrap-macos-10.sh +++ b/scripts/environment/bootstrap-macos.sh @@ -1,16 +1,19 @@ -#! /usr/bin/env bash +#!/usr/bin/env bash set -e -o verbose -# https://github.com/Homebrew/homebrew-cask/issues/150323 -unset HOMEBREW_NO_INSTALL_FROM_API - brew update # `brew install` attempts to upgrade python as a dependency but fails # https://github.com/actions/setup-python/issues/577 brew list -1 | grep python | while read -r formula; do brew unlink "$formula"; brew link --overwrite "$formula"; done -brew install ruby@2.7 coreutils cue-lang/tap/cue protobuf +brew install ruby@3 coreutils cue-lang/tap/cue protobuf + +# rustup-init (renamed to rustup) is already installed in GHA, but seems to be lacking the rustup binary +# Reinstalling seems to fix it +# TODO(jszwedko): It's possible GHA just needs to update its images and this won't be needed in the +# future +brew reinstall rustup gem install bundler diff --git a/scripts/environment/bootstrap-ubuntu-20.04.sh b/scripts/environment/bootstrap-ubuntu-24.04.sh similarity index 70% rename from scripts/environment/bootstrap-ubuntu-20.04.sh rename to scripts/environment/bootstrap-ubuntu-24.04.sh index be6182e6cacd5..664efef7b68ab 100755 --- a/scripts/environment/bootstrap-ubuntu-20.04.sh +++ b/scripts/environment/bootstrap-ubuntu-24.04.sh @@ -1,4 +1,7 @@ -#! /usr/bin/env bash +#!/usr/bin/env bash +# Refer to https://github.com/actions/runner-images/blob/main/images/ubuntu/Ubuntu2404-Readme.md +# for all runner information such as OS version and installed software. + set -e -o verbose if [ -n "$RUSTFLAGS" ] @@ -20,11 +23,8 @@ apt-get install --yes \ apt-utils \ apt-transport-https -apt-get upgrade --yes - # Deps apt-get install --yes --no-install-recommends \ - awscli \ build-essential \ ca-certificates \ cmake \ @@ -41,7 +41,6 @@ apt-get install --yes --no-install-recommends \ llvm \ locales \ pkg-config \ - python3-pip \ rename \ rpm \ ruby-bundler \ @@ -53,38 +52,25 @@ apt-get install --yes --no-install-recommends \ # Cue TEMP=$(mktemp -d) curl \ - -L https://github.com/cue-lang/cue/releases/download/v0.7.0/cue_v0.7.0_linux_amd64.tar.gz \ - -o "${TEMP}/cue_v0.7.0_linux_amd64.tar.gz" + -L https://github.com/cue-lang/cue/releases/download/v0.10.0/cue_v0.10.0_linux_amd64.tar.gz \ + -o "${TEMP}/cue_v0.10.0_linux_amd64.tar.gz" tar \ - -xvf "${TEMP}/cue_v0.7.0_linux_amd64.tar.gz" \ + -xvf "${TEMP}/cue_v0.10.0_linux_amd64.tar.gz" \ -C "${TEMP}" cp "${TEMP}/cue" /usr/bin/cue rm -rf "$TEMP" -# Grease -# Grease is used for the `make release-github` task. -TEMP=$(mktemp -d) -curl \ - -L https://github.com/vectordotdev/grease/releases/download/v1.0.1/grease-1.0.1-linux-amd64.tar.gz \ - -o "${TEMP}/grease-1.0.1-linux-amd64.tar.gz" -tar \ - -xvf "${TEMP}/grease-1.0.1-linux-amd64.tar.gz" \ - -C "${TEMP}" -cp "${TEMP}/grease/bin/grease" /usr/bin/grease -rm -rf "$TEMP" - # Locales locale-gen en_US.UTF-8 dpkg-reconfigure locales if ! command -v rustup ; then - # Rust/Cargo should already be installed on both GH Actions-provided Ubuntu 20.04 images _and_ - # by our own Ubuntu 20.04 images + # https://github.com/actions/runner-images/blob/main/images/ubuntu/Ubuntu2404-Readme.md#rust-tools curl https://sh.rustup.rs -sSf | sh -s -- -y --profile minimal fi -# Rust/Cargo should already be installed on both GH Actions-provided Ubuntu 20.04 images _and_ -# by our own Ubuntu 20.04 images, so this is really just make sure the path is configured. +# Rust/Cargo should already be installed on both GH Actions-provided Ubuntu 24.04 images _and_ +# by our own Ubuntu 24.04 images, so this is really just make sure the path is configured. if [ -n "${CI-}" ] ; then echo "${HOME}/.cargo/bin" >> "${GITHUB_PATH}" # we often run into OOM issues in CI due to the low memory vs. CPU ratio on c5 instances @@ -108,12 +94,6 @@ if ! [ -x "$(command -v docker)" ]; then usermod --append --groups docker ubuntu || true fi -# docker-compose -if ! [ -x "$(command -v docker-compose)" ]; then - curl -fsSL "https://github.com/docker/compose/releases/download/v2.20.3/docker-compose-linux-$(uname -m)" -o /usr/local/bin/docker-compose - chmod +x /usr/local/bin/docker-compose -fi - bash scripts/environment/install-protoc.sh # Node.js, npm and yarn. @@ -133,30 +113,6 @@ if ! [ -x "$(command -v node)" ]; then corepack enable fi -# Hugo (static site generator). -# Hugo is used to build the website content. -# Note: the installed version should match the version specified in 'netlify.toml' -TEMP=$(mktemp -d) -curl \ - -L https://github.com/gohugoio/hugo/releases/download/v0.84.0/hugo_extended_0.84.0_Linux-64bit.tar.gz \ - -o "${TEMP}/hugo_extended_0.84.0_Linux-64bit.tar.gz" -tar \ - -xvf "${TEMP}/hugo_extended_0.84.0_Linux-64bit.tar.gz" \ - -C "${TEMP}" -cp "${TEMP}/hugo" /usr/bin/hugo -rm -rf "$TEMP" - -# htmltest (HTML checker for the website content) -TEMP=$(mktemp -d) -curl \ - -L https://github.com/wjdp/htmltest/releases/download/v0.17.0/htmltest_0.17.0_linux_amd64.tar.gz \ - -o "${TEMP}/htmltest_0.17.0_linux_amd64.tar.gz" -tar \ - -xvf "${TEMP}/htmltest_0.17.0_linux_amd64.tar.gz" \ - -C "${TEMP}" -cp "${TEMP}/htmltest" /usr/bin/htmltest -rm -rf "$TEMP" - # Apt cleanup apt-get clean diff --git a/scripts/environment/bootstrap-windows-2019.ps1 b/scripts/environment/bootstrap-windows-2022.ps1 similarity index 73% rename from scripts/environment/bootstrap-windows-2019.ps1 rename to scripts/environment/bootstrap-windows-2022.ps1 index 2a220117e5f2f..7af1123b2df35 100644 --- a/scripts/environment/bootstrap-windows-2019.ps1 +++ b/scripts/environment/bootstrap-windows-2022.ps1 @@ -4,20 +4,23 @@ echo "$HOME\.cargo\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Ap # We have to limit our Cargo build concurrency otherwise we can overwhelm the machine during things # like running tests, where it will try and build many binaries at once, consuming all of the memory # and making things go veryyyyyyy slow. -$N_JOBS=(((Get-CimInstance -ClassName Win32_ComputerSystem).NumberOfLogicalProcessors / 2),1 | Measure-Object -Max).Maximum +$N_JOBS = (((Get-CimInstance -ClassName Win32_ComputerSystem).NumberOfLogicalProcessors / 2), 1 | Measure-Object -Max).Maximum echo "CARGO_BUILD_JOBS=$N_JOBS" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append -if ($env:RELEASE_BUILDER -ne "true") { +if ($env:RELEASE_BUILDER -ne "true") +{ # Ensure we have cargo-next test installed. - rustup run stable cargo install cargo-nextest --version 0.9.64 --locked + rustup run stable cargo install cargo-nextest --version 0.9.95 --locked } -# Install some required dependencies / tools. +# Enable retries to avoid transient network issues. +$env:NUGET_ENABLE_ENHANCED_HTTP_RETRY = "true" + choco install make choco install protoc # Set a specific override path for libclang. -echo "LIBCLANG_PATH=$((gcm clang).source -replace "clang.exe")" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append +echo "LIBCLANG_PATH=$( (gcm clang).source -replace "clang.exe" )" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append # Explicitly instruct the `openssl` crate to use Strawberry Perl instead of the Perl bundled with # git-bash, since the GHA Windows 2022 image has a poorly arranged PATH. diff --git a/scripts/environment/entrypoint.sh b/scripts/environment/entrypoint.sh index ae6b55eb1ce55..b97484e991fc6 100755 --- a/scripts/environment/entrypoint.sh +++ b/scripts/environment/entrypoint.sh @@ -1,7 +1,16 @@ -#! /usr/bin/env bash +#!/usr/bin/env bash # set HOSTNAME to container id for `cross` -HOSTNAME="$(head -1 /proc/self/cgroup|cut -d/ -f3)" -export HOSTNAME +if [ -f /.docker-container-id ]; then + HOSTNAME="$(cat /.docker-container-id)" + export HOSTNAME +fi + +if [ -z "$HOSTNAME" ]; then + echo "Failed to properly set HOSTNAME, cross may not work" + # Fallback if everything else fails + HOSTNAME="vector-environment" + export HOSTNAME +fi exec "$@" diff --git a/scripts/environment/install-protoc.sh b/scripts/environment/install-protoc.sh index 3e65820813da9..dc0eaa09037a2 100755 --- a/scripts/environment/install-protoc.sh +++ b/scripts/environment/install-protoc.sh @@ -1,4 +1,4 @@ -#! /usr/bin/env bash +#!/usr/bin/env bash set -o errexit -o verbose # A parameter can be optionally passed to this script to specify an alternative @@ -60,7 +60,7 @@ install_protoc() { curl -fsSL "${url}" -o "${download_path}" unzip -qq "${download_path}" -d "${TMP_DIR}" - mv --force --verbose "${TMP_DIR}/bin/protoc" "${install_path}" + mv -f -v "${TMP_DIR}/bin/protoc" "${install_path}" } -install_protoc "3.19.5" "${INSTALL_PATH}/protoc" +install_protoc "3.20.2" "${INSTALL_PATH}/protoc" diff --git a/scripts/environment/prepare.sh b/scripts/environment/prepare.sh index 3f73ec9b30844..bf39d7205d690 100755 --- a/scripts/environment/prepare.sh +++ b/scripts/environment/prepare.sh @@ -1,36 +1,171 @@ -#! /usr/bin/env bash -set -e -o verbose +#!/usr/bin/env bash +set -euo pipefail + +ALL_MODULES=( + rustup + cargo-deb + cross + cargo-nextest + cargo-deny + cargo-msrv + dd-rust-license-tool + wasm-pack + markdownlint + datadog-ci + release-flags +) -git config --global --add safe.directory /git/vectordotdev/vector +# By default, install everything +MODULES=( "${ALL_MODULES[@]}" ) -rustup show # causes installation of version from rust-toolchain.toml -rustup default "$(rustup show active-toolchain | awk '{print $1;}')" -if [[ "$(cargo-deb --version)" != "2.0.2" ]] ; then - rustup run stable cargo install cargo-deb --version 2.0.0 --force --locked +# Helper to join an array by comma +join_by() { local IFS="$1"; shift; echo "$*"; } + +# If the INSTALL_MODULES env var is set, override MODULES +if [[ -n "${INSTALL_MODULES:-}" ]]; then + IFS=',' read -r -a MODULES <<< "$INSTALL_MODULES" fi -if [[ "$(cross --version | grep cross)" != "cross 0.2.5" ]] ; then - rustup run stable cargo install cross --version 0.2.5 --force --locked + +# Parse CLI args for --modules or -m +for arg in "$@"; do + case $arg in + --modules=*|-m=*) + val="${arg#*=}" + IFS=',' read -r -a MODULES <<< "$val" + shift + ;; + --help|-h) + cat </dev/null || ./scripts/environment/binstall.sh; then + install=(binstall -y) + else + echo "Failed to install cargo binstall, defaulting to cargo install" + fi + fi +fi +set -e -o verbose +if contains_module cargo-deb; then + if [[ "$(cargo-deb --version 2>/dev/null)" != "2.0.2" ]]; then + rustup run stable cargo "${install[@]}" cargo-deb --version 2.0.2 --force --locked + fi +fi + +if contains_module cross; then + if ! cross --version 2>/dev/null | grep -q '^cross 0.2.5'; then + rustup run stable cargo "${install[@]}" cross --version 0.2.5 --force --locked + fi +fi + +if contains_module cargo-nextest; then + if ! cargo-nextest --version 2>/dev/null | grep -q '^cargo-nextest 0.9.95'; then + rustup run stable cargo "${install[@]}" cargo-nextest --version 0.9.95 --force --locked + fi fi -if ! cargo deny --version >& /dev/null ; then - rustup run stable cargo install cargo-deny --force --locked + +if contains_module cargo-deny; then + if ! cargo-deny --version 2>/dev/null | grep -q '^cargo-deny 0.16.2'; then + rustup run stable cargo "${install[@]}" cargo-deny --version 0.16.2 --force --locked + fi fi -if ! dd-rust-license-tool --help >& /dev/null ; then - rustup run stable cargo install dd-rust-license-tool --version 1.0.2 --force --locked + +if contains_module cargo-msrv; then + if ! cargo-msrv --version 2>/dev/null | grep -q '^cargo-msrv 0.18.4'; then + rustup run stable cargo "${install[@]}" cargo-msrv --version 0.18.4 --force --locked + fi fi -# Currently fixing this to version 0.30 since version 0.31 has introduced -# a change that means it only works with versions of node > 10. -# https://github.com/igorshubovych/markdownlint-cli/issues/258 -# ubuntu 20.04 gives us version 10.19. We can revert once we update the -# ci image to install a newer version of node. -sudo npm -g install markdownlint-cli@0.30 -sudo npm -g install @datadog/datadog-ci +if contains_module dd-rust-license-tool; then + if ! dd-rust-license-tool --help &>/dev/null; then + rustup run stable cargo install dd-rust-license-tool --version 1.0.2 --force --locked + fi +fi -pip3 install jsonschema==3.2.0 -pip3 install remarshal==0.11.2 +if contains_module wasm-pack; then + if ! wasm-pack --version | grep -q '^wasm-pack 0.13.1'; then + rustup run stable cargo "${install[@]}" --locked --version 0.13.1 wasm-pack + fi +fi -# Make sure our release build settings are present. -. scripts/environment/release-flags.sh +if contains_module markdownlint; then + if [[ "$(markdownlint --version 2>/dev/null)" != "0.45.0" ]]; then + sudo npm install -g markdownlint-cli@0.45.0 + fi +fi + +if contains_module datadog-ci; then + if [[ "$(datadog-ci version 2>/dev/null)" != "v3.16.0" ]]; then + sudo npm install -g @datadog/datadog-ci@3.16.0 + fi +fi diff --git a/scripts/environment/release-flags.sh b/scripts/environment/release-flags.sh index 5ccbc70f60866..4115517c437e1 100755 --- a/scripts/environment/release-flags.sh +++ b/scripts/environment/release-flags.sh @@ -1,4 +1,4 @@ -#! /usr/bin/env bash +#!/usr/bin/env bash set -e -o verbose # We want to ensure we're building using "full" release capabilities when possible, which diff --git a/scripts/environment/setup-helm.sh b/scripts/environment/setup-helm.sh index 66c919f895c83..a16b731957b3c 100755 --- a/scripts/environment/setup-helm.sh +++ b/scripts/environment/setup-helm.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -euo pipefail KUBERNETES_VERSION="v1.18.6" diff --git a/scripts/generate-component-docs.rb b/scripts/generate-component-docs.rb index b6aa51ce2503f..940abc111bf73 100755 --- a/scripts/generate-component-docs.rb +++ b/scripts/generate-component-docs.rb @@ -515,88 +515,60 @@ def expand_schema_references(root_schema, unexpanded_schema) original_title = unexpanded_schema['title'] original_description = unexpanded_schema['description'] - loop do - expanded = false - - # If the schema has a top level reference, we expand it. - schema_ref = schema['$ref'] - if !schema_ref.nil? - expanded_schema_ref = get_cached_expanded_schema(schema_ref) - if expanded_schema_ref.nil? - @logger.debug "Expanding top-level schema ref of '#{schema_ref}'..." - - unexpanded_schema_ref = get_schema_by_name(root_schema, schema_ref) - expanded_schema_ref = expand_schema_references(root_schema, unexpanded_schema_ref) - - @expanded_schema_cache[schema_ref] = expanded_schema_ref - end - - schema.delete('$ref') - schema = nested_merge(expanded_schema_ref, schema) - - expanded = true - end - - # If the schema is an array type and has a reference for its items, we expand that. - items_ref = schema.dig('items', '$ref') - if !items_ref.nil? - expanded_items_schema_ref = expand_schema_references(root_schema, schema['items']) - - schema['items'].delete('$ref') - schema['items'] = nested_merge(expanded_items_schema_ref, schema['items']) - - expanded = true - end + # If the schema has a top level reference, we expand it. + schema_ref = schema['$ref'] + if !schema_ref.nil? + expanded_schema_ref = get_cached_expanded_schema(schema_ref) + if expanded_schema_ref.nil? + @logger.debug "Expanding top-level schema ref of '#{schema_ref}'..." - # If the schema has any object properties, we expand those. - if !schema['properties'].nil? - schema['properties'] = schema['properties'].transform_values { |property_schema| - new_property_schema = expand_schema_references(root_schema, property_schema) - if new_property_schema != property_schema - expanded = true - end + unexpanded_schema_ref = get_schema_by_name(root_schema, schema_ref) + expanded_schema_ref = expand_schema_references(root_schema, unexpanded_schema_ref) - new_property_schema - } + @expanded_schema_cache[schema_ref] = expanded_schema_ref end - # If the schema has any `allOf`/`oneOf` subschemas, we expand those, too. - if !schema['allOf'].nil? - schema['allOf'] = schema['allOf'].map { |subschema| - new_subschema = expand_schema_references(root_schema, subschema) - if new_subschema != subschema - expanded = true - end + schema.delete('$ref') + schema = nested_merge(expanded_schema_ref, schema) + end - new_subschema - } - end + # If the schema is an array type and has a reference for its items, we expand that. + items_ref = schema.dig('items', '$ref') + if !items_ref.nil? + expanded_items_schema_ref = expand_schema_references(root_schema, schema['items']) - if !schema['oneOf'].nil? - schema['oneOf'] = schema['oneOf'].map { |subschema| - new_subschema = expand_schema_references(root_schema, subschema) - if new_subschema != subschema - expanded = true - end + schema['items'].delete('$ref') + schema['items'] = nested_merge(expanded_items_schema_ref, schema['items']) + end - new_subschema - } - end + # If the schema has any object properties, we expand those. + if !schema['properties'].nil? + schema['properties'] = schema['properties'].transform_values { |property_schema| + new_property_schema = expand_schema_references(root_schema, property_schema) + new_property_schema + } + end - if !schema['anyOf'].nil? - schema['anyOf'] = schema['anyOf'].map { |subschema| - new_subschema = expand_schema_references(root_schema, subschema) - if new_subschema != subschema - expanded = true - end + # If the schema has any `allOf`/`oneOf` subschemas, we expand those, too. + if !schema['allOf'].nil? + schema['allOf'] = schema['allOf'].map { |subschema| + new_subschema = expand_schema_references(root_schema, subschema) + new_subschema + } + end - new_subschema - } - end + if !schema['oneOf'].nil? + schema['oneOf'] = schema['oneOf'].map { |subschema| + new_subschema = expand_schema_references(root_schema, subschema) + new_subschema + } + end - if !expanded - break - end + if !schema['anyOf'].nil? + schema['anyOf'] = schema['anyOf'].map { |subschema| + new_subschema = expand_schema_references(root_schema, subschema) + new_subschema + } end # If the original schema had either a title or description, we forcefully reset both of them back @@ -744,9 +716,23 @@ def resolve_schema(root_schema, schema) # # We intentially set no actual definition for these types, relying on the documentation generation # process to provide the actual details. We only need to specify the custom type name. + # + # To handle u8 types as ascii characters and not there uint representation between 0 and 255 we + # added a special handling of these exact values. This means + # `#[configurable(metadata(docs::type_override = "ascii_char"))]` should only be used consciously + # for rust u8 type. See lib/codecs/src/encoding/format/csv.rs for an example and + # https://github.com/vectordotdev/vector/pull/20498 type_override = get_schema_metadata(schema, 'docs::type_override') if !type_override.nil? - resolved = { 'type' => { type_override.to_s => {} } } + if type_override == 'ascii_char' + if !schema['default'].nil? + resolved = { 'type' => { type_override.to_s => { 'default' => schema['default'].chr } } } + else + resolved = { 'type' => { type_override.to_s => { } } } + end + else + resolved = { 'type' => { type_override.to_s => {} } } + end description = get_rendered_description_from_schema(schema) resolved['description'] = description unless description.empty? return resolved @@ -784,6 +770,17 @@ def resolve_schema(root_schema, schema) end end + # required for global option configuration + is_common_field = get_schema_metadata(schema, 'docs::common') + if !is_common_field.nil? + resolved['common'] = is_common_field + end + + is_required_field = get_schema_metadata(schema, 'docs::required') + if !is_required_field.nil? + resolved['required'] = is_required_field + end + # Reconcile the resolve schema, which essentially gives us a chance to, once the schema is # entirely resolved, check it for logical inconsistencies, fix up anything that we reasonably can, # and so on. @@ -1645,7 +1642,7 @@ def get_rendered_description_from_schema(schema) description.strip end -def render_and_import_schema(root_schema, schema_name, friendly_name, config_map_path, cue_relative_path) +def unwrap_resolved_schema(root_schema, schema_name, friendly_name) @logger.info "[*] Resolving schema definition for #{friendly_name}..." # Try and resolve the schema, unwrapping it as an object schema which is a requirement/expectation @@ -1659,7 +1656,10 @@ def render_and_import_schema(root_schema, schema_name, friendly_name, config_map exit 1 end - unwrapped_resolved_schema = sort_hash_nested(unwrapped_resolved_schema) + return sort_hash_nested(unwrapped_resolved_schema) +end + +def render_and_import_schema(unwrapped_resolved_schema, friendly_name, config_map_path, cue_relative_path) # Set up the appropriate structure for the value based on the configuration map path. It defines # the nested levels of the map where our resolved schema should go, as well as a means to generate @@ -1677,8 +1677,7 @@ def render_and_import_schema(root_schema, schema_name, friendly_name, config_map config_map_path.prepend('config-schema-base') tmp_file_prefix = config_map_path.join('-') - final = { 'base' => { 'components' => data } } - final_json = to_pretty_json(final) + final_json = to_pretty_json(data) # Write the resolved schema as JSON, which we'll then use to import into a Cue file. json_output_file = write_to_temp_file(["config-schema-#{tmp_file_prefix}-", '.json'], final_json) @@ -1686,7 +1685,7 @@ def render_and_import_schema(root_schema, schema_name, friendly_name, config_map # Try importing it as Cue. @logger.info "[*] Importing #{friendly_name} schema as Cue file..." - cue_output_file = "website/cue/reference/components/#{cue_relative_path}" + cue_output_file = "website/cue/reference/#{cue_relative_path}" unless system(@cue_binary_path, 'import', '-f', '-o', cue_output_file, '-p', 'metadata', json_output_file) @logger.error "[!] Failed to import #{friendly_name} schema as valid Cue." exit 1 @@ -1694,23 +1693,65 @@ def render_and_import_schema(root_schema, schema_name, friendly_name, config_map @logger.info "[✓] Imported #{friendly_name} schema to '#{cue_output_file}'." end -def render_and_import_base_component_schema(root_schema, schema_name, component_type) +def render_and_import_generated_component_schema(root_schema, schema_name, component_type) + friendly_name = "generated #{component_type} configuration" + unwrapped_resolved_schema = unwrap_resolved_schema(root_schema, schema_name, friendly_name) render_and_import_schema( - root_schema, - schema_name, - "base #{component_type} configuration", - ["#{component_type}s"], - "base/#{component_type}s.cue" + unwrapped_resolved_schema, + friendly_name, + ["generated", "components", "#{component_type}s"], + "components/generated/#{component_type}s.cue" ) end def render_and_import_component_schema(root_schema, schema_name, component_type, component_name) + friendly_name = "'#{component_name}' #{component_type} configuration" + unwrapped_resolved_schema = unwrap_resolved_schema(root_schema, schema_name, friendly_name) render_and_import_schema( - root_schema, - schema_name, - "'#{component_name}' #{component_type} configuration", - ["#{component_type}s", component_name], - "#{component_type}s/base/#{component_name}.cue" + unwrapped_resolved_schema, + friendly_name, + ["generated", "components", "#{component_type}s", component_name], + "components/#{component_type}s/generated/#{component_name}.cue" + ) +end + +def render_and_import_generated_api_schema(root_schema, apis) + api_schema = {} + apis.each do |component_name, schema_name| + friendly_name = "'#{component_name}' #{schema_name} configuration" + resolved_schema = unwrap_resolved_schema(root_schema, schema_name, friendly_name) + api_schema[component_name] = resolved_schema + end + + render_and_import_schema( + api_schema, + "configuration", + ["generated", "api"], + "generated/api.cue" + ) +end + +def render_and_import_generated_global_option_schema(root_schema, global_options) + global_option_schema = {} + + global_options.each do |component_name, schema_name| + friendly_name = "'#{component_name}' #{schema_name} configuration" + + if component_name == "global_option" + # Flattening global options + unwrap_resolved_schema(root_schema, schema_name, friendly_name) + .each { |name, schema| global_option_schema[name] = schema } + else + # Resolving and assigning other global options + global_option_schema[component_name] = resolve_schema_by_name(root_schema, schema_name) + end + end + + render_and_import_schema( + global_option_schema, + "configuration", + ["generated", "configuration"], + "generated/configuration.cue" ) end @@ -1733,7 +1774,7 @@ def render_and_import_component_schema(root_schema, schema_name, component_type, # First off, we generate the component type configuration bases. These are the high-level # configuration settings that are universal on a per-component type basis. # -# For example, the "base" configuration for a sink would be the inputs, buffer settings, healthcheck +# For example, the "generated" configuration for a sink would be the inputs, buffer settings, healthcheck # settings, and proxy settings... and then the configuration for a sink would be those, plus # whatever the sink itself defines. component_bases = root_schema['definitions'].filter_map do |key, definition| @@ -1743,7 +1784,7 @@ def render_and_import_component_schema(root_schema, schema_name, component_type, .reduce { |acc, item| nested_merge(acc, item) } component_bases.each do |component_type, schema_name| - render_and_import_base_component_schema(root_schema, schema_name, component_type) + render_and_import_generated_component_schema(root_schema, schema_name, component_type) end # Now we'll generate the base configuration for each component. @@ -1759,3 +1800,23 @@ def render_and_import_component_schema(root_schema, schema_name, component_type, render_and_import_component_schema(root_schema, schema_name, component_type, component_name) end end + +apis = root_schema['definitions'].filter_map do |key, definition| + component_type = get_schema_metadata(definition, 'docs::component_type') + component_name = get_schema_metadata(definition, 'docs::component_name') + { component_name => key } if component_type == "api" +end +.reduce { |acc, item| nested_merge(acc, item) } + +render_and_import_generated_api_schema(root_schema, apis) + + +# At last, we generate the global options configuration. +global_options = root_schema['definitions'].filter_map do |key, definition| + component_type = get_schema_metadata(definition, 'docs::component_type') + component_name = get_schema_metadata(definition, 'docs::component_name') + { component_name => key } if component_type == "global_option" +end +.reduce { |acc, item| nested_merge(acc, item) } + +render_and_import_generated_global_option_schema(root_schema, global_options) diff --git a/scripts/generate-release-cue.rb b/scripts/generate-release-cue.rb index c308f44f8bb43..9a03cda437cdf 100755 --- a/scripts/generate-release-cue.rb +++ b/scripts/generate-release-cue.rb @@ -14,22 +14,56 @@ require "json" require "time" +require "optparse" +require 'pathname' require_relative "util/commit" require_relative "util/git_log_commit" require_relative "util/printer" require_relative "util/release" require_relative "util/version" +# Function to find the repository root by looking for .git directory +def find_repo_root + # Get the absolute path of the current script + script_path = File.expand_path(__FILE__) + dir = Pathname.new(script_path).dirname + + # Walk up the directory tree until we find .git or reach the filesystem root + loop do + return dir.to_s if File.exist?(File.join(dir, '.git')) + parent = dir.parent + raise "Could not find repository root (no .git directory found)" if parent == dir # Reached filesystem root + dir = parent + end +end + # # Constants # -ROOT = ".." +ROOT = find_repo_root RELEASE_REFERENCE_DIR = File.join(ROOT, "website", "cue", "reference", "releases") CHANGELOG_DIR = File.join(ROOT, "changelog.d") TYPES = ["chore", "docs", "feat", "fix", "enhancement", "perf"] TYPES_THAT_REQUIRE_SCOPES = ["feat", "enhancement", "fix"] +# Parse command-line options +options = {} +OptionParser.new do |opts| + opts.banner = "Usage: #{File.basename(__FILE__)} [options]" + + opts.on("--new-version VERSION", "Specify the new version (e.g., 1.2.3)") do |v| + options[:new_version] = v + end + opts.on("--[no-]interactive", "Enable/disable interactive prompts (default: true)") do |i| + options[:interactive] = i + end + opts.on_tail("-h", "--help", "Show this help message") do + puts opts + exit + end +end.parse! + # # Functions # @@ -43,66 +77,60 @@ # This file is created from outstanding commits since the last release. # It's meant to be a starting point. The resulting file should be reviewed # and edited by a human before being turned into a cue file. -def create_log_file!(current_commits, new_version) +def create_log_file!(current_commits, new_version, interactive) release_log_path = "#{RELEASE_REFERENCE_DIR}/#{new_version}.log" # Grab all existing commits existing_commits = get_existing_commits! - # Ensure this release does not include duplicate commits. Notice that we - # check the parsed PR numbers. This is necessary to ensure we do not include - # cherry-picked commits made available in other releases. - # - # For example, if we cherry pick a commit from `master` to the `0.8` branch - # it will have a different commit sha. Without checking something besides the - # sha, this commit would also show up in the next release. - new_commits = - current_commits.select do |current_commit| - !existing_commits.any? do |existing_commit| - existing_commit.eql?(current_commit) - end - end + # Filter out duplicate commits + new_commits = current_commits.select do |current_commit| + !existing_commits.any? { |existing_commit| existing_commit.eql?(current_commit) } + end new_commit_lines = new_commits.collect { |c| c.to_git_log_commit.to_raw }.join("\n") if new_commits.any? if File.exists?(release_log_path) - words = - <<~EOF - I found #{new_commits.length} new commits since you last ran this - command. So that I don't erase any other work in that file, please - manually add the following commit lines: + if interactive + words = <<~EOF + I found #{new_commits.length} new commits since you last ran this + command. So that I don't erase any other work in that file, please + manually add the following commit lines: - #{new_commit_lines.split("\n").collect { |line| " #{line}" }.join("\n")} + #{new_commit_lines.split("\n").collect { |line| " #{line}" }.join("\n")} - To: + To: - #{release_log_path} + #{release_log_path} - All done? Ready to proceed? + All done? Ready to proceed? EOF - if Util::Printer.get(words, ["y", "n"]) == "n" - Util::Printer.error!("Ok, re-run this command when you're ready.") + if Util::Printer.get(words, ["y", "n"]) == "n" + Util::Printer.error!("Ok, re-run this command when you're ready.") + end end else File.open(release_log_path, 'w+') do |file| file.write(new_commit_lines) end - words = - <<~EOF - I've created a release log file here: + puts interactive + if interactive + words = <<~EOF + I've created a release log file here: - #{release_log_path} + #{release_log_path} - Please review the commits and *adjust the commit messages as necessary*. + Please review the commits and *adjust the commit messages as necessary*. - All done? Ready to proceed? + All done? Ready to proceed? EOF - if Util::Printer.get(words, ["y", "n"]) == "n" - Util::Printer.error!("Ok, re-run this command when you're ready.") + if Util::Printer.get(words, ["y", "n"]) == "n" + Util::Printer.error!("Ok, re-run this command when you're ready.") + end end end end @@ -391,16 +419,32 @@ def migrate_highlights(new_version) # # Execute # - -Dir.chdir "scripts" +script_dir = File.expand_path(File.dirname(__FILE__)) +Dir.chdir script_dir Util::Printer.title("Creating release meta file...") last_tag = `git describe --tags $(git rev-list --tags --max-count=1)`.chomp last_version = Util::Version.new(last_tag.gsub(/^v/, '')) current_commits = get_commits_since(last_version) -new_version = get_new_version(last_version, current_commits) -log_file_path = create_log_file!(current_commits, new_version) + +new_version_string = options[:new_version] +if new_version_string + begin + new_version = Util::Version.new(new_version_string) + if last_version.bump_type(new_version).nil? + Util::Printer.error!("The specified version '#{new_version_string}' must be a single patch, minor, or major bump from #{last_version}") + exit 1 + end + rescue ArgumentError => e + Util::Printer.error!("Invalid version specified: #{e.message}") + exit 1 + end +else + new_version = get_new_version(last_version, current_commits) +end + +log_file_path = create_log_file!(current_commits, new_version, options[":interactive"]) create_release_file!(new_version) File.delete(log_file_path) diff --git a/scripts/int-e2e-test.sh b/scripts/int-e2e-test.sh new file mode 100755 index 0000000000000..c9981e248da88 --- /dev/null +++ b/scripts/int-e2e-test.sh @@ -0,0 +1,54 @@ +#!/bin/bash + +# Used in CI to run and stop an integration test and upload the results of it. +# This is useful to allow retrying the integration test at a higher level than +# the nextest and reduce code duplication in the workflow file. + +set -u + +if [ $# -ne 2 ] +then + echo "usage: $0 [int|e2e] TEST_NAME" + exit 1 +fi + +set -x + +SCRIPT_DIR=$(realpath "$(dirname "${BASH_SOURCE[0]}")") +TEST_TYPE=$1 # either "int" or "e2e" +TEST_NAME=$2 + +print_compose_logs_on_failure() { + local LAST_RETURN_CODE=$1 + + if [[ $LAST_RETURN_CODE -ne 0 || "${ACTIONS_RUNNER_DEBUG:-}" == "true" ]]; then + (docker compose --project-name "${TEST_NAME}" logs) || echo "Failed to collect logs" + fi +} + +if [[ "$TEST_NAME" == "opentelemetry-logs" ]]; then + find "${SCRIPT_DIR}/../tests/data/e2e/opentelemetry/logs/output" -type f -name '*.log' -delete + chmod -R 777 "${SCRIPT_DIR}/../tests/data/e2e/opentelemetry/logs/output" +fi + +cargo vdev -v "${TEST_TYPE}" start -a "${TEST_NAME}" +START_RET=$? +print_compose_logs_on_failure $START_RET + +if [[ $START_RET -eq 0 ]]; then + cargo vdev -v "${TEST_TYPE}" test --retries 2 -a "${TEST_NAME}" + RET=$? + print_compose_logs_on_failure $RET +else + echo "Skipping test phase because 'vdev start' failed" + RET=$START_RET +fi + +cargo vdev -v "${TEST_TYPE}" stop -a "${TEST_NAME}" + +# Only upload test results if CI is defined +if [[ -n "${CI:-}" ]]; then + ./scripts/upload-test-results.sh +fi + +exit $RET diff --git a/scripts/integration/Dockerfile b/scripts/integration/Dockerfile index 6205c96eb56fe..3e6b68dbfb669 100644 --- a/scripts/integration/Dockerfile +++ b/scripts/integration/Dockerfile @@ -1,4 +1,4 @@ -ARG RUST_VERSION +ARG RUST_VERSION=1.85 FROM docker.io/rust:${RUST_VERSION}-slim-bookworm RUN apt-get update && apt-get install -y --no-install-recommends \ @@ -16,7 +16,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ git \ && rm -rf /var/lib/apt/lists/* -RUN rustup run "${RUST_VERSION}" cargo install cargo-nextest --version 0.9.64 --locked +RUN cargo install cargo-nextest --version 0.9.95 --locked COPY scripts/environment/install-protoc.sh / COPY tests/data/ca/certs /certs diff --git a/scripts/integration/README.md b/scripts/integration/README.md index 116d37553c47b..b38b5eef0571b 100644 --- a/scripts/integration/README.md +++ b/scripts/integration/README.md @@ -3,7 +3,7 @@ This directory contains a set of integration test frameworks for vector which ar Each directory contains two files: -1. A `compose.yaml` file containing the instructions to `docker-compose` or `podman-compose` for how +1. A `compose.yaml` file containing the instructions to `docker compose` or `podman compose` for how to set up the containers in which to run the integrations, and 2. A `test.yaml` file that describes how to run the integration tests, including a matrix of software versions or other parameters over which the tests will be run. diff --git a/scripts/integration/amqp/compose.yaml b/scripts/integration/amqp/compose.yaml index 865c35266f15f..a60a27155df0b 100644 --- a/scripts/integration/amqp/compose.yaml +++ b/scripts/integration/amqp/compose.yaml @@ -12,5 +12,4 @@ services: - RABBITMQ_SSL_CACERTFILE=/code/tests/data/ca/intermediate_server/certs/ca-chain.cert.pem - RABBITMQ_SSL_FAIL_IF_NO_PEER_CERT=false volumes: - - ${PWD}:/code - + - ../../..:/code diff --git a/scripts/integration/aws/compose.yaml b/scripts/integration/aws/compose.yaml index 07de953aaed52..55e926dcdc1af 100644 --- a/scripts/integration/aws/compose.yaml +++ b/scripts/integration/aws/compose.yaml @@ -2,13 +2,11 @@ version: '3' services: mock-ec2-metadata: - image: public.ecr.aws/aws-ec2/amazon-ec2-metadata-mock:v1.11.2 + image: public.ecr.aws/aws-ec2/amazon-ec2-metadata-mock:v1.13.0 mock-localstack: - image: docker.io/localstack/localstack:3 + image: docker.io/localstack/localstack:stable environment: - - SERVICES=kinesis,s3,cloudwatch,es,firehose,sqs,sns - mock-watchlogs: - image: docker.io/luciofranco/mockwatchlogs:latest + - SERVICES=kinesis,s3,cloudwatch,es,firehose,kms,sqs,sns,logs mock-ecs: image: docker.io/amazon/amazon-ecs-local-container-endpoints:latest volumes: diff --git a/scripts/integration/aws/test.yaml b/scripts/integration/aws/test.yaml index 706efe5ba6dd4..ba1d901dfc217 100644 --- a/scripts/integration/aws/test.yaml +++ b/scripts/integration/aws/test.yaml @@ -11,10 +11,10 @@ env: ECS_ADDRESS: http://mock-ecs ELASTICSEARCH_ADDRESS: http://mock-localstack:4566 KINESIS_ADDRESS: http://mock-localstack:4566 + KMS_ADDRESS: http://mock-localstack:4566 S3_ADDRESS: http://mock-localstack:4566 SQS_ADDRESS: http://mock-localstack:4566 SNS_ADDRESS: http://mock-localstack:4566 - WATCHLOGS_ADDRESS: http://mock-watchlogs:6000 matrix: version: [latest] diff --git a/scripts/integration/databend/test.yaml b/scripts/integration/databend/test.yaml index f84a979cb4b11..545813974eef5 100644 --- a/scripts/integration/databend/test.yaml +++ b/scripts/integration/databend/test.yaml @@ -5,9 +5,7 @@ test_filter: '::databend::' runner: env: - DATABEND_ENDPOINT: http://databend:8000 - DATABEND_USER: vector - DATABEND_PASSWORD: vector + DATABEND_ENDPOINT: databend://vector:vector@databend:8000?sslmode=disable&presign=detect matrix: version: ['latest'] diff --git a/scripts/integration/datadog-agent/compose.yaml b/scripts/integration/datadog-agent/compose.yaml index 5a36d822ac6ca..1b0b0a5a0fff6 100644 --- a/scripts/integration/datadog-agent/compose.yaml +++ b/scripts/integration/datadog-agent/compose.yaml @@ -14,6 +14,7 @@ services: - DD_CMD_PORT=5001 - DD_USE_DOGSTATSD=false - DD_HOSTNAME=datadog-agent + - DD_SERIALIZER_COMPRESSOR_KIND=zstd volumes: - ../../../tests/data/datadog-agent/conf.yaml:/etc/datadog-agent/conf.d/test.d/conf.yaml datadog-trace-agent: diff --git a/scripts/integration/datadog-logs/test.yaml b/scripts/integration/datadog-logs/test.yaml index 30a99f8a87ae7..7937db5d87158 100644 --- a/scripts/integration/datadog-logs/test.yaml +++ b/scripts/integration/datadog-logs/test.yaml @@ -1,7 +1,7 @@ features: - datadog-logs-integration-tests -test_filter: '::datadog::logs::' +test_filter: '::datadog::logs::integration_tests::' runner: env: diff --git a/scripts/integration/eventstoredb/test.yaml b/scripts/integration/eventstoredb/test.yaml index 43370281b158f..9a734db4881eb 100644 --- a/scripts/integration/eventstoredb/test.yaml +++ b/scripts/integration/eventstoredb/test.yaml @@ -4,7 +4,7 @@ features: test_filter: '::eventstoredb_metrics::' matrix: - version: [latest] + version: ["24.2", "24.6", "latest"] # changes to these files/paths will invoke the integration test in CI # expressions are evaluated using https://github.com/micromatch/picomatch diff --git a/scripts/integration/gcp/compose.yaml b/scripts/integration/gcp/compose.yaml index 3ab1673228b9d..b9e6c8a917d16 100644 --- a/scripts/integration/gcp/compose.yaml +++ b/scripts/integration/gcp/compose.yaml @@ -7,7 +7,7 @@ services: - PUBSUB_PROJECT1=testproject,topic1:subscription1 - PUBSUB_PROJECT2=sourceproject,topic2:subscription2 chronicle-emulator: - image: docker.io/plork/chronicle-emulator:${CONFIG_VERSION} + image: docker.io/timberio/chronicle-emulator:${CONFIG_VERSION} ports: - 3000:3000 volumes: diff --git a/scripts/integration/greptimedb/test.yaml b/scripts/integration/greptimedb/test.yaml index 0fe648b0e7db2..eb1754364172a 100644 --- a/scripts/integration/greptimedb/test.yaml +++ b/scripts/integration/greptimedb/test.yaml @@ -7,11 +7,12 @@ runner: env: GREPTIMEDB_ENDPOINT: greptimedb:4001 GREPTIMEDB_HTTP: http://greptimedb:4000 + no_proxy: greptimedb matrix: # Temporarily pegging to the latest known stable release # since using `latest` is failing consistently. - version: [v0.4.4] + version: [v0.13.2] # changes to these files/paths will invoke the integration test in CI # expressions are evaluated using https://github.com/micromatch/picomatch diff --git a/scripts/integration/humio/compose.yaml b/scripts/integration/humio/compose.yaml index fa95bec0826bd..a4b310c9f9557 100644 --- a/scripts/integration/humio/compose.yaml +++ b/scripts/integration/humio/compose.yaml @@ -2,4 +2,4 @@ version: '3' services: humio: - image: docker.io/humio/humio:${CONFIG_VERSION} + image: docker.io/humio/humio-single-node-demo:${CONFIG_VERSION} diff --git a/scripts/integration/humio/test.yaml b/scripts/integration/humio/test.yaml index 344ab6a7fc5fc..ad242e622f5b2 100644 --- a/scripts/integration/humio/test.yaml +++ b/scripts/integration/humio/test.yaml @@ -8,7 +8,7 @@ runner: HUMIO_ADDRESS: http://humio:8080 matrix: - version: [1.13.1] + version: [1.167.0] # changes to these files/paths will invoke the integration test in CI # expressions are evaluated using https://github.com/micromatch/picomatch diff --git a/scripts/integration/kafka/compose.yaml b/scripts/integration/kafka/compose.yaml index 1f1ee04cd7baa..4fb8fb45f2efe 100644 --- a/scripts/integration/kafka/compose.yaml +++ b/scripts/integration/kafka/compose.yaml @@ -2,34 +2,35 @@ version: '3' services: zookeeper: - image: docker.io/wurstmeister/zookeeper:${CONFIG_VERSION} + image: docker.io/confluentinc/cp-zookeeper:${CONFIG_VERSION} ports: - 2181:2181 + environment: + - ZOOKEEPER_CLIENT_PORT=2181 kafka: - image: docker.io/wurstmeister/kafka:2.13-2.6.0 + image: docker.io/confluentinc/cp-kafka:7.6.1 depends_on: - zookeeper environment: - KAFKA_BROKER_ID=1 - KAFKA_ZOOKEEPER_CONNECT=zookeeper:2181 + - ZOOKEEPER_SASL_ENABLED=false + - KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1 + - KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS=0 - KAFKA_LISTENERS=PLAINTEXT://:9091,SSL://:9092,SASL_PLAINTEXT://:9093 - KAFKA_ADVERTISED_LISTENERS=PLAINTEXT://kafka:9091,SSL://kafka:9092,SASL_PLAINTEXT://kafka:9093 - KAFKA_SSL_KEYSTORE_TYPE=PKCS12 - - KAFKA_SSL_KEYSTORE_LOCATION=/certs/kafka.p12 - - KAFKA_SSL_KEYSTORE_PASSWORD=NOPASS - - KAFKA_SSL_TRUSTSTORE_TYPE=PKCS12 - - KAFKA_SSL_TRUSTSTORE_LOCATION=/certs/kafka.p12 - - KAFKA_SSL_TRUSTSTORE_PASSWORD=NOPASS - - KAFKA_SSL_KEY_PASSWORD=NOPASS - - KAFKA_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM=none + - KAFKA_SSL_KEY_CREDENTIALS=kafka.pass + - KAFKA_SSL_KEYSTORE_CREDENTIALS=kafka.pass + - KAFKA_SSL_KEYSTORE_FILENAME=kafka.p12 + - KAFKA_SSL_CLIENT_AUTH=none - KAFKA_OPTS=-Djava.security.auth.login.config=/etc/kafka/kafka_server_jaas.conf - - KAFKA_INTER_BROKER_LISTENER_NAME=SASL_PLAINTEXT - KAFKA_SASL_ENABLED_MECHANISMS=PLAIN - - KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL=PLAIN ports: - 9091:9091 - 9092:9092 - 9093:9093 volumes: - - ../../../tests/data/ca/intermediate_server/private/kafka.p12:/certs/kafka.p12:ro + - ../../../tests/data/ca/intermediate_server/private/kafka.pass:/etc/kafka/secrets/kafka.pass:ro + - ../../../tests/data/ca/intermediate_server/private/kafka.p12:/etc/kafka/secrets/kafka.p12:ro - ../../../tests/data/kafka_server_jaas.conf:/etc/kafka/kafka_server_jaas.conf diff --git a/scripts/integration/mqtt/test.yaml b/scripts/integration/mqtt/test.yaml index 607da45eeb5e1..ec0c637e094d1 100644 --- a/scripts/integration/mqtt/test.yaml +++ b/scripts/integration/mqtt/test.yaml @@ -10,3 +10,4 @@ paths: - "src/internal_events/mqtt.rs" - "src/sinks/mqtt/**" - "src/sinks/util/**" +- "src/sources/mqtt/**" diff --git a/scripts/integration/nats/test.yaml b/scripts/integration/nats/test.yaml index 1615b5f244ff6..553c9efa25257 100644 --- a/scripts/integration/nats/test.yaml +++ b/scripts/integration/nats/test.yaml @@ -21,7 +21,7 @@ paths: - "src/internal_events/nats.rs" - "src/sources/nats.rs" - "src/sources/util/**" -- "src/sinks/nats.rs" +- "src/sinks/nats/**" - "src/sinks/util/**" - "src/nats.rs" - "scripts/integration/nats/**" diff --git a/scripts/integration/postgres/test.yaml b/scripts/integration/postgres/test.yaml index 67aa2ddc10b50..a89049f545948 100644 --- a/scripts/integration/postgres/test.yaml +++ b/scripts/integration/postgres/test.yaml @@ -1,5 +1,6 @@ features: - postgresql_metrics-integration-tests +- postgres_sink-integration-tests test_filter: ::postgres @@ -18,6 +19,7 @@ matrix: # expressions are evaluated using https://github.com/micromatch/picomatch paths: - "src/internal_events/postgresql_metrics.rs" +- "src/sinks/postgres/**" - "src/sources/postgresql_metrics.rs" - "src/sources/util/**" - "scripts/integration/postgres/**" diff --git a/scripts/integration/pulsar/compose.yaml b/scripts/integration/pulsar/compose.yaml index b73d35909be9d..0e963cd2e0bd1 100644 --- a/scripts/integration/pulsar/compose.yaml +++ b/scripts/integration/pulsar/compose.yaml @@ -3,6 +3,16 @@ version: '3' services: pulsar: image: docker.io/apachepulsar/pulsar:${CONFIG_VERSION} - command: bin/pulsar standalone + command: sh -c "bin/apply-config-from-env.py conf/standalone.conf && bin/pulsar standalone" ports: - - 6650:6650 + - 6650:6650 + - 6651:6651 + environment: + - PULSAR_PREFIX_brokerServicePortTls=6651 + - PULSAR_PREFIX_tlsKeyFilePath=/etc/pulsar/certs/pulsar.key.pem + - PULSAR_PREFIX_tlsCertificateFilePath=/etc/pulsar/certs/pulsar.cert.pem + - PULSAR_PREFIX_tlsTrustCertsFilePath=/etc/pulsar/certs/ca-chain.cert.pem + volumes: + - ../../../tests/data/ca/intermediate_server/private/pulsar.key.pem:/etc/pulsar/certs/pulsar.key.pem:ro + - ../../../tests/data//ca/intermediate_server/certs/pulsar.cert.pem:/etc/pulsar/certs/pulsar.cert.pem:ro + - ../../../tests/data/ca/intermediate_server/certs/ca-chain.cert.pem:/etc/pulsar/certs/ca-chain.cert.pem:ro diff --git a/scripts/integration/pulsar/test.yaml b/scripts/integration/pulsar/test.yaml index 824f0e0f290d4..b629a582d0a62 100644 --- a/scripts/integration/pulsar/test.yaml +++ b/scripts/integration/pulsar/test.yaml @@ -4,7 +4,7 @@ features: test_filter: '::pulsar::integration_tests::' env: - PULSAR_ADDRESS: pulsar://pulsar:6650 + PULSAR_HOST: pulsar matrix: version: [latest] diff --git a/scripts/integration/redis/compose.yaml b/scripts/integration/redis/compose.yaml index a5dd865e43579..1399a12b2a655 100644 --- a/scripts/integration/redis/compose.yaml +++ b/scripts/integration/redis/compose.yaml @@ -1,5 +1,26 @@ version: '3' services: - redis: + redis-primary: image: docker.io/redis:${CONFIG_VERSION} + container_name: redis-primary + hostname: redis-primary + ports: + - "6379:6379" + + redis-sentinel: + image: docker.io/redis:${CONFIG_VERSION} + container_name: redis-sentinel + hostname: redis-sentinel + depends_on: + - redis-primary + ports: + - "26379:26379" + command: > + sh -c 'echo "bind 0.0.0.0" > /etc/sentinel.conf && + echo "sentinel monitor vector redis-primary 6379 1" >> /etc/sentinel.conf && + echo "sentinel resolve-hostnames yes" >> /etc/sentinel.conf && + echo "sentinel down-after-milliseconds vector 5000" >> /etc/sentinel.conf && + echo "sentinel failover-timeout vector 5000" >> /etc/sentinel.conf && + echo "sentinel parallel-syncs vector 1" >> /etc/sentinel.conf && + redis-sentinel /etc/sentinel.conf' diff --git a/scripts/integration/redis/test.yaml b/scripts/integration/redis/test.yaml index d2d0577e844ca..6b2d9bfa3745a 100644 --- a/scripts/integration/redis/test.yaml +++ b/scripts/integration/redis/test.yaml @@ -4,7 +4,8 @@ features: test_filter: "::redis::" env: - REDIS_URL: redis://redis:6379/0 + REDIS_URL: redis://redis-primary:6379/0 + SENTINEL_URL: redis://redis-sentinel:26379/ matrix: version: [6-alpine] diff --git a/scripts/integration/shutdown/compose.yaml b/scripts/integration/shutdown/compose.yaml index a74d753e841bc..4fb8fb45f2efe 100644 --- a/scripts/integration/shutdown/compose.yaml +++ b/scripts/integration/shutdown/compose.yaml @@ -2,32 +2,35 @@ version: '3' services: zookeeper: - image: docker.io/wurstmeister/zookeeper:${CONFIG_VERSION} + image: docker.io/confluentinc/cp-zookeeper:${CONFIG_VERSION} ports: - 2181:2181 + environment: + - ZOOKEEPER_CLIENT_PORT=2181 kafka: - image: docker.io/wurstmeister/kafka:2.13-2.6.0 + image: docker.io/confluentinc/cp-kafka:7.6.1 depends_on: - zookeeper environment: - KAFKA_BROKER_ID=1 - KAFKA_ZOOKEEPER_CONNECT=zookeeper:2181 + - ZOOKEEPER_SASL_ENABLED=false + - KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1 + - KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS=0 - KAFKA_LISTENERS=PLAINTEXT://:9091,SSL://:9092,SASL_PLAINTEXT://:9093 - KAFKA_ADVERTISED_LISTENERS=PLAINTEXT://kafka:9091,SSL://kafka:9092,SASL_PLAINTEXT://kafka:9093 - - KAFKA_SSL_KEYSTORE_LOCATION=/certs/kafka.p12 - - KAFKA_SSL_KEYSTORE_PASSWORD=NOPASS - - KAFKA_SSL_TRUSTSTORE_LOCATION=/certs/kafka.p12 - - KAFKA_SSL_TRUSTSTORE_PASSWORD=NOPASS - - KAFKA_SSL_KEY_PASSWORD=NOPASS - - KAFKA_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM=none + - KAFKA_SSL_KEYSTORE_TYPE=PKCS12 + - KAFKA_SSL_KEY_CREDENTIALS=kafka.pass + - KAFKA_SSL_KEYSTORE_CREDENTIALS=kafka.pass + - KAFKA_SSL_KEYSTORE_FILENAME=kafka.p12 + - KAFKA_SSL_CLIENT_AUTH=none - KAFKA_OPTS=-Djava.security.auth.login.config=/etc/kafka/kafka_server_jaas.conf - - KAFKA_INTER_BROKER_LISTENER_NAME=SASL_PLAINTEXT - KAFKA_SASL_ENABLED_MECHANISMS=PLAIN - - KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL=PLAIN ports: - 9091:9091 - 9092:9092 - 9093:9093 volumes: - - ../../../tests/data/ca/intermediate_server/private/kafka.p12:/certs/kafka.p12:ro + - ../../../tests/data/ca/intermediate_server/private/kafka.pass:/etc/kafka/secrets/kafka.pass:ro + - ../../../tests/data/ca/intermediate_server/private/kafka.p12:/etc/kafka/secrets/kafka.p12:ro - ../../../tests/data/kafka_server_jaas.conf:/etc/kafka/kafka_server_jaas.conf diff --git a/scripts/test-e2e-kubernetes.sh b/scripts/test-e2e-kubernetes.sh index 38d7fc13b05b1..ec8332c85e66f 100755 --- a/scripts/test-e2e-kubernetes.sh +++ b/scripts/test-e2e-kubernetes.sh @@ -107,7 +107,7 @@ if [[ -z "${CONTAINER_IMAGE:-}" ]]; then # Build docker image with Vector - the same way it's done for releases. Don't # do the push - we'll handle it later. - REPO="$CONTAINER_IMAGE_REPO" \ + REPOS="$CONTAINER_IMAGE_REPO" \ CHANNEL="test" \ BASE="$BASE_TAG" \ TAG="$VERSION_TAG" \ diff --git a/scripts/util/commit.rb b/scripts/util/commit.rb deleted file mode 100644 index 8b4f5baba1ca1..0000000000000 --- a/scripts/util/commit.rb +++ /dev/null @@ -1,166 +0,0 @@ -require_relative "conventional_commit" -require_relative "git_log_commit" - -module Vector - class Commit - class << self - def fetch_since(last_version) - git_log = GitLogCommit.fetch_since!(last_version) - git_log.collect do |git_log_commit| - from_git_log_commit(git_log_commit) - end - end - - def fetch_since!(last_version) - git_log = GitLogCommit.fetch_since!(last_version) - git_log.collect do |git_log_commit| - from_git_log_commit!(git_log_commit) - end - end - - def from_git_log!(git_log) - git_log.collect do |git_log_commit| - from_git_log_commit!(git_log_commit) - end - end - - private - def from_git_log_commit(git_log_commit) - conventional_commit = ConventionalCommit.parse(git_log_commit.message) - hash = git_log_commit.to_h.merge(conventional_commit.to_h) - new(hash) - end - - def from_git_log_commit!(git_log_commit) - conventional_commit = ConventionalCommit.parse!(git_log_commit.message) - hash = git_log_commit.to_h.merge(conventional_commit.to_h) - new(hash) - end - end - - attr_reader :author, - :breaking_change, - :date, - :deletions_count, - :description, - :files_count, - :insertions_count, - :pr_number, - :scopes, - :sha, - :type - - def initialize(hash) - @author = hash.fetch("author") - @breaking_change = hash.fetch("breaking_change") - @date = hash.fetch("date") - @deletions_count = hash.fetch("deletions_count") - @description = hash.fetch("description") - @files_count = hash.fetch("files_count") - @insertions_count = hash.fetch("insertions_count") - @pr_number = hash.fetch("pr_number") - @scopes = hash.fetch("scopes") - @sha = hash.fetch("sha") - @type = hash.fetch("type") - end - - def eql?(other) - sha == other.sha || pr_number == other.pr_number - end - - def breaking_change? - breaking_change == true - end - - def fix? - type == "fix" - end - - def new_feature? - type == "feat" - end - - def to_cue_struct - "{" + - "sha: #{sha.to_json}, " + - "date: #{date.to_json}, " + - "description: #{description.to_json}, " + - "pr_number: #{pr_number.to_json}, " + - "scopes: #{scopes.to_json}, " + - "type: #{type.to_json}, " + - "breaking_change: #{breaking_change.to_json}, " + - "author: #{author.to_json}, " + - "files_count: #{files_count.to_json}, " + - "insertions_count: #{insertions_count.to_json}, " + - "deletions_count: #{deletions_count.to_json}}" - end - - def validate! - if !type.nil? && !TYPES.include?(type) - raise <<~EOF - The following commit has an invalid type! - - #{to_s} - - The type must be one of #{TYPES.inspect}. - - #{type.inspect} - - Please correct in the release /.meta file and retry. - EOF - end - - if TYPES_THAT_REQUIRE_SCOPES.include?(type) && scopes.empty? - raise <<~EOF - The following commit does not have a scope - - #{to_s} - - A scope is required for commits of type #{TYPES_THAT_REQUIRE_SCOPES.inspect}. - - #{description} - - Please correct in the release /.meta file and retry. - EOF - end - - true - end - - def to_git_log_commit - message = "" - - if type - message = "#{message}#{type.clone}" - end - - if scopes.any? - message = "#{message}(#{scopes.join(", ")})" - end - - if breaking_change? - message = "#{message}!" - end - - message = "#{message}: #{description}" - - if pr_number - message = "#{message} (##{pr_number})" - end - - GitLogCommit.new({ - "author" => author, - "date" => date, - "deletions_count" => deletions_count, - "files_count" => files_count, - "insertions_count" => insertions_count, - "message" => message, - "sha" => sha - }) - end - - def to_s - "#{sha} #{type}(#{scopes.join(", ")})#{breaking_change? ? "!" : ""}: #{description} (##{pr_number})" - end - end -end diff --git a/scripts/util/conventional_commit.rb b/scripts/util/conventional_commit.rb deleted file mode 100644 index 5944bb407f663..0000000000000 --- a/scripts/util/conventional_commit.rb +++ /dev/null @@ -1,97 +0,0 @@ -module Vector - class ConventionalCommit - class << self - def parse(message) - hash = parse_commit_message(message) - new(hash) - end - - def parse!(message) - hash = parse_commit_message!(message) - new(hash) - end - - private - def parse_commit_message(message) - begin - parse_commit_message!(message) - rescue Exception => e - if message.include?("Use `namespace` field in metric sources") - raise e - end - - { - "breaking_change" => nil, - "description" => message, - "pr_number" => nil, - "scopes" => [], - "type" => nil - } - end - end - - def parse_commit_message!(message) - match = message.match(/^(?[a-z]*)(\((?[a-z0-9_, ]*)\))?(?!)?: (?.*?)( \(#(?[0-9]*)\))?$/) - - if match.nil? - raise <<~EOF - Commit message does not conform to the conventional commit format. - - Unable to parse at all! - - #{message} - - Please correct in the release /.meta file and retry. - EOF - end - - attributes = - { - "type" => match[:type], - "breaking_change" => !match[:breaking_change].nil?, - "description" => match[:description] - } - - attributes["scopes"] = - if match[:scope] - match[:scope].split(",").collect(&:strip) - else - [] - end - - attributes["pr_number"] = - if match[:pr_number] - match[:pr_number].to_i - else - nil - end - - attributes - end - end - - attr_reader :breaking_change, - :description, - :pr_number, - :type, - :scopes - - def initialize(hash) - @breaking_change = hash.fetch("breaking_change") - @description = hash.fetch("description") - @pr_number = hash.fetch("pr_number") - @type = hash.fetch("type") - @scopes = hash.fetch("scopes") - end - - def to_h - { - "breaking_change" => breaking_change, - "description" => description, - "pr_number" => pr_number, - "type" => type, - "scopes" => scopes - } - end - end -end diff --git a/scripts/util/git_log_commit.rb b/scripts/util/git_log_commit.rb deleted file mode 100644 index fe8425209a86d..0000000000000 --- a/scripts/util/git_log_commit.rb +++ /dev/null @@ -1,122 +0,0 @@ -module Vector - class GitLogCommit - class << self - def fetch_since!(last_version) - range = "v#{last_version}..." - commit_log = `git log #{range} --cherry-pick --right-only --no-merges --pretty=format:'%H\t%s\t%aN\t%ad'`.chomp - commit_lines = commit_log.split("\n").reverse - - commit_lines.collect do |commit_line| - hash = parse_commit_line!(commit_line) - new(hash) - end - end - - def from_file!(path) - contents = File.read(path) - contents.split("\n").collect do |line| - hash = parse_commit_line!(line) - new(hash) - end - end - - private - # This is used for the `files_count`, `insertions_count`, and `deletions_count` - # attributes. It helps to communicate stats and the depth of changes in our - # release notes. - def get_commit_stats(sha) - `git show --shortstat --oneline #{sha}`.split("\n").last - end - - def parse_commit_line!(commit_line) - # Parse the full commit line - line_parts = commit_line.split("\t") - sha = line_parts.fetch(0) - message = line_parts.fetch(1) - author = line_parts.fetch(2) - date = Time.parse(line_parts.fetch(3)).utc - - attributes = - { - "sha" => sha, - "author" => author, - "date" => date, - "message" => message - } - - # Parse the stats - stats = get_commit_stats(attributes.fetch("sha")) - if /^\W*\p{Digit}+ files? changed,/.match(stats) - stats_attributes = parse_commit_stats!(stats) - attributes.merge!(stats_attributes) - end - - attributes - end - - # Parses the data from `#get_commit_stats`. - def parse_commit_stats!(stats) - attributes = {} - - stats.split(", ").each do |stats_part| - stats_part.strip! - - key = - case stats_part - when /insertions?/ - "insertions_count" - when /deletions?/ - "deletions_count" - when /files? changed/ - "files_count" - else - raise "Invalid commit stat: #{stats_part}" - end - - count = stats_part.match(/^(?[0-9]*) /)[:count].to_i - attributes[key] = count - end - - attributes["insertions_count"] ||= 0 - attributes["deletions_count"] ||= 0 - attributes["files_count"] ||= 0 - attributes - end - end - - attr_reader :author, - :date, - :deletions_count, - :files_count, - :insertions_count, - :message, - :raw, - :sha - - def initialize(hash) - @author = hash.fetch("author") - @date = hash.fetch("date") - @deletions_count = hash.fetch("deletions_count", 0) - @files_count = hash.fetch("files_count", 0) - @insertions_count = hash.fetch("insertions_count", 0) - @message = hash.fetch("message") - @sha = hash.fetch("sha") - end - - def to_h - { - "author" => author, - "date" => date, - "deletions_count" => deletions_count, - "files_count" => files_count, - "insertions_count" => insertions_count, - "message" => message, - "sha" => sha - } - end - - def to_raw - "#{sha}\t#{message}\t#{author}\t#{date.strftime("%a %b %d %H:%M:%S %Y %z")}" - end - end -end diff --git a/scripts/util/printer.rb b/scripts/util/printer.rb deleted file mode 100644 index 4f65f2d904359..0000000000000 --- a/scripts/util/printer.rb +++ /dev/null @@ -1,74 +0,0 @@ -require "paint" - -module Util - module Printer - PROMPT = "---> " - INDENT = " " - SEPARATOR = "-" * 80 - TITLE_PROMPT = "#### " - - extend self - - def error!(message) - Printer.say(message, color: :red) - exit(1) - end - - def Printer.get(words, choices = nil) - question = "#{words.strip}" - - if !choices.nil? - question += " (" + choices.join("/") + ")" - end - - Printer.say(question) - - print INDENT - - input = gets().chomp - - if choices && !choices.include?(input) - Printer.say("You must enter one of #{choices.join(", ")}", color: :red) - Printer.get(words, choices) - else - input - end - end - - def invalid(words) - Printer.say(words, color: :yellow) - end - - def say(words, color: nil, new: true, prompt: PROMPT) - prefix = new ? prompt : INDENT - - if color - words = Paint[prefix + words, color] - else - words = prefix + words - end - - puts words.gsub("\n", "\n#{INDENT}") - end - - def separate(color: nil) - string = SEPARATOR - - if color - string = Paint[string, color] - end - - puts "" - puts string - end - - def success(words) - Printer.say(words, color: :green) - end - - def title(words) - separate(color: :cyan) - Printer.say(words, color: :cyan, prompt: TITLE_PROMPT) - end - end -end diff --git a/scripts/util/release.rb b/scripts/util/release.rb deleted file mode 100644 index 5533552af83b1..0000000000000 --- a/scripts/util/release.rb +++ /dev/null @@ -1,38 +0,0 @@ -require_relative "commit" -require_relative "version" - -module Vector - class Release - class << self - def all!(dir) - release_meta_paths = Dir.glob("#{dir}/*.cue").to_a - - release_meta_paths. - collect do |release_meta_path| - release_json = `cue export #{release_meta_path}/../../urls.cue #{release_meta_path}` - release_hash = JSON.parse(release_json) - name = release_hash.fetch("releases").keys.first - hash = release_hash.fetch("releases").values.first - new(hash.merge({"name" => name})) - end. - sort_by(&:version) - end - end - - attr_reader :codename, - :commits, - :date, - :name, - :version, - :whats_next - - def initialize(hash) - @codename = hash.fetch("codename", "") - @commits = hash.fetch("commits").collect { |commit_hash| Commit.new(commit_hash) } - @date = hash.fetch("date") - @name = hash.fetch("name") - @version = Util::Version.new(@name) - @whats_next = hash.fetch("whats_next", []) - end - end -end diff --git a/scripts/util/version.rb b/scripts/util/version.rb deleted file mode 100644 index afc69ab15483f..0000000000000 --- a/scripts/util/version.rb +++ /dev/null @@ -1,45 +0,0 @@ -module Util - class Version < Gem::Version - def bump_type(other_version) - # Return nil if the other version is not greater than the current version - if other_version <= self - return nil - end - - bumped_version = bump - next_major = segments.first + 1 - - if other_version.prerelease? - "pre" - elsif other_version < bumped_version - "patch" - elsif other_version == bumped_version - "minor" - elsif other_version.segments.first == next_major - "major" - else - nil - end - end - - def major - segments[0] - end - - def major_x - "#{segments[0]}.X" - end - - def minor - segments[1] - end - - def minor_x - "#{segments[0]}.#{segments[1]}.X" - end - - def patch - segments[2] - end - end -end diff --git a/scripts/verify-install.sh b/scripts/verify-install.sh index 48e80ee36ffa0..a73dccc7a52b4 100755 --- a/scripts/verify-install.sh +++ b/scripts/verify-install.sh @@ -5,7 +5,7 @@ set -euo pipefail # # SUMMARY # -# Verifies vector packages have been installed correctly +# Verifies vector packages have been built and installed correctly package="${1:?must pass package as argument}" @@ -38,3 +38,5 @@ getent group vector || (echo "vector group missing" && exit 1) vector --version || (echo "vector --version failed" && exit 1) grep -q "FOO=bar" "/etc/default/vector" || (echo "/etc/default/vector has incorrect contents" && exit 1) grep -q "foo: bar" "/etc/vector/vector.yaml" || (echo "/etc/vector/vector.yaml has incorrect contents" && exit 1) + +dd-pkg lint "$package" diff --git a/src/api/mod.rs b/src/api/mod.rs index 71db358e231c4..48b23c47907f3 100644 --- a/src/api/mod.rs +++ b/src/api/mod.rs @@ -2,14 +2,15 @@ mod handler; mod schema; mod server; -pub mod tap; -#[cfg(all(test, feature = "vector-api-tests"))] +#[cfg(all( + test, + feature = "vector-api-tests", + feature = "sinks-blackhole", + feature = "sources-demo_logs", + feature = "transforms-log_to_metric", + feature = "transforms-remap", +))] mod tests; pub use schema::build_schema; pub use server::Server; -use tokio::sync::oneshot; - -// Shutdown channel types used by the server and tap. -type ShutdownTx = oneshot::Sender<()>; -type ShutdownRx = oneshot::Receiver<()>; diff --git a/src/api/schema/components/mod.rs b/src/api/schema/components/mod.rs index a8cb08731c19d..b65b2a4cec230 100644 --- a/src/api/schema/components/mod.rs +++ b/src/api/schema/components/mod.rs @@ -6,10 +6,10 @@ pub mod transform; use std::{ cmp, collections::{HashMap, HashSet}, + sync::LazyLock, }; use async_graphql::{Enum, InputObject, Interface, Object, Subscription}; -use once_cell::sync::Lazy; use tokio_stream::{wrappers::BroadcastStream, Stream, StreamExt}; use vector_lib::internal_event::DEFAULT_OUTPUT; @@ -23,6 +23,7 @@ use crate::{ filter_check, }; +#[allow(clippy::duplicated_attributes)] // False positive caused by `ty = "String"` #[derive(Debug, Clone, Interface)] #[graphql( field(name = "component_id", ty = "String"), @@ -222,8 +223,8 @@ enum ComponentChanged { Removed(Component), } -static COMPONENT_CHANGED: Lazy> = - Lazy::new(|| { +static COMPONENT_CHANGED: LazyLock> = + LazyLock::new(|| { let (tx, _) = tokio::sync::broadcast::channel(10); tx }); @@ -234,7 +235,7 @@ pub struct ComponentsSubscription; #[Subscription] impl ComponentsSubscription { /// Subscribes to all newly added components - async fn component_added(&self) -> impl Stream { + async fn component_added(&self) -> impl Stream + use<> { BroadcastStream::new(COMPONENT_CHANGED.subscribe()).filter_map(|c| match c { Ok(ComponentChanged::Added(c)) => Some(c), _ => None, @@ -242,7 +243,7 @@ impl ComponentsSubscription { } /// Subscribes to all removed components - async fn component_removed(&self) -> impl Stream { + async fn component_removed(&self) -> impl Stream + use<> { BroadcastStream::new(COMPONENT_CHANGED.subscribe()).filter_map(|c| match c { Ok(ComponentChanged::Removed(c)) => Some(c), _ => None, @@ -255,7 +256,14 @@ pub fn update_config(config: &Config) { let mut new_components = HashMap::new(); // Sources - for (component_key, source) in config.sources() { + let table_sources = config + .enrichment_tables() + .filter_map(|(k, e)| e.as_source(k)) + .collect::>(); + for (component_key, source) in config + .sources() + .chain(table_sources.iter().map(|(k, s)| (k, s))) + { new_components.insert( component_key.clone(), Component::Source(source::Source(source::Data { @@ -300,7 +308,14 @@ pub fn update_config(config: &Config) { } // Sinks - for (component_key, sink) in config.sinks() { + let table_sinks = config + .enrichment_tables() + .filter_map(|(k, e)| e.as_sink(k)) + .collect::>(); + for (component_key, sink) in config + .sinks() + .chain(table_sinks.iter().map(|(k, s)| (k, s))) + { new_components.insert( component_key.clone(), Component::Sink(sink::Sink(sink::Data { diff --git a/src/api/schema/components/state.rs b/src/api/schema/components/state.rs index 8d42af2cd663a..615f3b2e26479 100644 --- a/src/api/schema/components/state.rs +++ b/src/api/schema/components/state.rs @@ -1,17 +1,15 @@ use std::{ collections::{HashMap, HashSet}, - sync::{Arc, RwLock}, + sync::{Arc, LazyLock, RwLock}, }; -use once_cell::sync::Lazy; - use super::{sink, source, transform, Component}; use crate::config::{ComponentKey, OutputId}; pub const INVARIANT: &str = "Couldn't acquire lock on Vector components. Please report this."; -pub static COMPONENTS: Lazy>>> = - Lazy::new(|| Arc::new(RwLock::new(HashMap::new()))); +pub static COMPONENTS: LazyLock>>> = + LazyLock::new(|| Arc::new(RwLock::new(HashMap::new()))); /// Filter components with the provided `map_func` pub fn filter_components(map_func: impl Fn((&ComponentKey, &Component)) -> Option) -> Vec { diff --git a/src/api/schema/events/encoding.rs b/src/api/schema/events/encoding.rs index ccd99f4804700..4b455b5e2a2fc 100644 --- a/src/api/schema/events/encoding.rs +++ b/src/api/schema/events/encoding.rs @@ -2,7 +2,7 @@ use async_graphql::Enum; #[derive(Enum, Copy, Clone, PartialEq, Eq)] /// Encoding format for the event -pub(crate) enum EventEncodingType { +pub enum EventEncodingType { Json, Yaml, Logfmt, diff --git a/src/api/schema/events/log.rs b/src/api/schema/events/log.rs index 1faedfa509ad6..2a08c1f13135d 100644 --- a/src/api/schema/events/log.rs +++ b/src/api/schema/events/log.rs @@ -3,10 +3,11 @@ use std::borrow::Cow; use async_graphql::Object; use chrono::{DateTime, Utc}; use vector_lib::encode_logfmt; +use vector_lib::event; +use vector_lib::tap::topology::TapOutput; use vrl::event_path; use super::EventEncodingType; -use crate::{event, topology::TapOutput}; #[derive(Debug, Clone)] pub struct Log { diff --git a/src/api/schema/events/metric.rs b/src/api/schema/events/metric.rs index 77247ac9b365f..f25b383f4a930 100644 --- a/src/api/schema/events/metric.rs +++ b/src/api/schema/events/metric.rs @@ -2,12 +2,10 @@ use async_graphql::{Enum, Object}; use chrono::{DateTime, Utc}; use serde_json::Value; use vector_lib::encode_logfmt; +use vector_lib::event; +use vector_lib::tap::topology::TapOutput; use super::EventEncodingType; -use crate::{ - event::{self, KeyString}, - topology::TapOutput, -}; #[derive(Debug, Clone)] pub struct Metric { @@ -128,7 +126,7 @@ impl Metric { .expect("logfmt serialization of metric event failed: conversion to serde Value failed. Please report."); match json { Value::Object(map) => encode_logfmt::encode_map( - &map.into_iter().map(|(k,v)| (KeyString::from(k), v)).collect(), + &map.into_iter().map(|(k,v)| (event::KeyString::from(k), v)).collect(), ) .expect("logfmt serialization of metric event failed. Please report."), _ => panic!("logfmt serialization of metric event failed: metric converted to unexpected serde Value. Please report."), diff --git a/src/api/schema/events/mod.rs b/src/api/schema/events/mod.rs index c311956ffde8e..e734169afd31d 100644 --- a/src/api/schema/events/mod.rs +++ b/src/api/schema/events/mod.rs @@ -1,47 +1,21 @@ -mod encoding; +pub mod encoding; pub mod log; pub mod metric; -pub mod notification; pub mod output; pub mod trace; -use std::collections::HashSet; - use async_graphql::{Context, Subscription}; use encoding::EventEncodingType; use futures::{stream, Stream, StreamExt}; -use output::OutputEventsPayload; +use output::{from_tap_payload_to_output_events, OutputEventsPayload}; use rand::{rngs::SmallRng, Rng, SeedableRng}; +use std::time::{SystemTime, UNIX_EPOCH}; use tokio::{select, sync::mpsc, time}; use tokio_stream::wrappers::ReceiverStream; - -use crate::{api::tap::TapController, topology::WatchRx}; - -/// Patterns (glob) used by tap to match against components and access events -/// flowing into (for_inputs) or out of (for_outputs) specified components -#[derive(Debug)] -pub struct TapPatterns { - pub for_outputs: HashSet, - pub for_inputs: HashSet, -} - -impl TapPatterns { - pub const fn new(for_outputs: HashSet, for_inputs: HashSet) -> Self { - Self { - for_outputs, - for_inputs, - } - } - - /// Get all user-specified patterns - pub fn all_patterns(&self) -> HashSet { - self.for_outputs - .iter() - .cloned() - .chain(self.for_inputs.iter().cloned()) - .collect() - } -} +use vector_lib::tap::{ + controller::{TapController, TapPatterns}, + topology::WatchRx, +}; #[derive(Debug, Default)] pub struct EventsSubscription; @@ -81,7 +55,7 @@ pub(crate) fn create_events_stream( // interval, this is capped to the same value. let (tap_tx, tap_rx) = mpsc::channel(limit); let mut tap_rx = ReceiverStream::new(tap_rx) - .flat_map(|payload| stream::iter(>::from(payload))); + .flat_map(|payload| stream::iter(from_tap_payload_to_output_events(payload))); // The resulting vector of `Event` sent to the client. Only one result set will be streamed // back to the client at a time. This value is set higher than `1` to prevent blocking the event @@ -108,7 +82,11 @@ pub(crate) fn create_events_stream( // Random number generator to allow for sampling. Speed trumps cryptographic security here. // The RNG must be Send + Sync to use with the `select!` loop below, hence `SmallRng`. - let mut rng = SmallRng::from_entropy(); + let seed = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap() + .as_nanos() as u64; + let mut rng = SmallRng::seed_from_u64(seed); // Keep a count of the batch size, which will be used as a seed for random eviction // per the sampling strategy used below. @@ -140,7 +118,7 @@ pub(crate) fn create_events_stream( if limit > results.len() { results.push(payload); } else { - let random_number = rng.gen_range(0..batch); + let random_number = rng.random_range(0..batch); if random_number < results.len() { results[random_number] = payload; } diff --git a/src/api/schema/events/output.rs b/src/api/schema/events/output.rs index 0adb3a24a5ec7..d0a70fe84ddf2 100644 --- a/src/api/schema/events/output.rs +++ b/src/api/schema/events/output.rs @@ -1,7 +1,36 @@ -use async_graphql::Union; +use async_graphql::{Object, Union}; -use super::{log::Log, metric::Metric, notification::EventNotification, trace::Trace}; -use crate::api::tap::TapPayload; +use crate::api::schema::events::log::Log; +use crate::api::schema::events::metric::Metric; +use crate::api::schema::events::trace::Trace; +use vector_lib::tap::controller::TapPayload; +use vector_lib::tap::notification::Notification; + +/// This wrapper struct hoists `message` up from [`Notification`] for a more +/// natural querying experience. While ideally [`Notification`] would be a +/// GraphQL interface with a common `message` field, an interface cannot be +/// directly nested into the union of [`super::OutputEventsPayload`]. +/// +/// The GraphQL specification forbids such a nesting: +/// +#[derive(Debug, Clone)] +pub struct EventNotification { + pub notification: Notification, +} + +#[Object] +/// A notification regarding events observation +impl EventNotification { + /// Notification details + async fn notification(&self) -> &Notification { + &self.notification + } + + /// The human-readable message associated with the notification + async fn message(&self) -> &str { + self.notification.as_str() + } +} #[derive(Union, Debug, Clone)] #[allow(clippy::large_enum_variant)] @@ -21,26 +50,24 @@ pub enum OutputEventsPayload { } /// Convert an `api::TapPayload` to the equivalent GraphQL type. -impl From for Vec { - fn from(t: TapPayload) -> Self { - match t { - TapPayload::Log(output, log_array) => log_array - .into_iter() - .map(|log| OutputEventsPayload::Log(Log::new(output.clone(), log))) - .collect(), - TapPayload::Metric(output, metric_array) => metric_array - .into_iter() - .map(|metric| OutputEventsPayload::Metric(Metric::new(output.clone(), metric))) - .collect(), - TapPayload::Notification(notification) => { - vec![OutputEventsPayload::Notification(EventNotification { - notification, - })] - } - TapPayload::Trace(output, trace_array) => trace_array - .into_iter() - .map(|trace| OutputEventsPayload::Trace(Trace::new(output.clone(), trace))) - .collect(), +pub(crate) fn from_tap_payload_to_output_events(t: TapPayload) -> Vec { + match t { + TapPayload::Log(output, log_array) => log_array + .into_iter() + .map(|log| OutputEventsPayload::Log(Log::new(output.clone(), log))) + .collect(), + TapPayload::Metric(output, metric_array) => metric_array + .into_iter() + .map(|metric| OutputEventsPayload::Metric(Metric::new(output.clone(), metric))) + .collect(), + TapPayload::Notification(notification) => { + vec![OutputEventsPayload::Notification(EventNotification { + notification, + })] } + TapPayload::Trace(output, trace_array) => trace_array + .into_iter() + .map(|trace| OutputEventsPayload::Trace(Trace::new(output.clone(), trace))) + .collect(), } } diff --git a/src/api/schema/events/trace.rs b/src/api/schema/events/trace.rs index db2606fac5c95..fe81252c4fa1f 100644 --- a/src/api/schema/events/trace.rs +++ b/src/api/schema/events/trace.rs @@ -1,9 +1,10 @@ use async_graphql::Object; use vector_lib::encode_logfmt; +use vector_lib::event; +use vector_lib::tap::topology::TapOutput; use vrl::event_path; use super::EventEncodingType; -use crate::{event, topology::TapOutput}; #[derive(Debug, Clone)] pub struct Trace { diff --git a/src/api/schema/filter.rs b/src/api/schema/filter.rs index c73b88a6c01d6..438a4d7587207 100644 --- a/src/api/schema/filter.rs +++ b/src/api/schema/filter.rs @@ -7,7 +7,7 @@ use super::components::{source, ComponentKind}; /// Takes an `&Option` and returns early if false #[macro_export] macro_rules! filter_check { - ($($match:expr),+) => { + ($($match:expr_2021),+) => { $( if matches!($match, Some(t) if !t) { return false; diff --git a/src/api/schema/gen.rs b/src/api/schema/gen.rs index 33e27f658416d..c359f0db3b640 100644 --- a/src/api/schema/gen.rs +++ b/src/api/schema/gen.rs @@ -110,7 +110,7 @@ async fn main() { fs::write( "lib/vector-api-client/graphql/schema.json", - format!("{}\n", json), + format!("{json}\n"), ) .expect("Couldn't save schema file"); } diff --git a/src/api/schema/health.rs b/src/api/schema/health.rs index ebd24184caf17..36500f21f7afa 100644 --- a/src/api/schema/health.rs +++ b/src/api/schema/health.rs @@ -34,7 +34,7 @@ impl HealthSubscription { async fn heartbeat( &self, #[graphql(default = 1000, validator(minimum = 10, maximum = 60_000))] interval: i32, - ) -> impl Stream { + ) -> impl Stream + use<> { IntervalStream::new(tokio::time::interval(Duration::from_millis( interval as u64, ))) diff --git a/src/api/schema/metrics/filter.rs b/src/api/schema/metrics/filter.rs index f225667560cb0..1236119c87af3 100644 --- a/src/api/schema/metrics/filter.rs +++ b/src/api/schema/metrics/filter.rs @@ -52,7 +52,7 @@ pub trait MetricsFilter<'a> { fn sent_events_total(&self) -> Option; } -impl<'a> MetricsFilter<'a> for Vec { +impl MetricsFilter<'_> for Vec { fn received_bytes_total(&self) -> Option { let sum = sum_metrics( self.iter() @@ -303,7 +303,7 @@ pub fn component_sent_events_totals_metrics_with_outputs( match m.value() { MetricValue::Counter { value } if cache - .insert(format!("{}.{}", id, output), *value) + .insert(format!("{id}.{output}"), *value) .unwrap_or(0.00) < *value => { @@ -349,8 +349,7 @@ pub fn component_sent_events_total_throughputs_with_outputs( .iter() .filter_map(|output| { let m = filter_output_metric(metrics.as_ref(), output.as_ref())?; - let throughput = - throughput(&m, format!("{}.{}", id, output), &mut cache)?; + let throughput = throughput(&m, format!("{id}.{output}"), &mut cache)?; Some(OutputThroughput::new(output.clone(), throughput as i64)) }) .collect::>(); diff --git a/src/api/schema/metrics/host.rs b/src/api/schema/metrics/host.rs index 4b0512e7f9287..6a814b1c5f311 100644 --- a/src/api/schema/metrics/host.rs +++ b/src/api/schema/metrics/host.rs @@ -259,6 +259,26 @@ impl DiskMetrics { } } +pub struct TCPMetrics(Vec); + +#[Object] +impl TCPMetrics { + /// Total TCP connections + async fn tcp_conns_total(&self) -> f64 { + filter_host_metric(&self.0, "tcp_connections_total") + } + + /// Total bytes in the send queue across all connections. + async fn tcp_tx_queued_bytes_total(&self) -> f64 { + filter_host_metric(&self.0, "tcp_tx_queued_bytes_total") + } + + /// Total bytes in the receive queue across all connections. + async fn tcp_rx_queued_bytes_total(&self) -> f64 { + filter_host_metric(&self.0, "tcp_rx_queued_bytes_total") + } +} + pub struct HostMetrics(host_metrics::HostMetrics); impl HostMetrics { @@ -324,6 +344,14 @@ impl HostMetrics { self.0.disk_metrics(&mut buffer).await; DiskMetrics(buffer.metrics) } + + #[cfg(target_os = "linux")] + /// TCP metrics + async fn tcp(&self) -> TCPMetrics { + let mut buffer = self.0.buffer(); + self.0.tcp_metrics(&mut buffer).await; + TCPMetrics(buffer.metrics) + } } /// Filters a [`Vec`] by name, returning the inner `value` or 0.00 if not found diff --git a/src/api/schema/metrics/mod.rs b/src/api/schema/metrics/mod.rs index 1a273dc5e9bee..116f1ca3777c2 100644 --- a/src/api/schema/metrics/mod.rs +++ b/src/api/schema/metrics/mod.rs @@ -15,7 +15,7 @@ mod uptime; mod host; pub use allocated_bytes::{AllocatedBytes, ComponentAllocatedBytes}; -use async_graphql::{Interface, Object, Subscription}; +use async_graphql::{Interface, Subscription}; use chrono::{DateTime, Utc}; pub use errors::{ComponentErrorsTotal, ErrorsTotal}; pub use filter::*; @@ -45,9 +45,9 @@ pub enum MetricType { #[derive(Default)] pub struct MetricsQuery; -#[Object] +#[cfg(feature = "sources-host_metrics")] +#[async_graphql::Object] impl MetricsQuery { - #[cfg(feature = "sources-host_metrics")] /// Vector host metrics async fn host_metrics(&self) -> host::HostMetrics { host::HostMetrics::new() @@ -63,7 +63,7 @@ impl MetricsSubscription { async fn uptime( &self, #[graphql(default = 1000, validator(minimum = 10, maximum = 60_000))] interval: i32, - ) -> impl Stream { + ) -> impl Stream + use<> { get_metrics(interval).filter_map(|m| match m.name() { "uptime_seconds" => Some(Uptime::new(m)), _ => None, @@ -75,7 +75,7 @@ impl MetricsSubscription { async fn received_events_total( &self, #[graphql(default = 1000, validator(minimum = 10, maximum = 60_000))] interval: i32, - ) -> impl Stream { + ) -> impl Stream + use<> { get_metrics(interval).filter_map(|m| match m.name() { "component_received_events_total" => Some(ReceivedEventsTotal::new(m)), _ => None, @@ -87,7 +87,7 @@ impl MetricsSubscription { async fn received_events_throughput( &self, #[graphql(default = 1000, validator(minimum = 10, maximum = 60_000))] interval: i32, - ) -> impl Stream { + ) -> impl Stream + use<> { counter_throughput(interval, &|m| m.name() == "component_received_events_total") .map(|(_, throughput)| throughput as i64) } @@ -96,7 +96,7 @@ impl MetricsSubscription { async fn component_received_events_throughputs( &self, #[graphql(default = 1000, validator(minimum = 10, maximum = 60_000))] interval: i32, - ) -> impl Stream> { + ) -> impl Stream> + use<> { component_counter_throughputs(interval, &|m| m.name() == "component_received_events_total") .map(|m| { m.into_iter() @@ -114,7 +114,7 @@ impl MetricsSubscription { async fn component_received_events_totals( &self, #[graphql(default = 1000, validator(minimum = 10, maximum = 60_000))] interval: i32, - ) -> impl Stream> { + ) -> impl Stream> + use<> { component_counter_metrics(interval, &|m| m.name() == "component_received_events_total").map( |m| { m.into_iter() @@ -129,7 +129,7 @@ impl MetricsSubscription { async fn sent_events_total( &self, #[graphql(default = 1000, validator(minimum = 10, maximum = 60_000))] interval: i32, - ) -> impl Stream { + ) -> impl Stream + use<> { get_metrics(interval).filter_map(|m| match m.name() { "component_sent_events_total" => Some(SentEventsTotal::new(m)), _ => None, @@ -141,7 +141,7 @@ impl MetricsSubscription { async fn sent_events_throughput( &self, #[graphql(default = 1000, validator(minimum = 10, maximum = 60_000))] interval: i32, - ) -> impl Stream { + ) -> impl Stream + use<> { counter_throughput(interval, &|m| m.name() == "component_sent_events_total") .map(|(_, throughput)| throughput as i64) } @@ -150,7 +150,7 @@ impl MetricsSubscription { async fn component_sent_events_throughputs( &self, #[graphql(default = 1000, validator(minimum = 10, maximum = 60_000))] interval: i32, - ) -> impl Stream> { + ) -> impl Stream> + use<> { component_sent_events_total_throughputs_with_outputs(interval).map(|m| { m.into_iter() .map(|(key, total_throughput, outputs)| { @@ -164,7 +164,7 @@ impl MetricsSubscription { async fn component_sent_events_totals( &self, #[graphql(default = 1000, validator(minimum = 10, maximum = 60_000))] interval: i32, - ) -> impl Stream> { + ) -> impl Stream> + use<> { component_sent_events_totals_metrics_with_outputs(interval).map(|ms| { ms.into_iter() .map(|(m, m_by_outputs)| ComponentSentEventsTotal::new(m, m_by_outputs)) @@ -176,7 +176,7 @@ impl MetricsSubscription { async fn component_received_bytes_totals( &self, #[graphql(default = 1000, validator(minimum = 10, maximum = 60_000))] interval: i32, - ) -> impl Stream> { + ) -> impl Stream> + use<> { component_counter_metrics(interval, &|m| m.name() == "component_received_bytes_total").map( |m| { m.into_iter() @@ -190,7 +190,7 @@ impl MetricsSubscription { async fn component_received_bytes_throughputs( &self, #[graphql(default = 1000, validator(minimum = 10, maximum = 60_000))] interval: i32, - ) -> impl Stream> { + ) -> impl Stream> + use<> { component_counter_throughputs(interval, &|m| m.name() == "component_received_bytes_total") .map(|m| { m.into_iter() @@ -208,7 +208,7 @@ impl MetricsSubscription { async fn component_sent_bytes_totals( &self, #[graphql(default = 1000, validator(minimum = 10, maximum = 60_000))] interval: i32, - ) -> impl Stream> { + ) -> impl Stream> + use<> { component_counter_metrics(interval, &|m| m.name() == "component_sent_bytes_total") .map(|m| m.into_iter().map(ComponentSentBytesTotal::new).collect()) } @@ -217,7 +217,7 @@ impl MetricsSubscription { async fn component_sent_bytes_throughputs( &self, #[graphql(default = 1000, validator(minimum = 10, maximum = 60_000))] interval: i32, - ) -> impl Stream> { + ) -> impl Stream> + use<> { component_counter_throughputs(interval, &|m| m.name() == "component_sent_bytes_total").map( |m| { m.into_iter() @@ -236,7 +236,7 @@ impl MetricsSubscription { async fn errors_total( &self, #[graphql(default = 1000, validator(minimum = 10, maximum = 60_000))] interval: i32, - ) -> impl Stream { + ) -> impl Stream + use<> { get_metrics(interval) .filter(|m| m.name().ends_with("_errors_total")) .map(ErrorsTotal::new) @@ -246,7 +246,7 @@ impl MetricsSubscription { async fn allocated_bytes( &self, #[graphql(default = 1000, validator(minimum = 10, maximum = 60_000))] interval: i32, - ) -> impl Stream { + ) -> impl Stream + use<> { get_metrics(interval) .filter(|m| m.name() == "component_allocated_bytes") .map(AllocatedBytes::new) @@ -256,7 +256,7 @@ impl MetricsSubscription { async fn component_allocated_bytes( &self, #[graphql(default = 1000, validator(minimum = 10, maximum = 60_000))] interval: i32, - ) -> impl Stream> { + ) -> impl Stream> + use<> { component_gauge_metrics(interval, &|m| m.name() == "component_allocated_bytes") .map(|m| m.into_iter().map(ComponentAllocatedBytes::new).collect()) } @@ -265,7 +265,7 @@ impl MetricsSubscription { async fn component_errors_totals( &self, #[graphql(default = 1000, validator(minimum = 10, maximum = 60_000))] interval: i32, - ) -> impl Stream> { + ) -> impl Stream> + use<> { component_counter_metrics(interval, &|m| m.name().ends_with("_errors_total")) .map(|m| m.into_iter().map(ComponentErrorsTotal::new).collect()) } @@ -274,7 +274,7 @@ impl MetricsSubscription { async fn metrics( &self, #[graphql(default = 1000, validator(minimum = 10, maximum = 60_000))] interval: i32, - ) -> impl Stream { + ) -> impl Stream + use<> { get_metrics(interval).filter_map(|m| match m.name() { "uptime_seconds" => Some(MetricType::Uptime(m.into())), _ => None, diff --git a/src/api/schema/metrics/sink/generic.rs b/src/api/schema/metrics/sink/generic.rs index e0f96e38fbd6d..83c50bc6557b7 100644 --- a/src/api/schema/metrics/sink/generic.rs +++ b/src/api/schema/metrics/sink/generic.rs @@ -9,7 +9,7 @@ use crate::{ pub struct GenericSinkMetrics(Vec); impl GenericSinkMetrics { - pub fn new(metrics: Vec) -> Self { + pub const fn new(metrics: Vec) -> Self { Self(metrics) } } diff --git a/src/api/schema/metrics/source/file.rs b/src/api/schema/metrics/source/file.rs index 24e8fbf64b8d6..93a3141860d5f 100644 --- a/src/api/schema/metrics/source/file.rs +++ b/src/api/schema/metrics/source/file.rs @@ -31,7 +31,7 @@ impl<'a> FileSourceMetricFile<'a> { } #[Object] -impl<'a> FileSourceMetricFile<'a> { +impl FileSourceMetricFile<'_> { /// File name async fn name(&self) -> &str { &*self.name @@ -57,7 +57,7 @@ impl<'a> FileSourceMetricFile<'a> { pub struct FileSourceMetrics(Vec); impl FileSourceMetrics { - pub fn new(metrics: Vec) -> Self { + pub const fn new(metrics: Vec) -> Self { Self(metrics) } @@ -249,7 +249,7 @@ mod tests { sort::by_fields(&mut files, &fields); for (i, f) in ["1", "2", "3"].iter().enumerate() { - assert_eq!(files[i].name.as_str(), format!("/path/to/file/{}", f)); + assert_eq!(files[i].name.as_str(), format!("/path/to/file/{f}")); } } @@ -268,7 +268,7 @@ mod tests { sort::by_fields(&mut files, &fields); for (i, f) in ["3", "2", "1"].iter().enumerate() { - assert_eq!(files[i].name.as_str(), format!("/path/to/file/{}", f)); + assert_eq!(files[i].name.as_str(), format!("/path/to/file/{f}")); } } diff --git a/src/api/schema/metrics/source/generic.rs b/src/api/schema/metrics/source/generic.rs index c66d50a841c91..12435c1b36e90 100644 --- a/src/api/schema/metrics/source/generic.rs +++ b/src/api/schema/metrics/source/generic.rs @@ -9,7 +9,7 @@ use crate::{ pub struct GenericSourceMetrics(Vec); impl GenericSourceMetrics { - pub fn new(metrics: Vec) -> Self { + pub const fn new(metrics: Vec) -> Self { Self(metrics) } } diff --git a/src/api/schema/metrics/transform/generic.rs b/src/api/schema/metrics/transform/generic.rs index 0fd2569551473..854a845226cc1 100644 --- a/src/api/schema/metrics/transform/generic.rs +++ b/src/api/schema/metrics/transform/generic.rs @@ -9,7 +9,7 @@ use crate::{ pub struct GenericTransformMetrics(Vec); impl GenericTransformMetrics { - pub fn new(metrics: Vec) -> Self { + pub const fn new(metrics: Vec) -> Self { Self(metrics) } } diff --git a/src/api/schema/mod.rs b/src/api/schema/mod.rs index d3e664f000d94..58c22a03b687f 100644 --- a/src/api/schema/mod.rs +++ b/src/api/schema/mod.rs @@ -13,7 +13,7 @@ use async_graphql::{EmptyMutation, MergedObject, MergedSubscription, Schema, Sch pub struct Query( health::HealthQuery, components::ComponentsQuery, - metrics::MetricsQuery, + #[cfg(feature = "sources-host_metrics")] metrics::MetricsQuery, meta::MetaQuery, ); diff --git a/src/api/schema/relay.rs b/src/api/schema/relay.rs index 366a71723aafe..83f397a9d8f76 100644 --- a/src/api/schema/relay.rs +++ b/src/api/schema/relay.rs @@ -49,7 +49,7 @@ impl Base64Cursor { let cursor = String::from_utf8(bytes).map_err(|_| Base64CursorError::Invalid)?; let index = cursor .split(':') - .last() + .next_back() .map(|s| s.parse::()) .ok_or(Base64CursorError::Invalid)? .map_err(|_| Base64CursorError::Invalid)?; diff --git a/src/api/server.rs b/src/api/server.rs index 5b2f5abe71aa6..caf6512b62d3e 100644 --- a/src/api/server.rs +++ b/src/api/server.rs @@ -14,18 +14,18 @@ use tokio::runtime::Handle; use tokio::sync::oneshot; use tower::ServiceBuilder; use tracing::Span; +use vector_lib::tap::topology; use warp::{filters::BoxedFilter, http::Response, ws::Ws, Filter, Reply}; -use super::{handler, schema, ShutdownTx}; +use super::{handler, schema}; use crate::{ config::{self, api}, http::build_http_trace_layer, internal_events::{SocketBindError, SocketMode}, - topology, }; pub struct Server { - _shutdown: ShutdownTx, + _shutdown: oneshot::Sender<()>, addr: SocketAddr, } @@ -45,12 +45,11 @@ impl Server { let _guard = handle.enter(); let addr = config.api.address.expect("No socket address"); - let incoming = AddrIncoming::bind(&addr).map_err(|error| { + let incoming = AddrIncoming::bind(&addr).inspect_err(|error| { emit!(SocketBindError { mode: SocketMode::Tcp, - error: &error, + error, }); - error })?; let span = Span::current(); diff --git a/src/api/tap.rs b/src/api/tap.rs deleted file mode 100644 index 7ba25bfd87f95..0000000000000 --- a/src/api/tap.rs +++ /dev/null @@ -1,1028 +0,0 @@ -use std::{ - collections::{HashMap, HashSet}, - num::NonZeroUsize, -}; - -use futures::{future::try_join_all, FutureExt}; -use tokio::sync::{ - mpsc as tokio_mpsc, - mpsc::error::{SendError, TrySendError}, - oneshot, -}; -use tracing::{Instrument, Span}; -use uuid::Uuid; -use vector_lib::buffers::{topology::builder::TopologyBuilder, WhenFull}; - -use super::{ - schema::events::{ - notification::{InvalidMatch, Matched, NotMatched, Notification}, - TapPatterns, - }, - ShutdownRx, ShutdownTx, -}; -use crate::{ - config::ComponentKey, - event::{EventArray, LogArray, MetricArray, TraceArray}, - topology::{fanout, fanout::ControlChannel, TapOutput, TapResource, WatchRx}, -}; - -/// A tap sender is the control channel used to surface tap payloads to a client. -type TapSender = tokio_mpsc::Sender; - -const TAP_BUFFER_SIZE: NonZeroUsize = unsafe { NonZeroUsize::new_unchecked(100) }; - -/// Clients can supply glob patterns to find matched topology components. -trait GlobMatcher { - fn matches_glob(&self, rhs: T) -> bool; -} - -impl GlobMatcher<&str> for String { - fn matches_glob(&self, rhs: &str) -> bool { - match glob::Pattern::new(self) { - Ok(pattern) => pattern.matches(rhs), - _ => false, - } - } -} - -/// Distinguishing between pattern variants helps us preserve user-friendly tap -/// notifications. Otherwise, after translating an input pattern into relevant -/// output patterns, we'd be unable to send a [`TapPayload::Notification`] with -/// the original user-specified input pattern. -#[derive(Debug, Eq, PartialEq, Hash)] -enum Pattern { - /// A pattern used to tap into outputs of components - OutputPattern(glob::Pattern), - /// A pattern used to tap into inputs of components. - /// - /// For a tap user, an input pattern is effectively a shortcut for specifying - /// one or more output patterns since a component's inputs are other - /// components' outputs. This variant captures the original user-supplied - /// pattern alongside the output patterns it's translated into. - InputPattern(String, Vec), -} - -impl GlobMatcher<&str> for Pattern { - fn matches_glob(&self, rhs: &str) -> bool { - match self { - Pattern::OutputPattern(pattern) => pattern.matches(rhs), - Pattern::InputPattern(_, patterns) => { - patterns.iter().any(|pattern| pattern.matches(rhs)) - } - } - } -} - -/// A tap payload contains events or notifications that alert users about the -/// status of the tap request. -#[derive(Debug)] -pub enum TapPayload { - Log(TapOutput, LogArray), - Metric(TapOutput, MetricArray), - Trace(TapOutput, TraceArray), - Notification(Notification), -} - -impl TapPayload { - /// Raise a `matched` event against the provided pattern. - pub fn matched>(pattern: T) -> Self { - Self::Notification(Notification::Matched(Matched::new(pattern.into()))) - } - - /// Raise a `not_matched` event against the provided pattern. - pub fn not_matched>(pattern: T) -> Self { - Self::Notification(Notification::NotMatched(NotMatched::new(pattern.into()))) - } - - /// Raise an `invalid_match` event against the provided input pattern. - pub fn invalid_input_pattern_match>( - pattern: T, - invalid_matches: Vec, - ) -> Self { - let pattern = pattern.into(); - let message = format!("[tap] Warning: source inputs cannot be tapped. Input pattern '{}' matches sources {:?}", pattern, invalid_matches); - Self::Notification(Notification::InvalidMatch(InvalidMatch::new( - message, - pattern, - invalid_matches, - ))) - } - - /// Raise an `invalid_match`event against the provided output pattern. - pub fn invalid_output_pattern_match>( - pattern: T, - invalid_matches: Vec, - ) -> Self { - let pattern = pattern.into(); - let message = format!( - "[tap] Warning: sink outputs cannot be tapped. Output pattern '{}' matches sinks {:?}", - pattern, invalid_matches - ); - Self::Notification(Notification::InvalidMatch(InvalidMatch::new( - message, - pattern, - invalid_matches, - ))) - } -} - -/// A `TapTransformer` transforms raw events and ships them to the global tap receiver. -#[derive(Clone)] -pub struct TapTransformer { - tap_tx: TapSender, - output: TapOutput, -} - -impl TapTransformer { - pub const fn new(tap_tx: TapSender, output: TapOutput) -> Self { - Self { tap_tx, output } - } - - pub fn try_send(&mut self, events: EventArray) { - let payload = match events { - EventArray::Logs(logs) => TapPayload::Log(self.output.clone(), logs), - EventArray::Metrics(metrics) => TapPayload::Metric(self.output.clone(), metrics), - EventArray::Traces(traces) => TapPayload::Trace(self.output.clone(), traces), - }; - - if let Err(TrySendError::Closed(payload)) = self.tap_tx.try_send(payload) { - debug!( - message = "Couldn't send event.", - payload = ?payload, - component_id = ?self.output.output_id, - ); - } - } -} - -/// A tap sink spawns a process for listening for topology changes. If topology changes, -/// sinks are rewired to accommodate matched/unmatched patterns. -#[derive(Debug)] -pub struct TapController { - _shutdown: ShutdownTx, -} - -impl TapController { - /// Creates a new tap sink, and spawns a handler for watching for topology changes - /// and a separate inner handler for events. Uses a oneshot channel to trigger shutdown - /// of handlers when the `TapSink` drops out of scope. - pub fn new(watch_rx: WatchRx, tap_tx: TapSender, patterns: TapPatterns) -> Self { - let (_shutdown, shutdown_rx) = oneshot::channel(); - - tokio::spawn( - tap_handler(patterns, tap_tx, watch_rx, shutdown_rx).instrument(error_span!( - "tap_handler", - component_kind = "sink", - component_id = "_tap", // It isn't clear what the component_id should be here other than "_tap" - component_type = "tap", - )), - ); - - Self { _shutdown } - } -} - -/// Provides a `ShutdownTx` that disconnects a component sink when it drops out of scope. -fn shutdown_trigger(control_tx: ControlChannel, sink_id: ComponentKey) -> ShutdownTx { - let (shutdown_tx, shutdown_rx) = oneshot::channel(); - - tokio::spawn(async move { - _ = shutdown_rx.await; - if control_tx - .send(fanout::ControlMessage::Remove(sink_id.clone())) - .is_err() - { - debug!(message = "Couldn't disconnect sink.", ?sink_id); - } else { - debug!(message = "Disconnected sink.", ?sink_id); - } - }); - - shutdown_tx -} - -/// Sends a 'matched' tap payload. -async fn send_matched(tx: TapSender, pattern: String) -> Result<(), SendError> { - debug!(message = "Sending matched notification.", pattern = ?pattern); - tx.send(TapPayload::matched(pattern)).await -} - -/// Sends a 'not matched' tap payload. -async fn send_not_matched(tx: TapSender, pattern: String) -> Result<(), SendError> { - debug!(message = "Sending not matched notification.", pattern = ?pattern); - tx.send(TapPayload::not_matched(pattern)).await -} - -/// Sends an 'invalid input pattern match' tap payload. -async fn send_invalid_input_pattern_match( - tx: TapSender, - pattern: String, - invalid_matches: Vec, -) -> Result<(), SendError> { - debug!(message = "Sending invalid input pattern match notification.", pattern = ?pattern, invalid_matches = ?invalid_matches); - tx.send(TapPayload::invalid_input_pattern_match( - pattern, - invalid_matches, - )) - .await -} - -/// Sends an 'invalid output pattern match' tap payload. -async fn send_invalid_output_pattern_match( - tx: TapSender, - pattern: String, - invalid_matches: Vec, -) -> Result<(), SendError> { - debug!(message = "Sending invalid output pattern match notification.", pattern = ?pattern, invalid_matches = ?invalid_matches); - tx.send(TapPayload::invalid_output_pattern_match( - pattern, - invalid_matches, - )) - .await -} - -/// Returns a tap handler that listens for topology changes, and connects sinks to observe -/// `LogEvent`s` when a component matches one or more of the provided patterns. -async fn tap_handler( - patterns: TapPatterns, - tx: TapSender, - mut watch_rx: WatchRx, - mut shutdown_rx: ShutdownRx, -) { - debug!(message = "Started tap.", outputs_patterns = ?patterns.for_outputs, inputs_patterns = ?patterns.for_inputs); - - // Sinks register for the current tap. Contains the id of the matched component, and - // a shutdown trigger for sending a remove control message when matching sinks change. - let mut sinks: HashMap = HashMap::new(); - - // Recording user-provided patterns for later use in sending notifications - // (determining patterns which did not match) - let user_provided_patterns = patterns.all_patterns(); - - // The patterns that matched on the last iteration, to compare with the latest - // round of matches when sending notifications. - let mut last_matches = HashSet::new(); - - loop { - tokio::select! { - _ = &mut shutdown_rx => break, - Ok(_) = watch_rx.changed() => { - // Cache of matched patterns. A `HashSet` is used here to ignore repetition. - let mut matched = HashSet::new(); - - // Borrow and clone the latest resources to register sinks. Since this blocks the - // watch channel and the returned ref isn't `Send`, this requires a clone. - let TapResource { - outputs, - inputs, - source_keys, - sink_keys, - removals, - } = watch_rx.borrow().clone(); - - // Remove tap sinks from components that have gone away/can no longer match. - let output_keys = outputs.keys().map(|output| output.output_id.component.clone()).collect::>(); - sinks.retain(|key, _| { - !removals.contains(key) && output_keys.contains(key) || { - debug!(message = "Removing component.", component_id = %key); - false - } - }); - - let mut component_id_patterns = patterns.for_outputs.iter() - .filter_map(|p| glob::Pattern::new(p).ok()) - .map(Pattern::OutputPattern).collect::>(); - - // Matching an input pattern is equivalent to matching the outputs of the component's inputs - for pattern in patterns.for_inputs.iter() { - if let Ok(glob) = glob::Pattern::new(pattern) { - match inputs.iter().filter(|(key, _)| - glob.matches(&key.to_string()) - ).flat_map(|(_, related_inputs)| related_inputs.iter().map(|id| id.to_string()).collect::>()).collect::>() { - found if !found.is_empty() => { - component_id_patterns.insert(Pattern::InputPattern(pattern.clone(), found.into_iter() - .filter_map(|p| glob::Pattern::new(&p).ok()).collect::>())); - } - _ => { - debug!(message="Input pattern not expanded: no matching components.", ?pattern); - } - } - } - } - - // Loop over all outputs, and connect sinks for the components that match one - // or more patterns. - for (output, control_tx) in outputs.iter() { - match component_id_patterns - .iter() - .filter(|pattern| pattern.matches_glob(&output.output_id.to_string())) - .collect::>() - { - found if !found.is_empty() => { - debug!( - message="Component matched.", - ?output.output_id, ?component_id_patterns, matched = ?found - ); - - // Build a new intermediate buffer pair that we can insert as a sink - // target for the component, and spawn our transformer task which will - // wrap each event payload with the necessary metadata before forwarding - // it to our global tap receiver. - let (tap_buffer_tx, mut tap_buffer_rx) = TopologyBuilder::standalone_memory(TAP_BUFFER_SIZE, WhenFull::DropNewest, &Span::current()).await; - let mut tap_transformer = TapTransformer::new(tx.clone(), output.clone()); - - tokio::spawn(async move { - while let Some(events) = tap_buffer_rx.next().await { - tap_transformer.try_send(events); - } - }); - - // Attempt to connect the sink. - // - // This is necessary because a sink may be reconfigured with the same id - // as a previous, and we are not getting involved in config diffing at - // this point. - let sink_id = Uuid::new_v4().to_string(); - match control_tx - .send(fanout::ControlMessage::Add(ComponentKey::from(sink_id.as_str()), tap_buffer_tx)) - { - Ok(_) => { - debug!( - message = "Sink connected.", ?sink_id, ?output.output_id, - ); - - // Create a sink shutdown trigger to remove the sink - // when matched components change. - sinks.entry(output.output_id.component.clone()).or_insert_with(Vec::new).push( - shutdown_trigger(control_tx.clone(), ComponentKey::from(sink_id.as_str())) - ); - } - Err(error) => { - error!( - message = "Couldn't connect sink.", - ?error, - ?output.output_id, - ?sink_id, - ); - } - } - - matched.extend(found.iter().map(|pattern| { - match pattern { - Pattern::OutputPattern(p) => p.to_string(), - Pattern::InputPattern(p, _) => p.to_owned(), - } - })); - } - _ => { - debug!( - message="Component not matched.", ?output.output_id, ?component_id_patterns - ); - } - } - } - - // Notifications to send to the client. - let mut notifications = Vec::new(); - - // Matched notifications. - for pattern in matched.difference(&last_matches) { - notifications.push(send_matched(tx.clone(), pattern.clone()).boxed()); - } - - // Not matched notifications. - for pattern in user_provided_patterns.difference(&matched) { - notifications.push(send_not_matched(tx.clone(), pattern.clone()).boxed()); - } - - // Warnings on invalid matches. - - for pattern in patterns.for_inputs.iter() { - if let Ok(glob) = glob::Pattern::new(pattern) { - let invalid_matches = source_keys.iter().filter(|key| glob.matches(key)).cloned().collect::>(); - if !invalid_matches.is_empty() { - notifications.push(send_invalid_input_pattern_match(tx.clone(), pattern.clone(), invalid_matches).boxed()) - } - } - } - for pattern in patterns.for_outputs.iter() { - if let Ok(glob) = glob::Pattern::new(pattern) { - let invalid_matches = sink_keys.iter().filter(|key| glob.matches(key)).cloned().collect::>(); - if !invalid_matches.is_empty() { - notifications.push(send_invalid_output_pattern_match(tx.clone(), pattern.clone(), invalid_matches).boxed()) - } - } - } - - last_matches = matched; - - // Send all events. If any event returns an error, this means the client - // channel has gone away, so we can break the loop. - if try_join_all(notifications).await.is_err() { - debug!("Couldn't send notification(s); tap gone away."); - break; - } - } - } - } - - debug!(message = "Stopped tap.", outputs_patterns = ?patterns.for_outputs, inputs_patterns = ?patterns.for_inputs); -} - -#[cfg(all( - test, - feature = "sinks-blackhole", - feature = "sources-demo_logs", - feature = "transforms-log_to_metric", - feature = "transforms-remap", -))] -mod tests { - use std::time::Duration; - - use futures::StreamExt; - use tokio::sync::watch; - - use super::*; - use crate::api::schema::events::output::OutputEventsPayload; - use crate::api::schema::events::{create_events_stream, log, metric}; - use crate::config::{Config, OutputId}; - use crate::event::{LogEvent, Metric, MetricKind, MetricValue}; - use crate::sinks::blackhole::BlackholeConfig; - use crate::sources::demo_logs::{DemoLogsConfig, OutputFormat}; - use crate::test_util::{start_topology, trace_init}; - use crate::transforms::log_to_metric::{LogToMetricConfig, MetricConfig, MetricTypeConfig}; - use crate::transforms::remap::RemapConfig; - - #[test] - /// Patterns should accept globbing. - fn matches() { - let patterns = ["ab*", "12?", "xy?"]; - - // Should find. - for id in &["abc", "123", "xyz"] { - assert!(patterns.iter().any(|p| p.to_string().matches_glob(id))); - } - - // Should not find. - for id in &["xzy", "ad*", "1234"] { - assert!(!patterns.iter().any(|p| p.to_string().matches_glob(id))); - } - } - - #[tokio::test] - /// A tap sink should match a pattern, receive the correct notifications, - /// and receive events - async fn sink_events() { - let pattern_matched = "tes*"; - let pattern_not_matched = "xyz"; - let id = OutputId::from(&ComponentKey::from("test")); - - let (mut fanout, control_tx) = fanout::Fanout::new(); - let mut outputs = HashMap::new(); - outputs.insert( - TapOutput { - output_id: id.clone(), - component_kind: "source", - component_type: "demo".to_string(), - }, - control_tx, - ); - let tap_resource = TapResource { - outputs, - inputs: HashMap::new(), - source_keys: Vec::new(), - sink_keys: Vec::new(), - removals: HashSet::new(), - }; - - let (watch_tx, watch_rx) = watch::channel(TapResource::default()); - let (sink_tx, mut sink_rx) = tokio_mpsc::channel(10); - - let _controller = TapController::new( - watch_rx, - sink_tx, - TapPatterns::new( - HashSet::from([pattern_matched.to_string(), pattern_not_matched.to_string()]), - HashSet::new(), - ), - ); - - // Add the outputs to trigger a change event. - watch_tx.send(tap_resource).unwrap(); - - // First two events should contain a notification that one pattern matched, and - // one that didn't. - #[allow(clippy::mixed_read_write_in_expression)] - let notifications = vec![sink_rx.recv().await, sink_rx.recv().await]; - - for notification in notifications.into_iter() { - match notification { - Some(TapPayload::Notification(Notification::Matched(matched))) - if matched.pattern == pattern_matched => - { - continue - } - Some(TapPayload::Notification(Notification::NotMatched(not_matched))) - if not_matched.pattern == pattern_not_matched => - { - continue - } - _ => panic!("unexpected payload"), - } - } - - // Send some events down the wire. Waiting until the first notifications are in - // to ensure the event handler has been initialized. - let log_event = LogEvent::default(); - let metric_event = Metric::new( - id.to_string(), - MetricKind::Incremental, - MetricValue::Counter { value: 1.0 }, - ); - - fanout - .send(vec![metric_event].into(), None) - .await - .expect("should not fail"); - fanout - .send(vec![log_event].into(), None) - .await - .expect("should not fail"); - - // 3rd payload should be the metric event - assert!(matches!( - sink_rx.recv().await, - Some(TapPayload::Metric(output, _)) if output.output_id == id - )); - - // 4th payload should be the log event - assert!(matches!( - sink_rx.recv().await, - Some(TapPayload::Log(output, _)) if output.output_id == id - )); - } - - fn assert_notification(payload: OutputEventsPayload) -> Notification { - if let OutputEventsPayload::Notification(event_notification) = payload { - event_notification.notification - } else { - panic!("Expected payload to be a Notification") - } - } - - fn assert_log(payload: OutputEventsPayload) -> log::Log { - if let OutputEventsPayload::Log(log) = payload { - log - } else { - panic!("Expected payload to be a Log") - } - } - - fn assert_metric(payload: OutputEventsPayload) -> metric::Metric { - if let OutputEventsPayload::Metric(metric) = payload { - metric - } else { - panic!("Expected payload to be a Metric") - } - } - - #[tokio::test] - async fn integration_test_source_log() { - trace_init(); - - let mut config = Config::builder(); - config.add_source( - "in", - DemoLogsConfig { - interval: Duration::from_secs_f64(0.01), - count: 200, - format: OutputFormat::Json, - ..Default::default() - }, - ); - config.add_sink( - "out", - &["in"], - BlackholeConfig { - print_interval_secs: Duration::from_secs(1), - rate: None, - acknowledgements: Default::default(), - }, - ); - - let (topology, _) = start_topology(config.build().unwrap(), false).await; - - let source_tap_stream = create_events_stream( - topology.watch(), - TapPatterns::new(HashSet::from(["in".to_string()]), HashSet::new()), - 500, - 100, - ); - - let source_tap_events: Vec<_> = source_tap_stream.take(2).collect().await; - - assert_eq!( - assert_notification(source_tap_events[0][0].clone()), - Notification::Matched(Matched::new("in".to_string())) - ); - let _log = assert_log(source_tap_events[1][0].clone()); - } - - #[tokio::test] - async fn integration_test_source_metric() { - trace_init(); - - let mut config = Config::builder(); - config.add_source( - "in", - DemoLogsConfig { - interval: Duration::from_secs_f64(0.01), - count: 200, - format: OutputFormat::Shuffle { - sequence: false, - lines: vec!["1".to_string()], - }, - ..Default::default() - }, - ); - config.add_transform( - "to_metric", - &["in"], - LogToMetricConfig { - metrics: vec![MetricConfig { - field: "message".try_into().expect("Fixed template string"), - name: None, - namespace: None, - tags: None, - metric: MetricTypeConfig::Gauge, - }], - all_metrics: None, - }, - ); - config.add_sink( - "out", - &["to_metric"], - BlackholeConfig { - print_interval_secs: Duration::from_secs(1), - rate: None, - acknowledgements: Default::default(), - }, - ); - - let (topology, _) = start_topology(config.build().unwrap(), false).await; - - let source_tap_stream = create_events_stream( - topology.watch(), - TapPatterns::new(HashSet::from(["to_metric".to_string()]), HashSet::new()), - 500, - 100, - ); - - let source_tap_events: Vec<_> = source_tap_stream.take(2).collect().await; - - assert_eq!( - assert_notification(source_tap_events[0][0].clone()), - Notification::Matched(Matched::new("to_metric".to_string())) - ); - assert_metric(source_tap_events[1][0].clone()); - } - - #[tokio::test] - async fn integration_test_transform() { - trace_init(); - - let mut config = Config::builder(); - config.add_source( - "in", - DemoLogsConfig { - interval: Duration::from_secs_f64(0.01), - count: 200, - format: OutputFormat::Json, - ..Default::default() - }, - ); - config.add_transform( - "transform", - &["in"], - RemapConfig { - source: Some("".to_string()), - ..Default::default() - }, - ); - config.add_sink( - "out", - &["transform"], - BlackholeConfig { - print_interval_secs: Duration::from_secs(1), - rate: None, - acknowledgements: Default::default(), - }, - ); - - let (topology, _) = start_topology(config.build().unwrap(), false).await; - - let transform_tap_stream = create_events_stream( - topology.watch(), - TapPatterns::new(HashSet::from(["transform".to_string()]), HashSet::new()), - 500, - 100, - ); - - let transform_tap_events: Vec<_> = transform_tap_stream.take(2).collect().await; - - assert_eq!( - assert_notification(transform_tap_events[0][0].clone()), - Notification::Matched(Matched::new("transform".to_string())) - ); - let _log = assert_log(transform_tap_events[1][0].clone()); - } - - #[tokio::test] - async fn integration_test_transform_input() { - trace_init(); - - let mut config = Config::builder(); - config.add_source( - "in", - DemoLogsConfig { - interval: Duration::from_secs_f64(0.01), - count: 200, - format: OutputFormat::Shuffle { - sequence: false, - lines: vec!["test".to_string()], - }, - ..Default::default() - }, - ); - config.add_transform( - "transform", - &["in"], - RemapConfig { - source: Some(".message = \"new message\"".to_string()), - ..Default::default() - }, - ); - config.add_sink( - "out", - &["in"], - BlackholeConfig { - print_interval_secs: Duration::from_secs(1), - rate: None, - acknowledgements: Default::default(), - }, - ); - - let (topology, _) = start_topology(config.build().unwrap(), false).await; - - let tap_stream = create_events_stream( - topology.watch(), - TapPatterns::new( - HashSet::new(), - HashSet::from(["transform".to_string(), "in".to_string()]), - ), - 500, - 100, - ); - - let tap_events: Vec<_> = tap_stream.take(4).collect().await; - - let notifications = [ - assert_notification(tap_events[0][0].clone()), - assert_notification(tap_events[1][0].clone()), - assert_notification(tap_events[2][0].clone()), - ]; - assert!(notifications - .iter() - .any(|n| *n == Notification::Matched(Matched::new("transform".to_string())))); - // "in" is not matched since it corresponds to a source - assert!(notifications - .iter() - .any(|n| *n == Notification::NotMatched(NotMatched::new("in".to_string())))); - // "in" generates an invalid match notification to warn against an - // attempt to tap the input of a source - assert!(notifications.iter().any(|n| *n - == Notification::InvalidMatch(InvalidMatch::new("[tap] Warning: source inputs cannot be tapped. Input pattern 'in' matches sources [\"in\"]".to_string(), "in".to_string(), vec!["in".to_string()])))); - - assert_eq!( - assert_log(tap_events[3][0].clone()) - .get_message() - .unwrap_or_default(), - "test" - ); - } - - #[tokio::test] - async fn integration_test_sink() { - trace_init(); - - let mut config = Config::builder(); - config.add_source( - "in", - DemoLogsConfig { - interval: Duration::from_secs_f64(0.01), - count: 200, - format: OutputFormat::Shuffle { - sequence: false, - lines: vec!["test".to_string()], - }, - ..Default::default() - }, - ); - config.add_transform( - "transform", - &["in"], - RemapConfig { - source: Some(".message = \"new message\"".to_string()), - ..Default::default() - }, - ); - config.add_sink( - "out", - &["transform"], - BlackholeConfig { - print_interval_secs: Duration::from_secs(1), - rate: None, - acknowledgements: Default::default(), - }, - ); - - let (topology, _) = start_topology(config.build().unwrap(), false).await; - - let tap_stream = create_events_stream( - topology.watch(), - TapPatterns::new(HashSet::new(), HashSet::from(["out".to_string()])), - 500, - 100, - ); - - let tap_events: Vec<_> = tap_stream.take(2).collect().await; - - assert_eq!( - assert_notification(tap_events[0][0].clone()), - Notification::Matched(Matched::new("out".to_string())) - ); - assert_eq!( - assert_log(tap_events[1][0].clone()) - .get_message() - .unwrap_or_default(), - "new message" - ); - } - - #[tokio::test] - async fn integration_test_tap_non_default_output() { - trace_init(); - - let mut config = Config::builder(); - config.add_source( - "in", - DemoLogsConfig { - interval: Duration::from_secs_f64(0.01), - count: 200, - format: OutputFormat::Shuffle { - sequence: false, - lines: vec!["test2".to_string()], - }, - ..Default::default() - }, - ); - config.add_transform( - "transform", - &["in"], - RemapConfig { - source: Some("assert_eq!(.message, \"test1\")".to_string()), - drop_on_error: true, - reroute_dropped: true, - ..Default::default() - }, - ); - config.add_sink( - "out", - &["transform"], - BlackholeConfig { - print_interval_secs: Duration::from_secs(1), - rate: None, - acknowledgements: Default::default(), - }, - ); - - let (topology, _) = start_topology(config.build().unwrap(), false).await; - - let transform_tap_remap_dropped_stream = create_events_stream( - topology.watch(), - TapPatterns::new( - HashSet::from(["transform.dropped".to_string()]), - HashSet::new(), - ), - 500, - 100, - ); - - let transform_tap_events: Vec<_> = - transform_tap_remap_dropped_stream.take(2).collect().await; - - assert_eq!( - assert_notification(transform_tap_events[0][0].clone()), - Notification::Matched(Matched::new("transform.dropped".to_string())) - ); - assert_eq!( - assert_log(transform_tap_events[1][0].clone()) - .get_message() - .unwrap_or_default(), - "test2" - ); - } - - #[tokio::test] - async fn integration_test_tap_multiple_outputs() { - trace_init(); - - let mut config = Config::builder(); - config.add_source( - "in-test1", - DemoLogsConfig { - interval: Duration::from_secs_f64(0.01), - count: 1, - format: OutputFormat::Shuffle { - sequence: false, - lines: vec!["test1".to_string()], - }, - ..Default::default() - }, - ); - config.add_source( - "in-test2", - DemoLogsConfig { - interval: Duration::from_secs_f64(0.01), - count: 1, - format: OutputFormat::Shuffle { - sequence: false, - lines: vec!["test2".to_string()], - }, - ..Default::default() - }, - ); - config.add_transform( - "transform", - &["in*"], - RemapConfig { - source: Some("assert_eq!(.message, \"test1\")".to_string()), - drop_on_error: true, - reroute_dropped: true, - ..Default::default() - }, - ); - config.add_sink( - "out", - &["transform"], - BlackholeConfig { - print_interval_secs: Duration::from_secs(1), - rate: None, - acknowledgements: Default::default(), - }, - ); - - let (topology, _) = start_topology(config.build().unwrap(), false).await; - - let mut transform_tap_all_outputs_stream = create_events_stream( - topology.watch(), - TapPatterns::new(HashSet::from(["transform*".to_string()]), HashSet::new()), - 500, - 100, - ); - - let transform_tap_notifications = transform_tap_all_outputs_stream.next().await.unwrap(); - assert_eq!( - assert_notification(transform_tap_notifications[0].clone()), - Notification::Matched(Matched::new("transform*".to_string())) - ); - - let mut default_output_found = false; - let mut dropped_output_found = false; - for _ in 0..2 { - if default_output_found && dropped_output_found { - break; - } - - match transform_tap_all_outputs_stream.next().await { - Some(tap_events) => { - if !default_output_found { - default_output_found = tap_events - .iter() - .map(|payload| assert_log(payload.clone())) - .any(|log| log.get_message().unwrap_or_default() == "test1"); - } - if !dropped_output_found { - dropped_output_found = tap_events - .iter() - .map(|payload| assert_log(payload.clone())) - .any(|log| log.get_message().unwrap_or_default() == "test2"); - } - } - None => break, - } - } - - assert!(default_output_found && dropped_output_found); - } -} diff --git a/src/api/tests.rs b/src/api/tests.rs index 9abe2c6277e7c..b9f06ac3ed20e 100644 --- a/src/api/tests.rs +++ b/src/api/tests.rs @@ -1,34 +1,22 @@ -use futures::SinkExt; -use futures::StreamExt; -use tokio::sync::watch; +use std::collections::{HashMap, HashSet}; +use std::time::Duration; -use super::*; -use crate::api::schema::events::notification::{EventNotification, EventNotificationType}; use crate::api::schema::events::output::OutputEventsPayload; use crate::api::schema::events::{create_events_stream, log, metric}; -use crate::config::Config; -use crate::event::{Metric, MetricKind, MetricValue}; +use crate::config::{Config, OutputId}; +use crate::event::{LogEvent, Metric, MetricKind, MetricValue}; use crate::sinks::blackhole::BlackholeConfig; use crate::sources::demo_logs::{DemoLogsConfig, OutputFormat}; -use crate::test_util::start_topology; -use crate::transforms::log_to_metric::{GaugeConfig, LogToMetricConfig, MetricConfig}; +use crate::test_util::{start_topology, trace_init}; +use crate::transforms::log_to_metric::{LogToMetricConfig, MetricConfig, MetricTypeConfig}; use crate::transforms::remap::RemapConfig; - -#[test] -/// Patterns should accept globbing. -fn matches() { - let patterns = ["ab*", "12?", "xy?"]; - - // Should find. - for id in &["abc", "123", "xyz"] { - assert!(patterns.iter().any(|p| p.to_string().matches_glob(id))); - } - - // Should not find. - for id in &["xzy", "ad*", "1234"] { - assert!(!patterns.iter().any(|p| p.to_string().matches_glob(id))); - } -} +use futures::StreamExt; +use tokio::sync::{mpsc, watch}; +use vector_lib::config::ComponentKey; +use vector_lib::fanout; +use vector_lib::tap::controller::{TapController, TapPatterns, TapPayload}; +use vector_lib::tap::notification::{InvalidMatch, Matched, NotMatched, Notification}; +use vector_lib::tap::topology::{TapOutput, TapResource}; #[tokio::test] /// A tap sink should match a pattern, receive the correct notifications, @@ -40,34 +28,51 @@ async fn sink_events() { let (mut fanout, control_tx) = fanout::Fanout::new(); let mut outputs = HashMap::new(); - outputs.insert(id.clone(), control_tx); + outputs.insert( + TapOutput { + output_id: id.clone(), + component_kind: "source", + component_type: "demo".to_string(), + }, + control_tx, + ); + let tap_resource = TapResource { + outputs, + inputs: HashMap::new(), + source_keys: Vec::new(), + sink_keys: Vec::new(), + removals: HashSet::new(), + }; - let (watch_tx, watch_rx) = watch::channel(HashMap::new()); - let (sink_tx, mut sink_rx) = tokio_mpsc::channel(10); + let (watch_tx, watch_rx) = watch::channel(TapResource::default()); + let (sink_tx, mut sink_rx) = mpsc::channel(10); let _controller = TapController::new( watch_rx, sink_tx, - &[pattern_matched.to_string(), pattern_not_matched.to_string()], + TapPatterns::new( + HashSet::from([pattern_matched.to_string(), pattern_not_matched.to_string()]), + HashSet::new(), + ), ); // Add the outputs to trigger a change event. - watch_tx.send(outputs).unwrap(); + watch_tx.send(tap_resource).unwrap(); // First two events should contain a notification that one pattern matched, and // one that didn't. - #[allow(clippy::eval_order_dependence)] + #[allow(clippy::mixed_read_write_in_expression)] let notifications = vec![sink_rx.recv().await, sink_rx.recv().await]; for notification in notifications.into_iter() { match notification { - Some(TapPayload::Notification(returned_id, TapNotification::Matched)) - if returned_id == pattern_matched => + Some(TapPayload::Notification(Notification::Matched(matched))) + if matched.pattern == pattern_matched => { continue } - Some(TapPayload::Notification(returned_id, TapNotification::NotMatched)) - if returned_id == pattern_not_matched => + Some(TapPayload::Notification(Notification::NotMatched(not_matched))) + if not_matched.pattern == pattern_not_matched => { continue } @@ -77,32 +82,38 @@ async fn sink_events() { // Send some events down the wire. Waiting until the first notifications are in // to ensure the event handler has been initialized. - let log_event = Event::from(LogEvent::default()); - let metric_event = Event::from(Metric::new( + let log_event = LogEvent::default(); + let metric_event = Metric::new( id.to_string(), MetricKind::Incremental, MetricValue::Counter { value: 1.0 }, - )); + ); - _ = fanout.send(metric_event).await.unwrap(); - _ = fanout.send(log_event).await.unwrap(); + fanout + .send(vec![metric_event].into(), None) + .await + .expect("should not fail"); + fanout + .send(vec![log_event].into(), None) + .await + .expect("should not fail"); // 3rd payload should be the metric event assert!(matches!( - sink_rx.recv().await, - Some(TapPayload::Metric(returned_id, _)) if returned_id == id + sink_rx.recv().await, + Some(TapPayload::Metric(output, _)) if output.output_id == id )); // 4th payload should be the log event assert!(matches!( - sink_rx.recv().await, - Some(TapPayload::Log(returned_id, _)) if returned_id == id + sink_rx.recv().await, + Some(TapPayload::Log(output, _)) if output.output_id == id )); } -fn assert_notification(payload: OutputEventsPayload) -> EventNotification { - if let OutputEventsPayload::Notification(notification) = payload { - notification +fn assert_notification(payload: OutputEventsPayload) -> Notification { + if let OutputEventsPayload::Notification(event_notification) = payload { + event_notification.notification } else { panic!("Expected payload to be a Notification") } @@ -126,11 +137,13 @@ fn assert_metric(payload: OutputEventsPayload) -> metric::Metric { #[tokio::test] async fn integration_test_source_log() { + trace_init(); + let mut config = Config::builder(); config.add_source( "in", DemoLogsConfig { - interval: 0.01, + interval: Duration::from_secs_f64(0.01), count: 200, format: OutputFormat::Json, ..Default::default() @@ -140,32 +153,39 @@ async fn integration_test_source_log() { "out", &["in"], BlackholeConfig { - print_interval_secs: 1, + print_interval_secs: Duration::from_secs(1), rate: None, + acknowledgements: Default::default(), }, ); - let (topology, _crash) = start_topology(config.build().unwrap(), false).await; + let (topology, _) = start_topology(config.build().unwrap(), false).await; - let source_tap_stream = - create_events_stream(topology.watch(), vec!["in".to_string()], 500, 100); + let source_tap_stream = create_events_stream( + topology.watch(), + TapPatterns::new(HashSet::from(["in".to_string()]), HashSet::new()), + 500, + 100, + ); let source_tap_events: Vec<_> = source_tap_stream.take(2).collect().await; assert_eq!( assert_notification(source_tap_events[0][0].clone()), - EventNotification::new("in".to_string(), EventNotificationType::Matched) + Notification::Matched(Matched::new("in".to_string())) ); let _log = assert_log(source_tap_events[1][0].clone()); } #[tokio::test] async fn integration_test_source_metric() { + trace_init(); + let mut config = Config::builder(); config.add_source( "in", DemoLogsConfig { - interval: 0.01, + interval: Duration::from_secs_f64(0.01), count: 200, format: OutputFormat::Shuffle { sequence: false, @@ -178,44 +198,53 @@ async fn integration_test_source_metric() { "to_metric", &["in"], LogToMetricConfig { - metrics: vec![MetricConfig::Gauge(GaugeConfig { - field: "message".to_string(), + metrics: vec![MetricConfig { + field: "message".try_into().expect("Fixed template string"), name: None, namespace: None, tags: None, - })], + metric: MetricTypeConfig::Gauge, + }], + all_metrics: None, }, ); config.add_sink( "out", &["to_metric"], BlackholeConfig { - print_interval_secs: 1, + print_interval_secs: Duration::from_secs(1), rate: None, + acknowledgements: Default::default(), }, ); - let (topology, _crash) = start_topology(config.build().unwrap(), false).await; + let (topology, _) = start_topology(config.build().unwrap(), false).await; - let source_tap_stream = - create_events_stream(topology.watch(), vec!["to_metric".to_string()], 500, 100); + let source_tap_stream = create_events_stream( + topology.watch(), + TapPatterns::new(HashSet::from(["to_metric".to_string()]), HashSet::new()), + 500, + 100, + ); let source_tap_events: Vec<_> = source_tap_stream.take(2).collect().await; assert_eq!( assert_notification(source_tap_events[0][0].clone()), - EventNotification::new("to_metric".to_string(), EventNotificationType::Matched) + Notification::Matched(Matched::new("to_metric".to_string())) ); assert_metric(source_tap_events[1][0].clone()); } #[tokio::test] async fn integration_test_transform() { + trace_init(); + let mut config = Config::builder(); config.add_source( "in", DemoLogsConfig { - interval: 0.01, + interval: Duration::from_secs_f64(0.01), count: 200, format: OutputFormat::Json, ..Default::default() @@ -233,32 +262,171 @@ async fn integration_test_transform() { "out", &["transform"], BlackholeConfig { - print_interval_secs: 1, + print_interval_secs: Duration::from_secs(1), rate: None, + acknowledgements: Default::default(), }, ); - let (topology, _crash) = start_topology(config.build().unwrap(), false).await; + let (topology, _) = start_topology(config.build().unwrap(), false).await; - let transform_tap_stream = - create_events_stream(topology.watch(), vec!["transform".to_string()], 500, 100); + let transform_tap_stream = create_events_stream( + topology.watch(), + TapPatterns::new(HashSet::from(["transform".to_string()]), HashSet::new()), + 500, + 100, + ); let transform_tap_events: Vec<_> = transform_tap_stream.take(2).collect().await; assert_eq!( assert_notification(transform_tap_events[0][0].clone()), - EventNotification::new("transform".to_string(), EventNotificationType::Matched) + Notification::Matched(Matched::new("transform".to_string())) ); let _log = assert_log(transform_tap_events[1][0].clone()); } +#[tokio::test] +async fn integration_test_transform_input() { + trace_init(); + + let mut config = Config::builder(); + config.add_source( + "in", + DemoLogsConfig { + interval: Duration::from_secs_f64(0.01), + count: 200, + format: OutputFormat::Shuffle { + sequence: false, + lines: vec!["test".to_string()], + }, + ..Default::default() + }, + ); + config.add_transform( + "transform", + &["in"], + RemapConfig { + source: Some(".message = \"new message\"".to_string()), + ..Default::default() + }, + ); + config.add_sink( + "out", + &["in"], + BlackholeConfig { + print_interval_secs: Duration::from_secs(1), + rate: None, + acknowledgements: Default::default(), + }, + ); + + let (topology, _) = start_topology(config.build().unwrap(), false).await; + + let tap_stream = create_events_stream( + topology.watch(), + TapPatterns::new( + HashSet::new(), + HashSet::from(["transform".to_string(), "in".to_string()]), + ), + 500, + 100, + ); + + let tap_events: Vec<_> = tap_stream.take(4).collect().await; + + let notifications = [ + assert_notification(tap_events[0][0].clone()), + assert_notification(tap_events[1][0].clone()), + assert_notification(tap_events[2][0].clone()), + ]; + assert!(notifications + .iter() + .any(|n| *n == Notification::Matched(Matched::new("transform".to_string())))); + // "in" is not matched since it corresponds to a source + assert!(notifications + .iter() + .any(|n| *n == Notification::NotMatched(NotMatched::new("in".to_string())))); + // "in" generates an invalid match notification to warn against an + // attempt to tap the input of a source + assert!(notifications.iter().any(|n| *n + == Notification::InvalidMatch(InvalidMatch::new("[tap] Warning: source inputs cannot be tapped. Input pattern 'in' matches sources [\"in\"]".to_string(), "in".to_string(), vec!["in".to_string()])))); + + assert_eq!( + assert_log(tap_events[3][0].clone()) + .get_message() + .unwrap_or_default(), + "test" + ); +} + +#[tokio::test] +async fn integration_test_sink() { + trace_init(); + + let mut config = Config::builder(); + config.add_source( + "in", + DemoLogsConfig { + interval: Duration::from_secs_f64(0.01), + count: 200, + format: OutputFormat::Shuffle { + sequence: false, + lines: vec!["test".to_string()], + }, + ..Default::default() + }, + ); + config.add_transform( + "transform", + &["in"], + RemapConfig { + source: Some(".message = \"new message\"".to_string()), + ..Default::default() + }, + ); + config.add_sink( + "out", + &["transform"], + BlackholeConfig { + print_interval_secs: Duration::from_secs(1), + rate: None, + acknowledgements: Default::default(), + }, + ); + + let (topology, _) = start_topology(config.build().unwrap(), false).await; + + let tap_stream = create_events_stream( + topology.watch(), + TapPatterns::new(HashSet::new(), HashSet::from(["out".to_string()])), + 500, + 100, + ); + + let tap_events: Vec<_> = tap_stream.take(2).collect().await; + + assert_eq!( + assert_notification(tap_events[0][0].clone()), + Notification::Matched(Matched::new("out".to_string())) + ); + assert_eq!( + assert_log(tap_events[1][0].clone()) + .get_message() + .unwrap_or_default(), + "new message" + ); +} + #[tokio::test] async fn integration_test_tap_non_default_output() { + trace_init(); + let mut config = Config::builder(); config.add_source( "in", DemoLogsConfig { - interval: 0.01, + interval: Duration::from_secs_f64(0.01), count: 200, format: OutputFormat::Shuffle { sequence: false, @@ -281,16 +449,20 @@ async fn integration_test_tap_non_default_output() { "out", &["transform"], BlackholeConfig { - print_interval_secs: 1, + print_interval_secs: Duration::from_secs(1), rate: None, + acknowledgements: Default::default(), }, ); - let (topology, _crash) = start_topology(config.build().unwrap(), false).await; + let (topology, _) = start_topology(config.build().unwrap(), false).await; let transform_tap_remap_dropped_stream = create_events_stream( topology.watch(), - vec!["transform.dropped".to_string()], + TapPatterns::new( + HashSet::from(["transform.dropped".to_string()]), + HashSet::new(), + ), 500, 100, ); @@ -299,10 +471,7 @@ async fn integration_test_tap_non_default_output() { assert_eq!( assert_notification(transform_tap_events[0][0].clone()), - EventNotification::new( - "transform.dropped".to_string(), - EventNotificationType::Matched - ) + Notification::Matched(Matched::new("transform.dropped".to_string())) ); assert_eq!( assert_log(transform_tap_events[1][0].clone()) @@ -314,11 +483,13 @@ async fn integration_test_tap_non_default_output() { #[tokio::test] async fn integration_test_tap_multiple_outputs() { + trace_init(); + let mut config = Config::builder(); config.add_source( "in-test1", DemoLogsConfig { - interval: 0.01, + interval: Duration::from_secs_f64(0.01), count: 1, format: OutputFormat::Shuffle { sequence: false, @@ -330,7 +501,7 @@ async fn integration_test_tap_multiple_outputs() { config.add_source( "in-test2", DemoLogsConfig { - interval: 0.01, + interval: Duration::from_secs_f64(0.01), count: 1, format: OutputFormat::Shuffle { sequence: false, @@ -353,20 +524,25 @@ async fn integration_test_tap_multiple_outputs() { "out", &["transform"], BlackholeConfig { - print_interval_secs: 1, + print_interval_secs: Duration::from_secs(1), rate: None, + acknowledgements: Default::default(), }, ); - let (topology, _crash) = start_topology(config.build().unwrap(), false).await; + let (topology, _) = start_topology(config.build().unwrap(), false).await; - let mut transform_tap_all_outputs_stream = - create_events_stream(topology.watch(), vec!["transform*".to_string()], 500, 100); + let mut transform_tap_all_outputs_stream = create_events_stream( + topology.watch(), + TapPatterns::new(HashSet::from(["transform*".to_string()]), HashSet::new()), + 500, + 100, + ); let transform_tap_notifications = transform_tap_all_outputs_stream.next().await.unwrap(); assert_eq!( assert_notification(transform_tap_notifications[0].clone()), - EventNotification::new("transform*".to_string(), EventNotificationType::Matched) + Notification::Matched(Matched::new("transform*".to_string())) ); let mut default_output_found = false; diff --git a/src/app.rs b/src/app.rs index 495e3d743c046..2095ce57fbbb6 100644 --- a/src/app.rs +++ b/src/app.rs @@ -1,26 +1,24 @@ #![allow(missing_docs)] -use std::{num::NonZeroUsize, path::PathBuf, process::ExitStatus, time::Duration}; +use std::{ + num::{NonZeroU64, NonZeroUsize}, + path::PathBuf, + process::ExitStatus, + sync::atomic::{AtomicUsize, Ordering}, + time::Duration, +}; use exitcode::ExitCode; use futures::StreamExt; -#[cfg(feature = "enterprise")] -use futures_util::future::BoxFuture; -use once_cell::race::OnceNonZeroUsize; use tokio::runtime::{self, Runtime}; use tokio::sync::{broadcast::error::RecvError, MutexGuard}; use tokio_stream::wrappers::UnboundedReceiverStream; -#[cfg(feature = "enterprise")] -use crate::config::enterprise::{ - attach_enterprise_components, report_configuration, EnterpriseError, EnterpriseMetadata, - EnterpriseReporter, -}; use crate::extra_context::ExtraContext; #[cfg(feature = "api")] use crate::{api, internal_events::ApiStarted}; use crate::{ - cli::{handle_config_errors, LogFormat, Opts, RootOpts}, - config::{self, Config, ConfigPath}, + cli::{handle_config_errors, LogFormat, Opts, RootOpts, WatchConfigMethod}, + config::{self, ComponentConfig, Config, ConfigPath}, heartbeat, internal_events::{VectorConfigLoadError, VectorQuit, VectorStarted, VectorStopped}, signal::{SignalHandler, SignalPair, SignalRx, SignalTo}, @@ -37,7 +35,11 @@ use std::os::unix::process::ExitStatusExt; use std::os::windows::process::ExitStatusExt; use tokio::runtime::Handle; -pub static WORKER_THREADS: OnceNonZeroUsize = OnceNonZeroUsize::new(); +static WORKER_THREADS: AtomicUsize = AtomicUsize::new(0); + +pub fn worker_threads() -> Option { + NonZeroUsize::new(WORKER_THREADS.load(Ordering::Relaxed)) +} pub struct ApplicationConfig { pub config_paths: Vec, @@ -46,8 +48,6 @@ pub struct ApplicationConfig { pub internal_topologies: Vec, #[cfg(feature = "api")] pub api: config::api::Options, - #[cfg(feature = "enterprise")] - pub enterprise: Option>>, pub extra_context: ExtraContext, } @@ -68,9 +68,18 @@ impl ApplicationConfig { let graceful_shutdown_duration = (!opts.no_graceful_shutdown_limit) .then(|| Duration::from_secs(u64::from(opts.graceful_shutdown_limit_secs))); + let watcher_conf = if opts.watch_config { + Some(watcher_config( + opts.watch_config_method, + opts.watch_config_poll_interval_seconds, + )) + } else { + None + }; + let config = load_configs( &config_paths, - opts.watch_config, + watcher_conf, opts.require_healthy, opts.allow_empty_config, graceful_shutdown_duration, @@ -86,14 +95,6 @@ impl ApplicationConfig { config: Config, extra_context: ExtraContext, ) -> Result { - // This is ugly, but needed to allow `config` to be mutable for building the enterprise - // features, but also avoid a "does not need to be mutable" warning when the enterprise - // feature is not enabled. - #[cfg(feature = "enterprise")] - let mut config = config; - #[cfg(feature = "enterprise")] - let enterprise = build_enterprise(&mut config, config_paths.clone())?; - #[cfg(feature = "api")] let api = config.api; @@ -109,8 +110,6 @@ impl ApplicationConfig { internal_topologies: Vec::new(), #[cfg(feature = "api")] api, - #[cfg(feature = "enterprise")] - enterprise, extra_context, }) } @@ -222,7 +221,7 @@ impl Application { let config = runtime.block_on(ApplicationConfig::from_opts( &opts.root, &mut signals.handler, - extra_context.clone(), + extra_context, ))?; Ok(( @@ -255,8 +254,6 @@ impl Application { topology: config.topology, config_paths: config.config_paths.clone(), require_healthy: root_opts.require_healthy, - #[cfg(feature = "enterprise")] - enterprise_reporter: config.enterprise, extra_context: config.extra_context, }); @@ -339,6 +336,27 @@ async fn handle_signal( allow_empty_config: bool, ) -> Option { match signal { + Ok(SignalTo::ReloadComponents(components_to_reload)) => { + let mut topology_controller = topology_controller.lock().await; + topology_controller + .topology + .extend_reload_set(components_to_reload); + + // Reload paths + if let Some(paths) = config::process_paths(config_paths) { + topology_controller.config_paths = paths; + } + + // Reload config + let new_config = config::load_from_paths_with_provider_and_secrets( + &topology_controller.config_paths, + signal_handler, + allow_empty_config, + ) + .await; + + reload_config_from_result(topology_controller, new_config).await + } Ok(SignalTo::ReloadFromConfigBuilder(config_builder)) => { let topology_controller = topology_controller.lock().await; reload_config_from_result(topology_controller, config_builder.build()).await @@ -455,12 +473,11 @@ impl FinishedApplication { fn get_log_levels(default: &str) -> String { std::env::var("VECTOR_LOG") .or_else(|_| { - std::env::var("LOG").map(|log| { + std::env::var("LOG").inspect(|_log| { warn!( message = "DEPRECATED: Use of $LOG is deprecated. Please use $VECTOR_LOG instead." ); - log }) }) .unwrap_or_else(|_| default.into()) @@ -472,14 +489,14 @@ pub fn build_runtime(threads: Option, thread_name: &str) -> Result, thread_name: &str) -> Result, require_healthy: Option, allow_empty_config: bool, graceful_shutdown_duration: Option, @@ -495,25 +512,16 @@ pub async fn load_configs( ) -> Result { let config_paths = config::process_paths(config_paths).ok_or(exitcode::CONFIG)?; - if watch_config { - // Start listening for config changes immediately. - config::watcher::spawn_thread(config_paths.iter().map(Into::into), None).map_err( - |error| { - error!(message = "Unable to start config watcher.", %error); - exitcode::CONFIG - }, - )?; - } + let watched_paths = config_paths + .iter() + .map(<&PathBuf>::from) + .collect::>(); info!( message = "Loading configs.", - paths = ?config_paths.iter().map(<&PathBuf>::from).collect::>() + paths = ?watched_paths ); - // config::init_log_schema should be called before initializing sources. - #[cfg(not(feature = "enterprise-tests"))] - config::init_log_schema(&config_paths, true).map_err(handle_config_errors)?; - let mut config = config::load_from_paths_with_provider_and_secrets( &config_paths, signal_handler, @@ -522,6 +530,47 @@ pub async fn load_configs( .await .map_err(handle_config_errors)?; + let mut watched_component_paths = Vec::new(); + + if let Some(watcher_conf) = watcher_conf { + for (name, transform) in config.transforms() { + let files = transform.inner.files_to_watch(); + let component_config = + ComponentConfig::new(files.into_iter().cloned().collect(), name.clone()); + watched_component_paths.push(component_config); + } + + for (name, sink) in config.sinks() { + let files = sink.inner.files_to_watch(); + let component_config = + ComponentConfig::new(files.into_iter().cloned().collect(), name.clone()); + watched_component_paths.push(component_config); + } + + info!( + message = "Starting watcher.", + paths = ?watched_paths + ); + info!( + message = "Components to watch.", + paths = ?watched_component_paths + ); + + // Start listening for config changes. + config::watcher::spawn_thread( + watcher_conf, + signal_handler.clone_tx(), + watched_paths, + watched_component_paths, + None, + ) + .map_err(|error| { + error!(message = "Unable to start config watcher.", %error); + exitcode::CONFIG + })?; + } + + config::init_log_schema(config.global.log_schema.clone(), true); config::init_telemetry(config.global.telemetry.clone(), true); if !config.healthchecks.enabled { @@ -533,41 +582,6 @@ pub async fn load_configs( Ok(config) } -#[cfg(feature = "enterprise")] -// Enable enterprise features, if applicable. -fn build_enterprise( - config: &mut Config, - config_paths: Vec, -) -> Result>>, ExitCode> { - use crate::ENTERPRISE_ENABLED; - - ENTERPRISE_ENABLED - .set( - config - .enterprise - .as_ref() - .map(|e| e.enabled) - .unwrap_or_default(), - ) - .expect("double initialization of enterprise enabled flag"); - - match EnterpriseMetadata::try_from(&*config) { - Ok(metadata) => { - let enterprise = EnterpriseReporter::new(); - - attach_enterprise_components(config, &metadata); - enterprise.send(report_configuration(config_paths, metadata)); - - Ok(Some(enterprise)) - } - Err(EnterpriseError::MissingApiKey) => { - error!("Enterprise configuration incomplete: missing API key."); - Err(exitcode::CONFIG) - } - Err(_) => Ok(None), - } -} - pub fn init_logging(color: bool, format: LogFormat, log_level: &str, rate: u64) { let level = get_log_levels(log_level); let json = match format { @@ -576,9 +590,16 @@ pub fn init_logging(color: bool, format: LogFormat, log_level: &str, rate: u64) }; trace::init(color, json, &level, rate); - debug!( - message = "Internal log rate limit configured.", - internal_log_rate_secs = rate, - ); + debug!(message = "Internal log rate limit configured.",); info!(message = "Log level is enabled.", level = ?level); } + +pub fn watcher_config( + method: WatchConfigMethod, + interval: NonZeroU64, +) -> config::watcher::WatcherConfig { + match method { + WatchConfigMethod::Recommended => config::watcher::WatcherConfig::RecommendedWatcher, + WatchConfigMethod::Poll => config::watcher::WatcherConfig::PollWatcher(interval.into()), + } +} diff --git a/src/async_read.rs b/src/async_read.rs index 00a3c53024684..6dc0f0f7d83dd 100644 --- a/src/async_read.rs +++ b/src/async_read.rs @@ -39,7 +39,7 @@ impl AllowReadUntil { &self.reader } - pub fn get_mut(&mut self) -> &mut S { + pub const fn get_mut(&mut self) -> &mut S { &mut self.reader } } diff --git a/src/aws/auth.rs b/src/aws/auth.rs index b975c2a51d02c..1ed174096dfa1 100644 --- a/src/aws/auth.rs +++ b/src/aws/auth.rs @@ -2,17 +2,12 @@ use std::time::Duration; use aws_config::{ - default_provider::credentials::DefaultCredentialsChain, - identity::IdentityCache, - imds, - profile::{ - profile_file::{ProfileFileKind, ProfileFiles}, - ProfileFileCredentialsProvider, - }, - provider_config::ProviderConfig, + default_provider::credentials::DefaultCredentialsChain, identity::IdentityCache, imds, + profile::ProfileFileCredentialsProvider, provider_config::ProviderConfig, sts::AssumeRoleProviderBuilder, }; use aws_credential_types::{provider::SharedCredentialsProvider, Credentials}; +use aws_runtime::env_config::file::{EnvConfigFileKind, EnvConfigFiles}; use aws_smithy_async::time::SystemTimeSource; use aws_smithy_runtime_api::client::identity::SharedIdentityCache; use aws_types::{region::Region, SdkConfig}; @@ -28,7 +23,7 @@ const DEFAULT_PROFILE_NAME: &str = "default"; /// IMDS Client Configuration for authenticating with AWS. #[serde_as] #[configurable_component] -#[derive(Copy, Clone, Debug, Derivative)] +#[derive(Copy, Clone, Debug, Derivative, Eq, PartialEq)] #[derivative(Default)] #[serde(deny_unknown_fields)] pub struct ImdsAuthentication { @@ -62,7 +57,7 @@ const fn default_timeout() -> Duration { /// Configuration of the authentication strategy for interacting with AWS services. #[configurable_component] -#[derive(Clone, Debug, Derivative)] +#[derive(Clone, Debug, Derivative, Eq, PartialEq)] #[derivative(Default)] #[serde(deny_unknown_fields, untagged)] pub enum AwsAuthentication { @@ -76,6 +71,11 @@ pub enum AwsAuthentication { #[configurable(metadata(docs::examples = "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY"))] secret_access_key: SensitiveString, + /// The AWS session token. + /// See [AWS temporary credentials](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_use-resources.html) + #[configurable(metadata(docs::examples = "AQoDYXdz...AQoDYXdz..."))] + session_token: Option, + /// The ARN of an [IAM role][iam_role] to assume. /// /// [iam_role]: https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles.html @@ -96,6 +96,15 @@ pub enum AwsAuthentication { /// [aws_region]: https://docs.aws.amazon.com/general/latest/gr/rande.html#regional-endpoints #[configurable(metadata(docs::examples = "us-west-2"))] region: Option, + + /// The optional [RoleSessionName][role_session_name] is a unique session identifier for your assumed role. + /// + /// Should be unique per principal or reason. + /// If not set, the session name is autogenerated like assume-role-provider-1736428351340 + /// + /// [role_session_name]: https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html + #[configurable(metadata(docs::examples = "vector-indexer-role"))] + session_name: Option, }, /// Authenticate using credentials stored in a file. @@ -114,6 +123,15 @@ pub enum AwsAuthentication { #[configurable(metadata(docs::examples = "develop"))] #[serde(default = "default_profile")] profile: String, + + /// The [AWS region][aws_region] to send STS requests to. + /// + /// If not set, this defaults to the configured region + /// for the service itself. + /// + /// [aws_region]: https://docs.aws.amazon.com/general/latest/gr/rande.html#regional-endpoints + #[configurable(metadata(docs::examples = "us-west-2"))] + region: Option, }, /// Assume the given role ARN. @@ -150,6 +168,15 @@ pub enum AwsAuthentication { /// [aws_region]: https://docs.aws.amazon.com/general/latest/gr/rande.html#regional-endpoints #[configurable(metadata(docs::examples = "us-west-2"))] region: Option, + + /// The optional [RoleSessionName][role_session_name] is a unique session identifier for your assumed role. + /// + /// Should be unique per principal or reason. + /// If not set, the session name is autogenerated like assume-role-provider-1736428351340 + /// + /// [role_session_name]: https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html + #[configurable(metadata(docs::examples = "vector-indexer-role"))] + session_name: Option, }, /// Default authentication strategy which tries a variety of substrategies in sequential order. @@ -206,12 +233,44 @@ impl AwsAuthentication { } } + /// Create the AssumeRoleProviderBuilder, ensuring we create the HTTP client with + /// the correct proxy and TLS options. + fn assume_role_provider_builder( + proxy: &ProxyConfig, + tls_options: Option<&TlsConfig>, + region: &Region, + assume_role: &str, + external_id: Option<&str>, + session_name: Option<&str>, + ) -> crate::Result { + let connector = super::connector(proxy, tls_options)?; + let config = SdkConfig::builder() + .http_client(connector) + .region(region.clone()) + .time_source(SystemTimeSource::new()) + .build(); + + let mut builder = AssumeRoleProviderBuilder::new(assume_role) + .region(region.clone()) + .configure(&config); + + if let Some(external_id) = external_id { + builder = builder.external_id(external_id) + } + + if let Some(session_name) = session_name { + builder = builder.session_name(session_name) + } + + Ok(builder) + } + /// Returns the provider for the credentials based on the authentication mechanism chosen. pub async fn credentials_provider( &self, service_region: Region, proxy: &ProxyConfig, - tls_options: &Option, + tls_options: Option<&TlsConfig>, ) -> crate::Result { match self { Self::AccessKey { @@ -220,20 +279,24 @@ impl AwsAuthentication { assume_role, external_id, region, + session_name, + session_token, } => { let provider = SharedCredentialsProvider::new(Credentials::from_keys( access_key_id.inner(), secret_access_key.inner(), - None, + session_token.clone().map(|v| v.inner().into()), )); if let Some(assume_role) = assume_role { let auth_region = region.clone().map(Region::new).unwrap_or(service_region); - let mut builder = - AssumeRoleProviderBuilder::new(assume_role).region(auth_region); - - if let Some(external_id) = external_id { - builder = builder.external_id(external_id) - } + let builder = Self::assume_role_provider_builder( + proxy, + tls_options, + &auth_region, + assume_role, + external_id.as_deref(), + session_name.as_deref(), + )?; let provider = builder.build_from_provider(provider).await; @@ -244,15 +307,25 @@ impl AwsAuthentication { AwsAuthentication::File { credentials_file, profile, + region, } => { + let connector = super::connector(proxy, tls_options)?; + // The SDK uses the default profile out of the box, but doesn't provide an optional // type in the builder. We can just hardcode it so that everything works. - let profile_files = ProfileFiles::builder() - .with_file(ProfileFileKind::Credentials, credentials_file) + let profile_files = EnvConfigFiles::builder() + .with_file(EnvConfigFileKind::Credentials, credentials_file) .build(); + + let auth_region = region.clone().map(Region::new).unwrap_or(service_region); + let provider_config = ProviderConfig::empty() + .with_region(Option::from(auth_region)) + .with_http_client(connector); + let profile_provider = ProfileFileCredentialsProvider::builder() .profile_files(profile_files) .profile_name(profile) + .configure(&provider_config) .build(); Ok(SharedCredentialsProvider::new(profile_provider)) } @@ -261,23 +334,18 @@ impl AwsAuthentication { external_id, imds, region, + session_name, .. } => { let auth_region = region.clone().map(Region::new).unwrap_or(service_region); - let connector = super::connector(proxy, tls_options)?; - let config = SdkConfig::builder() - .http_client(connector) - .region(auth_region.clone()) - .time_source(SystemTimeSource::new()) - .build(); - - let mut builder = AssumeRoleProviderBuilder::new(assume_role) - .region(auth_region.clone()) - .configure(&config); - - if let Some(external_id) = external_id { - builder = builder.external_id(external_id) - } + let builder = Self::assume_role_provider_builder( + proxy, + tls_options, + &auth_region, + assume_role, + external_id.as_deref(), + session_name.as_deref(), + )?; let provider = builder .build_from_provider( @@ -309,6 +377,8 @@ impl AwsAuthentication { assume_role: None, external_id: None, region: None, + session_name: None, + session_token: None, } } } @@ -316,7 +386,7 @@ impl AwsAuthentication { async fn default_credentials_provider( region: Region, proxy: &ProxyConfig, - tls_options: &Option, + tls_options: Option<&TlsConfig>, imds: ImdsAuthentication, ) -> crate::Result { let connector = super::connector(proxy, tls_options)?; @@ -360,11 +430,7 @@ mod tests { #[test] fn parsing_default() { - let config = toml::from_str::( - r#" - "#, - ) - .unwrap(); + let config = toml::from_str::("").unwrap(); assert!(matches!(config.auth, AwsAuthentication::Default { .. })); } @@ -372,9 +438,9 @@ mod tests { #[test] fn parsing_default_with_load_timeout() { let config = toml::from_str::( - r#" + " auth.load_timeout_secs = 10 - "#, + ", ) .unwrap(); @@ -408,11 +474,11 @@ mod tests { #[test] fn parsing_default_with_imds_client() { let config = toml::from_str::( - r#" + " auth.imds.max_attempts = 5 auth.imds.connect_timeout_seconds = 30 auth.imds.read_timeout_seconds = 10 - "#, + ", ) .unwrap(); @@ -469,6 +535,25 @@ mod tests { assert!(matches!(config.auth, AwsAuthentication::Role { .. })); } + #[test] + fn parsing_session_name_with_assume_role() { + let config = toml::from_str::( + r#" + auth.assume_role = "root" + auth.session_name = "session_name" + auth.load_timeout_secs = 10 + "#, + ) + .unwrap(); + + match config.auth { + AwsAuthentication::Role { session_name, .. } => { + assert_eq!(session_name.unwrap(), "session_name"); + } + _ => panic!(), + } + } + #[test] fn parsing_assume_role_with_imds_client() { let config = toml::from_str::( @@ -488,10 +573,12 @@ mod tests { load_timeout_secs, imds, region, + session_name, } => { assert_eq!(&assume_role, "root"); assert_eq!(external_id, None); assert_eq!(load_timeout_secs, None); + assert_eq!(session_name, None); assert!(matches!( imds, ImdsAuthentication { @@ -525,10 +612,12 @@ mod tests { load_timeout_secs, imds, region, + session_name, } => { assert_eq!(&assume_role, "auth.root"); assert_eq!(external_id, None); assert_eq!(load_timeout_secs, Some(10)); + assert_eq!(session_name, None); assert!(matches!(imds, ImdsAuthentication { .. })); assert_eq!(region.unwrap(), "us-west-2"); } @@ -616,6 +705,7 @@ mod tests { r#" auth.credentials_file = "/path/to/file" auth.profile = "foo" + auth.region = "us-west-2" "#, ) .unwrap(); @@ -624,9 +714,11 @@ mod tests { AwsAuthentication::File { credentials_file, profile, + region, } => { assert_eq!(&credentials_file, "/path/to/file"); assert_eq!(&profile, "foo"); + assert_eq!(region.unwrap(), "us-west-2"); } _ => panic!(), } @@ -642,6 +734,7 @@ mod tests { AwsAuthentication::File { credentials_file, profile, + .. } => { assert_eq!(&credentials_file, "/path/to/file"); assert_eq!(profile, "default".to_string()); diff --git a/src/aws/mod.rs b/src/aws/mod.rs index 2f4ea2678a637..dbfc742878ae8 100644 --- a/src/aws/mod.rs +++ b/src/aws/mod.rs @@ -1,12 +1,15 @@ //! Shared functionality for the AWS components. pub mod auth; pub mod region; +pub mod timeout; pub use auth::{AwsAuthentication, ImdsAuthentication}; -use aws_config::{meta::region::ProvideRegion, retry::RetryConfig, Region, SdkConfig}; +use aws_config::{ + meta::region::ProvideRegion, retry::RetryConfig, timeout::TimeoutConfig, Region, SdkConfig, +}; use aws_credential_types::provider::{ProvideCredentials, SharedCredentialsProvider}; use aws_sigv4::{ - http_request::{SignableBody, SignableRequest, SigningSettings}, + http_request::{PayloadChecksumKind, SignableBody, SignableRequest, SigningSettings}, sign::v4, }; use aws_smithy_async::rt::sleep::TokioSleep; @@ -30,7 +33,6 @@ use pin_project::pin_project; use regex::RegexSet; pub use region::RegionOrEndpoint; use snafu::Snafu; -use std::time::SystemTime; use std::{ error::Error, pin::Pin, @@ -39,7 +41,9 @@ use std::{ Arc, OnceLock, }, task::{Context, Poll}, + time::{Duration, SystemTime}, }; +pub use timeout::AwsTimeout; use crate::config::ProxyConfig; use crate::http::{build_proxy_connector, build_tls_connector, status}; @@ -100,7 +104,7 @@ fn check_response(res: &HttpResponse) -> bool { /// have turned off as we want to consistently use openssl. fn connector( proxy: &ProxyConfig, - tls_options: &Option, + tls_options: Option<&TlsConfig>, ) -> crate::Result { let tls_settings = MaybeTlsSettings::tls_client(tls_options)?; @@ -119,13 +123,14 @@ pub trait ClientBuilder { type Client; /// Build the client using the given config settings. - fn build(config: &SdkConfig) -> Self::Client; + fn build(&self, config: &SdkConfig) -> Self::Client; } -fn region_provider( +/// Provides the configured AWS region. +pub fn region_provider( proxy: &ProxyConfig, - tls_options: &Option, -) -> crate::Result { + tls_options: Option<&TlsConfig>, +) -> crate::Result> { let config = aws_config::provider_config::ProviderConfig::default() .with_http_client(connector(proxy, tls_options)?); @@ -142,7 +147,7 @@ fn region_provider( async fn resolve_region( proxy: &ProxyConfig, - tls_options: &Option, + tls_options: Option<&TlsConfig>, region: Option, ) -> crate::Result { match region { @@ -157,26 +162,36 @@ async fn resolve_region( } /// Create the SDK client using the provided settings. -pub async fn create_client( +pub async fn create_client( + builder: &T, auth: &AwsAuthentication, region: Option, endpoint: Option, proxy: &ProxyConfig, - tls_options: &Option, -) -> crate::Result { - create_client_and_region::(auth, region, endpoint, proxy, tls_options) + tls_options: Option<&TlsConfig>, + timeout: Option<&AwsTimeout>, +) -> crate::Result +where + T: ClientBuilder, +{ + create_client_and_region::(builder, auth, region, endpoint, proxy, tls_options, timeout) .await .map(|(client, _)| client) } /// Create the SDK client and resolve the region using the provided settings. -pub async fn create_client_and_region( +pub async fn create_client_and_region( + builder: &T, auth: &AwsAuthentication, region: Option, endpoint: Option, proxy: &ProxyConfig, - tls_options: &Option, -) -> crate::Result<(T::Client, Region)> { + tls_options: Option<&TlsConfig>, + timeout: Option<&AwsTimeout>, +) -> crate::Result<(T::Client, Region)> +where + T: ClientBuilder, +{ let retry_config = RetryConfig::disabled(); // The default credentials chains will look for a region if not given but we'd like to @@ -208,6 +223,10 @@ pub async fn create_client_and_region( if let Some(endpoint_override) = endpoint { config_builder = config_builder.endpoint_url(endpoint_override); + } else if let Some(endpoint_from_config) = + aws_config::default_provider::endpoint_url::endpoint_url_provider(&provider_config).await + { + config_builder = config_builder.endpoint_url(endpoint_from_config); } if let Some(use_fips) = @@ -216,9 +235,24 @@ pub async fn create_client_and_region( config_builder = config_builder.use_fips(use_fips); } + if let Some(timeout) = timeout { + let mut timeout_config_builder = TimeoutConfig::builder(); + + let operation_timeout = timeout.operation_timeout(); + let connect_timeout = timeout.connect_timeout(); + let read_timeout = timeout.read_timeout(); + + timeout_config_builder + .set_operation_timeout(operation_timeout.map(Duration::from_secs)) + .set_connect_timeout(connect_timeout.map(Duration::from_secs)) + .set_read_timeout(read_timeout.map(Duration::from_secs)); + + config_builder = config_builder.timeout_config(timeout_config_builder.build()); + } + let config = config_builder.build(); - Ok((T::build(&config), region)) + Ok((T::build(builder, &config), region)) } #[derive(Snafu, Debug)] @@ -233,7 +267,8 @@ pub async fn sign_request( service_name: &str, request: &mut http::Request, credentials_provider: &SharedCredentialsProvider, - region: &Option, + region: Option<&Region>, + payload_checksum_sha256: bool, ) -> crate::Result<()> { let headers = request .headers() @@ -255,12 +290,21 @@ pub async fn sign_request( let credentials = credentials_provider.provide_credentials().await?; let identity = Identity::new(credentials, None); + + let mut signing_settings = SigningSettings::default(); + + // Include the x-amz-content-sha256 header when calculating the AWS v4 signature; + // this is required by some AWS services, e.g. S3 and OpenSearch Serverless + if payload_checksum_sha256 { + signing_settings.payload_checksum_kind = PayloadChecksumKind::XAmzSha256; + } + let signing_params_builder = v4::SigningParams::builder() .identity(&identity) .region(region.as_ref().map(|r| r.as_ref()).unwrap_or("")) .name(service_name) .time(SystemTime::now()) - .settings(SigningSettings::default()); + .settings(signing_settings); let signing_params = signing_params_builder .build() @@ -342,7 +386,7 @@ struct MeasuredBody { } impl MeasuredBody { - fn new(body: SdkBody, shared_bytes_sent: Arc) -> Self { + const fn new(body: SdkBody, shared_bytes_sent: Arc) -> Self { Self { inner: body, shared_bytes_sent, diff --git a/src/aws/region.rs b/src/aws/region.rs index 4824755920617..4c13d53b02f79 100644 --- a/src/aws/region.rs +++ b/src/aws/region.rs @@ -55,8 +55,8 @@ mod tests { #[test] fn optional() { - assert!(toml::from_str::(indoc! {r#" - "#}) + assert!(toml::from_str::(indoc! {" + "}) .is_ok()); } diff --git a/src/aws/timeout.rs b/src/aws/timeout.rs new file mode 100644 index 0000000000000..bf8cec8b149d7 --- /dev/null +++ b/src/aws/timeout.rs @@ -0,0 +1,86 @@ +//! Client timeout configuration for AWS operations. +//use std::time::Duration; +use vector_lib::configurable::configurable_component; + +use serde_with::serde_as; + +/// Client timeout configuration for AWS operations. +#[serde_as] +#[configurable_component] +#[derive(Copy, Clone, Debug, Derivative)] +#[derivative(Default)] +#[serde(deny_unknown_fields)] +pub struct AwsTimeout { + /// The connection timeout for AWS requests + /// + /// Limits the amount of time allowed to initiate a socket connection. + #[configurable(metadata(docs::examples = 20))] + #[configurable(metadata(docs::human_name = "Connect Timeout"))] + #[configurable(metadata(docs::type_unit = "seconds"))] + #[serde(skip_serializing_if = "Option::is_none")] + #[serde(rename = "connect_timeout_seconds")] + connect_timeout: Option, + + /// The operation timeout for AWS requests + /// + /// Limits the amount of time allowed for an operation to be fully serviced; an + /// operation represents the full request/response lifecycle of a call to a service. + /// Take care when configuring this settings to allow enough time for the polling + /// interval configured in `poll_secs` + #[configurable(metadata(docs::examples = 20))] + #[configurable(metadata(docs::human_name = "Operation Timeout"))] + #[configurable(metadata(docs::type_unit = "seconds"))] + #[serde(skip_serializing_if = "Option::is_none")] + #[serde(rename = "operation_timeout_seconds")] + operation_timeout: Option, + + /// The read timeout for AWS requests + /// + /// Limits the amount of time allowed to read the first byte of a response from the + /// time the request is initiated. Take care when configuring this settings to allow + /// enough time for the polling interval configured in `poll_secs` + #[configurable(metadata(docs::examples = 20))] + #[configurable(metadata(docs::human_name = "Read Timeout"))] + #[configurable(metadata(docs::type_unit = "seconds"))] + #[serde(skip_serializing_if = "Option::is_none")] + #[serde(rename = "read_timeout_seconds")] + read_timeout: Option, +} + +impl AwsTimeout { + /// returns the connection timeout + pub const fn connect_timeout(&self) -> Option { + self.connect_timeout + } + + /// returns the operation timeout + pub const fn operation_timeout(&self) -> Option { + self.operation_timeout + } + + /// returns the read timeout + pub const fn read_timeout(&self) -> Option { + self.read_timeout + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn parsing_timeout_configuration() { + let config = toml::from_str::( + r" + connect_timeout_seconds = 20 + operation_timeout_seconds = 20 + read_timeout_seconds = 60 + ", + ) + .unwrap(); + + assert_eq!(config.connect_timeout, Some(20)); + assert_eq!(config.operation_timeout, Some(20)); + assert_eq!(config.read_timeout, Some(60)); + } +} diff --git a/src/cli.rs b/src/cli.rs index c1092d3cab996..93e1dfea4d6bd 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -1,6 +1,5 @@ #![allow(missing_docs)] -use std::sync::atomic::Ordering; use std::{num::NonZeroU64, path::PathBuf}; use clap::{ArgAction, CommandFactory, FromArgMatches, Parser}; @@ -153,7 +152,33 @@ pub struct RootOpts { #[arg(short, long, env = "VECTOR_WATCH_CONFIG")] pub watch_config: bool, + /// Method for configuration watching. + /// + /// By default, `vector` uses recommended watcher for host OS + /// - `inotify` for Linux-based systems. + /// - `kqueue` for unix/macos + /// - `ReadDirectoryChangesWatcher` for windows + /// + /// The `poll` watcher can be used in cases where `inotify` doesn't work, e.g., when attaching the configuration via NFS. + #[arg( + long, + default_value = "recommended", + env = "VECTOR_WATCH_CONFIG_METHOD" + )] + pub watch_config_method: WatchConfigMethod, + + /// Poll for changes in the configuration file at the given interval. + /// + /// This setting is only applicable if `Poll` is set in `--watch-config-method`. + #[arg( + long, + env = "VECTOR_WATCH_CONFIG_POLL_INTERVAL_SECONDS", + default_value = "30" + )] + pub watch_config_poll_interval_seconds: NonZeroU64, + /// Set the internal log rate limit + /// Note that traces are throttled by default unless tagged with `internal_log_rate_limit = false`. #[arg( short, long, @@ -212,22 +237,6 @@ pub struct RootOpts { /// `--watch-config`. #[arg(long, env = "VECTOR_ALLOW_EMPTY_CONFIG", default_value = "false")] pub allow_empty_config: bool, - - /// Turn on strict mode for environment variable interpolation. When set, interpolation of - /// a missing environment variable in configuration files will cause an error instead of - /// a warning, which will result in a failure to load any such configuration file. This option - /// is deprecated and will be removed in a future version to remove the ability to downgrade - /// missing environment variables to warnings. - #[arg( - long, - env = "VECTOR_STRICT_ENV_VARS", - default_value = "true", - default_missing_value = "true", - num_args = 0..=1, - require_equals = true, - action = ArgAction::Set - )] - pub strict_env_vars: bool, } impl RootOpts { @@ -249,13 +258,12 @@ impl RootOpts { } pub fn init_global(&self) { - crate::config::STRICT_ENV_VARS.store(self.strict_env_vars, Ordering::Relaxed); - if !self.openssl_no_probe { - openssl_probe::init_ssl_cert_env_vars(); + unsafe { + openssl_probe::init_openssl_env_vars(); + } } - #[cfg(not(feature = "enterprise-tests"))] crate::metrics::init_global().expect("metrics initialization failed"); } } @@ -279,11 +287,13 @@ pub enum SubCommand { /// Generate the configuration schema for this version of Vector. (experimental) /// - /// A JSON Schema document will be written to stdout that represents the valid schema for a + /// A JSON Schema document will be generated that represents the valid schema for a /// Vector configuration. This schema is based on the "full" configuration, such that for usages /// where a configuration is split into multiple files, the schema would apply to those files /// only when concatenated together. - GenerateSchema, + /// + /// By default all output is writen to stdout. The `output_path` option can be used to redirect to a file. + GenerateSchema(generate_schema::Opts), /// Output a provided Vector configuration file/dir as a single JSON object, useful for checking in to version control. #[command(hide = true)] @@ -325,7 +335,7 @@ impl SubCommand { Self::Config(c) => config::cmd(c), Self::ConvertConfig(opts) => convert_config::cmd(opts), Self::Generate(g) => generate::cmd(g), - Self::GenerateSchema => generate_schema::cmd(), + Self::GenerateSchema(opts) => generate_schema::cmd(opts), Self::Graph(g) => graph::cmd(g), Self::List(l) => list::cmd(l), #[cfg(windows)] @@ -374,6 +384,15 @@ pub enum LogFormat { Json, } +#[derive(clap::ValueEnum, Debug, Clone, Copy, PartialEq, Eq)] +pub enum WatchConfigMethod { + /// Recommended watcher for the current OS, usually `inotify` for Linux-based systems. + Recommended, + /// Poll-based watcher, typically used for watching files on EFS/NFS-like network storage systems. + /// The interval is determined by [`RootOpts::watch_config_poll_interval_seconds`]. + Poll, +} + pub fn handle_config_errors(errors: Vec) -> exitcode::ExitCode { for error in errors { error!(message = "Configuration error.", %error); diff --git a/src/codecs/encoding/config.rs b/src/codecs/encoding/config.rs index c742f14f84f41..b68f2e7427c07 100644 --- a/src/codecs/encoding/config.rs +++ b/src/codecs/encoding/config.rs @@ -8,7 +8,8 @@ use vector_lib::configurable::configurable_component; /// Encoding configuration. #[configurable_component] #[derive(Clone, Debug)] -#[configurable(description = "Configures how events are encoded into raw bytes.")] +/// Configures how events are encoded into raw bytes. +/// The selected encoding also determines which input types (logs, metrics, traces) are supported. pub struct EncodingConfig { #[serde(flatten)] encoding: SerializerConfig, @@ -101,11 +102,11 @@ impl EncodingConfigWithFraming { let framer = match (framer, &serializer) { (Some(framer), _) => framer, (None, Serializer::Json(_)) => match sink_type { - SinkType::StreamBased => NewlineDelimitedEncoder::new().into(), + SinkType::StreamBased => NewlineDelimitedEncoder::default().into(), SinkType::MessageBased => CharacterDelimitedEncoder::new(b',').into(), }, (None, Serializer::Avro(_) | Serializer::Native(_)) => { - LengthDelimitedEncoder::new().into() + LengthDelimitedEncoder::default().into() } (None, Serializer::Gelf(_)) => { // Graylog/GELF always uses null byte delimiter on TCP, see @@ -115,17 +116,18 @@ impl EncodingConfigWithFraming { (None, Serializer::Protobuf(_)) => { // Protobuf uses length-delimited messages, see: // https://developers.google.com/protocol-buffers/docs/techniques#streaming - LengthDelimitedEncoder::new().into() + LengthDelimitedEncoder::default().into() } ( None, - Serializer::Csv(_) + Serializer::Cef(_) + | Serializer::Csv(_) | Serializer::Logfmt(_) | Serializer::NativeJson(_) | Serializer::RawMessage(_) | Serializer::Text(_) | Serializer::Syslog(_), - ) => NewlineDelimitedEncoder::new().into(), + ) => NewlineDelimitedEncoder::default().into(), }; Ok((framer, serializer)) diff --git a/src/codecs/encoding/encoder.rs b/src/codecs/encoding/encoder.rs index 5bc74dcb258a2..7f5886db709e6 100644 --- a/src/codecs/encoding/encoder.rs +++ b/src/codecs/encoding/encoder.rs @@ -23,7 +23,7 @@ where impl Default for Encoder { fn default() -> Self { Self { - framer: NewlineDelimitedEncoder::new().into(), + framer: NewlineDelimitedEncoder::default().into(), serializer: TextSerializerConfig::default().build().into(), } } @@ -93,12 +93,14 @@ impl Encoder { } /// Get the suffix that encloses a batch of events. - pub const fn batch_suffix(&self) -> &[u8] { - match (&self.framer, &self.serializer) { + pub const fn batch_suffix(&self, empty: bool) -> &[u8] { + match (&self.framer, &self.serializer, empty) { ( Framer::CharacterDelimited(CharacterDelimitedEncoder { delimiter: b',' }), Serializer::Json(_) | Serializer::NativeJson(_), + _, ) => b"]", + (Framer::NewlineDelimited(_), _, false) => b"\n", _ => &[], } } @@ -116,6 +118,7 @@ impl Encoder { (Serializer::Native(_), _) | (Serializer::Protobuf(_), _) => "application/octet-stream", ( Serializer::Avro(_) + | Serializer::Cef(_) | Serializer::Csv(_) | Serializer::Gelf(_) | Serializer::Json(_) @@ -237,7 +240,7 @@ mod tests { fn encode(&mut self, _: (), dst: &mut BytesMut) -> Result<(), Self::Error> { self.0.encode((), dst)?; let result = if self.1 == self.2 { - Err(Box::new(std::io::Error::new(std::io::ErrorKind::Other, "error")) as _) + Err(Box::new(std::io::Error::other("error")) as _) } else { Ok(()) }; @@ -323,4 +326,23 @@ mod tests { let sink = framed.into_inner(); assert_eq!(sink, b"(foo)(bar)"); } + + #[tokio::test] + async fn test_encode_batch_newline() { + let encoder = Encoder::::new( + Framer::NewlineDelimited(NewlineDelimitedEncoder::default()), + TextSerializerConfig::default().build().into(), + ); + let source = futures::stream::iter(vec![ + Event::Log(LogEvent::from("bar")), + Event::Log(LogEvent::from("baz")), + Event::Log(LogEvent::from("bat")), + ]) + .map(Ok); + let sink: Vec = Vec::new(); + let mut framed = FramedWrite::new(sink, encoder); + source.forward(&mut framed).await.unwrap(); + let sink = framed.into_inner(); + assert_eq!(sink, b"bar\nbaz\nbat\n"); + } } diff --git a/src/codecs/encoding/transformer.rs b/src/codecs/encoding/transformer.rs index 5539df5dd332c..4b9583e1f53e6 100644 --- a/src/codecs/encoding/transformer.rs +++ b/src/codecs/encoding/transformer.rs @@ -396,7 +396,7 @@ mod tests { ), ]; for (fmt, expected) in cases { - let config: String = format!(r#"timestamp_format = "{}""#, fmt); + let config: String = format!(r#"timestamp_format = "{fmt}""#); let transformer: Transformer = toml::from_str(&config).unwrap(); let mut event = base.clone(); transformer.transform(&mut event); diff --git a/src/codecs/ready_frames.rs b/src/codecs/ready_frames.rs index 4aed021b57e83..a707140b23001 100644 --- a/src/codecs/ready_frames.rs +++ b/src/codecs/ready_frames.rs @@ -52,7 +52,7 @@ where } /// Returns a mutable reference to the underlying stream. - pub fn get_mut(&mut self) -> &mut T { + pub const fn get_mut(&mut self) -> &mut T { &mut self.inner } diff --git a/src/common/backoff.rs b/src/common/backoff.rs new file mode 100644 index 0000000000000..9c18593678f6c --- /dev/null +++ b/src/common/backoff.rs @@ -0,0 +1,81 @@ +use std::time::Duration; + +// `tokio-retry` crate +// MIT License +// Copyright (c) 2017 Sam Rijs +// +/// A retry strategy driven by exponential back-off. +/// +/// The power corresponds to the number of past attempts. +#[derive(Debug, Clone)] +pub(crate) struct ExponentialBackoff { + current: u64, + base: u64, + factor: u64, + max_delay: Option, +} + +impl ExponentialBackoff { + /// Constructs a new exponential back-off strategy, + /// given a base duration in milliseconds. + /// + /// The resulting duration is calculated by taking the base to the `n`-th power, + /// where `n` denotes the number of past attempts. + pub(crate) const fn from_millis(base: u64) -> ExponentialBackoff { + ExponentialBackoff { + current: base, + base, + factor: 1u64, + max_delay: None, + } + } + + /// A multiplicative factor that will be applied to the retry delay. + /// + /// For example, using a factor of `1000` will make each delay in units of seconds. + /// + /// Default factor is `1`. + pub(crate) const fn factor(mut self, factor: u64) -> ExponentialBackoff { + self.factor = factor; + self + } + + /// Apply a maximum delay. No retry delay will be longer than this `Duration`. + pub(crate) const fn max_delay(mut self, duration: Duration) -> ExponentialBackoff { + self.max_delay = Some(duration); + self + } + + /// Resents the exponential back-off strategy to its initial state. + pub(crate) const fn reset(&mut self) { + self.current = self.base; + } +} + +impl Iterator for ExponentialBackoff { + type Item = Duration; + + fn next(&mut self) -> Option { + // set delay duration by applying factor + let duration = if let Some(duration) = self.current.checked_mul(self.factor) { + Duration::from_millis(duration) + } else { + Duration::from_millis(u64::MAX) + }; + + // check if we reached max delay + if let Some(ref max_delay) = self.max_delay { + if duration > *max_delay { + return Some(*max_delay); + } + } + + if let Some(next) = self.current.checked_mul(self.base) { + self.current = next; + } else { + self.current = u64::MAX; + } + + Some(duration) + } +} diff --git a/src/common/datadog.rs b/src/common/datadog.rs index 5077453861fbd..9b84a2f8351dc 100644 --- a/src/common/datadog.rs +++ b/src/common/datadog.rs @@ -3,12 +3,42 @@ // Datadog component type, whether it's used in integration tests, etc. #![allow(dead_code)] #![allow(unreachable_pub)] + +use std::sync::LazyLock; + +use regex::Regex; use serde::{Deserialize, Serialize}; -use vector_lib::{event::DatadogMetricOriginMetadata, sensitive_string::SensitiveString}; +use vector_lib::{ + event::DatadogMetricOriginMetadata, schema::meaning, sensitive_string::SensitiveString, +}; pub(crate) const DD_US_SITE: &str = "datadoghq.com"; pub(crate) const DD_EU_SITE: &str = "datadoghq.eu"; +/// The datadog tags event path. +pub const DDTAGS: &str = "ddtags"; +/// The datadog message event path. +pub const MESSAGE: &str = "message"; + +/// Mapping of the semantic meaning of well known Datadog reserved attributes +/// to the field name that Datadog intake expects. +// https://docs.datadoghq.com/logs/log_configuration/attributes_naming_convention/?s=severity#reserved-attributes +pub const DD_RESERVED_SEMANTIC_ATTRS: [(&str, &str); 6] = [ + (meaning::SEVERITY, "status"), // status is intentionally semantically defined as severity + (meaning::TIMESTAMP, "timestamp"), + (meaning::HOST, "hostname"), + (meaning::SERVICE, "service"), + (meaning::SOURCE, "ddsource"), + (meaning::TAGS, DDTAGS), +]; + +/// Returns true if the parameter `attr` is one of the reserved Datadog log attributes +pub fn is_reserved_attribute(attr: &str) -> bool { + DD_RESERVED_SEMANTIC_ATTRS + .iter() + .any(|(_, attr_str)| &attr == attr_str) +} + /// DatadogSeriesMetric #[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] pub struct DatadogSeriesMetric { @@ -62,10 +92,28 @@ pub struct DatadogPoint(pub i64, pub T); /// Gets the base API endpoint to use for any calls to Datadog. /// /// If `endpoint` is not specified, we fallback to `site`. -pub(crate) fn get_api_base_endpoint(endpoint: Option<&String>, site: &str) -> String { - endpoint - .cloned() - .unwrap_or_else(|| format!("https://api.{}", site)) +pub(crate) fn get_api_base_endpoint(endpoint: Option<&str>, site: &str) -> String { + endpoint.map_or_else(|| format!("https://api.{site}"), compute_api_endpoint) +} + +/// Computes the Datadog API endpoint from a given endpoint string. +/// +/// This scans the given endpoint for the common Datadog domain names; and, if found, rewrites the +/// endpoint string using the standard API URI. If not found, the endpoint is used as-is. +fn compute_api_endpoint(endpoint: &str) -> String { + // This mechanism is derived from the forwarder health check in the Datadog Agent: + // https://github.com/DataDog/datadog-agent/blob/cdcf0fc809b9ac1cd6e08057b4971c7dbb8dbe30/comp/forwarder/defaultforwarder/forwarder_health.go#L45-L47 + // https://github.com/DataDog/datadog-agent/blob/cdcf0fc809b9ac1cd6e08057b4971c7dbb8dbe30/comp/forwarder/defaultforwarder/forwarder_health.go#L188-L190 + static DOMAIN_REGEX: LazyLock = LazyLock::new(|| { + Regex::new(r"(?:[a-z]{2}\d\.)?(datadoghq\.[a-z]+|ddog-gov\.com)/*$") + .expect("Could not build Datadog domain regex") + }); + + if let Some(caps) = DOMAIN_REGEX.captures(endpoint) { + format!("https://api.{}", &caps[1]) + } else { + endpoint.into() + } } /// Default settings to use for Datadog components. @@ -92,3 +140,54 @@ fn default_api_key() -> Option { pub(crate) fn default_site() -> String { std::env::var("DD_SITE").unwrap_or(DD_US_SITE.to_string()) } + +#[cfg(test)] +mod tests { + use similar_asserts::assert_eq; + + use super::*; + + #[test] + fn computes_correct_api_endpoint() { + assert_eq!( + compute_api_endpoint("https://http-intake.logs.datadoghq.com"), + "https://api.datadoghq.com" + ); + assert_eq!( + compute_api_endpoint("https://http-intake.logs.datadoghq.com/"), + "https://api.datadoghq.com" + ); + assert_eq!( + compute_api_endpoint("http://http-intake.logs.datadoghq.com/"), + "https://api.datadoghq.com" + ); + assert_eq!( + compute_api_endpoint("https://anythingelse.datadoghq.com/"), + "https://api.datadoghq.com" + ); + assert_eq!( + compute_api_endpoint("https://this.datadoghq.eu/"), + "https://api.datadoghq.eu" + ); + assert_eq!( + compute_api_endpoint("http://datadog.com/"), + "http://datadog.com/" + ); + } + + #[test] + fn gets_correct_api_base_endpoint() { + assert_eq!( + get_api_base_endpoint(None, DD_US_SITE), + "https://api.datadoghq.com" + ); + assert_eq!( + get_api_base_endpoint(None, "datadog.net"), + "https://api.datadog.net" + ); + assert_eq!( + get_api_base_endpoint(Some("https://logs.datadoghq.eu"), DD_US_SITE), + "https://api.datadoghq.eu" + ); + } +} diff --git a/src/common/expansion.rs b/src/common/expansion.rs new file mode 100644 index 0000000000000..04c737498f259 --- /dev/null +++ b/src/common/expansion.rs @@ -0,0 +1,48 @@ +use regex::Regex; +use std::{collections::HashMap, sync::LazyLock}; + +use crate::event::Value; + +static RE: LazyLock = LazyLock::new(|| Regex::new(r"[^0-9A-Za-z_]").unwrap()); +fn slugify_text(input: &str) -> String { + let result = RE.replace_all(input, "_"); + result.to_lowercase() +} + +/// Expands the given possibly template-able `key_s` and `value_s`, and return the expanded owned pairs +/// it would also insert the pairs into either `static_pairs` or `dynamic_pairs` depending on the template-ability of `key_s`. +pub(crate) fn pair_expansion( + key_s: &str, + value_s: &str, + static_pairs: &mut HashMap, + dynamic_pairs: &mut HashMap, +) -> Result, serde_json::Error> { + let mut expanded_pairs = HashMap::new(); + if let Some(opening_prefix) = key_s.strip_suffix('*') { + let output: serde_json::map::Map = + serde_json::from_str(value_s)?; + + // key_* -> key_one, key_two, key_three + // * -> one, two, three + for (k, v) in output { + let key = slugify_text(&format!("{opening_prefix}{k}")); + let val = Value::from(v).to_string_lossy().into_owned(); + if val == "" { + warn!("Encountered \"null\" value for dynamic pair. key: {}", key); + continue; + } + if let Some(prev) = dynamic_pairs.insert(key.clone(), val.clone()) { + warn!( + "Encountered duplicated dynamic pair. \ + key: {}, value: {:?}, discarded value: {:?}", + key, val, prev + ); + }; + expanded_pairs.insert(key, val); + } + } else { + static_pairs.insert(key_s.to_string(), value_s.to_string()); + expanded_pairs.insert(key_s.to_string(), value_s.to_string()); + } + Ok(expanded_pairs) +} diff --git a/src/sources/util/http/error.rs b/src/common/http/error.rs similarity index 83% rename from src/sources/util/http/error.rs rename to src/common/http/error.rs index 06298558b2abd..2edef0b948d99 100644 --- a/src/sources/util/http/error.rs +++ b/src/common/http/error.rs @@ -2,6 +2,7 @@ use std::{error::Error, fmt}; use serde::Serialize; +/// HTTP error, containing HTTP status code and a message #[derive(Serialize, Debug)] pub struct ErrorMessage { code: u16, @@ -15,6 +16,7 @@ pub struct ErrorMessage { feature = "sources-datadog_agent" ))] impl ErrorMessage { + /// Create a new `ErrorMessage` from HTTP status code and a message #[allow(unused)] // triggered by check-component-features pub fn new(code: http::StatusCode, message: String) -> Self { ErrorMessage { @@ -23,6 +25,7 @@ impl ErrorMessage { } } + /// Returns the HTTP status code #[allow(unused)] // triggered by check-component-features pub fn status_code(&self) -> http::StatusCode { http::StatusCode::from_u16(self.code).unwrap_or(http::StatusCode::INTERNAL_SERVER_ERROR) @@ -31,10 +34,12 @@ impl ErrorMessage { #[cfg(feature = "sources-utils-http-prelude")] impl ErrorMessage { + /// Returns the raw HTTP status code pub const fn code(&self) -> u16 { self.code } + /// Returns the error message pub fn message(&self) -> &str { self.message.as_str() } diff --git a/src/common/http/mod.rs b/src/common/http/mod.rs new file mode 100644 index 0000000000000..8608f5e153dcb --- /dev/null +++ b/src/common/http/mod.rs @@ -0,0 +1,12 @@ +//! Common module between modules that use HTTP +#[cfg(all( + feature = "sources-utils-http-auth", + feature = "sources-utils-http-error" +))] +pub mod server_auth; + +#[cfg(feature = "sources-utils-http-error")] +mod error; + +#[cfg(feature = "sources-utils-http-error")] +pub use error::ErrorMessage; diff --git a/src/common/http/server_auth.rs b/src/common/http/server_auth.rs new file mode 100644 index 0000000000000..0b3eda4c43184 --- /dev/null +++ b/src/common/http/server_auth.rs @@ -0,0 +1,604 @@ +//! Shared authentication config between components that use HTTP. +use std::{collections::HashMap, fmt, net::SocketAddr}; + +use bytes::Bytes; +use headers::{authorization::Credentials, Authorization}; +use http::{header::AUTHORIZATION, HeaderMap, HeaderValue, StatusCode}; +use serde::{ + de::{Error, MapAccess, Visitor}, + Deserialize, +}; +use vector_config::configurable_component; +use vector_lib::{ + compile_vrl, + event::{Event, LogEvent, VrlTarget}, + sensitive_string::SensitiveString, + TimeZone, +}; +use vrl::{ + compiler::{runtime::Runtime, CompilationResult, CompileConfig, Program}, + core::Value, + diagnostic::Formatter, + prelude::TypeState, + value::{KeyString, ObjectMap}, +}; + +use super::ErrorMessage; + +/// Configuration of the authentication strategy for server mode sinks and sources. +/// +/// Use the HTTP authentication with HTTPS only. The authentication credentials are passed as an +/// HTTP header without any additional encryption beyond what is provided by the transport itself. +#[configurable_component(no_deser)] +#[derive(Clone, Debug, Eq, PartialEq)] +#[configurable(metadata(docs::enum_tag_description = "The authentication strategy to use."))] +#[serde(tag = "strategy", rename_all = "snake_case")] +pub enum HttpServerAuthConfig { + /// Basic authentication. + /// + /// The username and password are concatenated and encoded using [base64][base64]. + /// + /// [base64]: https://en.wikipedia.org/wiki/Base64 + Basic { + /// The basic authentication username. + #[configurable(metadata(docs::examples = "${USERNAME}"))] + #[configurable(metadata(docs::examples = "username"))] + username: String, + + /// The basic authentication password. + #[configurable(metadata(docs::examples = "${PASSWORD}"))] + #[configurable(metadata(docs::examples = "password"))] + password: SensitiveString, + }, + + /// Custom authentication using VRL code. + /// + /// Takes in request and validates it using VRL code. + Custom { + /// The VRL boolean expression. + source: String, + }, +} + +// Custom deserializer implementation to default `strategy` to `basic` +impl<'de> Deserialize<'de> for HttpServerAuthConfig { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + struct HttpServerAuthConfigVisitor; + + const FIELD_KEYS: [&str; 4] = ["strategy", "username", "password", "source"]; + + impl<'de> Visitor<'de> for HttpServerAuthConfigVisitor { + type Value = HttpServerAuthConfig; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("a valid authentication strategy (basic or custom)") + } + + fn visit_map(self, mut map: A) -> Result + where + A: MapAccess<'de>, + { + let mut fields: HashMap<&str, String> = HashMap::default(); + + while let Some(key) = map.next_key::()? { + if let Some(field_index) = FIELD_KEYS.iter().position(|k| *k == key.as_str()) { + if fields.contains_key(FIELD_KEYS[field_index]) { + return Err(Error::duplicate_field(FIELD_KEYS[field_index])); + } + fields.insert(FIELD_KEYS[field_index], map.next_value()?); + } else { + return Err(Error::unknown_field(&key, &FIELD_KEYS)); + } + } + + // Default to "basic" if strategy is missing + let strategy = fields + .get("strategy") + .map(String::as_str) + .unwrap_or_else(|| "basic"); + + match strategy { + "basic" => { + let username = fields + .remove("username") + .ok_or_else(|| Error::missing_field("username"))?; + let password = fields + .remove("password") + .ok_or_else(|| Error::missing_field("password"))?; + Ok(HttpServerAuthConfig::Basic { + username, + password: SensitiveString::from(password), + }) + } + "custom" => { + let source = fields + .remove("source") + .ok_or_else(|| Error::missing_field("source"))?; + Ok(HttpServerAuthConfig::Custom { source }) + } + _ => Err(Error::unknown_variant(strategy, &["basic", "custom"])), + } + } + } + + deserializer.deserialize_map(HttpServerAuthConfigVisitor) + } +} + +impl HttpServerAuthConfig { + /// Builds an auth matcher based on provided configuration. + /// Used to validate configuration if needed, before passing it to the + /// actual component for usage. + pub fn build( + &self, + enrichment_tables: &vector_lib::enrichment::TableRegistry, + ) -> crate::Result { + match self { + HttpServerAuthConfig::Basic { username, password } => { + Ok(HttpServerAuthMatcher::AuthHeader( + Authorization::basic(username, password.inner()).0.encode(), + "Invalid username/password", + )) + } + HttpServerAuthConfig::Custom { source } => { + let functions = vrl::stdlib::all() + .into_iter() + .chain(vector_lib::enrichment::vrl_functions()) + .chain(vector_vrl_functions::all()) + .collect::>(); + + let state = TypeState::default(); + + let mut config = CompileConfig::default(); + config.set_custom(enrichment_tables.clone()); + config.set_read_only(); + + let CompilationResult { + program, + warnings, + config: _, + } = compile_vrl(source, &functions, &state, config).map_err(|diagnostics| { + Formatter::new(source, diagnostics).colored().to_string() + })?; + + if !program.final_type_info().result.is_boolean() { + return Err("VRL conditions must return a boolean.".into()); + } + + if !warnings.is_empty() { + let warnings = Formatter::new(source, warnings).colored().to_string(); + warn!(message = "VRL compilation warning.", %warnings); + } + + Ok(HttpServerAuthMatcher::Vrl { program }) + } + } + } +} + +/// Built auth matcher with validated configuration +/// Can be used directly in a component to validate authentication in HTTP requests +#[allow(clippy::large_enum_variant)] +#[derive(Clone, Debug)] +pub enum HttpServerAuthMatcher { + /// Matcher for comparing exact value of Authorization header + AuthHeader(HeaderValue, &'static str), + /// Matcher for running VRL script for requests, to allow for custom validation + Vrl { + /// Compiled VRL script + program: Program, + }, +} + +impl HttpServerAuthMatcher { + /// Compares passed headers to the matcher + pub fn handle_auth( + &self, + address: Option<&SocketAddr>, + headers: &HeaderMap, + path: &str, + ) -> Result<(), ErrorMessage> { + match self { + HttpServerAuthMatcher::AuthHeader(expected, err_message) => { + if let Some(header) = headers.get(AUTHORIZATION) { + if expected == header { + Ok(()) + } else { + Err(ErrorMessage::new( + StatusCode::UNAUTHORIZED, + err_message.to_string(), + )) + } + } else { + Err(ErrorMessage::new( + StatusCode::UNAUTHORIZED, + "No authorization header".to_owned(), + )) + } + } + HttpServerAuthMatcher::Vrl { program } => { + self.handle_vrl_auth(address, headers, path, program) + } + } + } + + fn handle_vrl_auth( + &self, + address: Option<&SocketAddr>, + headers: &HeaderMap, + path: &str, + program: &Program, + ) -> Result<(), ErrorMessage> { + let mut target = VrlTarget::new( + Event::Log(LogEvent::from_map( + ObjectMap::from([ + ( + "headers".into(), + Value::Object( + headers + .iter() + .map(|(k, v)| { + ( + KeyString::from(k.to_string()), + Value::Bytes(Bytes::copy_from_slice(v.as_bytes())), + ) + }) + .collect::(), + ), + ), + ( + "address".into(), + address.map_or(Value::Null, |a| Value::from(a.ip().to_string())), + ), + ("path".into(), Value::from(path.to_owned())), + ]), + Default::default(), + )), + program.info(), + false, + ); + let timezone = TimeZone::default(); + + let result = Runtime::default().resolve(&mut target, program, &timezone); + match result.map_err(|e| { + warn!("Handling auth failed: {}", e); + ErrorMessage::new(StatusCode::UNAUTHORIZED, "Auth failed".to_owned()) + })? { + vrl::core::Value::Boolean(result) => { + if result { + Ok(()) + } else { + Err(ErrorMessage::new( + StatusCode::UNAUTHORIZED, + "Auth failed".to_owned(), + )) + } + } + _ => Err(ErrorMessage::new( + StatusCode::UNAUTHORIZED, + "Invalid return value".to_owned(), + )), + } + } +} + +#[cfg(test)] +mod tests { + use crate::test_util::{next_addr, random_string}; + use indoc::indoc; + + use super::*; + + impl HttpServerAuthMatcher { + fn auth_header(self) -> (HeaderValue, &'static str) { + match self { + HttpServerAuthMatcher::AuthHeader(header_value, error_message) => { + (header_value, error_message) + } + HttpServerAuthMatcher::Vrl { .. } => { + panic!("Expected HttpServerAuthMatcher::AuthHeader") + } + } + } + } + + #[test] + fn config_should_default_to_basic() { + let config: HttpServerAuthConfig = serde_yaml::from_str(indoc! { r#" + username: foo + password: bar + "# + }) + .unwrap(); + + if let HttpServerAuthConfig::Basic { username, password } = config { + assert_eq!(username, "foo"); + assert_eq!(password.inner(), "bar"); + } else { + panic!("Expected HttpServerAuthConfig::Basic"); + } + } + + #[test] + fn config_should_support_explicit_basic_strategy() { + let config: HttpServerAuthConfig = serde_yaml::from_str(indoc! { r#" + strategy: basic + username: foo + password: bar + "# + }) + .unwrap(); + + if let HttpServerAuthConfig::Basic { username, password } = config { + assert_eq!(username, "foo"); + assert_eq!(password.inner(), "bar"); + } else { + panic!("Expected HttpServerAuthConfig::Basic"); + } + } + + #[test] + fn config_should_support_custom_strategy() { + let config: HttpServerAuthConfig = serde_yaml::from_str(indoc! { r#" + strategy: custom + source: "true" + "# + }) + .unwrap(); + + assert!(matches!(config, HttpServerAuthConfig::Custom { .. })); + if let HttpServerAuthConfig::Custom { source } = config { + assert_eq!(source, "true"); + } else { + panic!("Expected HttpServerAuthConfig::Custom"); + } + } + + #[test] + fn build_basic_auth_should_always_work() { + let basic_auth = HttpServerAuthConfig::Basic { + username: random_string(16), + password: random_string(16).into(), + }; + + let matcher = basic_auth.build(&Default::default()); + + assert!(matcher.is_ok()); + assert!(matches!( + matcher.unwrap(), + HttpServerAuthMatcher::AuthHeader { .. } + )); + } + + #[test] + fn build_basic_auth_should_use_username_password_related_message() { + let basic_auth = HttpServerAuthConfig::Basic { + username: random_string(16), + password: random_string(16).into(), + }; + + let (_, error_message) = basic_auth.build(&Default::default()).unwrap().auth_header(); + assert_eq!("Invalid username/password", error_message); + } + + #[test] + fn build_basic_auth_should_use_encode_basic_header() { + let username = random_string(16); + let password = random_string(16); + let basic_auth = HttpServerAuthConfig::Basic { + username: username.clone(), + password: password.clone().into(), + }; + + let (header, _) = basic_auth.build(&Default::default()).unwrap().auth_header(); + assert_eq!( + Authorization::basic(&username, &password).0.encode(), + header + ); + } + + #[test] + fn build_custom_should_fail_on_invalid_source() { + let custom_auth = HttpServerAuthConfig::Custom { + source: "invalid VRL source".to_string(), + }; + + assert!(custom_auth.build(&Default::default()).is_err()); + } + + #[test] + fn build_custom_should_fail_on_non_boolean_return_type() { + let custom_auth = HttpServerAuthConfig::Custom { + source: indoc! {r#" + .success = true + . + "#} + .to_string(), + }; + + assert!(custom_auth.build(&Default::default()).is_err()); + } + + #[test] + fn build_custom_should_success_on_proper_source_with_boolean_return_type() { + let custom_auth = HttpServerAuthConfig::Custom { + source: indoc! {r#" + .headers.authorization == "Basic test" + "#} + .to_string(), + }; + + assert!(custom_auth.build(&Default::default()).is_ok()); + } + + #[test] + fn basic_auth_matcher_should_return_401_when_missing_auth_header() { + let basic_auth = HttpServerAuthConfig::Basic { + username: random_string(16), + password: random_string(16).into(), + }; + + let matcher = basic_auth.build(&Default::default()).unwrap(); + + let result = matcher.handle_auth(Some(&next_addr()), &HeaderMap::new(), "/"); + + assert!(result.is_err()); + let error = result.unwrap_err(); + assert_eq!(401, error.code()); + assert_eq!("No authorization header", error.message()); + } + + #[test] + fn basic_auth_matcher_should_return_401_and_with_wrong_credentials() { + let basic_auth = HttpServerAuthConfig::Basic { + username: random_string(16), + password: random_string(16).into(), + }; + + let matcher = basic_auth.build(&Default::default()).unwrap(); + + let mut headers = HeaderMap::new(); + headers.insert(AUTHORIZATION, HeaderValue::from_static("Basic wrong")); + let result = matcher.handle_auth(Some(&next_addr()), &headers, "/"); + + assert!(result.is_err()); + let error = result.unwrap_err(); + assert_eq!(401, error.code()); + assert_eq!("Invalid username/password", error.message()); + } + + #[test] + fn basic_auth_matcher_should_return_ok_for_correct_credentials() { + let username = random_string(16); + let password = random_string(16); + let basic_auth = HttpServerAuthConfig::Basic { + username: username.clone(), + password: password.clone().into(), + }; + + let matcher = basic_auth.build(&Default::default()).unwrap(); + + let mut headers = HeaderMap::new(); + headers.insert( + AUTHORIZATION, + Authorization::basic(&username, &password).0.encode(), + ); + let result = matcher.handle_auth(Some(&next_addr()), &headers, "/"); + + assert!(result.is_ok()); + } + + #[test] + fn custom_auth_matcher_should_return_ok_for_true_vrl_script_result() { + let custom_auth = HttpServerAuthConfig::Custom { + source: r#".headers.authorization == "test""#.to_string(), + }; + + let matcher = custom_auth.build(&Default::default()).unwrap(); + + let mut headers = HeaderMap::new(); + headers.insert(AUTHORIZATION, HeaderValue::from_static("test")); + let result = matcher.handle_auth(Some(&next_addr()), &headers, "/"); + + assert!(result.is_ok()); + } + + #[test] + fn custom_auth_matcher_should_be_able_to_check_address() { + let addr = next_addr(); + let addr_string = addr.ip().to_string(); + let custom_auth = HttpServerAuthConfig::Custom { + source: format!(".address == \"{addr_string}\""), + }; + + let matcher = custom_auth.build(&Default::default()).unwrap(); + + let headers = HeaderMap::new(); + let result = matcher.handle_auth(Some(&next_addr()), &headers, "/"); + + assert!(result.is_ok()); + } + + #[test] + fn custom_auth_matcher_should_work_with_missing_address_too() { + let addr = next_addr(); + let addr_string = addr.ip().to_string(); + let custom_auth = HttpServerAuthConfig::Custom { + source: format!(".address == \"{addr_string}\""), + }; + + let matcher = custom_auth.build(&Default::default()).unwrap(); + + let headers = HeaderMap::new(); + let result = matcher.handle_auth(None, &headers, "/"); + + assert!(result.is_err()); + } + + #[test] + fn custom_auth_matcher_should_be_able_to_check_path() { + let custom_auth = HttpServerAuthConfig::Custom { + source: r#".path == "/ok""#.to_string(), + }; + + let matcher = custom_auth.build(&Default::default()).unwrap(); + + let headers = HeaderMap::new(); + let result = matcher.handle_auth(Some(&next_addr()), &headers, "/ok"); + + assert!(result.is_ok()); + } + + #[test] + fn custom_auth_matcher_should_return_401_with_wrong_path() { + let custom_auth = HttpServerAuthConfig::Custom { + source: r#".path == "/ok""#.to_string(), + }; + + let matcher = custom_auth.build(&Default::default()).unwrap(); + + let headers = HeaderMap::new(); + let result = matcher.handle_auth(Some(&next_addr()), &headers, "/bad"); + + assert!(result.is_err()); + } + + #[test] + fn custom_auth_matcher_should_return_401_for_false_vrl_script_result() { + let custom_auth = HttpServerAuthConfig::Custom { + source: r#".headers.authorization == "test""#.to_string(), + }; + + let matcher = custom_auth.build(&Default::default()).unwrap(); + + let mut headers = HeaderMap::new(); + headers.insert(AUTHORIZATION, HeaderValue::from_static("wrong value")); + let result = matcher.handle_auth(Some(&next_addr()), &headers, "/"); + + assert!(result.is_err()); + let error = result.unwrap_err(); + assert_eq!(401, error.code()); + assert_eq!("Auth failed", error.message()); + } + + #[test] + fn custom_auth_matcher_should_return_401_for_failed_script_execution() { + let custom_auth = HttpServerAuthConfig::Custom { + source: "abort".to_string(), + }; + + let matcher = custom_auth.build(&Default::default()).unwrap(); + + let mut headers = HeaderMap::new(); + headers.insert(AUTHORIZATION, HeaderValue::from_static("test")); + let result = matcher.handle_auth(Some(&next_addr()), &headers, "/"); + + assert!(result.is_err()); + let error = result.unwrap_err(); + assert_eq!(401, error.code()); + assert_eq!("Auth failed", error.message()); + } +} diff --git a/src/common/mod.rs b/src/common/mod.rs index 666e29896daa3..1c9a6eb45e6d6 100644 --- a/src/common/mod.rs +++ b/src/common/mod.rs @@ -1,11 +1,10 @@ -//! Modules that are common between sources and sinks. +//! Modules that are common between sources, transforms, and sinks. #[cfg(any( feature = "sources-datadog_agent", feature = "sinks-datadog_events", feature = "sinks-datadog_logs", feature = "sinks-datadog_metrics", feature = "sinks-datadog_traces", - feature = "enterprise" ))] pub mod datadog; @@ -18,3 +17,20 @@ pub(crate) mod sqs; #[cfg(any(feature = "sources-aws_s3", feature = "sinks-aws_s3"))] pub(crate) mod s3; + +#[cfg(any(feature = "sources-websocket", feature = "sinks-websocket"))] +pub(crate) mod websocket; + +pub(crate) mod backoff; +#[cfg(any(feature = "sources-mqtt", feature = "sinks-mqtt",))] +/// Common MQTT configuration shared by MQTT components. +pub mod mqtt; + +#[cfg(any(feature = "transforms-log_to_metric", feature = "sinks-loki"))] +pub(crate) mod expansion; + +#[cfg(any( + feature = "sources-utils-http-auth", + feature = "sources-utils-http-error" +))] +pub mod http; diff --git a/src/common/mqtt.rs b/src/common/mqtt.rs new file mode 100644 index 0000000000000..35f3c27812bcc --- /dev/null +++ b/src/common/mqtt.rs @@ -0,0 +1,123 @@ +use rumqttc::{AsyncClient, EventLoop, MqttOptions}; +use snafu::Snafu; +use vector_config_macros::configurable_component; +use vector_lib::tls::{TlsEnableableConfig, TlsError}; + +use crate::template::TemplateParseError; + +/// Shared MQTT configuration for sources and sinks. +#[configurable_component] +#[derive(Clone, Debug, Derivative)] +#[derivative(Default)] +#[serde(deny_unknown_fields)] +pub struct MqttCommonConfig { + /// MQTT server address (The broker’s domain name or IP address). + #[configurable(metadata(docs::examples = "mqtt.example.com", docs::examples = "127.0.0.1"))] + pub host: String, + + /// TCP port of the MQTT server to connect to. + #[configurable(derived)] + #[serde(default = "default_port")] + #[derivative(Default(value = "default_port()"))] + pub port: u16, + + /// MQTT username. + #[serde(default)] + #[configurable(derived)] + pub user: Option, + + /// MQTT password. + #[serde(default)] + #[configurable(derived)] + pub password: Option, + + /// MQTT client ID. + #[serde(default)] + #[configurable(derived)] + pub client_id: Option, + + /// Connection keep-alive interval. + #[serde(default = "default_keep_alive")] + #[derivative(Default(value = "default_keep_alive()"))] + pub keep_alive: u16, + + /// TLS configuration. + #[configurable(derived)] + pub tls: Option, +} + +const fn default_port() -> u16 { + 1883 +} + +const fn default_keep_alive() -> u16 { + 60 +} + +/// MQTT Error Types +#[derive(Debug, Snafu)] +#[snafu(visibility(pub))] +pub enum MqttError { + /// Topic template parsing failed + #[snafu(display("invalid topic template: {source}"))] + TopicTemplate { + /// Source of error + source: TemplateParseError, + }, + /// TLS error + #[snafu(display("TLS error: {source}"))] + Tls { + /// Source of error + source: TlsError, + }, + /// Configuration error + #[snafu(display("MQTT configuration error: {source}"))] + Configuration { + /// Source of error + source: ConfigurationError, + }, +} + +/// MQTT Configuration error types +#[derive(Clone, Debug, Eq, PartialEq, Snafu)] +pub enum ConfigurationError { + /// Empty client ID error + #[snafu(display("Client ID is not allowed to be empty."))] + EmptyClientId, + /// Invalid credentials provided error + #[snafu(display("Username and password must be either both provided or both missing."))] + InvalidCredentials, + /// Invalid client ID provied error + #[snafu(display( + "Client ID must be 1-23 characters long and must consist of only alphanumeric characters." + ))] + InvalidClientId, + /// Credentials provided were incomplete + #[snafu(display("Username and password must be either both or neither provided."))] + IncompleteCredentials, +} + +#[derive(Clone)] +/// Mqtt connector wrapper +pub struct MqttConnector { + /// Mqtt connection options + pub options: MqttOptions, +} + +impl MqttConnector { + /// Creates a new MqttConnector + pub const fn new(options: MqttOptions) -> Self { + Self { options } + } + + /// Connects the connector and generates a client and eventloop + pub fn connect(&self) -> (AsyncClient, EventLoop) { + let (client, eventloop) = AsyncClient::new(self.options.clone(), 1024); + (client, eventloop) + } + + /// TODO: Right now there is no way to implement the healthcheck properly: + pub async fn healthcheck(&self) -> crate::Result<()> { + Ok(()) + } +} diff --git a/src/common/s3.rs b/src/common/s3.rs index cdb69725b4c66..7c7a9810b1850 100644 --- a/src/common/s3.rs +++ b/src/common/s3.rs @@ -2,13 +2,16 @@ use aws_sdk_s3::config; use crate::aws::ClientBuilder; -pub(crate) struct S3ClientBuilder; +pub(crate) struct S3ClientBuilder { + pub force_path_style: Option, +} impl ClientBuilder for S3ClientBuilder { type Client = aws_sdk_s3::client::Client; - fn build(config: &aws_types::SdkConfig) -> Self::Client { - let config = config::Builder::from(config).force_path_style(true).build(); - aws_sdk_s3::client::Client::from_conf(config) + fn build(&self, config: &aws_types::SdkConfig) -> Self::Client { + let builder = + config::Builder::from(config).force_path_style(self.force_path_style.unwrap_or(true)); + aws_sdk_s3::client::Client::from_conf(builder.build()) } } diff --git a/src/common/sqs.rs b/src/common/sqs.rs index f02aa2f7c4021..8f4ff3ecab69e 100644 --- a/src/common/sqs.rs +++ b/src/common/sqs.rs @@ -5,7 +5,7 @@ pub(crate) struct SqsClientBuilder; impl ClientBuilder for SqsClientBuilder { type Client = aws_sdk_sqs::client::Client; - fn build(config: &aws_types::SdkConfig) -> Self::Client { + fn build(&self, config: &aws_types::SdkConfig) -> Self::Client { aws_sdk_sqs::client::Client::new(config) } } diff --git a/src/common/websocket.rs b/src/common/websocket.rs new file mode 100644 index 0000000000000..f545bc92adb1b --- /dev/null +++ b/src/common/websocket.rs @@ -0,0 +1,241 @@ +use std::{ + fmt::Debug, + net::SocketAddr, + num::NonZeroU64, + task::{Context, Poll}, + time::Duration, +}; +use vector_config_macros::configurable_component; + +use snafu::{ResultExt, Snafu}; +use tokio::{net::TcpStream, time}; +use tokio_tungstenite::{ + client_async_with_config, + tungstenite::{ + client::{uri_mode, IntoClientRequest}, + error::{Error as TungsteniteError, ProtocolError, UrlError}, + handshake::client::Request, + protocol::WebSocketConfig, + stream::Mode as UriMode, + }, + WebSocketStream, +}; + +use crate::{ + common::backoff::ExponentialBackoff, + dns, + http::Auth, + internal_events::{WebSocketConnectionEstablished, WebSocketConnectionFailedError}, + tls::{MaybeTlsSettings, MaybeTlsStream, TlsEnableableConfig, TlsError}, +}; + +#[allow(unreachable_pub)] +#[derive(Debug, Snafu)] +#[snafu(visibility(pub))] +pub enum WebSocketError { + #[snafu(display("Creating WebSocket client failed: {}", source))] + CreateFailed { source: TungsteniteError }, + #[snafu(display("Connect error: {}", source))] + ConnectError { source: TlsError }, + #[snafu(display("Unable to resolve DNS: {}", source))] + DnsError { source: dns::DnsError }, + #[snafu(display("No addresses returned."))] + NoAddresses, +} + +#[derive(Clone)] +pub(crate) struct WebSocketConnector { + uri: String, + host: String, + port: u16, + tls: MaybeTlsSettings, + auth: Option, +} + +impl WebSocketConnector { + pub(crate) fn new( + uri: String, + tls: MaybeTlsSettings, + auth: Option, + ) -> Result { + let request = (&uri).into_client_request().context(CreateFailedSnafu)?; + let (host, port) = Self::extract_host_and_port(&request).context(CreateFailedSnafu)?; + + Ok(Self { + uri, + host, + port, + tls, + auth, + }) + } + + fn extract_host_and_port(request: &Request) -> Result<(String, u16), TungsteniteError> { + let host = request + .uri() + .host() + .ok_or(TungsteniteError::Url(UrlError::NoHostName))? + .to_string(); + let mode = uri_mode(request.uri())?; + let port = request.uri().port_u16().unwrap_or(match mode { + UriMode::Tls => 443, + UriMode::Plain => 80, + }); + + Ok((host, port)) + } + + const fn fresh_backoff() -> ExponentialBackoff { + ExponentialBackoff::from_millis(2) + .factor(250) + .max_delay(Duration::from_secs(60)) + } + + async fn tls_connect(&self) -> Result, WebSocketError> { + let ip = dns::Resolver + .lookup_ip(self.host.clone()) + .await + .context(DnsSnafu)? + .next() + .ok_or(WebSocketError::NoAddresses)?; + + let addr = SocketAddr::new(ip, self.port); + self.tls + .connect(&self.host, &addr) + .await + .context(ConnectSnafu) + } + + async fn connect(&self) -> Result>, WebSocketError> { + let mut request = (&self.uri) + .into_client_request() + .context(CreateFailedSnafu)?; + + if let Some(auth) = &self.auth { + auth.apply(&mut request); + } + + let maybe_tls = self.tls_connect().await?; + + let ws_config = WebSocketConfig::default(); + + let (ws_stream, _response) = client_async_with_config(request, maybe_tls, Some(ws_config)) + .await + .context(CreateFailedSnafu)?; + + Ok(ws_stream) + } + + pub(crate) async fn connect_backoff(&self) -> WebSocketStream> { + let mut backoff = Self::fresh_backoff(); + loop { + match self.connect().await { + Ok(ws_stream) => { + emit!(WebSocketConnectionEstablished {}); + return ws_stream; + } + Err(error) => { + emit!(WebSocketConnectionFailedError { + error: Box::new(error) + }); + time::sleep(backoff.next().unwrap()).await; + } + } + } + } + + #[cfg(feature = "sinks-websocket")] + pub(crate) async fn healthcheck(&self) -> crate::Result<()> { + self.connect().await.map(|_| ()).map_err(Into::into) + } +} + +pub(crate) const fn is_closed(error: &TungsteniteError) -> bool { + matches!( + error, + TungsteniteError::ConnectionClosed + | TungsteniteError::AlreadyClosed + | TungsteniteError::Protocol(ProtocolError::ResetWithoutClosingHandshake) + ) +} + +pub(crate) struct PingInterval { + interval: Option, +} + +impl PingInterval { + pub(crate) fn new(period: Option) -> Self { + Self { + interval: period.map(|period| time::interval(Duration::from_secs(period))), + } + } + + pub(crate) fn poll_tick(&mut self, cx: &mut Context<'_>) -> Poll { + match self.interval.as_mut() { + Some(interval) => interval.poll_tick(cx), + None => Poll::Pending, + } + } + + pub(crate) async fn tick(&mut self) -> time::Instant { + std::future::poll_fn(|cx| self.poll_tick(cx)).await + } +} + +/// Shared websocket configuration for sources and sinks. +#[configurable_component] +#[derive(Clone, Debug)] +#[serde(deny_unknown_fields)] +pub struct WebSocketCommonConfig { + /// The WebSocket URI to connect to. + /// + /// This should include the protocol and host, but can also include the port, path, and any other valid part of a URI. + /// **Note**: Using the `wss://` protocol requires enabling `tls`. + #[configurable(metadata(docs::examples = "ws://localhost:8080"))] + #[configurable(metadata(docs::examples = "wss://example.com/socket"))] + pub uri: String, + + /// The interval, in seconds, between sending [Ping][ping]s to the remote peer. + /// + /// If this option is not configured, pings are not sent on an interval. + /// + /// If the `ping_timeout` is not set, pings are still sent but there is no expectation of pong + /// response times. + /// + /// [ping]: https://www.rfc-editor.org/rfc/rfc6455#section-5.5.2 + #[configurable(metadata(docs::type_unit = "seconds"))] + #[configurable(metadata(docs::advanced))] + #[configurable(metadata(docs::examples = 30))] + pub ping_interval: Option, + + /// The number of seconds to wait for a [Pong][pong] response from the remote peer. + /// + /// If a response is not received within this time, the connection is re-established. + /// + /// [pong]: https://www.rfc-editor.org/rfc/rfc6455#section-5.5.3 + // NOTE: this option is not relevant if the `ping_interval` is not configured. + #[configurable(metadata(docs::type_unit = "seconds"))] + #[configurable(metadata(docs::advanced))] + #[configurable(metadata(docs::examples = 5))] + pub ping_timeout: Option, + + /// TLS configuration. + #[configurable(derived)] + pub tls: Option, + + /// HTTP Authentication. + #[configurable(derived)] + pub auth: Option, +} + +impl Default for WebSocketCommonConfig { + fn default() -> Self { + Self { + uri: "ws://127.0.0.1:8080".to_owned(), + ping_interval: None, + ping_timeout: None, + tls: None, + auth: None, + } + } +} diff --git a/src/components/validation/mod.rs b/src/components/validation/mod.rs index e8126ba4159be..a786a56571120 100644 --- a/src/components/validation/mod.rs +++ b/src/components/validation/mod.rs @@ -6,6 +6,8 @@ mod test_case; pub mod util; mod validators; +use vector_lib::config::LogNamespace; + use crate::config::{BoxedSink, BoxedSource, BoxedTransform}; /// For components implementing `ValidatableComponent` @@ -125,42 +127,49 @@ pub struct ValidationConfiguration { /// There may be only one `ComponentTestCaseConfig` necessary to execute all test cases, but some cases /// require more advanced configuration in order to hit the code path desired. component_configurations: Vec, + log_namespace: LogNamespace, } impl ValidationConfiguration { /// Creates a new `ValidationConfiguration` for a source. - pub fn from_source( + pub const fn from_source( component_name: &'static str, + log_namespace: LogNamespace, component_configurations: Vec, ) -> Self { Self { component_name, component_type: ComponentType::Source, component_configurations, + log_namespace, } } /// Creates a new `ValidationConfiguration` for a transform. - pub fn from_transform( + pub const fn from_transform( component_name: &'static str, + log_namespace: LogNamespace, component_configurations: Vec, ) -> Self { Self { component_name, component_type: ComponentType::Transform, component_configurations, + log_namespace, } } /// Creates a new `ValidationConfiguration` for a sink. - pub fn from_sink( + pub const fn from_sink( component_name: &'static str, + log_namespace: LogNamespace, component_configurations: Vec, ) -> Self { Self { component_name, component_type: ComponentType::Sink, component_configurations, + log_namespace, } } @@ -179,6 +188,11 @@ impl ValidationConfiguration { self.component_configurations.clone() } + /// Gets the LogNamespace that the component is using. + pub const fn log_namespace(&self) -> LogNamespace { + self.log_namespace + } + fn get_comp_test_case(&self, test_case: Option<&String>) -> Option { let empty = String::from(""); let test_case = test_case.unwrap_or(&empty); @@ -305,7 +319,7 @@ fn run_validation(configuration: ValidationConfiguration, test_case_data_path: s } else { let formatted = success .iter() - .map(|s| format!(" - {}\n", s)) + .map(|s| format!(" - {s}\n")) .collect::>(); details.push(format!( @@ -326,7 +340,7 @@ fn run_validation(configuration: ValidationConfiguration, test_case_data_path: s } else { let formatted = failure .iter() - .map(|s| format!(" - {}\n", s)) + .map(|s| format!(" - {s}\n")) .collect::>(); details.push(format!( @@ -354,10 +368,9 @@ fn run_validation(configuration: ValidationConfiguration, test_case_data_path: s ); } } - Err(e) => panic!( - "Failed to complete validation run for component '{}': {}", - component_name, e - ), + Err(e) => { + panic!("Failed to complete validation run for component '{component_name}': {e}") + } } }); } diff --git a/src/components/validation/resources/event.rs b/src/components/validation/resources/event.rs index 4c03e3b4eed56..46ae48378ae1f 100644 --- a/src/components/validation/resources/event.rs +++ b/src/components/validation/resources/event.rs @@ -2,8 +2,10 @@ use std::collections::HashMap; use bytes::BytesMut; use serde::Deserialize; +use serde_json::Value; use snafu::Snafu; use tokio_util::codec::Encoder as _; +use vector_lib::codecs::encoding::format::JsonSerializerOptions; use crate::codecs::Encoder; use vector_lib::codecs::{ @@ -43,7 +45,7 @@ pub enum EventData { /// A simple log event. Log(String), /// A log event built from key-value pairs - LogBuilder(HashMap), + LogBuilder(HashMap), } impl EventData { @@ -99,7 +101,7 @@ impl TestEvent { } } - pub fn get_event(&mut self) -> &mut Event { + pub const fn get_event(&mut self) -> &mut Event { match self { Self::Passthrough(event) => event, Self::FailWithAlternateEncoder(event) => event, @@ -172,13 +174,17 @@ pub fn encode_test_event( // versa. let mut alt_encoder = if encoder.serializer().supports_json() { Encoder::::new( - LengthDelimitedEncoder::new().into(), - LogfmtSerializer::new().into(), + LengthDelimitedEncoder::default().into(), + LogfmtSerializer.into(), ) } else { Encoder::::new( - NewlineDelimitedEncoder::new().into(), - JsonSerializer::new(MetricTagValues::default()).into(), + NewlineDelimitedEncoder::default().into(), + JsonSerializer::new( + MetricTagValues::default(), + JsonSerializerOptions::default(), + ) + .into(), ) }; diff --git a/src/components/validation/resources/http.rs b/src/components/validation/resources/http.rs index 4932a2ff456c9..59c4504654ce9 100644 --- a/src/components/validation/resources/http.rs +++ b/src/components/validation/resources/http.rs @@ -11,7 +11,7 @@ use axum::{ routing::{MethodFilter, MethodRouter}, Router, }; -use bytes::BytesMut; +use bytes::{BufMut as _, BytesMut}; use http::{Method, Request, StatusCode, Uri}; use hyper::{Body, Client, Server}; use tokio::{ @@ -24,7 +24,11 @@ use crate::components::validation::{ sync::{Configuring, TaskCoordinator}, RunnerMetrics, }; -use vector_lib::{event::Event, EstimatedJsonEncodedSizeOf}; +use vector_lib::{ + codecs::encoding::Framer, codecs::encoding::Serializer::Json, + codecs::CharacterDelimitedEncoder, config::LogNamespace, event::Event, + EstimatedJsonEncodedSizeOf, +}; use super::{encode_test_event, ResourceCodec, ResourceDirection, TestEvent}; @@ -65,37 +69,17 @@ impl HttpResourceConfig { } // We'll push data to the source. ResourceDirection::Push => { - spawn_input_http_client(self, codec, input_rx, task_coordinator) + spawn_input_http_client(self, codec, input_rx, task_coordinator, runner_metrics) } } } - pub fn spawn_as_output( - self, - direction: ResourceDirection, - codec: ResourceCodec, - output_tx: mpsc::Sender>, - task_coordinator: &TaskCoordinator, - input_events: Vec, - runner_metrics: &Arc>, - ) -> vector_lib::Result<()> { - match direction { + pub fn spawn_as_output(self, ctx: HttpResourceOutputContext) -> vector_lib::Result<()> { + match ctx.direction { // We'll pull data from the sink. - ResourceDirection::Pull => Ok(spawn_output_http_client( - self, - codec, - output_tx, - task_coordinator, - )), + ResourceDirection::Pull => Ok(ctx.spawn_output_http_client(self)), // The sink will push data to us. - ResourceDirection::Push => spawn_output_http_server( - self, - codec, - output_tx, - task_coordinator, - input_events, - runner_metrics, - ), + ResourceDirection::Push => ctx.spawn_output_http_server(self), } } } @@ -129,16 +113,19 @@ fn spawn_input_http_server( async move { let mut sendable_events = sendable_events.lock().await; - if let Some(event) = sendable_events.pop_front() { - let mut buffer = BytesMut::new(); - encode_test_event(&mut encoder, &mut buffer, event); - - buffer.into_response() - } else { - // We'll send an empty 200 in the response since some - // sources throw errors for anything other than a valid - // response. - StatusCode::OK.into_response() + match sendable_events.pop_front() { + Some(event) => { + let mut buffer = BytesMut::new(); + encode_test_event(&mut encoder, &mut buffer, event); + + buffer.into_response() + } + _ => { + // We'll send an empty 200 in the response since some + // sources throw errors for anything other than a valid + // response. + StatusCode::OK.into_response() + } } } }, @@ -213,12 +200,14 @@ fn spawn_input_http_client( codec: ResourceCodec, mut input_rx: mpsc::Receiver, task_coordinator: &TaskCoordinator, + runner_metrics: &Arc>, ) { // Spin up an HTTP client that will push the input data to the source on a // request-per-input-item basis. This runs serially and has no parallelism. let started = task_coordinator.track_started(); let completed = task_coordinator.track_completed(); let mut encoder = codec.into_encoder(); + let runner_metrics = Arc::clone(runner_metrics); tokio::spawn(async move { // Mark ourselves as started. We don't actually do anything until we get our first input @@ -235,8 +224,32 @@ fn spawn_input_http_client( debug!("Got event to send from runner."); let mut buffer = BytesMut::new(); + + let is_json = matches!(encoder.serializer(), Json(_)) + && matches!( + encoder.framer(), + Framer::CharacterDelimited(CharacterDelimitedEncoder { delimiter: b',' }) + ); + + if is_json { + buffer.put_u8(b'['); + } + encode_test_event(&mut encoder, &mut buffer, event); + if is_json { + if !buffer.is_empty() { + // remove trailing comma from last record + buffer.truncate(buffer.len() - 1); + } + buffer.put_u8(b']'); + + // in this edge case we have removed the trailing comma (one byte) and added + // opening and closing braces (2 bytes) for a net add of one byte. + let mut runner_metrics = runner_metrics.lock().await; + runner_metrics.sent_bytes_total += 1; + } + let mut request_builder = Request::builder() .uri(request_uri.clone()) .method(request_method.clone()); @@ -268,127 +281,144 @@ fn spawn_input_http_client( }); } -/// Spawns an HTTP server that accepts events sent by a sink. -#[allow(clippy::missing_const_for_fn)] -fn spawn_output_http_server( - config: HttpResourceConfig, - codec: ResourceCodec, - output_tx: mpsc::Sender>, - task_coordinator: &TaskCoordinator, - input_events: Vec, - runner_metrics: &Arc>, -) -> vector_lib::Result<()> { - // This HTTP server will wait for events to be sent by a sink, and collect them and send them on - // via an output sender. We accept/collect events until we're told to shutdown. - - // First, we'll build and spawn our HTTP server. - let decoder = codec.into_decoder()?; +/// Anything that the output side HTTP external resource needs +pub struct HttpResourceOutputContext<'a> { + pub direction: ResourceDirection, + pub codec: ResourceCodec, + pub output_tx: mpsc::Sender>, + pub task_coordinator: &'a TaskCoordinator, + pub input_events: Vec, + pub runner_metrics: &'a Arc>, + pub log_namespace: LogNamespace, +} - // Note that we currently don't differentiate which events should and shouldn't be rejected- - // we reject all events in this server if any are marked for rejection. - // In the future it might be useful to be able to select which to reject. That will involve - // adding logic to the test case which is passed down here, and to the event itself. Since - // we can't guarantee the order of events, we'd need a way to flag which ones need to be - // rejected. - let should_reject = input_events.iter().filter(|te| te.should_reject()).count() > 0; +impl HttpResourceOutputContext<'_> { + /// Spawns an HTTP server that accepts events sent by a sink. + #[allow(clippy::missing_const_for_fn)] + fn spawn_output_http_server(&self, config: HttpResourceConfig) -> vector_lib::Result<()> { + // This HTTP server will wait for events to be sent by a sink, and collect them and send them on + // via an output sender. We accept/collect events until we're told to shutdown. + + // First, we'll build and spawn our HTTP server. + let decoder = self.codec.into_decoder(self.log_namespace)?; + + // Note that we currently don't differentiate which events should and shouldn't be rejected- + // we reject all events in this server if any are marked for rejection. + // In the future it might be useful to be able to select which to reject. That will involve + // adding logic to the test case which is passed down here, and to the event itself. Since + // we can't guarantee the order of events, we'd need a way to flag which ones need to be + // rejected. + let should_reject = self + .input_events + .iter() + .filter(|te| te.should_reject()) + .count() + > 0; + + let output_tx = self.output_tx.clone(); + let (_, http_server_shutdown_tx) = spawn_http_server( + self.task_coordinator, + &config, + self.runner_metrics, + move |request, output_runner_metrics| { + let output_tx = output_tx.clone(); + let mut decoder = decoder.clone(); - let (_, http_server_shutdown_tx) = spawn_http_server( - task_coordinator, - &config, - runner_metrics, - move |request, output_runner_metrics| { - let output_tx = output_tx.clone(); - let mut decoder = decoder.clone(); - - async move { - match hyper::body::to_bytes(request.into_body()).await { - Err(_) => StatusCode::INTERNAL_SERVER_ERROR.into_response(), - Ok(body) => { - let mut body = BytesMut::from(&body[..]); - loop { - match decoder.decode_eof(&mut body) { - Ok(Some((events, byte_size))) => { - if should_reject { - info!("HTTP server external output resource decoded {byte_size} bytes but test case configured to reject."); - } else { - let mut output_runner_metrics = - output_runner_metrics.lock().await; - info!("HTTP server external output resource decoded {byte_size} bytes."); - - // Update the runner metrics for the received events. This will later - // be used in the Validators, as the "expected" case. - output_runner_metrics.received_bytes_total += - byte_size as u64; - - output_runner_metrics.received_events_total += - events.len() as u64; - - events.iter().for_each(|event| { - output_runner_metrics.received_event_bytes_total += - event.estimated_json_encoded_size_of().get() as u64; - }); - - output_tx - .send(events.to_vec()) - .await - .expect("should not fail to send output event"); + async move { + match hyper::body::to_bytes(request.into_body()).await { + Err(_) => StatusCode::INTERNAL_SERVER_ERROR.into_response(), + Ok(body) => { + let byte_size = body.len(); + let mut body = BytesMut::from(&body[..]); + loop { + match decoder.decode_eof(&mut body) { + // `decoded_byte_size` is the decoded size of an individual frame. `byte_size` represents the size of the + // entire payload which may contain multiple frames and their delimiters. + Ok(Some((events, decoded_byte_size))) => { + if should_reject { + info!("HTTP server external output resource decoded {decoded_byte_size} bytes but test case configured to reject."); + } else { + let mut output_runner_metrics = + output_runner_metrics.lock().await; + info!("HTTP server external output resource decoded {decoded_byte_size} bytes."); + + // Update the runner metrics for the received events. This will later + // be used in the Validators, as the "expected" case. + output_runner_metrics.received_bytes_total += + byte_size as u64; + + output_runner_metrics.received_events_total += + events.len() as u64; + + events.iter().for_each(|event| { + output_runner_metrics.received_event_bytes_total += + event.estimated_json_encoded_size_of().get() + as u64; + }); + + output_tx + .send(events.to_vec()) + .await + .expect("should not fail to send output event"); + } } - } - Ok(None) => { - if should_reject { - // This status code is not retried and should result in the component under test - // emitting error events - return StatusCode::BAD_REQUEST.into_response(); - } else { - return StatusCode::OK.into_response(); + Ok(None) => { + if should_reject { + // This status code is not retried and should result in the component under test + // emitting error events + return StatusCode::BAD_REQUEST.into_response(); + } else { + return StatusCode::OK.into_response(); + } + } + Err(_) => { + error!( + "HTTP server failed to decode {:?}", + String::from_utf8_lossy(&body) + ); + return StatusCode::INTERNAL_SERVER_ERROR.into_response(); } } - Err(_) => return StatusCode::INTERNAL_SERVER_ERROR.into_response(), } } } } - } - }, - ); + }, + ); - // Now we'll create and spawn the resource's core logic loop which simply waits for the runner - // to instruct us to shutdown, and when that happens, cascades to shutting down the HTTP server. - let resource_started = task_coordinator.track_started(); - let resource_completed = task_coordinator.track_completed(); - let mut resource_shutdown_rx = task_coordinator.register_for_shutdown(); + // Now we'll create and spawn the resource's core logic loop which simply waits for the runner + // to instruct us to shutdown, and when that happens, cascades to shutting down the HTTP server. + let resource_started = self.task_coordinator.track_started(); + let resource_completed = self.task_coordinator.track_completed(); + let mut resource_shutdown_rx = self.task_coordinator.register_for_shutdown(); - tokio::spawn(async move { - resource_started.mark_as_done(); - info!("HTTP server external output resource started."); + tokio::spawn(async move { + resource_started.mark_as_done(); + info!("HTTP server external output resource started."); - // Wait for the runner to tell us to shutdown - resource_shutdown_rx.wait().await; + // Wait for the runner to tell us to shutdown + resource_shutdown_rx.wait().await; - // signal the server to shutdown - let _ = http_server_shutdown_tx.send(()); + // signal the server to shutdown + let _ = http_server_shutdown_tx.send(()); - // mark ourselves as done - resource_completed.mark_as_done(); + // mark ourselves as done + resource_completed.mark_as_done(); - info!("HTTP server external output resource completed."); - }); + info!("HTTP server external output resource completed."); + }); - Ok(()) -} + Ok(()) + } -/// Spawns an HTTP client that pulls events by making requests to an HTTP server driven by a sink. -#[allow(clippy::missing_const_for_fn)] -fn spawn_output_http_client( - _config: HttpResourceConfig, - _codec: ResourceCodec, - _output_tx: mpsc::Sender>, - _task_coordinator: &TaskCoordinator, -) { - // TODO: The `prometheus_exporter` sink is the only sink that exposes an HTTP server which must be - // scraped... but since we need special logic to aggregate/deduplicate scraped metrics, we can't - // use this generically for that purpose. - todo!() + /// Spawns an HTTP client that pulls events by making requests to an HTTP server driven by a sink. + #[allow(clippy::missing_const_for_fn)] + fn spawn_output_http_client(&self, _config: HttpResourceConfig) { + // TODO: The `prometheus_exporter` sink is the only sink that exposes an HTTP server which must be + // scraped... but since we need special logic to aggregate/deduplicate scraped metrics, we can't + // use this generically for that purpose. + todo!() + } } fn spawn_http_server( @@ -461,7 +491,12 @@ where } }); - let router = Router::new().route(&request_path, method_router); + let router = Router::new().route(&request_path, method_router).fallback( + |req: Request| async move { + error!(?req, "Component sent request the server could not route."); + StatusCode::NOT_FOUND + }, + ); // Now actually run/drive the HTTP server and process requests until we're told to shutdown. http_server_started.mark_as_done(); diff --git a/src/components/validation/resources/mod.rs b/src/components/validation/resources/mod.rs index 4b5dc66cad0eb..c0d9d324c3d81 100644 --- a/src/components/validation/resources/mod.rs +++ b/src/components/validation/resources/mod.rs @@ -4,12 +4,16 @@ mod http; use std::sync::Arc; use tokio::sync::{mpsc, Mutex}; -use vector_lib::codecs::{ - decoding::{self, DeserializerConfig}, - encoding::{ - self, Framer, FramingConfig, JsonSerializerConfig, SerializerConfig, TextSerializerConfig, +use vector_lib::{ + codecs::{ + decoding::{self, DeserializerConfig}, + encoding::{ + self, Framer, FramingConfig, JsonSerializerConfig, SerializerConfig, + TextSerializerConfig, + }, + BytesEncoder, }, - BytesEncoder, + config::LogNamespace, }; use vector_lib::{config::DataType, event::Event}; @@ -17,6 +21,7 @@ use crate::codecs::{Decoder, DecodingConfig, Encoder, EncodingConfig, EncodingCo pub use self::event::{encode_test_event, TestEvent}; pub use self::http::HttpResourceConfig; +use self::http::HttpResourceOutputContext; use super::{ sync::{Configuring, TaskCoordinator}, @@ -73,7 +78,7 @@ impl ResourceCodec { pub fn into_encoder(&self) -> Encoder { let (framer, serializer) = match self { Self::Encoding(config) => ( - Framer::Bytes(BytesEncoder::new()), + Framer::Bytes(BytesEncoder), config.build().expect("should not fail to build serializer"), ), Self::EncodingWithFraming(config) => { @@ -101,7 +106,7 @@ impl ResourceCodec { /// /// The decoder is generated as an inverse to the input codec: if an encoding configuration was /// given, we generate a decoder that satisfies that encoding configuration, and vice versa. - pub fn into_decoder(&self) -> vector_lib::Result { + pub fn into_decoder(&self, log_namespace: LogNamespace) -> vector_lib::Result { let (framer, deserializer) = match self { Self::Decoding(config) => return config.build(), Self::Encoding(config) => ( @@ -118,7 +123,7 @@ impl ResourceCodec { } }; - Ok(Decoder::new(framer, deserializer)) + Ok(Decoder::new(framer, deserializer).with_log_namespace(log_namespace)) } } @@ -164,6 +169,8 @@ fn deserializer_config_to_serializer(config: &DeserializerConfig) -> encoding::S DeserializerConfig::NativeJson { .. } => SerializerConfig::NativeJson, DeserializerConfig::Gelf { .. } => SerializerConfig::Gelf, DeserializerConfig::Avro { avro } => SerializerConfig::Avro { avro: avro.into() }, + // TODO: Influxdb has no serializer yet + DeserializerConfig::Influxdb { .. } => todo!(), DeserializerConfig::Vrl { .. } => unimplemented!(), }; @@ -182,11 +189,17 @@ fn decoder_framing_to_encoding_framer(framing: &decoding::FramingConfig) -> enco }, }) } - decoding::FramingConfig::LengthDelimited => encoding::FramingConfig::LengthDelimited, + decoding::FramingConfig::LengthDelimited(config) => { + encoding::FramingConfig::LengthDelimited(encoding::LengthDelimitedEncoderConfig { + length_delimited: config.length_delimited.clone(), + }) + } decoding::FramingConfig::NewlineDelimited(_) => encoding::FramingConfig::NewlineDelimited, // TODO: There's no equivalent octet counting framer for encoding... although // there's no particular reason that would make it hard to write. decoding::FramingConfig::OctetCounting(_) => todo!(), + // TODO: chunked gelf is not supported yet in encoding + decoding::FramingConfig::ChunkedGelf(_) => todo!(), }; framing_config.build() @@ -197,6 +210,7 @@ fn serializer_config_to_deserializer( ) -> vector_lib::Result { let deserializer_config = match config { SerializerConfig::Avro { .. } => todo!(), + SerializerConfig::Cef { .. } => todo!(), SerializerConfig::Csv { .. } => todo!(), SerializerConfig::Gelf => DeserializerConfig::Gelf(Default::default()), SerializerConfig::Json(_) => DeserializerConfig::Json(Default::default()), @@ -229,7 +243,11 @@ fn encoder_framing_to_decoding_framer(framing: encoding::FramingConfig) -> decod }, }) } - encoding::FramingConfig::LengthDelimited => decoding::FramingConfig::LengthDelimited, + encoding::FramingConfig::LengthDelimited(config) => { + decoding::FramingConfig::LengthDelimited(decoding::LengthDelimitedDecoderConfig { + length_delimited: config.length_delimited.clone(), + }) + } encoding::FramingConfig::NewlineDelimited => { decoding::FramingConfig::NewlineDelimited(Default::default()) } @@ -343,16 +361,20 @@ impl ExternalResource { task_coordinator: &TaskCoordinator, input_events: Vec, runner_metrics: &Arc>, + log_namespace: LogNamespace, ) -> vector_lib::Result<()> { match self.definition { - ResourceDefinition::Http(http_config) => http_config.spawn_as_output( - self.direction, - self.codec, - output_tx, - task_coordinator, - input_events, - runner_metrics, - ), + ResourceDefinition::Http(http_config) => { + http_config.spawn_as_output(HttpResourceOutputContext { + direction: self.direction, + codec: self.codec, + output_tx, + task_coordinator, + input_events, + runner_metrics, + log_namespace, + }) + } } } } diff --git a/src/components/validation/runner/config.rs b/src/components/validation/runner/config.rs index f2adb90765ce1..1afc21e5d5fdb 100644 --- a/src/components/validation/runner/config.rs +++ b/src/components/validation/runner/config.rs @@ -1,3 +1,5 @@ +use vector_lib::config::LogNamespace; + use crate::{ components::validation::{ component_names::*, @@ -31,9 +33,9 @@ impl TopologyBuilder { let component_configuration = configuration .component_configuration_for_test_case(config_name) .ok_or(format!( - "No test case name defined for configuration {:?}.", - config_name + "No test case name defined for configuration {config_name:?}." ))?; + Ok(match component_configuration { ComponentConfiguration::Source(source) => { debug_assert_eq!(configuration.component_type(), ComponentType::Source); @@ -41,11 +43,11 @@ impl TopologyBuilder { } ComponentConfiguration::Transform(transform) => { debug_assert_eq!(configuration.component_type(), ComponentType::Transform); - Self::from_transform(transform) + Self::from_transform(transform, configuration.log_namespace) } ComponentConfiguration::Sink(sink) => { debug_assert_eq!(configuration.component_type(), ComponentType::Sink); - Self::from_sink(sink) + Self::from_sink(sink, configuration.log_namespace) } }) } @@ -65,8 +67,8 @@ impl TopologyBuilder { } } - fn from_transform(transform: BoxedTransform) -> Self { - let (input_edge, input_source) = build_input_edge(); + fn from_transform(transform: BoxedTransform, log_namespace: LogNamespace) -> Self { + let (input_edge, input_source) = build_input_edge(log_namespace); let (output_edge, output_sink) = build_output_edge(); let mut config_builder = ConfigBuilder::default(); @@ -81,8 +83,8 @@ impl TopologyBuilder { } } - fn from_sink(sink: BoxedSink) -> Self { - let (input_edge, input_source) = build_input_edge(); + fn from_sink(sink: BoxedSink, log_namespace: LogNamespace) -> Self { + let (input_edge, input_source) = build_input_edge(log_namespace); let mut config_builder = ConfigBuilder::default(); config_builder.add_source(TEST_INPUT_SOURCE_NAME, input_source); @@ -123,11 +125,14 @@ impl TopologyBuilder { } } -fn build_input_edge() -> (InputEdge, impl Into) { +fn build_input_edge(log_namespace: LogNamespace) -> (InputEdge, impl Into) { let input_listen_addr = GrpcAddress::from(next_addr()); debug!(listen_addr = %input_listen_addr, "Creating controlled input edge."); - let input_source = VectorSourceConfig::from_address(input_listen_addr.as_socket_addr()); + let mut input_source = VectorSourceConfig::from_address(input_listen_addr.as_socket_addr()); + + input_source.log_namespace = Some(log_namespace == LogNamespace::Vector); + let input_edge = InputEdge::from_address(input_listen_addr); (input_edge, input_source) diff --git a/src/components/validation/runner/io.rs b/src/components/validation/runner/io.rs index 9e4e51e85651d..bc06761b17028 100644 --- a/src/components/validation/runner/io.rs +++ b/src/components/validation/runner/io.rs @@ -5,7 +5,8 @@ use hyper::Body; use tokio::{pin, select, sync::mpsc}; use tonic::{ body::BoxBody, - transport::{Channel, Endpoint, NamedService}, + server::NamedService, + transport::{Channel, Endpoint}, Status, }; use tower::Service; @@ -159,7 +160,7 @@ pub fn spawn_grpc_server( let (trigger_shutdown, shutdown_signal, _) = ShutdownSignal::new_wired(); let mut trigger_shutdown = Some(trigger_shutdown); - let tls_settings = MaybeTlsSettings::from_config(&None, true) + let tls_settings = MaybeTlsSettings::from_config(None, true) .expect("should not fail to get empty TLS settings"); let server = run_grpc_server( diff --git a/src/components/validation/runner/mod.rs b/src/components/validation/runner/mod.rs index 704444ded4c21..1316499bb13ac 100644 --- a/src/components/validation/runner/mod.rs +++ b/src/components/validation/runner/mod.rs @@ -188,13 +188,11 @@ impl Runner { .insert(validator_name.to_string(), validator) .is_some() { - panic!( - "attempted to add duplicate validator '{}' to runner", - validator_name - ); + panic!("attempted to add duplicate validator '{validator_name}' to runner"); } } + #[allow(clippy::print_stdout)] pub async fn run_validation(self) -> Result, vector_lib::Error> { // Initialize our test environment. initialize_test_environment(); @@ -205,8 +203,8 @@ impl Runner { let test_cases = load_component_test_cases(&self.test_case_data_path)?; for test_case in test_cases { - println!(""); - println!(""); + println!(); + println!(); info!( "Running test '{}' case for component '{}' (type: {:?})...", test_case.name, @@ -322,6 +320,7 @@ impl Runner { &runner_metrics, maybe_runner_encoder.as_ref().cloned(), self.configuration.component_type, + self.configuration.log_namespace(), ); // the number of events we expect to receive from the output. @@ -431,18 +430,10 @@ impl Runner { /// returned explaining the cause. fn load_component_test_cases(test_case_data_path: &PathBuf) -> Result, String> { std::fs::File::open(test_case_data_path) - .map_err(|e| { - format!( - "I/O error during open of component validation test cases file: {}", - e - ) - }) + .map_err(|e| format!("I/O error during open of component validation test cases file: {e}")) .and_then(|file| { serde_yaml::from_reader(file).map_err(|e| { - format!( - "Deserialization error for component validation test cases file: {}", - e - ) + format!("Deserialization error for component validation test cases file: {e}") }) }) } @@ -498,6 +489,7 @@ fn build_external_resource( output_task_coordinator, test_case.events.clone(), runner_metrics, + configuration.log_namespace(), )?; Ok(( @@ -567,10 +559,10 @@ fn spawn_input_driver( runner_metrics: &Arc>, mut maybe_encoder: Option>, component_type: ComponentType, + log_namespace: LogNamespace, ) -> JoinHandle<()> { let input_runner_metrics = Arc::clone(runner_metrics); - let log_namespace = LogNamespace::Legacy; let now = Utc::now(); tokio::spawn(async move { @@ -593,7 +585,7 @@ fn spawn_input_driver( } } - let (failure_case, event) = input_event.clone().get(); + let (failure_case, mut event) = input_event.clone().get(); if let Some(encoder) = maybe_encoder.as_mut() { let mut buffer = BytesMut::new(); @@ -614,6 +606,29 @@ fn spawn_input_driver( if !failure_case || component_type == ComponentType::Sink { input_runner_metrics.sent_events_total += 1; + // Convert unix timestamp in input events to the Datetime string. + // This is necessary when a source expects the incoming event to have a + // unix timestamp but we convert it into a datetime string in the source. + // For example, the `datadog_agent` source. This only takes effect when + // the test case YAML file defining the event, constructs it with the log + // builder variant, and specifies an integer in milliseconds for the timestamp. + if component_type == ComponentType::Source { + if let Event::Log(ref mut log) = event { + if let Some(ts) = log.remove_timestamp() { + let ts = match ts.as_integer() { + Some(ts) => chrono::DateTime::from_timestamp_millis(ts) + .unwrap_or_else(|| { + panic!("invalid timestamp in input test event {ts}") + }) + .into(), + None => ts, + }; + log.parse_path_and_insert("timestamp", ts) + .expect("failed to insert timestamp"); + } + } + } + // This particular metric is tricky because a component can run the // EstimatedJsonSizeOf calculation on a single event or an array of // events. If it's an array of events, the size calculation includes diff --git a/src/components/validation/validators/component_spec/mod.rs b/src/components/validation/validators/component_spec/mod.rs index 855a7633a9b71..26ffd292c5cf7 100644 --- a/src/components/validation/validators/component_spec/mod.rs +++ b/src/components/validation/validators/component_spec/mod.rs @@ -256,7 +256,7 @@ fn sum_counters( sum += *value; } } - _ => errs.push(format!("{}: metric value is not a counter", metric_name,)), + _ => errs.push(format!("{metric_name}: metric value is not a counter",)), } } @@ -281,7 +281,7 @@ fn compare_actual_to_expected( let actual = sum_counters(metric_type, &metrics)?; - info!("{}: expected {}, actual {}.", metric_type, expected, actual,); + info!("{metric_type}: expected {expected}, actual {actual}."); if actual != expected && // This is a bit messy. The issue is that EstimatedJsonSizeOf can be called by a component @@ -293,8 +293,7 @@ fn compare_actual_to_expected( || (actual != (expected + (expect_received_events * 2)))) { errs.push(format!( - "{}: expected {}, but received {}", - metric_type, expected, actual + "{metric_type}: expected {expected}, actual {actual}", )); } diff --git a/src/conditions/datadog_search.rs b/src/conditions/datadog_search.rs index d192f9b910436..a7ecea5f4f590 100644 --- a/src/conditions/datadog_search.rs +++ b/src/conditions/datadog_search.rs @@ -1,30 +1,48 @@ -use std::borrow::Cow; +use std::{borrow::Cow, str::FromStr}; +use vrl::path::PathParseError; use bytes::Bytes; use vector_lib::configurable::configurable_component; use vector_lib::event::{Event, LogEvent, Value}; -use vrl::datadog_filter::{ - build_matcher, - regex::{wildcard_regex, word_regex}, - Filter, Matcher, Resolver, Run, -}; -use vrl::datadog_search_syntax::parse; -use vrl::datadog_search_syntax::{Comparison, ComparisonValue, Field}; +use vrl::datadog_filter::regex::{wildcard_regex, word_regex}; +use vrl::datadog_filter::{build_matcher, Filter, Matcher, Resolver, Run}; +use vrl::datadog_search_syntax::{Comparison, ComparisonValue, Field, QueryNode}; -use crate::conditions::{Condition, Conditional, ConditionalConfig}; +use super::{Condition, Conditional, ConditionalConfig}; /// A condition that uses the [Datadog Search](https://docs.datadoghq.com/logs/explorer/search_syntax/) query syntax against an event. #[configurable_component] -#[derive(Clone, Debug, Default, PartialEq, Eq)] +#[derive(Clone, Debug, PartialEq)] pub struct DatadogSearchConfig { /// The query string. - source: String, + source: QueryNode, +} + +impl Default for DatadogSearchConfig { + fn default() -> Self { + Self { + source: QueryNode::MatchAllDocs, + } + } +} + +impl FromStr for DatadogSearchConfig { + type Err = ::Err; + fn from_str(s: &str) -> Result { + s.parse().map(|source| Self { source }) + } +} + +impl From for DatadogSearchConfig { + fn from(source: QueryNode) -> Self { + Self { source } + } } impl_generate_config_from_default!(DatadogSearchConfig); /// Runner that contains the boxed `Matcher` function to check whether an `Event` matches -/// a Datadog Search Syntax query. +/// a [Datadog Search Syntax query](https://docs.datadoghq.com/logs/explorer/search_syntax/). #[derive(Debug, Clone)] pub struct DatadogSearchRunner { matcher: Box>, @@ -42,8 +60,7 @@ impl ConditionalConfig for DatadogSearchConfig { &self, _enrichment_tables: &vector_lib::enrichment::TableRegistry, ) -> crate::Result { - let node = parse(&self.source)?; - let matcher = as_log(build_matcher(&node, &EventFilter)); + let matcher = as_log(build_matcher(&self.source, &EventFilter).map_err(|e| e.to_string())?); Ok(Condition::DatadogSearch(DatadogSearchRunner { matcher })) } @@ -64,32 +81,34 @@ struct EventFilter; impl Resolver for EventFilter {} impl Filter for EventFilter { - fn exists(&self, field: Field) -> Box> { - match field { + fn exists(&self, field: Field) -> Result>, PathParseError> { + Ok(match field { Field::Tag(tag) => { - let starts_with = format!("{}:", tag); + let starts_with = format!("{tag}:"); - any_string_match("tags", move |value| { + any_string_match_multiple(vec!["ddtags", "tags"], move |value| { value == tag || value.starts_with(&starts_with) }) } // Literal field 'tags' needs to be compared by key. Field::Reserved(field) if field == "tags" => { - any_string_match("tags", move |value| value == field) + any_string_match_multiple(vec!["ddtags", "tags"], move |value| value == field) } - Field::Default(f) | Field::Facet(f) | Field::Reserved(f) => { - Run::boxed(move |log: &LogEvent| { - log.parse_path_and_get_value(f.as_str()) - .ok() - .flatten() - .is_some() - }) + // A literal "source" field should string match in "source" and "ddsource" fields (OR condition). + Field::Reserved(field) if field == "source" => { + exists_match_multiple(vec!["ddsource", "source"]) } - } + + Field::Default(f) | Field::Attribute(f) | Field::Reserved(f) => exists_match(f), + }) } - fn equals(&self, field: Field, to_match: &str) -> Box> { - match field { + fn equals( + &self, + field: Field, + to_match: &str, + ) -> Result>, PathParseError> { + Ok(match field { // Default fields are compared by word boundary. Field::Default(field) => { let re = word_regex(to_match); @@ -100,66 +119,105 @@ impl Filter for EventFilter { Field::Reserved(field) if field == "tags" => { let to_match = to_match.to_owned(); - array_match(field, move |values| { + array_match_multiple(vec!["ddtags", "tags"], move |values| { values.contains(&Value::Bytes(Bytes::copy_from_slice(to_match.as_bytes()))) }) } // Individual tags are compared by element key:value. Field::Tag(tag) => { - let value_bytes = Value::Bytes(format!("{}:{}", tag, to_match).into()); + let value_bytes = Value::Bytes(format!("{tag}:{to_match}").into()); - array_match("tags", move |values| values.contains(&value_bytes)) + array_match_multiple(vec!["ddtags", "tags"], move |values| { + values.contains(&value_bytes) + }) } - // Everything else is matched by string equality. - Field::Reserved(field) | Field::Facet(field) => { + // A literal "source" field should string match in "source" and "ddsource" fields (OR condition). + Field::Reserved(field) if field == "source" => { + let to_match = to_match.to_owned(); + + string_match_multiple(vec!["ddsource", "source"], move |value| value == to_match) + } + // Reserved values are matched by string equality. + Field::Reserved(field) => { let to_match = to_match.to_owned(); string_match(field, move |value| value == to_match) } - } + // Attribute values can be strings or numeric types + Field::Attribute(field) => { + let to_match = to_match.to_owned(); + + simple_scalar_match(field, move |value| value == to_match) + } + }) } - fn prefix(&self, field: Field, prefix: &str) -> Box> { - match field { + fn prefix( + &self, + field: Field, + prefix: &str, + ) -> Result>, PathParseError> { + Ok(match field { // Default fields are matched by word boundary. Field::Default(field) => { - let re = word_regex(&format!("{}*", prefix)); + let re = word_regex(&format!("{prefix}*")); string_match(field, move |value| re.is_match(&value)) } // Tags are recursed until a match is found. Field::Tag(tag) => { - let starts_with = format!("{}:{}", tag, prefix); + let starts_with = format!("{tag}:{prefix}"); - any_string_match("tags", move |value| value.starts_with(&starts_with)) + any_string_match_multiple(vec!["ddtags", "tags"], move |value| { + value.starts_with(&starts_with) + }) + } + // A literal "source" field should string match in "source" and "ddsource" fields (OR condition). + Field::Reserved(field) if field == "source" => { + let prefix = prefix.to_owned(); + + string_match_multiple(vec!["ddsource", "source"], move |value| { + value.starts_with(&prefix) + }) } + // All other field types are compared by complete value. - Field::Reserved(field) | Field::Facet(field) => { + Field::Reserved(field) | Field::Attribute(field) => { let prefix = prefix.to_owned(); string_match(field, move |value| value.starts_with(&prefix)) } - } + }) } - fn wildcard(&self, field: Field, wildcard: &str) -> Box> { - match field { + fn wildcard( + &self, + field: Field, + wildcard: &str, + ) -> Result>, PathParseError> { + Ok(match field { Field::Default(field) => { let re = word_regex(wildcard); string_match(field, move |value| re.is_match(&value)) } Field::Tag(tag) => { - let re = wildcard_regex(&format!("{}:{}", tag, wildcard)); + let re = wildcard_regex(&format!("{tag}:{wildcard}")); + + any_string_match_multiple(vec!["ddtags", "tags"], move |value| re.is_match(&value)) + } + // A literal "source" field should string match in "source" and "ddsource" fields (OR condition). + Field::Reserved(field) if field == "source" => { + let re = wildcard_regex(wildcard); - any_string_match("tags", move |value| re.is_match(&value)) + string_match_multiple(vec!["ddsource", "source"], move |value| re.is_match(&value)) } - Field::Reserved(field) | Field::Facet(field) => { + Field::Reserved(field) | Field::Attribute(field) => { let re = wildcard_regex(wildcard); string_match(field, move |value| re.is_match(&value)) } - } + }) } fn compare( @@ -167,12 +225,12 @@ impl Filter for EventFilter { field: Field, comparator: Comparison, comparison_value: ComparisonValue, - ) -> Box> { + ) -> Result>, PathParseError> { let rhs = Cow::from(comparison_value.to_string()); - match field { - // Facets are compared numerically if the value is numeric, or as strings otherwise. - Field::Facet(f) => { + Ok(match field { + // Attributes are compared numerically if the value is numeric, or as strings otherwise. + Field::Attribute(f) => { Run::boxed(move |log: &LogEvent| { match ( log.parse_path_and_get_value(f.as_str()).ok().flatten(), @@ -242,19 +300,30 @@ impl Filter for EventFilter { }) } // Tag values need extracting by "key:value" to be compared. - Field::Tag(tag) => any_string_match("tags", move |value| match value.split_once(':') { - Some((t, lhs)) if t == tag => { - let lhs = Cow::from(lhs); + Field::Tag(tag) => any_string_match_multiple(vec!["ddtags", "tags"], move |value| { + match value.split_once(':') { + Some((t, lhs)) if t == tag => { + let lhs = Cow::from(lhs); - match comparator { - Comparison::Lt => lhs < rhs, - Comparison::Lte => lhs <= rhs, - Comparison::Gt => lhs > rhs, - Comparison::Gte => lhs >= rhs, + match comparator { + Comparison::Lt => lhs < rhs, + Comparison::Lte => lhs <= rhs, + Comparison::Gt => lhs > rhs, + Comparison::Gte => lhs >= rhs, + } } + _ => false, } - _ => false, }), + // A literal "source" field should string match in "source" and "ddsource" fields (OR condition). + Field::Reserved(field) if field == "source" => { + string_match_multiple(vec!["ddsource", "source"], move |lhs| match comparator { + Comparison::Lt => lhs < rhs, + Comparison::Lte => lhs <= rhs, + Comparison::Gt => lhs > rhs, + Comparison::Gte => lhs >= rhs, + }) + } // All other tag types are compared by string. Field::Default(field) | Field::Reserved(field) => { string_match(field, move |lhs| match comparator { @@ -264,13 +333,28 @@ impl Filter for EventFilter { Comparison::Gte => lhs >= rhs, }) } - } + }) } } -/// Returns a `Matcher` that returns true if the log event resolves to a string which -/// matches the provided `func`. -fn string_match(field: S, func: F) -> Box> +// Returns a `Matcher` that returns true if the field exists. +fn exists_match(field: S) -> Box> +where + S: Into, +{ + let field = field.into(); + + Run::boxed(move |log: &LogEvent| { + log.parse_path_and_get_value(field.as_str()) + .ok() + .flatten() + .is_some() + }) +} + +/// Returns a `Matcher` that returns true if the field resolves to a string, +/// numeric, or boolean which matches the provided `func`. +fn simple_scalar_match(field: S, func: F) -> Box> where S: Into, F: Fn(Cow) -> bool + Send + Sync + Clone + 'static, @@ -279,59 +363,99 @@ where Run::boxed(move |log: &LogEvent| { match log.parse_path_and_get_value(field.as_str()).ok().flatten() { + Some(Value::Boolean(v)) => func(v.to_string().into()), Some(Value::Bytes(v)) => func(String::from_utf8_lossy(v)), + Some(Value::Integer(v)) => func(v.to_string().into()), + Some(Value::Float(v)) => func(v.to_string().into()), _ => false, } }) } -/// Returns a `Matcher` that returns true if the log event resolves to an array, where -/// the vector of `Value`s the array contains matches the provided `func`. -fn array_match(field: S, func: F) -> Box> +/// Returns a `Matcher` that returns true if the field resolves to a string which +/// matches the provided `func`. +fn string_match(field: S, func: F) -> Box> where S: Into, - F: Fn(&Vec) -> bool + Send + Sync + Clone + 'static, + F: Fn(Cow) -> bool + Send + Sync + Clone + 'static, { let field = field.into(); Run::boxed(move |log: &LogEvent| { match log.parse_path_and_get_value(field.as_str()).ok().flatten() { - Some(Value::Array(values)) => func(values), + Some(Value::Bytes(v)) => func(String::from_utf8_lossy(v)), _ => false, } }) } -/// Returns a `Matcher` that returns true if the log event resolves to an array, where -/// at least one `Value` it contains matches the provided `func`. -fn any_match(field: S, func: F) -> Box> +// Returns a `Matcher` that returns true if any provided field exists. +fn exists_match_multiple(fields: Vec) -> Box> where - S: Into, - F: Fn(&Value) -> bool + Send + Sync + Clone + 'static, + S: Into + Clone + Send + Sync + 'static, +{ + Run::boxed(move |log: &LogEvent| { + fields + .iter() + .any(|field| exists_match(field.clone()).run(log)) + }) +} + +/// Returns a `Matcher` that returns true if any provided field resolves to a string which +/// matches the provided `func`. +fn string_match_multiple(fields: Vec, func: F) -> Box> +where + S: Into + Clone + Send + Sync + 'static, + F: Fn(Cow) -> bool + Send + Sync + Clone + 'static, { - array_match(field, move |values| values.iter().any(&func)) + Run::boxed(move |log: &LogEvent| { + fields + .iter() + .any(|field| string_match(field.clone(), func.clone()).run(log)) + }) } -/// Returns a `Matcher` that returns true if the log event resolves to an array of strings, -/// where at least one string matches the provided `func`. -fn any_string_match(field: S, func: F) -> Box> +fn any_string_match_multiple(fields: Vec, func: F) -> Box> where - S: Into, + S: Into + Clone + Send + Sync + 'static, F: Fn(Cow) -> bool + Send + Sync + Clone + 'static, { - any_match(field, move |value| { + any_match_multiple(fields, move |value| { let bytes = value.coerce_to_bytes(); func(String::from_utf8_lossy(&bytes)) }) } +/// Returns a `Matcher` that returns true if any provided field of the log event resolves to an array, where +/// at least one `Value` it contains matches the provided `func`. +fn any_match_multiple(fields: Vec, func: F) -> Box> +where + S: Into + Clone + Send + Sync + 'static, + F: Fn(&Value) -> bool + Send + Sync + Clone + 'static, +{ + array_match_multiple(fields, move |values| values.iter().any(&func)) +} + +/// Returns a `Matcher` that returns true if any provided field of the log event resolves to an array, where +/// the vector of `Value`s the array contains matches the provided `func`. +fn array_match_multiple(fields: Vec, func: F) -> Box> +where + S: Into + Clone + Send + Sync + 'static, + F: Fn(&Vec) -> bool + Send + Sync + Clone + 'static, +{ + Run::boxed(move |log: &LogEvent| { + fields.iter().any(|field| { + let field = field.clone().into(); + match log.parse_path_and_get_value(field.as_str()).ok().flatten() { + Some(Value::Array(values)) => func(values), + _ => false, + } + }) + }) +} + #[cfg(test)] mod test { - use serde_json::json; - use vector_lib::event::Event; - use vrl::datadog_filter::{build_matcher, Filter, Resolver}; - use vrl::datadog_search_syntax::parse; - use super::*; use crate::log_event; @@ -347,6 +471,12 @@ mod test { log_event!["tags" => vec!["a:foo"]], // Pass log_event!["tags" => vec!["b:foo"]], // Fail ), + // Tag exists with - in name. + ( + "_exists_:a-b", // Source + log_event!["tags" => vec!["a-b:foo"]], // Pass + log_event!["tags" => vec!["ab:foo"]], // Fail + ), // Tag exists (negate). ( "NOT _exists_:a", @@ -359,23 +489,32 @@ mod test { log_event!["tags" => vec!["b:foo"]], log_event!["tags" => vec!["a:foo"]], ), - // Facet exists. + // Attribute exists. ( "_exists_:@b", - log_event!["custom" => json!({"b": "foo"})], - log_event!["custom" => json!({"a": "foo"})], - ), - // Facet exists (negate). + log_event!["b" => "foo"], + log_event!["a" => "foo"], + ), + // Attribute with - in name, exists. + // TODO: this is a test case which exists in the Datadog implementation of the feature. + // in our implementation, it fails because parse_path_and_get_value, indicates that + // the `-` in the field name is an invalid field name. + // ( + // "_exists_:@foo-bar", + // log_event!["foo-bar" => "foo"], + // log_event!["foobar" => "foo"], + // ), + // Attribute exists (negate). ( "NOT _exists_:@b", - log_event!["custom" => json!({"a": "foo"})], - log_event!["custom" => json!({"b": "foo"})], + log_event!["a" => "foo"], + log_event!["b" => "foo"], ), - // Facet exists (negate w/-). + // Attribute exists (negate w/-). ( "-_exists_:@b", - log_event!["custom" => json!({"a": "foo"})], - log_event!["custom" => json!({"b": "foo"})], + log_event!["a" => "foo"], + log_event!["b" => "foo"], ), // Tag doesn't exist. ( @@ -395,23 +534,23 @@ mod test { log_event!["tags" => vec!["a:foo"]], log_event![], ), - // Facet doesn't exist. + // Attribute doesn't exist. ( "_missing_:@b", - log_event!["custom" => json!({"a": "foo"})], - log_event!["custom" => json!({"b": "foo"})], + log_event!["a" => "foo"], + log_event!["b" => "foo"], ), - // Facet doesn't exist (negate). + // Attribute doesn't exist (negate). ( "NOT _missing_:@b", - log_event!["custom" => json!({"b": "foo"})], - log_event!["custom" => json!({"a": "foo"})], + log_event!["b" => "foo"], + log_event!["a" => "foo"], ), - // Facet doesn't exist (negate w/-). + // Attribute doesn't exist (negate w/-). ( "-_missing_:@b", - log_event!["custom" => json!({"b": "foo"})], - log_event!["custom" => json!({"a": "foo"})], + log_event!["b" => "foo"], + log_event!["a" => "foo"], ), // Keyword. ("bla", log_event!["message" => "bla"], log_event![]), @@ -543,56 +682,138 @@ mod test { ( r#"a:"bla""#, log_event!["tags" => vec!["a:bla"]], - log_event!["custom" => json!({"a": "bla"})], + log_event!["a" => "bla"], ), // Quoted tag match (negate). ( r#"NOT a:"bla""#, - log_event!["custom" => json!({"a": "bla"})], + log_event!["a" => "bla"], log_event!["tags" => vec!["a:bla"]], ), // Quoted tag match (negate w/-). ( r#"-a:"bla""#, - log_event!["custom" => json!({"a": "bla"})], + log_event!["a" => "bla"], log_event!["tags" => vec!["a:bla"]], ), - // Facet match. + // Boolean attribute match. + ("@a:true", log_event!["a" => true], log_event!["a" => false]), + // Boolean attribute match (negate). + ( + "NOT @a:false", + log_event!["a" => true], + log_event!["a" => false], + ), + // String attribute match. ( "@a:bla", - log_event!["custom" => json!({"a": "bla"})], + log_event!["a" => "bla"], log_event!["tags" => vec!["a:bla"]], ), - // Facet match (negate). + // String attribute match (negate). ( "NOT @a:bla", log_event!["tags" => vec!["a:bla"]], - log_event!["custom" => json!({"a": "bla"})], + log_event!["a" => "bla"], ), - // Facet match (negate w/-). + // String attribute match single character. + ("@a:b", log_event!["a" => "b"], log_event!["a" => "c"]), + // String attribute match special chars + ( + "@a:va\\/lue", + log_event!["a" => "va/lue"], + log_event!["a" => "value"], + ), + // String attribute match escaped && chars + ( + "@a:va\\&&lue", + log_event!["a" => "va&&lue"], + log_event!["a" => "value"], + ), + // String attribute match escaped spaces + ( + "@a:va\\ lue", + log_event!["a" => "va lue"], + log_event!["a" => "value"], + ), + // String attribute match escaped || chars + ( + "@a:va\\||lue", + log_event!["a" => "va||lue"], + log_event!["a" => "value"], + ), + // String attribute match escaped () chars + ( + "@a:va\\(lue", + log_event!["a" => "va(lue"], + log_event!["a" => "value"], + ), + // String attribute match escaped * chars + ( + "@a:va\\*lue", + log_event!["a" => "va*lue"], + log_event!["a" => "value"], + ), + // String attribute match ~ chars + // TODO: in Datadog, this character does not need to be escaped. + ( + "@a:va\\~lue", + log_event!["a" => "va~lue"], + log_event!["a" => "value"], + ), + // String attribute match ^ chars + // TODO: in Datadog, this character does not need to be escaped. + ( + "@a:va\\^lue", + log_event!["a" => "va^lue"], + log_event!["a" => "value"], + ), + // String attribute match / chars + ( + "@a:va/lue", + log_event!["a" => "va/lue"], + log_event!["a" => "value"], + ), + // String attribute match (negate w/-). ( "-@a:bla", log_event!["tags" => vec!["a:bla"]], - log_event!["custom" => json!({"a": "bla"})], + log_event!["a" => "bla"], ), - // Quoted facet match. + // Quoted attribute match. ( r#"@a:"bla""#, - log_event!["custom" => json!({"a": "bla"})], + log_event!["a" => "bla"], log_event!["tags" => vec!["a:bla"]], ), - // Quoted facet match (negate). + // Quoted attribute match (negate). ( r#"NOT @a:"bla""#, log_event!["tags" => vec!["a:bla"]], - log_event!["custom" => json!({"a": "bla"})], + log_event!["a" => "bla"], ), - // Quoted facet match (negate w/-). + // Quoted attribute match (negate w/-). ( r#"-@a:"bla""#, log_event!["tags" => vec!["a:bla"]], - log_event!["custom" => json!({"a": "bla"})], + log_event!["a" => "bla"], ), + // Integer attribute match. + ( + "@a:200", + log_event!["a" => 200], + log_event!["tags" => vec!["a:200"]], + ), + // Integer attribute match (negate w/-). + ("-@a:200", log_event!["a" => 199], log_event!["a" => 200]), + // Float attribute match. + ( + "@a:0.75", + log_event!["a" => 0.75], + log_event!["tags" => vec!["a:0.75"]], + ), + // Float attribute match (negate w/-). + ("-@a:0.75", log_event!["a" => 0.74], log_event!["a" => 0.75]), // Wildcard prefix. ( "*bla", @@ -630,23 +851,15 @@ mod test { log_event!["message" => "blafoo"], ), // Multiple wildcards. - ( - "*b*la*", - log_event!["custom" => json!({"title": "foobla"})], - log_event![], - ), + ("*b*la*", log_event!["message" => "foobla"], log_event![]), // Multiple wildcards (negate). ( "NOT *b*la*", log_event![], - log_event!["custom" => json!({"title": "foobla"})], + log_event!["message" => "foobla"], ), // Multiple wildcards (negate w/-). - ( - "-*b*la*", - log_event![], - log_event!["custom" => json!({"title": "foobla"})], - ), + ("-*b*la*", log_event![], log_event!["message" => "foobla"]), // Wildcard prefix - tag. ( "a:*bla", @@ -687,73 +900,73 @@ mod test { ( "c:*b*la*", log_event!["tags" => vec!["c:foobla"]], - log_event!["custom" => r#"{"title": "foobla"}"#], + log_event!["custom" => r#"{"title" => "foobla"}"#], ), // Multiple wildcards - tag (negate). ( "NOT c:*b*la*", - log_event!["custom" => r#"{"title": "foobla"}"#], + log_event!["custom" => r#"{"title" => "foobla"}"#], log_event!["tags" => vec!["c:foobla"]], ), // Multiple wildcards - tag (negate w/-). ( "-c:*b*la*", - log_event!["custom" => r#"{"title": "foobla"}"#], + log_event!["custom" => r#"{"title" => "foobla"}"#], log_event!["tags" => vec!["c:foobla"]], ), - // Wildcard prefix - facet. + // Wildcard prefix - attribute. ( "@a:*bla", - log_event!["custom" => json!({"a": "foobla"})], + log_event!["a" => "foobla"], log_event!["tags" => vec!["a:foobla"]], ), - // Wildcard prefix - facet (negate). + // Wildcard prefix - attribute (negate). ( "NOT @a:*bla", log_event!["tags" => vec!["a:foobla"]], - log_event!["custom" => json!({"a": "foobla"})], + log_event!["a" => "foobla"], ), - // Wildcard prefix - facet (negate w/-). + // Wildcard prefix - attribute (negate w/-). ( "-@a:*bla", log_event!["tags" => vec!["a:foobla"]], - log_event!["custom" => json!({"a": "foobla"})], + log_event!["a" => "foobla"], ), - // Wildcard suffix - facet. + // Wildcard suffix - attribute. ( "@b:bla*", - log_event!["custom" => json!({"b": "blabop"})], + log_event!["b" => "blabop"], log_event!["tags" => vec!["b:blabop"]], ), - // Wildcard suffix - facet (negate). + // Wildcard suffix - attribute (negate). ( "NOT @b:bla*", log_event!["tags" => vec!["b:blabop"]], - log_event!["custom" => json!({"b": "blabop"})], + log_event!["b" => "blabop"], ), - // Wildcard suffix - facet (negate w/-). + // Wildcard suffix - attribute (negate w/-). ( "-@b:bla*", log_event!["tags" => vec!["b:blabop"]], - log_event!["custom" => json!({"b": "blabop"})], + log_event!["b" => "blabop"], ), - // Multiple wildcards - facet. + // Multiple wildcards - attribute. ( "@c:*b*la*", - log_event!["custom" => json!({"c": "foobla"})], + log_event!["c" => "foobla"], log_event!["tags" => vec!["c:foobla"]], ), - // Multiple wildcards - facet (negate). + // Multiple wildcards - attribute (negate). ( "NOT @c:*b*la*", log_event!["tags" => vec!["c:foobla"]], - log_event!["custom" => json!({"c": "foobla"})], + log_event!["c" => "foobla"], ), - // Multiple wildcards - facet (negate w/-). + // Multiple wildcards - attribute (negate w/-). ( "-@c:*b*la*", log_event!["tags" => vec!["c:foobla"]], - log_event!["custom" => json!({"c": "foobla"})], + log_event!["c" => "foobla"], ), // Special case for tags. ( @@ -905,62 +1118,50 @@ mod test { log_event!["tags" => vec!["b:test"]], log_event!["tags" => vec!["a:test"]], ), - // Range - numeric, inclusive, facet. - ( - "@b:[1 TO 10]", - log_event!["custom" => json!({"b": 5})], - log_event!["custom" => json!({"b": 11})], - ), + // Range - numeric, inclusive, attribute. + ("@b:[1 TO 10]", log_event!["b" => 5], log_event!["b" => 11]), ( "@b:[1 TO 100]", - log_event!["custom" => json!({"b": "10"})], - log_event!["custom" => json!({"b": "2"})], + log_event!["b" => "10"], + log_event!["b" => "2"], ), - // Range - numeric, inclusive, facet (negate). + // Range - numeric, inclusive, attribute (negate). ( "NOT @b:[1 TO 10]", - log_event!["custom" => json!({"b": 11})], - log_event!["custom" => json!({"b": 5})], + log_event!["b" => 11], + log_event!["b" => 5], ), ( "NOT @b:[1 TO 100]", - log_event!["custom" => json!({"b": "2"})], - log_event!["custom" => json!({"b": "10"})], - ), - // Range - numeric, inclusive, facet (negate w/-). - ( - "-@b:[1 TO 10]", - log_event!["custom" => json!({"b": 11})], - log_event!["custom" => json!({"b": 5})], + log_event!["b" => "2"], + log_event!["b" => "10"], ), + // Range - numeric, inclusive, attribute (negate w/-). + ("-@b:[1 TO 10]", log_event!["b" => 11], log_event!["b" => 5]), ( "NOT @b:[1 TO 100]", - log_event!["custom" => json!({"b": "2"})], - log_event!["custom" => json!({"b": "10"})], + log_event!["b" => "2"], + log_event!["b" => "10"], ), - // Range - alpha, inclusive, facet. - ( - "@b:[a TO z]", - log_event!["custom" => json!({"b": "c"})], - log_event!["custom" => json!({"b": 5})], - ), - // Range - alphanumeric, inclusive, facet. + // Range - alpha, inclusive, attribute. + ("@b:[a TO z]", log_event!["b" => "c"], log_event!["b" => 5]), + // Range - alphanumeric, inclusive, attribute. ( r#"@b:["1" TO "100"]"#, - log_event!["custom" => json!({"b": "10"})], - log_event!["custom" => json!({"b": "2"})], + log_event!["b" => "10"], + log_event!["b" => "2"], ), - // Range - alphanumeric, inclusive, facet (negate). + // Range - alphanumeric, inclusive, attribute (negate). ( r#"NOT @b:["1" TO "100"]"#, - log_event!["custom" => json!({"b": "2"})], - log_event!["custom" => json!({"b": "10"})], + log_event!["b" => "2"], + log_event!["b" => "10"], ), - // Range - alphanumeric, inclusive, facet (negate). + // Range - alphanumeric, inclusive, attribute (negate). ( r#"-@b:["1" TO "100"]"#, - log_event!["custom" => json!({"b": "2"})], - log_event!["custom" => json!({"b": "10"})], + log_event!["b" => "2"], + log_event!["b" => "10"], ), // Range - tag, exclusive. ( @@ -995,38 +1196,378 @@ mod test { log_event!["tags" => vec!["f:100"]], log_event!["tags" => vec!["f:10"]], ), - // Range - facet, exclusive. - ( - "@f:{1 TO 100}", - log_event!["custom" => json!({"f": 50})], - log_event!["custom" => json!({"f": 1})], - ), + // Range - attribute, exclusive. + ("@f:{1 TO 100}", log_event!["f" => 50], log_event!["f" => 1]), ( "@f:{1 TO 100}", - log_event!["custom" => json!({"f": 50})], - log_event!["custom" => json!({"f": 100})], + log_event!["f" => 50], + log_event!["f" => 100], ), - // Range - facet, exclusive (negate). + // Range - attribute, exclusive (negate). ( "NOT @f:{1 TO 100}", - log_event!["custom" => json!({"f": 1})], - log_event!["custom" => json!({"f": 50})], + log_event!["f" => 1], + log_event!["f" => 50], ), ( "NOT @f:{1 TO 100}", - log_event!["custom" => json!({"f": 100})], - log_event!["custom" => json!({"f": 50})], + log_event!["f" => 100], + log_event!["f" => 50], ), - // Range - facet, exclusive (negate w/-). + // Range - attribute, exclusive (negate w/-). ( "-@f:{1 TO 100}", - log_event!["custom" => json!({"f": 1})], - log_event!["custom" => json!({"f": 50})], + log_event!["f" => 1], + log_event!["f" => 50], ), ( "-@f:{1 TO 100}", - log_event!["custom" => json!({"f": 100})], - log_event!["custom" => json!({"f": 50})], + log_event!["f" => 100], + log_event!["f" => 50], + ), + // OR of two values + ( + "@field:(value1 OR value2)", + log_event!["field" => "value1"], + log_event!["field" => "value"], + ), + // OR of two values + ( + "@field:value1 OR @field:value2", + log_event!["field" => "value1"], + log_event!["field" => "value"], + ), + // negate OR of two values + ( + "-@field:value1 OR -@field:value2", + log_event!["field" => "value"], + log_event!["field" => "value2"], + ), + // default AND of two values + ( + "@field:value @field2:value2", + log_event!["field" => "value", "field2" => "value2"], + log_event!["field" => "value", "field2" => "value3"], + ), + // handles newline + ( + "@field:(value1 OR \n value2)", + log_event!["field" => "value1"], + log_event!["field" => "value"], + ), + // negate AND of bool and string + ( + "NOT (@field:true AND @field2:value2)", + log_event!["field" => false, "field2" => "value2"], + log_event!["field" => true, "field2" => "value2"], + ), + // tags checks with 'ddtags' (DD Agent Source naming) + + // Tag exists. + ( + "_exists_:a", // Source + log_event!["ddtags" => vec!["a:foo"]], // Pass + log_event!["ddtags" => vec!["b:foo"]], // Fail + ), + // Tag exists with - in name. + ( + "_exists_:a-b", // Source + log_event!["ddtags" => vec!["a-b:foo"]], // Pass + log_event!["ddtags" => vec!["ab:foo"]], // Fail + ), + // Tag exists (negate). + ( + "NOT _exists_:a", + log_event!["ddtags" => vec!["b:foo"]], + log_event!("ddtags" => vec!["a:foo"]), + ), + // Tag exists (negate w/-). + ( + "-_exists_:a", + log_event!["ddtags" => vec!["b:foo"]], + log_event!["ddtags" => vec!["a:foo"]], + ), + // Tag doesn't exist. + ( + "_missing_:a", + log_event![], + log_event!["ddtags" => vec!["a:foo"]], + ), + // Tag doesn't exist (negate). + ( + "NOT _missing_:a", + log_event!["ddtags" => vec!["a:foo"]], + log_event![], + ), + // Tag doesn't exist (negate w/-). + ( + "-_missing_:a", + log_event!["ddtags" => vec!["a:foo"]], + log_event![], + ), + // Tag match. + ( + "a:bla", + log_event!["ddtags" => vec!["a:bla"]], + log_event!["ddtags" => vec!["b:bla"]], + ), + // Tag match (negate). + ( + "NOT a:bla", + log_event!["ddtags" => vec!["b:bla"]], + log_event!["ddtags" => vec!["a:bla"]], + ), + // Reserved tag match (negate). + ( + "NOT host:foo", + log_event!["ddtags" => vec!["host:fo o"]], + log_event!["host" => "foo"], + ), + // Tag match (negate w/-). + ( + "-a:bla", + log_event!["ddtags" => vec!["b:bla"]], + log_event!["ddtags" => vec!["a:bla"]], + ), + // Quoted tag match. + ( + r#"a:"bla""#, + log_event!["ddtags" => vec!["a:bla"]], + log_event!["a" => "bla"], + ), + // Quoted tag match (negate). + ( + r#"NOT a:"bla""#, + log_event!["a" => "bla"], + log_event!["ddtags" => vec!["a:bla"]], + ), + // Quoted tag match (negate w/-). + ( + r#"-a:"bla""#, + log_event!["a" => "bla"], + log_event!["ddtags" => vec!["a:bla"]], + ), + // String attribute match. + ( + "@a:bla", + log_event!["a" => "bla"], + log_event!["ddtags" => vec!["a:bla"]], + ), + // String attribute match (negate). + ( + "NOT @a:bla", + log_event!["ddtags" => vec!["a:bla"]], + log_event!["a" => "bla"], + ), + // String attribute match (negate w/-). + ( + "-@a:bla", + log_event!["ddtags" => vec!["a:bla"]], + log_event!["a" => "bla"], + ), + // Quoted attribute match. + ( + r#"@a:"bla""#, + log_event!["a" => "bla"], + log_event!["ddtags" => vec!["a:bla"]], + ), + // Quoted attribute match (negate). + ( + r#"NOT @a:"bla""#, + log_event!["ddtags" => vec!["a:bla"]], + log_event!["a" => "bla"], + ), + // Quoted attribute match (negate w/-). + ( + r#"-@a:"bla""#, + log_event!["ddtags" => vec!["a:bla"]], + log_event!["a" => "bla"], + ), + // Integer attribute match. + ( + "@a:200", + log_event!["a" => 200], + log_event!["ddtags" => vec!["a:200"]], + ), + // Float attribute match. + ( + "@a:0.75", + log_event!["a" => 0.75], + log_event!["ddtags" => vec!["a:0.75"]], + ), + ( + "a:*bla", + log_event!["ddtags" => vec!["a:foobla"]], + log_event!["ddtags" => vec!["a:blafoo"]], + ), + // Wildcard prefix - tag (negate). + ( + "NOT a:*bla", + log_event!["ddtags" => vec!["a:blafoo"]], + log_event!["ddtags" => vec!["a:foobla"]], + ), + // Wildcard prefix - tag (negate w/-). + ( + "-a:*bla", + log_event!["ddtags" => vec!["a:blafoo"]], + log_event!["ddtags" => vec!["a:foobla"]], + ), + // Wildcard suffix - tag. + ( + "b:bla*", + log_event!["ddtags" => vec!["b:blabop"]], + log_event!["ddtags" => vec!["b:bopbla"]], + ), + // Wildcard suffix - tag (negate). + ( + "NOT b:bla*", + log_event!["ddtags" => vec!["b:bopbla"]], + log_event!["ddtags" => vec!["b:blabop"]], + ), + // Wildcard suffix - tag (negate w/-). + ( + "-b:bla*", + log_event!["ddtags" => vec!["b:bopbla"]], + log_event!["ddtags" => vec!["b:blabop"]], + ), + // Multiple wildcards - tag. + ( + "c:*b*la*", + log_event!["ddtags" => vec!["c:foobla"]], + log_event!["custom" => r#"{"title" => "foobla"}"#], + ), + // Multiple wildcards - tag (negate). + ( + "NOT c:*b*la*", + log_event!["custom" => r#"{"title" => "foobla"}"#], + log_event!["ddtags" => vec!["c:foobla"]], + ), + // Multiple wildcards - tag (negate w/-). + ( + "-c:*b*la*", + log_event!["custom" => r#"{"title" => "foobla"}"#], + log_event!["ddtags" => vec!["c:foobla"]], + ), + // Wildcard prefix - attribute. + ( + "@a:*bla", + log_event!["a" => "foobla"], + log_event!["ddtags" => vec!["a:foobla"]], + ), + // Wildcard prefix - attribute (negate). + ( + "NOT @a:*bla", + log_event!["ddtags" => vec!["a:foobla"]], + log_event!["a" => "foobla"], + ), + // Wildcard prefix - attribute (negate w/-). + ( + "-@a:*bla", + log_event!["ddtags" => vec!["a:foobla"]], + log_event!["a" => "foobla"], + ), + // Wildcard suffix - attribute. + ( + "@b:bla*", + log_event!["b" => "blabop"], + log_event!["ddtags" => vec!["b:blabop"]], + ), + // Wildcard suffix - attribute (negate). + ( + "NOT @b:bla*", + log_event!["ddtags" => vec!["b:blabop"]], + log_event!["b" => "blabop"], + ), + // Wildcard suffix - attribute (negate w/-). + ( + "-@b:bla*", + log_event!["ddtags" => vec!["b:blabop"]], + log_event!["b" => "blabop"], + ), + // Multiple wildcards - attribute. + ( + "@c:*b*la*", + log_event!["c" => "foobla"], + log_event!["ddtags" => vec!["c:foobla"]], + ), + // Multiple wildcards - attribute (negate). + ( + "NOT @c:*b*la*", + log_event!["ddtags" => vec!["c:foobla"]], + log_event!["c" => "foobla"], + ), + // Multiple wildcards - attribute (negate w/-). + ( + "-@c:*b*la*", + log_event!["ddtags" => vec!["c:foobla"]], + log_event!["c" => "foobla"], + ), + // Special case for tags. + ( + "tags:a", + log_event!["ddtags" => vec!["a", "b", "c"]], + log_event!["ddtags" => vec!["d", "e", "f"]], + ), + // Special case for tags (negate). + ( + "NOT tags:a", + log_event!["ddtags" => vec!["d", "e", "f"]], + log_event!["ddtags" => vec!["a", "b", "c"]], + ), + // Special case for tags (negate w/-). + ( + "-tags:a", + log_event!["ddtags" => vec!["d", "e", "f"]], + log_event!["ddtags" => vec!["a", "b", "c"]], + ), + // Special case: 'source' looks up on 'source' and 'ddsource' (OR condition) + // source + ( + "source:foo", + log_event!["source" => "foo"], + log_event!["tags" => vec!["source:foo"]], + ), + ( + "source:foo", + log_event!["source" => "foo"], + log_event!["source" => "foobar"], + ), + ( + "source:foo", + log_event!["source" => "foo"], + log_event!["source" => r#"{"value": "foo"}"#], + ), + // ddsource + ( + "source:foo", + log_event!["ddsource" => "foo"], + log_event!["tags" => vec!["ddsource:foo"]], + ), + ( + "source:foo", + log_event!["ddsource" => "foo"], + log_event!["ddsource" => "foobar"], + ), + ( + "source:foo", + log_event!["ddsource" => "foo"], + log_event!["ddsource" => r#"{"value": "foo"}"#], + ), + // both source and ddsource + ( + "source:foo", + log_event!["source" => "foo", "ddsource" => "foo"], + log_event!["source" => "foobar", "ddsource" => "foobar"], + ), + ( + "source:foo", + log_event!["source" => "foo", "ddsource" => "foobar"], + log_event!["source" => "foobar", "ddsource" => "foobar"], + ), + ( + "source:foo", + log_event!["source" => "foobar", "ddsource" => "foo"], + log_event!["source" => "foobar", "ddsource" => "foobar"], ), ] } @@ -1043,8 +1584,8 @@ mod test { let checks = get_checks(); for (source, pass, fail) in checks { - let node = parse(source).unwrap(); - let matcher = build_matcher(&node, &filter); + let node: QueryNode = source.parse().unwrap(); + let matcher = build_matcher(&node, &filter).unwrap(); assert!(matcher.run(&processor(pass))); assert!(!matcher.run(&processor(fail))); @@ -1065,27 +1606,25 @@ mod test { #[test] fn check_datadog() { for (source, pass, fail) in get_checks() { - let config = DatadogSearchConfig { - source: source.to_owned(), - }; + let config: DatadogSearchConfig = source.parse().unwrap(); // Every query should build successfully. let cond = config .build(&Default::default()) - .unwrap_or_else(|_| panic!("build failed: {}", source)); + .unwrap_or_else(|_| panic!("build failed: {source}")); assert!( cond.check_with_context(pass.clone()).0.is_ok(), - "should pass: {}\nevent: {:?}", + "should pass: {}\nevent: {}", source, - pass.as_log() + serde_json::to_string(&pass.as_log()).unwrap(), ); assert!( cond.check_with_context(fail.clone()).0.is_err(), - "should fail: {}\nevent: {:?}", + "should fail: {}\nevent: {}", source, - fail.as_log() + serde_json::to_string(&fail.as_log()).unwrap(), ); } } diff --git a/src/conditions/vrl.rs b/src/conditions/vrl.rs index adecd70d43cd0..267a7e01f19a4 100644 --- a/src/conditions/vrl.rs +++ b/src/conditions/vrl.rs @@ -21,7 +21,7 @@ pub struct VrlConfig { pub(crate) source: String, #[configurable(derived, metadata(docs::hidden))] - #[serde(default)] + #[serde(default, skip_serializing_if = "crate::serde::is_default")] pub(crate) runtime: VrlRuntime, } @@ -44,7 +44,11 @@ impl ConditionalConfig for VrlConfig { let functions = vrl::stdlib::all() .into_iter() - .chain(vector_lib::enrichment::vrl_functions()) + .chain(vector_lib::enrichment::vrl_functions()); + #[cfg(feature = "sources-dnstap")] + let functions = functions.chain(dnstap_parser::vrl_functions()); + + let functions = functions .chain(vector_vrl_functions::all()) .collect::>(); @@ -138,7 +142,7 @@ impl Conditional for Vrl { ) .colored() .to_string(); - format!("source execution aborted: {}", err) + format!("source execution aborted: {err}") } Terminate::Error(err) => { let err = Formatter::new( @@ -149,7 +153,7 @@ impl Conditional for Vrl { ) .colored() .to_string(); - format!("source execution failed: {}", err) + format!("source execution failed: {err}") } }); diff --git a/src/config/api.rs b/src/config/api.rs index c9ed3802d9bef..fca088f751b39 100644 --- a/src/config/api.rs +++ b/src/config/api.rs @@ -4,27 +4,42 @@ use url::Url; use vector_lib::configurable::configurable_component; /// API options. -#[configurable_component] +#[configurable_component(api("api"))] #[derive(Clone, Copy, Debug, Eq, PartialEq)] #[serde(default, deny_unknown_fields)] pub struct Options { - /// Whether or not the API endpoint is available. + /// Whether the GraphQL API is enabled for this Vector instance. #[serde(default = "default_enabled")] + #[configurable(metadata(docs::common = true, docs::required = false))] pub enabled: bool, - /// The socket address to listen on for the API endpoint. + /// The network address to which the API should bind. If you're running + /// Vector in a Docker container, bind to `0.0.0.0`. Otherwise + /// the API will not be exposed outside the container. #[serde(default = "default_address")] + #[configurable(metadata(docs::examples = "0.0.0.0:8686"))] + #[configurable(metadata(docs::examples = "127.0.0.1:1234"))] + #[configurable(metadata(docs::common = true, docs::required = false))] pub address: Option, - /// Whether or not to expose the GraphQL playground on the API endpoint. + /// Whether the [GraphQL Playground](https://github.com/graphql/graphql-playground) is enabled + /// for the API. The Playground is accessible via the `/playground` endpoint + /// of the address set using the `bind` parameter. Note that the `playground` + /// endpoint will only be enabled if the `graphql` endpoint is also enabled. #[serde(default = "default_playground")] + #[configurable(metadata(docs::common = false, docs::required = false))] pub playground: bool, - /// Whether or not the GraphQL endpoint is enabled + /// Whether the endpoint for receiving and processing GraphQL queries is + /// enabled for the API. The endpoint is accessible via the `/graphql` + /// endpoint of the address set using the `bind` parameter. #[serde(default = "default_graphql", skip_serializing_if = "is_true")] + #[configurable(metadata(docs::common = true, docs::required = false))] pub graphql: bool, } +impl_generate_config_from_default!(Options); + impl Default for Options { fn default() -> Self { Self { @@ -56,7 +71,7 @@ pub fn default_address() -> Option { /// Default GraphQL API address pub fn default_graphql_url() -> Url { let addr = default_address().unwrap(); - Url::parse(&format!("http://{}/graphql", addr)) + Url::parse(&format!("http://{addr}/graphql")) .expect("Couldn't parse default API URL. Please report this.") } @@ -80,9 +95,7 @@ impl Options { // Prefer non default address (Some(a), Some(b)) => { match (Some(a) == default_address(), Some(b) == default_address()) { - (false, false) => { - return Err(format!("Conflicting `api` address: {}, {} .", a, b)) - } + (false, false) => return Err(format!("Conflicting `api` address: {a}, {b} .")), (false, true) => Some(a), (true, _) => Some(b), } diff --git a/src/config/builder.rs b/src/config/builder.rs index f67322036fbd1..33889a67833c1 100644 --- a/src/config/builder.rs +++ b/src/config/builder.rs @@ -1,10 +1,6 @@ -#[cfg(feature = "enterprise")] -use std::collections::BTreeMap; use std::{path::Path, time::Duration}; use indexmap::IndexMap; -#[cfg(feature = "enterprise")] -use serde_json::Value; use vector_lib::config::GlobalOptions; use vector_lib::configurable::configurable_component; @@ -12,8 +8,6 @@ use crate::{enrichment_tables::EnrichmentTables, providers::Providers, secrets:: #[cfg(feature = "api")] use super::api; -#[cfg(feature = "enterprise")] -use super::enterprise; use super::{ compiler, schema, BoxedSink, BoxedSource, BoxedTransform, ComponentKey, Config, EnrichmentTableOuter, HealthcheckOptions, SinkOuter, SourceOuter, TestDefinition, @@ -38,18 +32,13 @@ pub struct ConfigBuilder { #[serde(default)] pub schema: schema::Options, - #[cfg(feature = "enterprise")] - #[configurable(derived)] - #[serde(default)] - pub enterprise: Option, - #[configurable(derived)] #[serde(default)] pub healthchecks: HealthcheckOptions, /// All configured enrichment tables. #[serde(default)] - pub enrichment_tables: IndexMap, + pub enrichment_tables: IndexMap>, /// All configured sources. #[serde(default)] @@ -90,105 +79,6 @@ pub struct ConfigBuilder { pub allow_empty: bool, } -#[cfg(feature = "enterprise")] -#[derive(::serde::Serialize)] -struct ConfigBuilderHash<'a> { - version: String, - #[cfg(feature = "api")] - api: &'a api::Options, - schema: &'a schema::Options, - global: &'a GlobalOptions, - healthchecks: &'a HealthcheckOptions, - enrichment_tables: BTreeMap<&'a ComponentKey, &'a EnrichmentTableOuter>, - sources: BTreeMap<&'a ComponentKey, &'a SourceOuter>, - sinks: BTreeMap<&'a ComponentKey, &'a SinkOuter>, - transforms: BTreeMap<&'a ComponentKey, &'a TransformOuter>, - tests: &'a Vec>, - provider: &'a Option, - secret: BTreeMap<&'a ComponentKey, &'a SecretBackends>, -} - -#[cfg(feature = "enterprise")] -impl ConfigBuilderHash<'_> { - /// Sort inner JSON values to maintain a consistent ordering. This prevents - /// non-deterministically serializable structures like HashMap from - /// affecting the resulting hash. As a consequence, ordering that does not - /// affect the actual semantics of a configuration is not considered when - /// calculating the hash. - fn into_hash(self) -> String { - use sha2::{Digest, Sha256}; - - let value = to_sorted_json_string(self); - let output = Sha256::digest(value.as_bytes()); - - hex::encode(output) - } -} - -/// It may seem like converting to Value prior to serializing to JSON string is -/// sufficient to sort our underlying keys. By default, Value::Map is backed by -/// BTreeMap which maintains an implicit key order, so it's an enticing and -/// simple approach. The issue however is the "by default". The underlying -/// Value::Map structure can actually change depending on which serde features -/// are enabled: IndexMap is the alternative and would break our intended -/// behavior. -/// -/// Rather than rely on the opaque underlying serde structures, we are explicit -/// about sorting, sacrificing a bit of potential convenience for correctness. -#[cfg(feature = "enterprise")] -fn to_sorted_json_string(value: T) -> String -where - T: ::serde::Serialize, -{ - let mut value = serde_json::to_value(value).expect("Should serialize to JSON. Please report."); - sort_json_value(&mut value); - - serde_json::to_string(&value).expect("Should serialize Value to JSON string. Please report.") -} - -#[cfg(feature = "enterprise")] -fn sort_json_value(value: &mut Value) { - match value { - Value::Array(arr) => { - for v in arr.iter_mut() { - sort_json_value(v); - } - } - Value::Object(map) => { - let mut ordered_map: BTreeMap = - serde_json::from_value(map.to_owned().into()) - .expect("Converting Value to BTreeMap failed."); - for v in ordered_map.values_mut() { - sort_json_value(v); - } - *value = serde_json::to_value(ordered_map) - .expect("Converting BTreeMap back to Value failed."); - } - _ => {} - } -} - -#[cfg(feature = "enterprise")] -impl<'a> From<&'a ConfigBuilder> for ConfigBuilderHash<'a> { - fn from(value: &'a ConfigBuilder) -> Self { - ConfigBuilderHash { - version: crate::get_version(), - #[cfg(feature = "api")] - api: &value.api, - schema: &value.schema, - global: &value.global, - healthchecks: &value.healthchecks, - enrichment_tables: value.enrichment_tables.iter().collect(), - sources: value.sources.iter().collect(), - sinks: value.sinks.iter().collect(), - transforms: value.transforms.iter().collect(), - tests: &value.tests, - provider: &value.provider, - secret: value.secret.iter().collect(), - } - } -} - impl From for ConfigBuilder { fn from(config: Config) -> Self { let Config { @@ -196,8 +86,6 @@ impl From for ConfigBuilder { #[cfg(feature = "api")] api, schema, - #[cfg(feature = "enterprise")] - enterprise, healthchecks, enrichment_tables, sources, @@ -206,7 +94,6 @@ impl From for ConfigBuilder { tests, secret, graceful_shutdown_duration, - hash: _, } = config; let transforms = transforms @@ -219,6 +106,11 @@ impl From for ConfigBuilder { .map(|(key, sink)| (key, sink.map_inputs(ToString::to_string))) .collect(); + let enrichment_tables = enrichment_tables + .into_iter() + .map(|(key, table)| (key, table.map_inputs(ToString::to_string))) + .collect(); + let tests = tests.into_iter().map(TestDefinition::stringify).collect(); ConfigBuilder { @@ -226,8 +118,6 @@ impl From for ConfigBuilder { #[cfg(feature = "api")] api, schema, - #[cfg(feature = "enterprise")] - enterprise, healthchecks, enrichment_tables, sources, @@ -260,11 +150,16 @@ impl ConfigBuilder { pub fn add_enrichment_table, E: Into>( &mut self, key: K, + inputs: &[&str], enrichment_table: E, ) { + let inputs = inputs + .iter() + .map(|value| value.to_string()) + .collect::>(); self.enrichment_tables.insert( ComponentKey::from(key.into()), - EnrichmentTableOuter::new(enrichment_table), + EnrichmentTableOuter::new(inputs, enrichment_table), ); } @@ -321,21 +216,6 @@ impl ConfigBuilder { errors.push(error); } - #[cfg(feature = "enterprise")] - { - match (self.enterprise.as_ref(), with.enterprise) { - (Some(_), Some(_)) => { - errors.push( - "duplicate 'enterprise' definition, only one definition allowed".to_owned(), - ); - } - (None, Some(other)) => { - self.enterprise = Some(other); - } - _ => {} - }; - } - self.provider = with.provider; match self.global.merge(with.global) { @@ -351,22 +231,22 @@ impl ConfigBuilder { with.enrichment_tables.keys().for_each(|k| { if self.enrichment_tables.contains_key(k) { - errors.push(format!("duplicate enrichment_table name found: {}", k)); + errors.push(format!("duplicate enrichment_table name found: {k}")); } }); with.sources.keys().for_each(|k| { if self.sources.contains_key(k) { - errors.push(format!("duplicate source id found: {}", k)); + errors.push(format!("duplicate source id found: {k}")); } }); with.sinks.keys().for_each(|k| { if self.sinks.contains_key(k) { - errors.push(format!("duplicate sink id found: {}", k)); + errors.push(format!("duplicate sink id found: {k}")); } }); with.transforms.keys().for_each(|k| { if self.transforms.contains_key(k) { - errors.push(format!("duplicate transform id found: {}", k)); + errors.push(format!("duplicate transform id found: {k}")); } }); with.tests.iter().for_each(|wt| { @@ -376,7 +256,7 @@ impl ConfigBuilder { }); with.secret.keys().for_each(|k| { if self.secret.contains_key(k) { - errors.push(format!("duplicate secret id found: {}", k)); + errors.push(format!("duplicate secret id found: {k}")); } }); if !errors.is_empty() { @@ -393,13 +273,6 @@ impl ConfigBuilder { Ok(()) } - #[cfg(feature = "enterprise")] - /// SHA256 hexadecimal representation of a config builder. This is generated by serializing - /// an order-stable JSON of the config builder and feeding its bytes into a SHA256 hasher. - pub fn sha256_hash(&self) -> String { - ConfigBuilderHash::from(self).into_hash() - } - #[cfg(test)] pub fn from_toml(input: &str) -> Self { crate::config::format::deserialize(input, crate::config::format::Format::Toml).unwrap() @@ -410,190 +283,3 @@ impl ConfigBuilder { crate::config::format::deserialize(input, crate::config::format::Format::Json).unwrap() } } - -#[cfg(all( - test, - feature = "enterprise", - feature = "api", - feature = "sources-demo_logs", - feature = "sinks-loki" -))] -mod tests { - use indexmap::IndexMap; - - use crate::config::{ - builder::{sort_json_value, to_sorted_json_string}, - enterprise, ConfigBuilder, - }; - - use super::ConfigBuilderHash; - - #[test] - /// If this test fails, it likely means an implementation detail has changed - /// which is likely to impact the final hash. - fn version_json_order() { - use serde_json::{json, Value}; - - use super::{ConfigBuilder, ConfigBuilderHash}; - - // Expected key order. This is important for guaranteeing that a hash is - // reproducible across versions. - let expected_keys = [ - "api", - "enrichment_tables", - "global", - "healthchecks", - "provider", - "schema", - "secret", - "sinks", - "sources", - "tests", - "transforms", - "version", - ]; - - let builder = ConfigBuilder::default(); - - let mut value = json!(ConfigBuilderHash::from(&builder)); - sort_json_value(&mut value); - - match value { - // Should serialize to a map. - Value::Object(map) => { - // Check ordering. - assert!(map.keys().eq(expected_keys)); - } - _ => panic!("should serialize to object"), - } - } - - #[test] - /// If this hash changes, it means either the version of Vector has changed (here it's fixed), - /// the `ConfigBuilder` has changed what it serializes, or the implementation of `serde_json` has changed. - /// If this test fails, we should ideally be able to fix so that the original hash passes! - fn version_hash_match() { - let expected_hash = "6c98bea9d9e2f3133e2d39ba04592d17f96340a9bc4c8d697b09f5af388a76bd"; - let builder = ConfigBuilder::default(); - let mut hash_builder = ConfigBuilderHash::from(&builder); - hash_builder.version = "1.2.3".into(); - assert_eq!(expected_hash, hash_builder.into_hash()); - } - - #[test] - fn append_keeps_enterprise() { - let mut base = ConfigBuilder { - enterprise: Some(enterprise::Options::default()), - ..Default::default() - }; - let other = ConfigBuilder::default(); - base.append(other).unwrap(); - assert!(base.enterprise.is_some()); - } - - #[test] - fn append_sets_enterprise() { - let mut base = ConfigBuilder::default(); - let other = ConfigBuilder { - enterprise: Some(enterprise::Options::default()), - ..Default::default() - }; - base.append(other).unwrap(); - assert!(base.enterprise.is_some()); - } - - #[test] - fn append_overwrites_enterprise() { - let base_ent = enterprise::Options::default(); - let mut base = ConfigBuilder { - enterprise: Some(base_ent), - ..Default::default() - }; - let other_ent = enterprise::Options::default(); - let other = ConfigBuilder { - enterprise: Some(other_ent), - ..Default::default() - }; - let errors = base.append(other).unwrap_err(); - assert_eq!( - errors[0], - "duplicate 'enterprise' definition, only one definition allowed" - ); - } - - #[test] - fn version_hash_sorted() { - let control_config = toml::from_str::( - r#" - [enterprise] - api_key = "apikey" - configuration_key = "configkey" - - [sources.foo] - type = "internal_logs" - - [sinks.loki] - type = "loki" - endpoint = "https://localhost:1111" - inputs = ["foo"] - - [sinks.loki.labels] - foo = '{{ foo }}' - bar = '{{ bar }}' - baz = '{{ baz }}' - ingest = "hello-world" - level = '{{ level }}' - module = '{{ module }}' - service = '{{ service }}' - - [sinks.loki.encoding] - codec = "json" - "#, - ) - .unwrap(); - let expected_hash = ConfigBuilderHash::from(&control_config).into_hash(); - for _ in 0..100 { - let experiment_config = toml::from_str::( - r#" - [enterprise] - api_key = "apikey" - configuration_key = "configkey" - - [sources.foo] - type = "internal_logs" - - [sinks.loki] - type = "loki" - endpoint = "https://localhost:1111" - inputs = ["foo"] - - [sinks.loki.labels] - foo = '{{ foo }}' - bar = '{{ bar }}' - baz = '{{ baz }}' - ingest = "hello-world" - level = '{{ level }}' - module = '{{ module }}' - service = '{{ service }}' - - [sinks.loki.encoding] - codec = "json" - "#, - ) - .unwrap(); - assert_eq!( - expected_hash, - ConfigBuilderHash::from(&experiment_config).into_hash() - ); - } - } - - #[test] - fn test_to_sorted_json_string() { - let ordered_map = IndexMap::from([("z", 26), ("a", 1), ("d", 4), ("c", 3), ("b", 2)]); - assert_eq!( - r#"{"a":1,"b":2,"c":3,"d":4,"z":26}"#.to_string(), - to_sorted_json_string(ordered_map) - ); - } -} diff --git a/src/config/cmd.rs b/src/config/cmd.rs index f022be288b140..e2372931087fa 100644 --- a/src/config/cmd.rs +++ b/src/config/cmd.rs @@ -78,7 +78,7 @@ impl Opts { /// Helper to merge JSON. Handles objects and array concatenation. fn merge_json(a: &mut Value, b: Value) { match (a, b) { - (Value::Object(ref mut a), Value::Object(b)) => { + (Value::Object(a), Value::Object(b)) => { for (k, v) in b { merge_json(a.entry(k).or_insert(Value::Null), v); } @@ -92,7 +92,7 @@ fn merge_json(a: &mut Value, b: Value) { /// Helper to sort array values. fn sort_json_array_values(json: &mut Value) { match json { - Value::Array(ref mut arr) => { + Value::Array(arr) => { for v in arr.iter_mut() { sort_json_array_values(v); } @@ -113,7 +113,7 @@ fn sort_json_array_values(json: &mut Value) { .map(|v| serde_json::from_str(v.as_str()).unwrap()) .collect::>(); } - Value::Object(ref mut json) => { + Value::Object(json) => { for (_, v) in json { sort_json_array_values(v); } @@ -168,7 +168,7 @@ pub fn cmd(opts: &Opts) -> exitcode::ExitCode { // builder fields which we'll use to error out if required. let (paths, builder) = match process_paths(&paths) { Some(paths) => match load_builder_from_paths(&paths) { - Ok((builder, _)) => (paths, builder), + Ok(builder) => (paths, builder), Err(errs) => return handle_config_errors(errs), }, None => return exitcode::CONFIG, @@ -176,7 +176,7 @@ pub fn cmd(opts: &Opts) -> exitcode::ExitCode { // Load source TOML. let source = match load_source_from_paths(&paths) { - Ok((map, _)) => map, + Ok(map) => map, Err(errs) => return handle_config_errors(errs), }; @@ -200,6 +200,7 @@ mod tests { SeedableRng, }; use serde_json::json; + use similar_asserts::assert_eq; use vector_lib::configurable::component::{ SinkDescription, SourceDescription, TransformDescription, }; @@ -241,21 +242,19 @@ mod tests { r#" [sources.in] type = "demo_logs" - format = "${{{}}}" + format = "${{{env_var}}}" [sinks.out] type = "blackhole" - inputs = ["${{{}}}"] - "#, - env_var, env_var_in_arr + inputs = ["${{{env_var_in_arr}}}"] + "# ); - let (interpolated_config_source, _) = vars::interpolate( + let interpolated_config_source = vars::interpolate( config_source.as_ref(), &HashMap::from([ (env_var.to_string(), "syslog".to_string()), (env_var_in_arr.to_string(), "in".to_string()), ]), - true, ) .unwrap(); @@ -283,7 +282,11 @@ mod tests { /// Select any 2-4 sources fn arb_sources() -> impl Strategy> { - sample::subsequence(SourceDescription::types(), 2..=4) + let mut types = SourceDescription::types(); + // The `file_descriptor` source produces different defaults each time it is used, and so + // will never compare equal below. + types.retain(|t| *t != "file_descriptor"); + sample::subsequence(types, 2..=4) } /// Select any 2-4 transforms @@ -315,18 +318,18 @@ mod tests { "{}/{}/{}", sources .iter() - .map(|source| format!("{}:{}", source, source)) + .map(|source| format!("{source}:{source}")) .collect::>() .join(","), transforms .iter() - .map(|transform| format!("{}:{}", transform, transform)) + .map(|transform| format!("{transform}:{transform}")) .chain(vec!["manually-added-remap:remap".to_string()]) .collect::>() .join(","), sinks .iter() - .map(|sink| format!("{}:{}", sink, sink)) + .map(|sink| format!("{sink}:{sink}")) .collect::>() .join(","), ); diff --git a/src/config/compiler.rs b/src/config/compiler.rs index 8031c3dc260d3..bffb51bcaa93a 100644 --- a/src/config/compiler.rs +++ b/src/config/compiler.rs @@ -1,9 +1,10 @@ use super::{ - builder::ConfigBuilder, graph::Graph, id::Inputs, transform::get_transform_output_ids, - validation, Config, OutputId, + builder::ConfigBuilder, graph::Graph, transform::get_transform_output_ids, validation, Config, + OutputId, }; -use indexmap::IndexSet; +use indexmap::{IndexMap, IndexSet}; +use vector_lib::id::Inputs; pub fn compile(mut builder: ConfigBuilder) -> Result<(Config, Vec), Vec> { let mut errors = Vec::new(); @@ -35,19 +36,11 @@ pub fn compile(mut builder: ConfigBuilder) -> Result<(Config, Vec), Vec< errors.extend(output_errors); } - #[cfg(feature = "enterprise")] - let hash = Some(builder.sha256_hash()); - - #[cfg(not(feature = "enterprise"))] - let hash = None; - let ConfigBuilder { global, #[cfg(feature = "api")] api, schema, - #[cfg(feature = "enterprise")] - enterprise, healthchecks, enrichment_tables, sources, @@ -59,8 +52,32 @@ pub fn compile(mut builder: ConfigBuilder) -> Result<(Config, Vec), Vec< graceful_shutdown_duration, allow_empty: _, } = builder; + let all_sinks = sinks + .clone() + .into_iter() + .chain( + enrichment_tables + .iter() + .filter_map(|(key, table)| table.as_sink(key)), + ) + .collect::>(); + let sources_and_table_sources = sources + .clone() + .into_iter() + .chain( + enrichment_tables + .iter() + .filter_map(|(key, table)| table.as_source(key)), + ) + .collect::>(); - let graph = match Graph::new(&sources, &transforms, &sinks, schema) { + let graph = match Graph::new( + &sources_and_table_sources, + &transforms, + &all_sinks, + schema, + global.wildcard_matching.unwrap_or_default(), + ) { Ok(graph) => graph, Err(graph_errors) => { errors.extend(graph_errors); @@ -92,6 +109,13 @@ pub fn compile(mut builder: ConfigBuilder) -> Result<(Config, Vec), Vec< (key, transform.with_inputs(inputs)) }) .collect(); + let enrichment_tables = enrichment_tables + .into_iter() + .map(|(key, table)| { + let inputs = graph.inputs_for(&key); + (key, table.with_inputs(inputs)) + }) + .collect(); let tests = tests .into_iter() .map(|test| test.resolve_outputs(&graph)) @@ -103,9 +127,6 @@ pub fn compile(mut builder: ConfigBuilder) -> Result<(Config, Vec), Vec< #[cfg(feature = "api")] api, schema, - #[cfg(feature = "enterprise")] - enterprise, - hash, healthchecks, enrichment_tables, sources, diff --git a/src/config/diff.rs b/src/config/diff.rs index da5ed54faeb9d..fa5f8cf09b8ff 100644 --- a/src/config/diff.rs +++ b/src/config/diff.rs @@ -1,56 +1,70 @@ use std::collections::HashSet; use indexmap::IndexMap; +use vector_lib::config::OutputId; -use super::{ComponentKey, Config}; +use super::{ComponentKey, Config, EnrichmentTableOuter}; #[derive(Debug)] pub struct ConfigDiff { pub sources: Difference, pub transforms: Difference, pub sinks: Difference, + /// This difference does not only contain the actual enrichment_tables keys, but also keys that + /// may be used for their source and sink components (if available). pub enrichment_tables: Difference, + pub components_to_reload: HashSet, } impl ConfigDiff { pub fn initial(initial: &Config) -> Self { - Self::new(&Config::default(), initial) + Self::new(&Config::default(), initial, HashSet::new()) } - pub fn new(old: &Config, new: &Config) -> Self { + pub fn new(old: &Config, new: &Config, components_to_reload: HashSet) -> Self { ConfigDiff { - sources: Difference::new(&old.sources, &new.sources), - transforms: Difference::new(&old.transforms, &new.transforms), - sinks: Difference::new(&old.sinks, &new.sinks), - enrichment_tables: Difference::new(&old.enrichment_tables, &new.enrichment_tables), + sources: Difference::new(&old.sources, &new.sources, &components_to_reload), + transforms: Difference::new(&old.transforms, &new.transforms, &components_to_reload), + sinks: Difference::new(&old.sinks, &new.sinks, &components_to_reload), + enrichment_tables: Difference::new_tables( + &old.enrichment_tables, + &new.enrichment_tables, + ), + components_to_reload, } } /// Swaps removed with added in Differences. - pub fn flip(mut self) -> Self { + pub const fn flip(mut self) -> Self { self.sources.flip(); self.transforms.flip(); self.sinks.flip(); + self.enrichment_tables.flip(); self } - /// Checks whether or not the given component is present at all. + /// Checks whether the given component is present at all. pub fn contains(&self, key: &ComponentKey) -> bool { - self.sources.contains(key) || self.transforms.contains(key) || self.sinks.contains(key) + self.sources.contains(key) + || self.transforms.contains(key) + || self.sinks.contains(key) + || self.enrichment_tables.contains(key) } - /// Checks whether or not the given component is changed. + /// Checks whether the given component is changed. pub fn is_changed(&self, key: &ComponentKey) -> bool { self.sources.is_changed(key) || self.transforms.is_changed(key) || self.sinks.is_changed(key) + || self.enrichment_tables.contains(key) } - /// Checks whether or not the given component is removed. + /// Checks whether the given component is removed. pub fn is_removed(&self, key: &ComponentKey) -> bool { self.sources.is_removed(key) || self.transforms.is_removed(key) || self.sinks.is_removed(key) + || self.enrichment_tables.contains(key) } } @@ -62,13 +76,58 @@ pub struct Difference { } impl Difference { - fn new(old: &IndexMap, new: &IndexMap) -> Self + fn new( + old: &IndexMap, + new: &IndexMap, + need_change: &HashSet, + ) -> Self where C: serde::Serialize + serde::Deserialize<'static>, { let old_names = old.keys().cloned().collect::>(); let new_names = new.keys().cloned().collect::>(); + let to_change = old_names + .intersection(&new_names) + .filter(|&n| { + // This is a hack around the issue of comparing two + // trait objects. Json is used here over toml since + // toml does not support serializing `None` + // to_value is used specifically (instead of string) + // to avoid problems comparing serialized HashMaps, + // which can iterate in varied orders. + let old_value = serde_json::to_value(&old[n]).unwrap(); + let new_value = serde_json::to_value(&new[n]).unwrap(); + old_value != new_value || need_change.contains(n) + }) + .cloned() + .collect::>(); + + let to_remove = &old_names - &new_names; + let to_add = &new_names - &old_names; + + Self { + to_remove, + to_change, + to_add, + } + } + + fn new_tables( + old: &IndexMap>, + new: &IndexMap>, + ) -> Self { + let old_names = old + .iter() + .flat_map(|(k, t)| vec![t.as_source(k).map(|(k, _)| k), t.as_sink(k).map(|(k, _)| k)]) + .flatten() + .collect::>(); + let new_names = new + .iter() + .flat_map(|(k, t)| vec![t.as_source(k).map(|(k, _)| k), t.as_sink(k).map(|(k, _)| k)]) + .flatten() + .collect::>(); + let to_change = old_names .intersection(&new_names) .filter(|&n| { @@ -130,7 +189,7 @@ impl Difference { self.to_remove.contains(key) } - fn flip(&mut self) { + const fn flip(&mut self) { std::mem::swap(&mut self.to_remove, &mut self.to_add); } diff --git a/src/config/dot_graph.rs b/src/config/dot_graph.rs new file mode 100644 index 0000000000000..0d3d7ca1e4900 --- /dev/null +++ b/src/config/dot_graph.rs @@ -0,0 +1,29 @@ +use std::collections::HashMap; + +use vector_lib::configurable::configurable_component; + +/// Extra graph configuration +/// +/// Configure output for component when generated with graph command +#[configurable_component] +#[configurable(metadata(docs::advanced))] +#[derive(Clone, Debug, Default, Eq, PartialEq)] +#[serde(deny_unknown_fields)] +pub struct GraphConfig { + /// Node attributes to add to this component's node in resulting graph + /// + /// They are added to the node as provided + #[configurable(metadata( + docs::additional_props_description = "A single graph node attribute in graphviz DOT language.", + docs::examples = "example_graph_options()" + ))] + pub node_attributes: HashMap, +} + +fn example_graph_options() -> HashMap { + HashMap::<_, _>::from_iter([ + ("name".to_string(), "Example Node".to_string()), + ("color".to_string(), "red".to_string()), + ("width".to_string(), "5.0".to_string()), + ]) +} diff --git a/src/config/enrichment_table.rs b/src/config/enrichment_table.rs index 7052b82005b68..83b52fc437228 100644 --- a/src/config/enrichment_table.rs +++ b/src/config/enrichment_table.rs @@ -1,28 +1,115 @@ -use async_trait::async_trait; use enum_dispatch::enum_dispatch; +use serde::Serialize; use vector_lib::config::GlobalOptions; -use vector_lib::configurable::{configurable_component, NamedComponent}; +use vector_lib::configurable::{configurable_component, Configurable, NamedComponent, ToValue}; +use vector_lib::id::{ComponentKey, Inputs}; use crate::enrichment_tables::EnrichmentTables; +use super::dot_graph::GraphConfig; +use super::{SinkConfig, SinkOuter, SourceConfig, SourceOuter}; + /// Fully resolved enrichment table component. #[configurable_component] #[derive(Clone, Debug)] -pub struct EnrichmentTableOuter { +pub struct EnrichmentTableOuter +where + T: Configurable + Serialize + 'static + ToValue + Clone, +{ #[serde(flatten)] pub inner: EnrichmentTables, + #[configurable(derived)] + #[serde(default, skip_serializing_if = "vector_lib::serde::is_default")] + pub graph: GraphConfig, + #[configurable(derived)] + #[serde( + default = "Inputs::::default", + skip_serializing_if = "Inputs::is_empty" + )] + pub inputs: Inputs, } -impl EnrichmentTableOuter { - pub fn new>(inner: I) -> Self { +impl EnrichmentTableOuter +where + T: Configurable + Serialize + 'static + ToValue + Clone, +{ + pub fn new(inputs: I, inner: IET) -> Self + where + I: IntoIterator, + IET: Into, + { Self { inner: inner.into(), + graph: Default::default(), + inputs: Inputs::from_iter(inputs), + } + } + + // Components are currently built in a way that they match exactly one of the roles (source, + // transform, sink, enrichment table). Due to specific requirements of the "memory" enrichment + // table, it has to fulfill 2 of these roles (sink and enrichment table). To reduce the impact + // of this very specific requirement, any enrichment table can now be optionally mapped into a + // sink, but this will only work for a "memory" enrichment table, since other tables will not + // have a "sink_config" present. + // This is also not ideal, since `SinkOuter` is not meant to represent the actual configuration, + // but it should just be a representation of that config used for deserialization. + // In the future, if more such components come up, it would be good to limit such "Outer" + // components to deserialization and build up the components and the topology in a more granular + // way, with each having "modules" for inputs (making them valid as sinks), for healthchecks, + // for providing outputs, etc. + pub fn as_sink(&self, default_key: &ComponentKey) -> Option<(ComponentKey, SinkOuter)> { + self.inner.sink_config(default_key).map(|(key, sink)| { + ( + key, + SinkOuter { + graph: self.graph.clone(), + inputs: self.inputs.clone(), + healthcheck_uri: None, + healthcheck: Default::default(), + buffer: Default::default(), + proxy: Default::default(), + inner: sink, + }, + ) + }) + } + + pub fn as_source(&self, default_key: &ComponentKey) -> Option<(ComponentKey, SourceOuter)> { + self.inner.source_config(default_key).map(|(key, source)| { + ( + key, + SourceOuter { + graph: self.graph.clone(), + sink_acknowledgements: false, + proxy: Default::default(), + inner: source, + }, + ) + }) + } + + pub(super) fn map_inputs(self, f: impl Fn(&T) -> U) -> EnrichmentTableOuter + where + U: Configurable + Serialize + 'static + ToValue + Clone, + { + let inputs = self.inputs.iter().map(f).collect::>(); + self.with_inputs(inputs) + } + + pub(crate) fn with_inputs(self, inputs: I) -> EnrichmentTableOuter + where + I: IntoIterator, + U: Configurable + Serialize + 'static + ToValue + Clone, + { + EnrichmentTableOuter { + inputs: Inputs::from_iter(inputs), + inner: self.inner, + graph: self.graph, } } } /// Generalized interface for describing and building enrichment table components. -#[async_trait] #[enum_dispatch] pub trait EnrichmentTableConfig: NamedComponent + core::fmt::Debug + Send + Sync { /// Builds the enrichment table with the given globals. @@ -38,4 +125,18 @@ pub trait EnrichmentTableConfig: NamedComponent + core::fmt::Debug + Send + Sync &self, globals: &GlobalOptions, ) -> crate::Result>; + + fn sink_config( + &self, + _default_key: &ComponentKey, + ) -> Option<(ComponentKey, Box)> { + None + } + + fn source_config( + &self, + _default_key: &ComponentKey, + ) -> Option<(ComponentKey, Box)> { + None + } } diff --git a/src/config/enterprise.rs b/src/config/enterprise.rs deleted file mode 100644 index b1387c8bf0510..0000000000000 --- a/src/config/enterprise.rs +++ /dev/null @@ -1,1005 +0,0 @@ -use std::{ - env, - fmt::{Display, Formatter}, -}; - -use futures_util::{future::BoxFuture, stream::FuturesOrdered, Future, StreamExt}; -use http::Request; -use hyper::{header::LOCATION, Body, StatusCode}; -use indexmap::IndexMap; -use rand::Rng; -use serde::Serialize; -use tokio::{ - sync::mpsc, - time::{sleep, Duration}, -}; -use url::{ParseError, Url}; -use vector_lib::config::proxy::ProxyConfig; - -use super::{ - load_source_from_paths, process_paths, ComponentKey, Config, ConfigPath, OutputId, SinkOuter, - SourceOuter, TransformOuter, -}; -use crate::{ - common::datadog::{default_site, get_api_base_endpoint}, - conditions::AnyCondition, - http::{HttpClient, HttpError}, - sinks::{ - datadog::{ - logs::DatadogLogsConfig, metrics::DatadogMetricsConfig, LocalDatadogCommonConfig, - }, - util::{http::RequestConfig, retries::ExponentialBackoff}, - }, - sources::{ - host_metrics::{Collector, HostMetricsConfig}, - internal_logs::InternalLogsConfig, - internal_metrics::InternalMetricsConfig, - }, - transforms::{filter::FilterConfig, remap::RemapConfig}, -}; -use vector_lib::configurable::configurable_component; - -static HOST_METRICS_KEY: &str = "_datadog_host_metrics"; -static TAG_METRICS_KEY: &str = "_datadog_tag_metrics"; -static TAG_LOGS_KEY: &str = "_datadog_tag_logs"; -static FILTER_METRICS_KEY: &str = "_datadog_filter_metrics"; -static PIPELINES_NAMESPACE_METRICS_KEY: &str = "_datadog_pipelines_namespace_metrics"; -static INTERNAL_METRICS_KEY: &str = "_datadog_internal_metrics"; -static INTERNAL_LOGS_KEY: &str = "_datadog_internal_logs"; -static DATADOG_METRICS_KEY: &str = "_datadog_metrics"; -static DATADOG_LOGS_KEY: &str = "_datadog_logs"; - -static DATADOG_REPORTING_PRODUCT: &str = "Datadog Observability Pipelines"; -static DATADOG_REPORTING_PATH_STUB: &str = "/api/unstable/observability_pipelines/configuration"; - -// Users can pass their Datadog API key through environment variables directly -// rather than placing it in their configuration. -pub static DATADOG_API_KEY_ENV_VAR_SHORT: &str = "DD_API_KEY"; -pub static DATADOG_API_KEY_ENV_VAR_FULL: &str = "DATADOG_API_KEY"; - -/// Enterprise options for using Datadog's [Observability Pipelines][datadog_op]. -/// -/// [datadog_op]: https://www.datadoghq.com/product/observability-pipelines/ -#[configurable_component] -#[derive(Clone, Debug, PartialEq)] -#[serde(deny_unknown_fields)] -pub struct Options { - /// Whether or not Observability Pipelines support is enabled. - #[serde(default = "default_enabled")] - pub enabled: bool, - - /// Whether or not to report internal component logs to Observability Pipelines. - #[serde(default = "default_enable_logs_reporting")] - pub enable_logs_reporting: bool, - - /// The Datadog [site][dd_site] to send data to. - /// - /// [dd_site]: https://docs.datadoghq.com/getting_started/site - #[serde(default = "default_site")] - site: String, - - /// The Datadog endpoint to send data to. - /// - /// This is an advanced setting that is generally meant only for testing, and overrides both - /// `site` and `region`. - /// - /// You should prefer to set `site`. - #[configurable(derived)] - endpoint: Option, - - /// The Datadog [API key][api_key] to send data with. - /// - /// [api_key]: https://docs.datadoghq.com/api/?lang=bash#authentication - #[serde(default)] - pub api_key: Option, - - /// The configuration key for Observability Pipelines. - pub configuration_key: String, - - /// The amount of time, in seconds, between reporting host metrics to Observability Pipelines. - #[serde(default = "default_reporting_interval_secs")] - pub reporting_interval_secs: f64, - - /// The maximum number of retries to report Vector's configuration to Observability Pipelines at startup. - #[serde(default = "default_max_retries")] - pub max_retries: u32, - - #[configurable(derived)] - #[serde(default, skip_serializing_if = "crate::serde::is_default")] - proxy: ProxyConfig, - - /// A map of additional tags for metrics sent to Observability Pipelines. - tags: Option>, -} - -impl Default for Options { - fn default() -> Self { - Self { - enabled: default_enabled(), - enable_logs_reporting: default_enable_logs_reporting(), - site: default_site(), - endpoint: None, - api_key: None, - configuration_key: "".to_owned(), - reporting_interval_secs: default_reporting_interval_secs(), - max_retries: default_max_retries(), - proxy: ProxyConfig::default(), - tags: None, - } - } -} - -/// By default, the Datadog feature is enabled. -const fn default_enabled() -> bool { - true -} - -/// By default, internal logs are reported to Datadog. -const fn default_enable_logs_reporting() -> bool { - true -} - -/// By default, scrape internal metrics and report to Datadog every 1 seconds. -const fn default_reporting_interval_secs() -> f64 { - 1.0 -} - -/// By default, keep retrying (recoverable) failed reporting -const fn default_max_retries() -> u32 { - u32::MAX -} - -/// Enterprise error, relevant to an upstream caller. -pub enum EnterpriseError { - Disabled, - MissingApiKey, -} - -/// Holds data required to authorize a request to the Datadog OP reporting endpoint. -struct PipelinesAuth<'a> { - api_key: &'a str, -} - -/// Holds the relevant fields for reporting a configuration to Datadog Observability Pipelines. -struct PipelinesStrFields<'a> { - configuration_version_hash: &'a str, - vector_version: &'a str, -} - -/// Top-level struct representing the field structure for reporting a config to Datadog OP. -#[derive(Debug, Serialize)] -struct PipelinesVersionPayload<'a> { - data: PipelinesData<'a>, -} - -#[derive(Debug, Serialize)] -struct PipelinesData<'a> { - attributes: PipelinesAttributes<'a>, - r#type: &'a str, -} - -#[derive(Debug, Serialize)] -struct PipelinesAttributes<'a> { - config_hash: &'a str, - vector_version: &'a str, - config: &'a toml::value::Table, -} - -/// Internal reporting error, necessary to determine the severity of an error response. -enum ReportingError { - Http(HttpError), - StatusCode(StatusCode), - EndpointError(ParseError), - TooManyRedirects, - InvalidRedirectUrl, - MaxRetriesReached, -} - -impl Display for ReportingError { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - match self { - Self::Http(err) => write!(f, "{}", err), - Self::StatusCode(status) => { - write!( - f, - "Request was unsuccessful and could not be retried: {}", - status - ) - } - Self::EndpointError(err) => write!(f, "{}", err), - Self::TooManyRedirects => { - write!(f, "Too many redirects from the server") - } - Self::InvalidRedirectUrl => write!(f, "Server responded with an invalid redirect URL"), - Self::MaxRetriesReached => write!(f, "Maximum number of retries reached"), - } - } -} - -/// Exponential backoff with random jitter for retrying configuration reporting -struct ReportingRetryBackoff { - backoff: ExponentialBackoff, -} - -impl ReportingRetryBackoff { - /// Retry every 2^n seconds with a maximum delay of 60 seconds (and any - /// additional jitter) - const fn new() -> Self { - let backoff = ExponentialBackoff::from_millis(2) - .factor(1000) - .max_delay(Duration::from_secs(60)); - - Self { backoff } - } - - /// Wait before retrying as determined by the backoff and jitter - async fn wait(&mut self) { - let retry_backoff = self.next().unwrap(); - info!( - "Retrying configuration reporting to {} in {} seconds.", - DATADOG_REPORTING_PRODUCT, - retry_backoff.as_secs_f32() - ); - sleep(retry_backoff).await; - } -} - -impl Iterator for ReportingRetryBackoff { - type Item = Duration; - - fn next(&mut self) -> Option { - let jitter_milliseconds = Duration::from_millis(rand::thread_rng().gen_range(0..1000)); - Some( - self.backoff - .next() - .unwrap() - .saturating_add(jitter_milliseconds), - ) - } -} - -impl<'a> PipelinesVersionPayload<'a> { - /// Create a new Pipelines reporting payload from a config and string fields. - const fn new(config: &'a toml::value::Table, fields: &PipelinesStrFields<'a>) -> Self { - Self { - data: PipelinesData { - attributes: PipelinesAttributes { - config_hash: fields.configuration_version_hash, - vector_version: fields.vector_version, - config, - }, - r#type: "pipelines_configuration_version", - }, - } - } - - /// Helper method to serialize payload as a JSON string. - fn json_string(&self) -> String { - serde_json::to_string(self) - .expect("couldn't serialize Pipelines fields to JSON. Please report") - } -} - -#[derive(Clone)] -pub(crate) struct EnterpriseMetadata { - pub opts: Options, - pub api_key: String, - pub configuration_version_hash: String, -} - -impl TryFrom<&Config> for EnterpriseMetadata { - type Error = EnterpriseError; - - fn try_from(value: &Config) -> Result { - // Only valid if a [enterprise] section is present in config. - let opts = match value.enterprise.clone() { - Some(opts) => opts, - _ => return Err(EnterpriseError::Disabled), - }; - - // Return early if the feature isn't enabled. - if !opts.enabled { - return Err(EnterpriseError::Disabled); - } - - let api_key = match &opts.api_key { - // API key provided explicitly. - Some(api_key) => api_key.clone(), - // No API key; attempt to get it from the environment. - None => match env::var(DATADOG_API_KEY_ENV_VAR_FULL) - .or_else(|_| env::var(DATADOG_API_KEY_ENV_VAR_SHORT)) - { - Ok(api_key) => api_key, - _ => return Err(EnterpriseError::MissingApiKey), - }, - }; - - info!( - "Datadog API key provided. Integration with {} is enabled.", - DATADOG_REPORTING_PRODUCT - ); - - // Get the configuration version hash. In DD Pipelines, this is referred to as the 'config hash'. - let configuration_version_hash = value.hash.clone().expect("Config should be versioned"); - - Ok(Self { - opts, - api_key, - configuration_version_hash, - }) - } -} - -pub struct EnterpriseReporter { - reporting_tx: mpsc::UnboundedSender, -} - -impl EnterpriseReporter -where - T: Future + Send + 'static, -{ - pub fn new() -> Self { - let (reporting_tx, mut reporting_rx) = mpsc::unbounded_channel(); - - // A long running task to report configurations in order - tokio::spawn(async move { - let mut pending_reports = FuturesOrdered::new(); - loop { - tokio::select! { - maybe_report = reporting_rx.recv() => { - match maybe_report { - Some(report) => pending_reports.push_back(report), - None => break, - } - } - _ = pending_reports.next(), if !pending_reports.is_empty() => { - } - } - } - }); - - Self { reporting_tx } - } - - pub fn send(&self, reporting_task: T) { - if let Err(err) = self.reporting_tx.send(reporting_task) { - error!( - %err, - "Unable to report configuration due to internal Vector issue.", - ); - } - } -} - -impl Default for EnterpriseReporter -where - T: Future + Send + 'static, -{ - fn default() -> Self { - Self::new() - } -} - -/// Report a configuration in a reloading context. -/// -/// Returns an [`EnterpriseReporter`] if one was not provided. -pub(crate) fn report_on_reload( - config: &mut Config, - metadata: EnterpriseMetadata, - config_paths: Vec, - enterprise: Option<&EnterpriseReporter>>, -) -> Option>> { - attach_enterprise_components(config, &metadata); - - match enterprise { - Some(enterprise) => { - enterprise.send(report_configuration(config_paths, metadata)); - None - } - None => { - let enterprise = EnterpriseReporter::new(); - enterprise.send(report_configuration(config_paths, metadata)); - Some(enterprise) - } - } -} - -pub(crate) fn attach_enterprise_components(config: &mut Config, metadata: &EnterpriseMetadata) { - let api_key = metadata.api_key.clone(); - let configuration_version_hash = metadata.configuration_version_hash.clone(); - - setup_metrics_reporting( - config, - &metadata.opts, - api_key.clone(), - configuration_version_hash.clone(), - ); - - if metadata.opts.enable_logs_reporting { - setup_logs_reporting(config, &metadata.opts, api_key, configuration_version_hash); - } -} - -fn setup_logs_reporting( - config: &mut Config, - datadog: &Options, - api_key: String, - configuration_version_hash: String, -) { - let tag_logs_id = OutputId::from(ComponentKey::from(TAG_LOGS_KEY)); - let internal_logs_id = OutputId::from(ComponentKey::from(INTERNAL_LOGS_KEY)); - let datadog_logs_id = ComponentKey::from(DATADOG_LOGS_KEY); - - let internal_logs = InternalLogsConfig::default(); - - let custom_logs_tags_vrl = datadog - .tags - .as_ref() - .map_or("".to_string(), |tags| convert_tags_to_vrl(tags, false)); - - let configuration_key = &datadog.configuration_key; - let vector_version = crate::vector_version(); - let tag_logs = RemapConfig { - source: Some(format!( - r#" - .ddsource = "vector" - .vector.configuration_key = "{configuration_key}" - .vector.configuration_version_hash = "{configuration_version_hash}" - .vector.version = "{vector_version}" - {} - "#, - custom_logs_tags_vrl, - )), - ..Default::default() - }; - - // Create a Datadog logs sink to consume and emit internal logs. - let datadog_logs = DatadogLogsConfig { - local_dd_common: LocalDatadogCommonConfig::new( - datadog.endpoint.clone(), - Some(datadog.site.clone()), - Some(api_key.into()), - ), - request: RequestConfig { - headers: IndexMap::from([( - "DD-EVP-ORIGIN".to_string(), - "vector-enterprise".to_string(), - )]), - ..Default::default() - }, - ..Default::default() - }; - - config.sources.insert( - internal_logs_id.component.clone(), - SourceOuter::new(internal_logs), - ); - - config.transforms.insert( - tag_logs_id.component.clone(), - TransformOuter::new(vec![internal_logs_id], tag_logs), - ); - - config.sinks.insert( - datadog_logs_id, - SinkOuter::new(vec![tag_logs_id], datadog_logs), - ); -} - -fn setup_metrics_reporting( - config: &mut Config, - datadog: &Options, - api_key: String, - configuration_version_hash: String, -) { - let host_metrics_id = OutputId::from(ComponentKey::from(HOST_METRICS_KEY)); - let tag_metrics_id = OutputId::from(ComponentKey::from(TAG_METRICS_KEY)); - let internal_metrics_id = OutputId::from(ComponentKey::from(INTERNAL_METRICS_KEY)); - let filter_metrics_id = OutputId::from(ComponentKey::from(FILTER_METRICS_KEY)); - let pipelines_namespace_metrics_id = - OutputId::from(ComponentKey::from(PIPELINES_NAMESPACE_METRICS_KEY)); - let datadog_metrics_id = ComponentKey::from(DATADOG_METRICS_KEY); - - // Create internal sources for host and internal metrics. We're using distinct sources here and - // not attempting to reuse existing ones, to configure according to enterprise requirements. - - // By default, host_metrics generates many metrics and some with high - // cardinality which can negatively impact customers' costs and downstream - // systems' performance. To avoid this, we explicitly set `collectors`. - let host_metrics = HostMetricsConfig { - namespace: Some("vector.host".to_owned()), - scrape_interval_secs: Duration::from_secs_f64(datadog.reporting_interval_secs), - collectors: Some(vec![ - Collector::Cpu, - Collector::Disk, - Collector::Load, - Collector::Host, - Collector::Memory, - Collector::Network, - ]), - ..Default::default() - }; - - let internal_metrics = InternalMetricsConfig { - // While the default namespace for internal metrics is already "vector", - // setting the namespace here is meant for clarity and resistance - // against any future or accidental changes. - namespace: "vector".to_owned(), - scrape_interval_secs: Duration::from_secs_f64(datadog.reporting_interval_secs), - ..Default::default() - }; - - let custom_metric_tags_vrl = datadog - .tags - .as_ref() - .map_or("".to_string(), |tags| convert_tags_to_vrl(tags, true)); - - let configuration_key = &datadog.configuration_key; - let vector_version = crate::vector_version(); - let tag_metrics = RemapConfig { - source: Some(format!( - r#" - .tags.configuration_version_hash = "{configuration_version_hash}" - .tags.configuration_key = "{configuration_key}" - .tags.vector_version = "{vector_version}" - {} - "#, - custom_metric_tags_vrl - )), - ..Default::default() - }; - - // Preserve the `pipelines` namespace for specific metrics - let filter_metrics = FilterConfig::from(AnyCondition::String( - r#".name == "component_received_bytes_total""#.to_string(), - )); - - let pipelines_namespace_metrics = RemapConfig { - source: Some(r#".namespace = "pipelines""#.to_string()), - ..Default::default() - }; - - // Create a Datadog metrics sink to consume and emit internal + host metrics. - let datadog_metrics = DatadogMetricsConfig { - local_dd_common: LocalDatadogCommonConfig::new( - datadog.endpoint.clone(), - Some(datadog.site.clone()), - Some(api_key.into()), - ), - ..Default::default() - }; - - config.sources.insert( - host_metrics_id.component.clone(), - SourceOuter::new(host_metrics), - ); - config.sources.insert( - internal_metrics_id.component.clone(), - SourceOuter::new(internal_metrics), - ); - - config.transforms.insert( - tag_metrics_id.component.clone(), - TransformOuter::new(vec![host_metrics_id, internal_metrics_id], tag_metrics), - ); - - config.transforms.insert( - filter_metrics_id.component.clone(), - TransformOuter::new(vec![tag_metrics_id.clone()], filter_metrics), - ); - - config.transforms.insert( - pipelines_namespace_metrics_id.component.clone(), - TransformOuter::new(vec![filter_metrics_id], pipelines_namespace_metrics), - ); - - config.sinks.insert( - datadog_metrics_id, - SinkOuter::new( - vec![tag_metrics_id, pipelines_namespace_metrics_id], - datadog_metrics, - ), - ); -} - -/// Converts user configured tags to VRL source code for adding tags/fields to -/// events -fn convert_tags_to_vrl(tags: &IndexMap, is_metric: bool) -> String { - let json_tags = serde_json::to_string(&tags).unwrap(); - if is_metric { - format!(r#".tags = merge(.tags, {}, deep: true)"#, json_tags) - } else { - format!(r#". = merge(., {}, deep: true)"#, json_tags) - } -} - -/// Report the internal configuration to Datadog Observability Pipelines. -pub(crate) fn report_configuration( - config_paths: Vec, - metadata: EnterpriseMetadata, -) -> BoxFuture<'static, ()> { - let fut = async move { - let EnterpriseMetadata { - api_key, - configuration_version_hash, - opts, - } = metadata; - - // Get the Vector version. This is reported to Pipelines along with a config hash. - let vector_version = crate::get_version(); - - // We need to create a JSON representation of config, based on the original files - // that Vector was spawned with. - let (table, _) = process_paths(&config_paths) - .and_then(|paths| load_source_from_paths(&paths).ok()) - .expect("Couldn't load source from config paths. Please report."); - - // Set the relevant fields needed to report a config to Datadog. This is a struct rather than - // exploding as func arguments to avoid confusion with multiple &str fields. - let fields = PipelinesStrFields { - configuration_version_hash: &configuration_version_hash, - vector_version: &vector_version, - }; - - // Set the Datadog authorization fields. There's an API and app key, to allow read/write - // access in tandem with RBAC on the Datadog side. - let auth = PipelinesAuth { api_key: &api_key }; - - // Create a HTTP client for posting a Vector version to Datadog OP. This will - // respect any proxy settings provided in top-level config. - let client = HttpClient::new(None, &opts.proxy) - .expect("couldn't instrument Datadog HTTP client. Please report"); - - // Endpoint to report a config to Datadog OP. - let endpoint = get_reporting_endpoint( - opts.endpoint.as_ref(), - opts.site.as_str(), - &opts.configuration_key, - ); - // Datadog uses a JSON:API, so we'll serialize the config to a JSON - let payload = PipelinesVersionPayload::new(&table, &fields); - - match report_serialized_config_to_datadog( - &client, - &endpoint, - &auth, - &payload, - opts.max_retries, - ) - .await - { - Ok(()) => { - info!( - "Vector config {} successfully reported to {}.", - &configuration_version_hash, DATADOG_REPORTING_PRODUCT - ); - } - Err(err) => { - error!( - err = ?err.to_string(), - "Could not report Vector config to {}.", DATADOG_REPORTING_PRODUCT - ); - } - } - }; - - Box::pin(fut) -} - -/// Returns the full URL endpoint of where to POST a Datadog Vector configuration. -fn get_reporting_endpoint( - endpoint: Option<&String>, - site: &str, - configuration_key: &str, -) -> String { - let base = site; - - format!( - "{}{}/{}/versions", - get_api_base_endpoint(endpoint, base), - DATADOG_REPORTING_PATH_STUB, - configuration_key - ) -} - -/// Build a POST request for reporting a Vector config to Datadog OP. -fn build_request<'a>( - endpoint: &Url, - auth: &'a PipelinesAuth, - payload: &'a PipelinesVersionPayload, -) -> Request { - Request::post(endpoint.to_string()) - .header("DD-API-KEY", auth.api_key) - .body(Body::from(payload.json_string())) - .unwrap_or_else(|_| { - panic!( - "couldn't create {} HTTP request. Please report", - DATADOG_REPORTING_PRODUCT - ) - }) -} - -/// Reports a JSON serialized Vector config to Datadog, for use with Observability Pipelines. -async fn report_serialized_config_to_datadog<'a>( - client: &'a HttpClient, - endpoint: &'a str, - auth: &'a PipelinesAuth<'a>, - payload: &'a PipelinesVersionPayload<'a>, - max_retries: u32, -) -> Result<(), ReportingError> { - info!( - "Attempting to report configuration to {}.", - DATADOG_REPORTING_PRODUCT - ); - - let mut endpoint = Url::parse(endpoint).map_err(ReportingError::EndpointError)?; - let mut redirected = false; - let mut backoff = ReportingRetryBackoff::new(); - let mut retries = 0; - - while retries < max_retries { - retries += 1; - let req = build_request(&endpoint, auth, payload); - let res = client.send(req).await; - if let Err(HttpError::CallRequest { source: error }) = &res { - // Retry on request timeouts and network issues - if error.is_timeout() { - info!(message = "Configuration reporting request timed out.", error = %error); - backoff.wait().await; - continue; - } else if error.is_connect() { - warn!(error = %error, "Configuration reporting connection issue."); - backoff.wait().await; - continue; - } - } - let res = res.map_err(ReportingError::Http)?; - let status = res.status(); - - // Follow redirection responses a maximum of one time. - if status.is_redirection() && !redirected { - redirected = true; - // A `Location` header could contain a relative path. To guard against that, we'll - // join the location to the original URL to get a new absolute path. - endpoint = endpoint - .join( - res.headers() - .get(LOCATION) - .ok_or(ReportingError::InvalidRedirectUrl)? - .to_str() - .map_err(|_| ReportingError::InvalidRedirectUrl)?, - ) - .map_err(ReportingError::EndpointError)?; - info!(message = "Configuration reporting request redirected.", endpoint = %endpoint); - continue; - } else if status.is_redirection() && redirected { - return Err(ReportingError::TooManyRedirects); - } else if status.is_client_error() || status.is_server_error() { - info!(message = "Encountered retriable error.", status = %status); - backoff.wait().await; - continue; - } else if status.is_success() { - return Ok(()); - } else { - return Err(ReportingError::StatusCode(status)); - } - } - - Err(ReportingError::MaxRetriesReached) -} - -#[cfg(all( - test, - feature = "enterprise", - feature = "sources-demo_logs", - feature = "sinks-blackhole" -))] -mod test { - use std::{collections::BTreeMap, net::TcpListener, time::Duration}; - - use http::StatusCode; - use indexmap::IndexMap; - use tokio::time::sleep; - use vector_lib::config::proxy::ProxyConfig; - use vrl::btreemap; - use vrl::compiler::state::ExternalEnv; - use vrl::compiler::{compile, compile_with_external, CompileConfig}; - use vrl::value::kind::Collection; - use vrl::value::Kind; - use wiremock::{matchers, Mock, MockServer, ResponseTemplate}; - - use super::{ - report_serialized_config_to_datadog, PipelinesAuth, PipelinesStrFields, - PipelinesVersionPayload, - }; - use crate::{ - config::enterprise::{convert_tags_to_vrl, default_max_retries}, - http::HttpClient, - test_util::next_addr, - }; - - const fn get_pipelines_auth() -> PipelinesAuth<'static> { - PipelinesAuth { api_key: "api_key" } - } - - const fn get_pipelines_fields() -> PipelinesStrFields<'static> { - PipelinesStrFields { - configuration_version_hash: "configuration_version_hash", - vector_version: "vector_version", - } - } - - /// This mocked server will reply with the configured status code 3 times - /// before falling back to a 200 OK - async fn build_test_server_error_and_recover(status_code: StatusCode) -> MockServer { - let mock_server = MockServer::start().await; - - Mock::given(matchers::method("POST")) - .respond_with(ResponseTemplate::new(status_code)) - .up_to_n_times(3) - .with_priority(1) - .mount(&mock_server) - .await; - - Mock::given(matchers::method("POST")) - .respond_with(ResponseTemplate::new(StatusCode::OK)) - .with_priority(2) - .mount(&mock_server) - .await; - - mock_server - } - - #[tokio::test] - async fn retry_on_specific_client_error_status_codes() { - let server = build_test_server_error_and_recover(StatusCode::REQUEST_TIMEOUT).await; - - let endpoint = server.uri(); - let client = - HttpClient::new(None, &ProxyConfig::default()).expect("Failed to create http client"); - let auth = get_pipelines_auth(); - let fields = get_pipelines_fields(); - let config = toml::map::Map::new(); - let payload = PipelinesVersionPayload::new(&config, &fields); - - assert!(report_serialized_config_to_datadog( - &client, - endpoint.as_ref(), - &auth, - &payload, - default_max_retries() - ) - .await - .is_ok()); - } - - #[tokio::test] - async fn retry_on_server_error_status_codes() { - let server = build_test_server_error_and_recover(StatusCode::INTERNAL_SERVER_ERROR).await; - - let endpoint = server.uri(); - let client = - HttpClient::new(None, &ProxyConfig::default()).expect("Failed to create http client"); - let auth = get_pipelines_auth(); - let fields = get_pipelines_fields(); - let config = toml::map::Map::new(); - let payload = PipelinesVersionPayload::new(&config, &fields); - - assert!(report_serialized_config_to_datadog( - &client, - endpoint.as_ref(), - &auth, - &payload, - default_max_retries() - ) - .await - .is_ok()); - } - - #[tokio::test] - async fn retry_on_loss_of_network_connection() { - let addr = next_addr(); - let endpoint = format!("http://{}:{}", addr.ip(), addr.port()); - - let report = tokio::spawn(async move { - let client = HttpClient::new(None, &ProxyConfig::default()) - .expect("Failed to create http client"); - let auth = get_pipelines_auth(); - let fields = get_pipelines_fields(); - let config = toml::map::Map::new(); - let payload = PipelinesVersionPayload::new(&config, &fields); - - report_serialized_config_to_datadog( - &client, - endpoint.as_ref(), - &auth, - &payload, - default_max_retries(), - ) - .await - }); - sleep(Duration::from_secs(2)).await; - - // The server is completely unavailable when initially reporting to - // simulate a network/connection failure - let listener = TcpListener::bind(addr).unwrap(); - let server = MockServer::builder().listener(listener).start().await; - Mock::given(matchers::method("POST")) - .respond_with(ResponseTemplate::new(StatusCode::OK)) - .mount(&server) - .await; - - let res = report.await.unwrap(); - assert!(res.is_ok()); - } - - #[tokio::test] - async fn error_exceed_max_retries() { - let server = build_test_server_error_and_recover(StatusCode::INTERNAL_SERVER_ERROR).await; - - let endpoint = server.uri(); - let client = - HttpClient::new(None, &ProxyConfig::default()).expect("Failed to create http client"); - let auth = get_pipelines_auth(); - let fields = get_pipelines_fields(); - let config = toml::map::Map::new(); - let payload = PipelinesVersionPayload::new(&config, &fields); - - assert!(report_serialized_config_to_datadog( - &client, - endpoint.as_ref(), - &auth, - &payload, - 1 - ) - .await - .is_err()); - } - - #[test] - fn dynamic_tags_to_remap_config_for_metrics() { - let tags = IndexMap::from([ - ("pull_request".to_string(), "1234".to_string()), - ("replica".to_string(), "abcd".to_string()), - ("variant".to_string(), "baseline".to_string()), - ]); - - let vrl = convert_tags_to_vrl(&tags, true); - assert_eq!( - vrl, - r#".tags = merge(.tags, {"pull_request":"1234","replica":"abcd","variant":"baseline"}, deep: true)"# - ); - // We need to set up some state here to inform the VRL compiler that - // .tags is an object and merge() is thus a safe operation (mimicking - // the environment this code will actually run in). - let state = ExternalEnv::new_with_kind( - Kind::object(btreemap! { - "tags" => Kind::object(BTreeMap::new()), - }), - Kind::object(Collection::empty()), - ); - assert!(compile_with_external( - vrl.as_str(), - vrl::stdlib::all().as_ref(), - &state, - CompileConfig::default() - ) - .is_ok()); - } - - #[test] - fn dynamic_tags_to_remap_config_for_logs() { - let tags = IndexMap::from([ - ("pull_request".to_string(), "1234".to_string()), - ("replica".to_string(), "abcd".to_string()), - ("variant".to_string(), "baseline".to_string()), - ]); - let vrl = convert_tags_to_vrl(&tags, false); - - assert_eq!( - vrl, - r#". = merge(., {"pull_request":"1234","replica":"abcd","variant":"baseline"}, deep: true)"# - ); - assert!(compile(vrl.as_str(), vrl::stdlib::all().as_ref()).is_ok()); - } -} diff --git a/src/config/format.rs b/src/config/format.rs index 1f21dd18da3e4..32c2bd7b0f12e 100644 --- a/src/config/format.rs +++ b/src/config/format.rs @@ -6,13 +6,28 @@ use std::fmt; use std::path::Path; use std::str::FromStr; -use serde::de; +use serde::{de, Deserialize, Serialize}; +use vector_config_macros::Configurable; /// A type alias to better capture the semantics. pub type FormatHint = Option; /// The format used to represent the configuration data. -#[derive(Debug, Default, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] +#[derive( + Debug, + Default, + Copy, + Clone, + Eq, + PartialEq, + Ord, + PartialOrd, + Hash, + Serialize, + Deserialize, + Configurable, +)] +#[serde(rename_all = "snake_case")] pub enum Format { /// TOML format is used. #[default] @@ -31,7 +46,7 @@ impl FromStr for Format { "toml" => Ok(Format::Toml), "yaml" => Ok(Format::Yaml), "json" => Ok(Format::Json), - _ => Err(format!("Invalid format: {}", s)), + _ => Err(format!("Invalid format: {s}")), } } } @@ -43,7 +58,7 @@ impl fmt::Display for Format { Format::Json => "json", Format::Yaml => "yaml", }; - write!(f, "{}", format) + write!(f, "{format}") } } @@ -66,7 +81,12 @@ where { match format { Format::Toml => toml::from_str(content).map_err(|e| vec![e.to_string()]), - Format::Yaml => serde_yaml::from_str(content).map_err(|e| vec![e.to_string()]), + Format::Yaml => serde_yaml::from_str::(content) + .and_then(|mut v| { + v.apply_merge()?; + serde_yaml::from_value(v) + }) + .map_err(|e| vec![e.to_string()]), Format::Json => serde_json::from_str(content).map_err(|e| vec![e.to_string()]), } } @@ -143,7 +163,7 @@ mod tests { for (input, expected) in cases { let output = Format::from_path(std::path::PathBuf::from(input)); - assert_eq!(expected, output.ok(), "{}", input) + assert_eq!(expected, output.ok(), "{input}") } } @@ -159,7 +179,7 @@ mod tests { use crate::config::ConfigBuilder; macro_rules! concat_with_newlines { - ($($e:expr,)*) => { concat!( $($e, "\n"),+ ) }; + ($($e:expr_2021,)*) => { concat!( $($e, "\n"),+ ) }; } const SAMPLE_TOML: &str = r#" @@ -171,6 +191,10 @@ mod tests { type = "socket" mode = "tcp" address = "127.0.0.1:1235" + [sources.in2] + type = "socket" + mode = "tcp" + address = "127.0.0.1:1234" [transforms.sample] type = "sample" inputs = ["in"] @@ -208,10 +232,13 @@ mod tests { r#" encoding:"#, r#" type: "csv""#, r#"sources:"#, - r#" in:"#, + r#" in: &a"#, r#" type: "socket""#, - r#" mode: "tcp""#, + r#" mode: &b "tcp""#, r#" address: "127.0.0.1:1235""#, + r#" in2:"#, + r#" <<: *a"#, + r#" address: "127.0.0.1:1234""#, r#"transforms:"#, r#" sample:"#, r#" type: "sample""#, @@ -220,7 +247,7 @@ mod tests { r#"sinks:"#, r#" out:"#, r#" type: "socket""#, - r#" mode: "tcp""#, + r#" mode: *b"#, r#" inputs: ["sample"]"#, r#" encoding:"#, r#" codec: "text""#, @@ -248,6 +275,11 @@ mod tests { "type": "socket", "mode": "tcp", "address": "127.0.0.1:1235" + }, + "in2": { + "type": "socket", + "mode": "tcp", + "address": "127.0.0.1:1234" } }, "transforms": { @@ -284,23 +316,20 @@ mod tests { Ok(expected) => { #[allow(clippy::expect_fun_call)] // false positive let output: ConfigBuilder = output.expect(&format!( - "expected Ok, got Err with format {:?} and input {:?}", - format, input + "expected Ok, got Err with format {format:?} and input {input:?}" )); let output_json = serde_json::to_value(output).unwrap(); let expected_output: ConfigBuilder = deserialize(expected, Format::Toml) .expect("Invalid TOML passed as an expectation"); let expected_json = serde_json::to_value(expected_output).unwrap(); - assert_eq!(expected_json, output_json, "{}", input) + assert_eq!(expected_json, output_json, "{input}") } Err(expected) => assert_eq!( expected, output.expect_err(&format!( - "expected Err, got Ok with format {:?} and input {:?}", - format, input + "expected Err, got Ok with format {format:?} and input {input:?}" )), - "{}", - input + "{input}" ), } } diff --git a/src/config/graph.rs b/src/config/graph.rs index f97e63cfc0ddc..989f0a9eaf282 100644 --- a/src/config/graph.rs +++ b/src/config/graph.rs @@ -1,10 +1,10 @@ -use indexmap::{set::IndexSet, IndexMap}; -use std::collections::{HashMap, HashSet, VecDeque}; - use super::{ schema, ComponentKey, DataType, OutputId, SinkOuter, SourceOuter, SourceOutput, TransformOuter, - TransformOutput, + TransformOutput, WildcardMatching, }; +use indexmap::{set::IndexSet, IndexMap}; +use std::collections::{HashMap, HashSet, VecDeque}; +use std::fmt; #[derive(Debug, Clone)] pub enum Node { @@ -20,6 +20,33 @@ pub enum Node { }, } +impl fmt::Display for Node { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Node::Source { outputs } => { + write!(f, "component_kind: source\n outputs:")?; + for output in outputs { + write!(f, "\n {output}")?; + } + Ok(()) + } + Node::Transform { in_ty, outputs } => { + write!( + f, + "component_kind: source\n input_types: {in_ty}\n outputs:" + )?; + for output in outputs { + write!(f, "\n {output}")?; + } + Ok(()) + } + Node::Sink { ty } => { + write!(f, "component_kind: sink\n types: {ty}") + } + } + } +} + #[derive(Debug, Clone)] struct Edge { from: OutputId, @@ -38,8 +65,9 @@ impl Graph { transforms: &IndexMap>, sinks: &IndexMap>, schema: schema::Options, + wildcard_matching: WildcardMatching, ) -> Result> { - Self::new_inner(sources, transforms, sinks, false, schema) + Self::new_inner(sources, transforms, sinks, false, schema, wildcard_matching) } pub fn new_unchecked( @@ -47,8 +75,10 @@ impl Graph { transforms: &IndexMap>, sinks: &IndexMap>, schema: schema::Options, + wildcard_matching: WildcardMatching, ) -> Self { - Self::new_inner(sources, transforms, sinks, true, schema).expect("errors ignored") + Self::new_inner(sources, transforms, sinks, true, schema, wildcard_matching) + .expect("errors ignored") } fn new_inner( @@ -57,6 +87,7 @@ impl Graph { sinks: &IndexMap>, ignore_errors: bool, schema: schema::Options, + wildcard_matching: WildcardMatching, ) -> Result> { let mut graph = Graph::default(); let mut errors = Vec::new(); @@ -85,7 +116,7 @@ impl Graph { ); } - for (id, config) in sinks.iter() { + for (id, config) in sinks { graph.nodes.insert( id.clone(), Node::Sink { @@ -100,15 +131,15 @@ impl Graph { for (id, config) in transforms.iter() { for input in config.inputs.iter() { - if let Err(e) = graph.add_input(input, id, &available_inputs) { + if let Err(e) = graph.add_input(input, id, &available_inputs, wildcard_matching) { errors.push(e); } } } - for (id, config) in sinks.iter() { + for (id, config) in sinks { for input in config.inputs.iter() { - if let Err(e) = graph.add_input(input, id, &available_inputs) { + if let Err(e) = graph.add_input(input, id, &available_inputs, wildcard_matching) { errors.push(e); } } @@ -126,6 +157,7 @@ impl Graph { from: &str, to: &ComponentKey, available_inputs: &HashMap, + wildcard_matching: WildcardMatching, ) -> Result<(), String> { if let Some(output_id) = available_inputs.get(from) { self.edges.push(Edge { @@ -139,9 +171,28 @@ impl Graph { Some(Node::Sink { .. }) => "sink", _ => panic!("only transforms and sinks have inputs"), }; + // allow empty result if relaxed wildcard matching is enabled + match wildcard_matching { + WildcardMatching::Relaxed => { + // using value != glob::Pattern::escape(value) to check if value is a glob + // TODO: replace with proper check when https://github.com/rust-lang/glob/issues/72 is resolved + if from != glob::Pattern::escape(from) { + info!("Input \"{from}\" for {output_type} \"{to}\" didn’t match any components, but this was ignored because `relaxed_wildcard_matching` is enabled."); + return Ok(()); + } + } + WildcardMatching::Strict => {} + } + info!( + "Available components:\n{}", + self.nodes + .iter() + .map(|(key, node)| format!("\"{key}\":\n {node}")) + .collect::>() + .join("\n") + ); Err(format!( - "Input \"{}\" for {} \"{}\" doesn't match any components.", - from, output_type, to + "Input \"{from}\" for {output_type} \"{to}\" doesn't match any components.", )) } } @@ -302,7 +353,7 @@ impl Graph { for id in self.valid_inputs() { if let Some(_other) = mapped.insert(id.to_string(), id.clone()) { - errors.insert(format!("Input specifier {} is ambiguous", id)); + errors.insert(format!("Input specifier {id} is ambiguous")); } } @@ -379,7 +430,7 @@ mod test { outputs: vec![match ty { DataType::Metric => SourceOutput::new_metrics(), DataType::Trace => SourceOutput::new_traces(), - _ => SourceOutput::new_logs(ty, Definition::any()), + _ => SourceOutput::new_maybe_logs(ty, Definition::any()), }], }, ); @@ -438,9 +489,14 @@ mod test { } } - fn test_add_input(&mut self, node: &str, input: &str) -> Result<(), String> { + fn test_add_input( + &mut self, + node: &str, + input: &str, + wildcard_matching: WildcardMatching, + ) -> Result<(), String> { let available_inputs = self.input_map().unwrap(); - self.add_input(input, &node.into(), &available_inputs) + self.add_input(input, &node.into(), &available_inputs, wildcard_matching) } } @@ -510,7 +566,7 @@ mod test { assert_eq!( Err(vec![ - "Data type mismatch between in (Log) and out (Metric)".into() + "Data type mismatch between in ([\"Log\"]) and out ([\"Metric\"])".into() ]), graph.typecheck() ); @@ -523,7 +579,7 @@ mod test { graph.add_source("metric_source", DataType::Metric); graph.add_sink( "any_sink", - DataType::all(), + DataType::all_bits(), vec!["log_source", "metric_source"], ); @@ -533,16 +589,16 @@ mod test { #[test] fn allows_any_into_log_or_metric() { let mut graph = Graph::default(); - graph.add_source("any_source", DataType::all()); + graph.add_source("any_source", DataType::all_bits()); graph.add_transform( "log_to_any", DataType::Log, - DataType::all(), + DataType::all_bits(), vec!["any_source"], ); graph.add_transform( "any_to_log", - DataType::all(), + DataType::all_bits(), DataType::Log, vec!["any_source"], ); @@ -579,19 +635,19 @@ mod test { ); graph.add_transform( "any_to_any", - DataType::all(), - DataType::all(), + DataType::all_bits(), + DataType::all_bits(), vec!["log_to_log", "metric_to_metric"], ); graph.add_transform( "any_to_log", - DataType::all(), + DataType::all_bits(), DataType::Log, vec!["any_to_any"], ); graph.add_transform( "any_to_metric", - DataType::all(), + DataType::all_bits(), DataType::Metric, vec!["any_to_any"], ); @@ -621,14 +677,22 @@ mod test { // make sure we're good with dotted paths assert_eq!( Ok(()), - graph.test_add_input("errored_log_sink", "log_to_log.errors") + graph.test_add_input( + "errored_log_sink", + "log_to_log.errors", + WildcardMatching::Strict + ) ); // make sure that we're not cool with an unknown dotted path let expected = "Input \"log_to_log.not_errors\" for sink \"bad_log_sink\" doesn't match any components.".to_string(); assert_eq!( Err(expected), - graph.test_add_input("bad_log_sink", "log_to_log.not_errors") + graph.test_add_input( + "bad_log_sink", + "log_to_log.not_errors", + WildcardMatching::Strict + ) ); } @@ -639,26 +703,32 @@ mod test { graph.nodes.insert( ComponentKey::from("foo.bar"), Node::Source { - outputs: vec![SourceOutput::new_logs(DataType::all(), Definition::any())], + outputs: vec![SourceOutput::new_maybe_logs( + DataType::all_bits(), + Definition::any(), + )], }, ); graph.nodes.insert( ComponentKey::from("foo.bar"), Node::Source { - outputs: vec![SourceOutput::new_logs(DataType::all(), Definition::any())], + outputs: vec![SourceOutput::new_maybe_logs( + DataType::all_bits(), + Definition::any(), + )], }, ); graph.nodes.insert( ComponentKey::from("foo"), Node::Transform { - in_ty: DataType::all(), + in_ty: DataType::all_bits(), outputs: vec![ TransformOutput::new( - DataType::all(), + DataType::all_bits(), [("test".into(), Definition::default_legacy_namespace())].into(), ), TransformOutput::new( - DataType::all(), + DataType::all_bits(), [("test".into(), Definition::default_legacy_namespace())].into(), ) .with_port("bar"), @@ -670,20 +740,23 @@ mod test { graph.nodes.insert( ComponentKey::from("baz.errors"), Node::Source { - outputs: vec![SourceOutput::new_logs(DataType::all(), Definition::any())], + outputs: vec![SourceOutput::new_maybe_logs( + DataType::all_bits(), + Definition::any(), + )], }, ); graph.nodes.insert( ComponentKey::from("baz"), Node::Transform { - in_ty: DataType::all(), + in_ty: DataType::all_bits(), outputs: vec![ TransformOutput::new( - DataType::all(), + DataType::all_bits(), [("test".into(), Definition::default_legacy_namespace())].into(), ), TransformOutput::new( - DataType::all(), + DataType::all_bits(), [("test".into(), Definition::default_legacy_namespace())].into(), ) .with_port("errors"), @@ -702,6 +775,40 @@ mod test { ); } + #[test] + fn wildcard_matching() { + let mut graph = Graph::default(); + graph.add_source("log_source", DataType::Log); + + // don't add inputs to these yet since they're not validated via these helpers + graph.add_sink("sink", DataType::Log, vec![]); + + // make sure we're not good with non existing inputs with relaxed wildcard matching disabled + let wildcard_matching = WildcardMatching::Strict; + let expected = + "Input \"bad_source-*\" for sink \"sink\" doesn't match any components.".to_string(); + assert_eq!( + Err(expected), + graph.test_add_input("sink", "bad_source-*", wildcard_matching) + ); + + // make sure we're good with non existing inputs with relaxed wildcard matching enabled + let wildcard_matching = WildcardMatching::Relaxed; + assert_eq!( + Ok(()), + graph.test_add_input("sink", "bad_source-*", wildcard_matching) + ); + + // make sure we're not good with non existing inputs that are not wildcards even when relaxed wildcard matching is enabled + let wildcard_matching = WildcardMatching::Relaxed; + let expected = + "Input \"bad_source-1\" for sink \"sink\" doesn't match any components.".to_string(); + assert_eq!( + Err(expected), + graph.test_add_input("sink", "bad_source-1", wildcard_matching) + ); + } + #[test] fn paths_to_sink_simple() { let mut graph = Graph::default(); diff --git a/src/config/loading/config_builder.rs b/src/config/loading/config_builder.rs index eb780ddd885e6..4ef0010d6fb20 100644 --- a/src/config/loading/config_builder.rs +++ b/src/config/loading/config_builder.rs @@ -33,14 +33,14 @@ impl ConfigBuilderLoader { impl Process for ConfigBuilderLoader { /// Prepares input for a `ConfigBuilder` by interpolating environment variables. - fn prepare(&mut self, input: R) -> Result<(String, Vec), Vec> { - let (prepared_input, warnings) = prepare_input(input)?; + fn prepare(&mut self, input: R) -> Result> { + let prepared_input = prepare_input(input)?; let prepared_input = self .secrets .as_ref() .map(|s| secret::interpolate(&prepared_input, s)) .unwrap_or(Ok(prepared_input))?; - Ok((prepared_input, warnings)) + Ok(prepared_input) } /// Merge a TOML `Table` with a `ConfigBuilder`. Component types extend specific keys. @@ -63,7 +63,7 @@ impl Process for ConfigBuilderLoader { } Some(ComponentHint::EnrichmentTable) => { self.builder.enrichment_tables.extend(deserialize_table::< - IndexMap, + IndexMap>, >(table)?); } Some(ComponentHint::Test) => { diff --git a/src/config/loading/loader.rs b/src/config/loading/loader.rs index b418afa6c0e21..0fcfeb2545c48 100644 --- a/src/config/loading/loader.rs +++ b/src/config/loading/loader.rs @@ -54,20 +54,16 @@ pub(super) mod process { pub trait Process { /// Prepares input for serialization. This can be a useful step to interpolate /// environment variables or perform some other pre-processing on the input. - fn prepare(&mut self, input: R) -> Result<(String, Vec), Vec>; + fn prepare(&mut self, input: R) -> Result>; /// Calls into the `prepare` method, and deserializes a `Read` to a `T`. - fn load( - &mut self, - input: R, - format: Format, - ) -> Result<(T, Vec), Vec> + fn load(&mut self, input: R, format: Format) -> Result> where T: serde::de::DeserializeOwned, { - let (value, warnings) = self.prepare(input)?; + let value = self.prepare(input)?; - format::deserialize(&value, format).map(|builder| (builder, warnings)) + format::deserialize(&value, format) } /// Helper method used by other methods to recursively handle file/dir loading, merging @@ -77,9 +73,8 @@ pub(super) mod process { path: &Path, result: &mut Table, recurse: bool, - ) -> Result, Vec> { + ) -> Result<(), Vec> { let mut errors = Vec::new(); - let mut warnings = Vec::new(); let readdir = read_dir(path)?; let mut files = Vec::new(); @@ -105,8 +100,7 @@ pub(super) mod process { } Err(err) => { errors.push(format!( - "Could not read entry in config dir: {:?}, {}.", - path, err + "Could not read entry in config dir: {path:?}, {err}." )); } }; @@ -126,11 +120,9 @@ pub(super) mod process { }; match loaded { - Ok(Some((name, inner, warns))) => { + Ok(Some((name, inner))) => { if let Err(errs) = merge_with_value(result, name, Value::Table(inner)) { errors.extend(errs); - } else { - warnings.extend(warns); } } Ok(None) => {} @@ -146,9 +138,8 @@ pub(super) mod process { if let Ok(name) = component_name(&entry) { if !result.contains_key(&name) { match self.load_dir(&entry, true) { - Ok((table, warns)) => { + Ok(table) => { result.insert(name, Value::Table(table)); - warnings.extend(warns); } Err(errs) => { errors.extend(errs); @@ -160,7 +151,7 @@ pub(super) mod process { } if errors.is_empty() { - Ok(warnings) + Ok(()) } else { Err(errors) } @@ -171,12 +162,10 @@ pub(super) mod process { &mut self, path: &Path, format: Format, - ) -> Result)>, Vec> { - if let (Ok(name), Some(file)) = (component_name(path), open_file(path)) { - self.load(file, format) - .map(|(value, warnings)| Some((name, value, warnings))) - } else { - Ok(None) + ) -> Result, Vec> { + match (component_name(path), open_file(path)) { + (Ok(name), Some(file)) => self.load(file, format).map(|value| Some((name, value))), + _ => Ok(None), } } @@ -186,14 +175,14 @@ pub(super) mod process { &mut self, path: &Path, format: Format, - ) -> Result)>, Vec> { - if let Some((name, mut table, mut warnings)) = self.load_file(path, format)? { + ) -> Result, Vec> { + if let Some((name, mut table)) = self.load_file(path, format)? { if let Some(subdir) = path.parent().map(|p| p.join(&name)) { if subdir.is_dir() && subdir.exists() { - warnings.extend(self.load_dir_into(&subdir, &mut table, true)?); + self.load_dir_into(&subdir, &mut table, true)?; } } - Ok(Some((name, table, warnings))) + Ok(Some((name, table))) } else { Ok(None) } @@ -201,14 +190,10 @@ pub(super) mod process { /// Loads a directory (optionally, recursively), returning a TOML `Table`. This will /// create an initial `Table` and pass it into `load_dir_into` for recursion handling. - fn load_dir( - &mut self, - path: &Path, - recurse: bool, - ) -> Result<(Table, Vec), Vec> { + fn load_dir(&mut self, path: &Path, recurse: bool) -> Result> { let mut result = Table::new(); - let warnings = self.load_dir_into(path, &mut result, recurse)?; - Ok((result, warnings)) + self.load_dir_into(path, &mut result, recurse)?; + Ok(result) } /// Merge a provided TOML `Table` in an implementation-specific way. Contains an @@ -229,18 +214,18 @@ where /// Deserializes a file with the provided format, and makes the result available via `take`. /// Returns a vector of non-fatal warnings on success, or a vector of error strings on failure. - fn load_from_file(&mut self, path: &Path, format: Format) -> Result, Vec> { - if let Some((_, table, warnings)) = self.load_file(path, format)? { + fn load_from_file(&mut self, path: &Path, format: Format) -> Result<(), Vec> { + if let Some((_, table)) = self.load_file(path, format)? { self.merge(table, None)?; - Ok(warnings) + Ok(()) } else { - Ok(vec![]) + Ok(()) } } /// Deserializes a dir with the provided format, and makes the result available via `take`. /// Returns a vector of non-fatal warnings on success, or a vector of error strings on failure. - fn load_from_dir(&mut self, path: &Path) -> Result, Vec> { + fn load_from_dir(&mut self, path: &Path) -> Result<(), Vec> { // Iterator containing component-specific sub-folders to attempt traversing into. let hints = [ ComponentHint::Source, @@ -257,7 +242,7 @@ where // Get files from the root of the folder. These represent top-level config settings, // and need to merged down first to represent a more 'complete' config. let mut root = Table::new(); - let (table, mut warnings) = self.load_dir(path, false)?; + let table = self.load_dir(path, false)?; // Discard the named part of the path, since these don't form any component names. for (_, value) in table { @@ -277,16 +262,13 @@ where if path.exists() && path.is_dir() { // Transforms are treated differently from other component types; they can be // arbitrarily nested. - let (table, warns) = - self.load_dir(&path, matches!(hint, ComponentHint::Transform))?; + let table = self.load_dir(&path, matches!(hint, ComponentHint::Transform))?; self.merge(table, Some(hint))?; - - warnings.extend(warns); } } - Ok(warnings) + Ok(()) } } diff --git a/src/config/loading/mod.rs b/src/config/loading/mod.rs index 40656fd29c84e..a4def7e19715d 100644 --- a/src/config/loading/mod.rs +++ b/src/config/loading/mod.rs @@ -8,7 +8,6 @@ use std::{ fmt::Debug, fs::{File, ReadDir}, path::{Path, PathBuf}, - sync::atomic::{AtomicBool, Ordering}, sync::Mutex, }; @@ -27,18 +26,6 @@ use crate::{config::ProviderConfig, signal}; pub static CONFIG_PATHS: Mutex> = Mutex::new(Vec::new()); -// Technically, this global should be a parameter to the `config::vars::interpolate` function, which -// is passed in from its caller `prepare_input` below, etc. However: -// -// 1. That ended up needing to have the parameter added to literally dozens of functions, as -// `prepare_input` has long chains of callers. -// -// 2. This variable is only ever set in one place, at program global startup from the command line. -// -// 3. This setting is intended to be transitional, anyways, and is marked as deprecated to be -// removed in a future version after strict mode becomes the default. -pub static STRICT_ENV_VARS: AtomicBool = AtomicBool::new(false); - pub(super) fn read_dir + Debug>(path: P) -> Result> { path.as_ref() .read_dir() @@ -123,16 +110,20 @@ pub fn process_paths(config_paths: &[ConfigPath]) -> Option> { paths.sort(); paths.dedup(); // Ignore poison error and let the current main thread continue running to do the cleanup. - drop(CONFIG_PATHS.lock().map(|mut guard| *guard = paths.clone())); + drop( + CONFIG_PATHS + .lock() + .map(|mut guard| guard.clone_from(&paths)), + ); Some(paths) } pub fn load_from_paths(config_paths: &[ConfigPath]) -> Result> { - let (builder, load_warnings) = load_builder_from_paths(config_paths)?; + let builder = load_builder_from_paths(config_paths)?; let (config, build_warnings) = builder.build_with_warnings()?; - for warning in load_warnings.into_iter().chain(build_warnings) { + for warning in build_warnings { warn!("{}", warning); } @@ -148,13 +139,13 @@ pub async fn load_from_paths_with_provider_and_secrets( allow_empty: bool, ) -> Result> { // Load secret backends first - let (mut secrets_backends_loader, secrets_warning) = - load_secret_backends_from_paths(config_paths)?; + let mut secrets_backends_loader = load_secret_backends_from_paths(config_paths)?; // And then, if needed, retrieve secrets from configured backends - let (mut builder, load_warnings) = if secrets_backends_loader.has_secrets_to_retrieve() { + let mut builder = if secrets_backends_loader.has_secrets_to_retrieve() { debug!(message = "Secret placeholders found, retrieving secrets from configured backends."); let resolved_secrets = secrets_backends_loader .retrieve(&mut signal_handler.subscribe()) + .await .map_err(|e| vec![e])?; load_builder_from_paths_with_secrets(config_paths, resolved_secrets)? } else { @@ -177,11 +168,7 @@ pub async fn load_from_paths_with_provider_and_secrets( validation::check_buffer_preconditions(&new_config).await?; - for warning in secrets_warning - .into_iter() - .chain(load_warnings) - .chain(build_warnings) - { + for warning in build_warnings { warn!("{}", warning); } @@ -189,15 +176,11 @@ pub async fn load_from_paths_with_provider_and_secrets( } /// Iterators over `ConfigPaths`, and processes a file/dir according to a provided `Loader`. -fn loader_from_paths( - mut loader: L, - config_paths: &[ConfigPath], -) -> Result<(T, Vec), Vec> +fn loader_from_paths(mut loader: L, config_paths: &[ConfigPath]) -> Result> where T: serde::de::DeserializeOwned, L: Loader + Process, { - let mut warnings = Vec::new(); let mut errors = Vec::new(); for config_path in config_paths { @@ -209,13 +192,13 @@ where .or_else(move || Format::from_path(&path).ok()) .unwrap_or_default(), ) { - Ok(warns) => warnings.extend(warns), + Ok(()) => {} Err(errs) => errors.extend(errs), }; } ConfigPath::Dir(path) => { match loader.load_from_dir(path) { - Ok(warns) => warnings.extend(warns), + Ok(()) => {} Err(errs) => errors.extend(errs), }; } @@ -223,16 +206,14 @@ where } if errors.is_empty() { - Ok((loader.take(), warnings)) + Ok(loader.take()) } else { Err(errors) } } /// Uses `ConfigBuilderLoader` to process `ConfigPaths`, deserializing to a `ConfigBuilder`. -pub fn load_builder_from_paths( - config_paths: &[ConfigPath], -) -> Result<(ConfigBuilder, Vec), Vec> { +pub fn load_builder_from_paths(config_paths: &[ConfigPath]) -> Result> { loader_from_paths(ConfigBuilderLoader::new(), config_paths) } @@ -240,29 +221,29 @@ pub fn load_builder_from_paths( pub fn load_builder_from_paths_with_secrets( config_paths: &[ConfigPath], secrets: HashMap, -) -> Result<(ConfigBuilder, Vec), Vec> { +) -> Result> { loader_from_paths(ConfigBuilderLoader::with_secrets(secrets), config_paths) } /// Uses `SourceLoader` to process `ConfigPaths`, deserializing to a toml `SourceMap`. pub fn load_source_from_paths( config_paths: &[ConfigPath], -) -> Result<(toml::value::Table, Vec), Vec> { +) -> Result> { loader_from_paths(SourceLoader::new(), config_paths) } /// Uses `SecretBackendLoader` to process `ConfigPaths`, deserializing to a `SecretBackends`. pub fn load_secret_backends_from_paths( config_paths: &[ConfigPath], -) -> Result<(SecretBackendLoader, Vec), Vec> { +) -> Result> { loader_from_paths(SecretBackendLoader::new(), config_paths) } pub fn load_from_str(input: &str, format: Format) -> Result> { - let (builder, load_warnings) = load_from_inputs(std::iter::once((input.as_bytes(), format)))?; + let builder = load_from_inputs(std::iter::once((input.as_bytes(), format)))?; let (config, build_warnings) = builder.build_with_warnings()?; - for warning in load_warnings.into_iter().chain(build_warnings) { + for warning in build_warnings { warn!("{}", warning); } @@ -271,29 +252,25 @@ pub fn load_from_str(input: &str, format: Format) -> Result> fn load_from_inputs( inputs: impl IntoIterator, -) -> Result<(ConfigBuilder, Vec), Vec> { +) -> Result> { let mut config = Config::builder(); let mut errors = Vec::new(); - let mut warnings = Vec::new(); for (input, format) in inputs { - if let Err(errs) = load(input, format).and_then(|(n, warn)| { - warnings.extend(warn); - config.append(n) - }) { + if let Err(errs) = load(input, format).and_then(|n| config.append(n)) { // TODO: add back paths errors.extend(errs.iter().map(|e| e.to_string())); } } if errors.is_empty() { - Ok((config, warnings)) + Ok(config) } else { Err(errors) } } -pub fn prepare_input(mut input: R) -> Result<(String, Vec), Vec> { +pub fn prepare_input(mut input: R) -> Result> { let mut source_string = String::new(); input .read_to_string(&mut source_string) @@ -305,20 +282,16 @@ pub fn prepare_input(mut input: R) -> Result<(String, Vec(input: R, format: Format) -> Result<(T, Vec), Vec> +pub fn load(input: R, format: Format) -> Result> where T: serde::de::DeserializeOwned, { - let (with_vars, warnings) = prepare_input(input)?; + let with_vars = prepare_input(input)?; - format::deserialize(&with_vars, format).map(|builder| (builder, warnings)) + format::deserialize(&with_vars, format) } #[cfg(not(windows))] @@ -345,7 +318,6 @@ fn default_config_paths() -> Vec { #[cfg(all( test, feature = "sinks-elasticsearch", - feature = "transforms-pipelines", feature = "transforms-sample", feature = "sources-demo_logs", feature = "sinks-console" @@ -363,8 +335,7 @@ mod tests { .join("namespacing") .join("success"); let configs = vec![ConfigPath::Dir(path)]; - let (builder, warnings) = load_builder_from_paths(&configs).unwrap(); - assert!(warnings.is_empty()); + let builder = load_builder_from_paths(&configs).unwrap(); assert!(builder .transforms .contains_key(&ComponentKey::from("apache_parser"))); @@ -384,8 +355,7 @@ mod tests { .join("namespacing") .join("ignore-invalid"); let configs = vec![ConfigPath::Dir(path)]; - let (_, warns) = load_builder_from_paths(&configs).unwrap(); - assert!(warns.is_empty()); + load_builder_from_paths(&configs).unwrap(); } #[test] @@ -395,8 +365,7 @@ mod tests { .join("config-dir") .join("ignore-unknown"); let configs = vec![ConfigPath::Dir(path)]; - let (_, warnings) = load_builder_from_paths(&configs).unwrap(); - assert!(warnings.is_empty()); + load_builder_from_paths(&configs).unwrap(); } #[test] @@ -406,8 +375,7 @@ mod tests { .join("config-dir") .join("globals"); let configs = vec![ConfigPath::Dir(path)]; - let (_, warnings) = load_builder_from_paths(&configs).unwrap(); - assert!(warnings.is_empty()); + load_builder_from_paths(&configs).unwrap(); } #[test] @@ -417,7 +385,6 @@ mod tests { .join("config-dir") .join("globals-duplicate"); let configs = vec![ConfigPath::Dir(path)]; - let (_, warnings) = load_builder_from_paths(&configs).unwrap(); - assert!(warnings.is_empty()); + load_builder_from_paths(&configs).unwrap(); } } diff --git a/src/config/loading/secret.rs b/src/config/loading/secret.rs index 909fa30b13f90..c3a9cf8f86bb8 100644 --- a/src/config/loading/secret.rs +++ b/src/config/loading/secret.rs @@ -1,10 +1,11 @@ use std::{ collections::{HashMap, HashSet}, io::Read, + sync::LazyLock, }; +use futures::TryFutureExt; use indexmap::IndexMap; -use once_cell::sync::Lazy; use regex::{Captures, Regex}; use serde::{Deserialize, Serialize}; use toml::value::Table; @@ -26,8 +27,8 @@ use crate::{ // - "SECRET[backend..secret.name]" will match and capture "backend" and ".secret.name" // - "SECRET[secret_name]" will not match // - "SECRET[.secret.name]" will not match -pub static COLLECTOR: Lazy = - Lazy::new(|| Regex::new(r"SECRET\[([[:word:]]+)\.([[:word:].]+)\]").unwrap()); +pub static COLLECTOR: LazyLock = + LazyLock::new(|| Regex::new(r"SECRET\[([[:word:]]+)\.([[:word:].-]+)\]").unwrap()); /// Helper type for specifically deserializing secrets backends. #[derive(Debug, Default, Deserialize, Serialize)] @@ -51,31 +52,33 @@ impl SecretBackendLoader { } } - pub(crate) fn retrieve( + pub(crate) async fn retrieve( &mut self, signal_rx: &mut signal::SignalRx, ) -> Result, String> { - let secrets = self.secret_keys.iter().flat_map(|(backend_name, keys)| { - match self.backends.get_mut(&ComponentKey::from(backend_name.clone())) { - None => { - vec![Err(format!("Backend \"{}\" is required for secret retrieval but was not found in config.", backend_name))] - }, - Some(backend) => { - debug!(message = "Retrieving secret from a backend.", backend = ?backend_name); - match backend.retrieve(keys.clone(), signal_rx) { - Err(e) => { - vec![Err(format!("Error while retrieving secret from backend \"{}\": {}.", backend_name, e))] - }, - Ok(s) => { - s.into_iter().map(|(k, v)| { - trace!(message = "Successfully retrieved a secret.", backend = ?backend_name, secret_key = ?k); - Ok((format!("{}.{}", backend_name, k), v)) - }).collect::>>() - } - } - }, + let mut secrets: HashMap = HashMap::new(); + + for (backend_name, keys) in &self.secret_keys { + let backend = self.backends + .get_mut(&ComponentKey::from(backend_name.clone())) + .ok_or_else(|| { + format!("Backend \"{backend_name}\" is required for secret retrieval but was not found in config.") + })?; + + debug!(message = "Retrieving secrets from a backend.", backend = ?backend_name, keys = ?keys); + let backend_secrets = backend + .retrieve(keys.clone(), signal_rx) + .map_err(|e| { + format!("Error while retrieving secret from backend \"{backend_name}\": {e}.",) + }) + .await?; + + for (k, v) in backend_secrets { + trace!(message = "Successfully retrieved a secret.", backend = ?backend_name, key = ?k); + secrets.insert(format!("{backend_name}.{k}"), v); } - }).collect::, String>>()?; + } + Ok(secrets) } @@ -85,11 +88,11 @@ impl SecretBackendLoader { } impl Process for SecretBackendLoader { - fn prepare(&mut self, input: R) -> Result<(String, Vec), Vec> { - let (config_string, warnings) = prepare_input(input)?; + fn prepare(&mut self, input: R) -> Result> { + let config_string = prepare_input(input)?; // Collect secret placeholders just after env var processing collect_secret_keys(&config_string, &mut self.secret_keys); - Ok((config_string, warnings)) + Ok(config_string) } fn merge(&mut self, table: Table, _: Option) -> Result<(), Vec> { @@ -196,8 +199,9 @@ mod tests { fn collection() { let mut keys = HashMap::new(); collect_secret_keys( - indoc! {r#" + indoc! {r" SECRET[first_backend.secret_key] + SECRET[first_backend.secret-key] SECRET[first_backend.another_secret_key] SECRET[second_backend.secret_key] SECRET[second_backend.secret.key] @@ -205,7 +209,7 @@ mod tests { SECRET[first_backend...an_extra_secret_key] SECRET[non_matching_syntax] SECRET[.non.matching.syntax] - "#}, + "}, &mut keys, ); assert_eq!(keys.len(), 2); @@ -213,8 +217,9 @@ mod tests { assert!(keys.contains_key("second_backend")); let first_backend_keys = keys.get("first_backend").unwrap(); - assert_eq!(first_backend_keys.len(), 4); + assert_eq!(first_backend_keys.len(), 5); assert!(first_backend_keys.contains("secret_key")); + assert!(first_backend_keys.contains("secret-key")); assert!(first_backend_keys.contains("another_secret_key")); assert!(first_backend_keys.contains("a_third.secret_key")); assert!(first_backend_keys.contains("..an_extra_secret_key")); @@ -229,10 +234,10 @@ mod tests { fn collection_duplicates() { let mut keys = HashMap::new(); collect_secret_keys( - indoc! {r#" + indoc! {r" SECRET[first_backend.secret_key] SECRET[first_backend.secret_key] - "#}, + "}, &mut keys, ); diff --git a/src/config/loading/secret_backend_example.rs b/src/config/loading/secret_backend_example.rs index 2466d7da387f7..4573df934fb3e 100644 --- a/src/config/loading/secret_backend_example.rs +++ b/src/config/loading/secret_backend_example.rs @@ -33,7 +33,7 @@ async fn main() { ( secret.clone(), ExecResponse { - value: format!("{}.retrieved", secret), + value: format!("{secret}.retrieved"), error: None, }, ) diff --git a/src/config/loading/source.rs b/src/config/loading/source.rs index f3efacec65e5a..6230c65b9edb8 100644 --- a/src/config/loading/source.rs +++ b/src/config/loading/source.rs @@ -18,13 +18,13 @@ impl SourceLoader { impl Process for SourceLoader { /// Prepares input by simply reading bytes to a string. Unlike other loaders, there's no /// interpolation of environment variables. This is on purpose to preserve the original config. - fn prepare(&mut self, mut input: R) -> Result<(String, Vec), Vec> { + fn prepare(&mut self, mut input: R) -> Result> { let mut source_string = String::new(); input .read_to_string(&mut source_string) .map_err(|e| vec![e.to_string()])?; - Ok((source_string, vec![])) + Ok(source_string) } /// Merge values by combining with the internal TOML `Table`. diff --git a/src/config/mod.rs b/src/config/mod.rs index eefceec4bb1a4..4f17de80f59b6 100644 --- a/src/config/mod.rs +++ b/src/config/mod.rs @@ -2,36 +2,41 @@ use std::{ collections::{HashMap, HashSet}, fmt::{self, Display, Formatter}, + fs, hash::Hash, net::SocketAddr, path::PathBuf, time::Duration, }; +use crate::{ + conditions, + event::{Metric, Value}, + secrets::SecretBackends, + serde::OneOrMany, +}; + use indexmap::IndexMap; use serde::Serialize; + +use vector_config::configurable_component; pub use vector_lib::config::{ AcknowledgementsConfig, DataType, GlobalOptions, Input, LogNamespace, - SourceAcknowledgementsConfig, SourceOutput, TransformOutput, + SourceAcknowledgementsConfig, SourceOutput, TransformOutput, WildcardMatching, }; pub use vector_lib::configurable::component::{ GenerateConfig, SinkDescription, TransformDescription, }; -use vector_lib::configurable::configurable_component; - -use crate::{conditions, event::Metric, secrets::SecretBackends, serde::OneOrMany}; pub mod api; mod builder; mod cmd; mod compiler; mod diff; +pub mod dot_graph; mod enrichment_table; -#[cfg(feature = "enterprise")] -pub mod enterprise; pub mod format; mod graph; -mod id; mod loading; pub mod provider; pub mod schema; @@ -49,11 +54,10 @@ pub use cmd::{cmd, Opts}; pub use diff::ConfigDiff; pub use enrichment_table::{EnrichmentTableConfig, EnrichmentTableOuter}; pub use format::{Format, FormatHint}; -pub use id::{ComponentKey, Inputs}; pub use loading::{ load, load_builder_from_paths, load_from_paths, load_from_paths_with_provider_and_secrets, load_from_str, load_source_from_paths, merge_path_lists, process_paths, COLLECTOR, - CONFIG_PATHS, STRICT_ENV_VARS, + CONFIG_PATHS, }; pub use provider::ProviderConfig; pub use secret::SecretBackend; @@ -65,18 +69,39 @@ pub use transform::{ pub use unit_test::{build_unit_tests, build_unit_tests_main, UnitTestResult}; pub use validation::warnings; pub use vars::{interpolate, ENVIRONMENT_VARIABLE_INTERPOLATION_REGEX}; -pub use vector_lib::config::{ - init_telemetry, log_schema, proxy::ProxyConfig, telemetry, LogSchema, OutputId, +pub use vector_lib::{ + config::{ + init_log_schema, init_telemetry, log_schema, proxy::ProxyConfig, telemetry, ComponentKey, + LogSchema, OutputId, + }, + id::Inputs, }; -/// Loads Log Schema from configurations and sets global schema. -/// Once this is done, configurations can be correctly loaded using -/// configured log schema defaults. -/// If deny is set, will panic if schema has already been set. -pub fn init_log_schema(config_paths: &[ConfigPath], deny_if_set: bool) -> Result<(), Vec> { - let (builder, _) = load_builder_from_paths(config_paths)?; - vector_lib::config::init_log_schema(builder.global.log_schema, deny_if_set); - Ok(()) +#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq)] +pub struct ComponentConfig { + pub config_paths: Vec, + pub component_key: ComponentKey, +} + +impl ComponentConfig { + pub fn new(config_paths: Vec, component_key: ComponentKey) -> Self { + let canonicalized_paths = config_paths + .into_iter() + .filter_map(|p| fs::canonicalize(p).ok()) + .collect(); + + Self { + config_paths: canonicalized_paths, + component_key, + } + } + + pub fn contains(&self, config_paths: &[PathBuf]) -> Option { + if config_paths.iter().any(|p| self.config_paths.contains(p)) { + return Some(self.component_key.clone()); + } + None + } } #[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq)] @@ -108,15 +133,12 @@ pub struct Config { #[cfg(feature = "api")] pub api: api::Options, pub schema: schema::Options, - pub hash: Option, - #[cfg(feature = "enterprise")] - pub enterprise: Option, pub global: GlobalOptions, pub healthchecks: HealthcheckOptions, sources: IndexMap, sinks: IndexMap>, transforms: IndexMap>, - pub enrichment_tables: IndexMap, + pub enrichment_tables: IndexMap>, tests: Vec, secret: IndexMap, pub graceful_shutdown_duration: Option, @@ -155,11 +177,22 @@ impl Config { self.sinks.get(id) } + pub fn enrichment_tables( + &self, + ) -> impl Iterator)> { + self.enrichment_tables.iter() + } + + pub fn enrichment_table(&self, id: &ComponentKey) -> Option<&EnrichmentTableOuter> { + self.enrichment_tables.get(id) + } + pub fn inputs_for_node(&self, id: &ComponentKey) -> Option<&[OutputId]> { self.transforms .get(id) .map(|t| &t.inputs[..]) .or_else(|| self.sinks.get(id).map(|s| &s.inputs[..])) + .or_else(|| self.enrichment_tables.get(id).map(|s| &s.inputs[..])) } pub fn propagate_acknowledgements(&mut self) -> Result<(), Vec> { @@ -232,7 +265,7 @@ impl HealthcheckOptions { } } - fn merge(&mut self, other: Self) { + const fn merge(&mut self, other: Self) { self.enabled &= other.enabled; self.require_healthy |= other.require_healthy; } @@ -328,10 +361,10 @@ impl Display for Protocol { impl Display for Resource { fn fmt(&self, fmt: &mut Formatter<'_>) -> Result<(), fmt::Error> { match self { - Resource::Port(address, protocol) => write!(fmt, "{} {}", protocol, address), + Resource::Port(address, protocol) => write!(fmt, "{protocol} {address}"), Resource::SystemFdOffset(offset) => write!(fmt, "systemd {}th socket", offset + 1), - Resource::Fd(fd) => write!(fmt, "file descriptor: {}", fd), - Resource::DiskBuffer(name) => write!(fmt, "disk buffer {:?}", name), + Resource::Fd(fd) => write!(fmt, "file descriptor: {fd}"), + Resource::DiskBuffer(name) => write!(fmt, "disk buffer {name:?}"), } } } @@ -393,8 +426,7 @@ impl TestDefinition { outputs.push(output_id.clone()); } else { errors.push(format!( - r#"Invalid extract_from target in test '{}': '{}' does not exist"#, - name, from + r#"Invalid extract_from target in test '{name}': '{from}' does not exist"# )); } } @@ -416,8 +448,7 @@ impl TestDefinition { Some(output_id.clone()) } else { errors.push(format!( - r#"Invalid no_outputs_from target in test '{}': '{}' does not exist"#, - name, o + r#"Invalid no_outputs_from target in test '{name}': '{o}' does not exist"# )); None } @@ -474,24 +505,6 @@ impl TestDefinition { } } -/// Value for a log field. -#[configurable_component] -#[derive(Clone, Debug)] -#[serde(untagged)] -pub enum TestInputValue { - /// A string. - String(String), - - /// An integer. - Integer(i64), - - /// A floating-point number. - Float(f64), - - /// A boolean. - Boolean(bool), -} - /// A unit test input. /// /// An input describes not only the type of event to insert, but also which transform within the @@ -523,7 +536,7 @@ pub struct TestInput { /// The set of log fields to use when creating a log input event. /// /// Only relevant when `type` is `log`. - pub log_fields: Option>, + pub log_fields: Option>, /// The metric to use as an input event. /// @@ -699,22 +712,27 @@ mod tests { } #[tokio::test] - #[cfg(unix)] + #[cfg(all(unix, feature = "sources-file_descriptor"))] async fn no_conflict_fd_resources() { + use crate::sources::file_descriptors::file_descriptor::null_fd; + let fd1 = null_fd().unwrap(); + let fd2 = null_fd().unwrap(); let result = load( - r#" + &format!( + r#" [sources.file_descriptor1] type = "file_descriptor" - fd = 10 + fd = {fd1} [sources.file_descriptor2] type = "file_descriptor" - fd = 20 + fd = {fd2} [sinks.out] type = "test_basic" inputs = ["file_descriptor1", "file_descriptor2"] - "#, + "# + ), Format::Toml, ) .await; @@ -1100,128 +1118,6 @@ mod tests { assert_eq!(source.proxy.https, None); assert!(source.proxy.no_proxy.matches("localhost")); } - - #[test] - #[cfg(feature = "enterprise")] - fn order_independent_sha256_hashes() { - let config1: ConfigBuilder = format::deserialize( - indoc! {r#" - data_dir = "/tmp" - - [api] - enabled = true - - [sources.file] - type = "file" - ignore_older_secs = 600 - include = ["/var/log/**/*.log"] - read_from = "beginning" - - [sources.internal_metrics] - type = "internal_metrics" - namespace = "pipelines" - - [transforms.filter] - type = "filter" - inputs = ["internal_metrics"] - condition = """ - .name == "component_received_bytes_total" - """ - - [sinks.out] - type = "console" - inputs = ["filter"] - target = "stdout" - encoding.codec = "json" - "#}, - Format::Toml, - ) - .unwrap(); - - let config2: ConfigBuilder = format::deserialize( - indoc! {r#" - data_dir = "/tmp" - - [sources.internal_metrics] - type = "internal_metrics" - namespace = "pipelines" - - [sources.file] - type = "file" - ignore_older_secs = 600 - include = ["/var/log/**/*.log"] - read_from = "beginning" - - [transforms.filter] - type = "filter" - inputs = ["internal_metrics"] - condition = """ - .name == "component_received_bytes_total" - """ - - [sinks.out] - type = "console" - inputs = ["filter"] - target = "stdout" - encoding.codec = "json" - - [api] - enabled = true - "#}, - Format::Toml, - ) - .unwrap(); - - assert_eq!(config1.sha256_hash(), config2.sha256_hash()) - } - - #[test] - #[cfg(feature = "enterprise")] - fn enterprise_tags_ignored_sha256_hashes() { - let config1: ConfigBuilder = format::deserialize( - indoc! {r#" - [enterprise] - api_key = "api_key" - configuration_key = "configuration_key" - - [enterprise.tags] - tag = "value" - - [sources.internal_metrics] - type = "internal_metrics" - - [sinks.datadog_metrics] - type = "datadog_metrics" - inputs = ["*"] - default_api_key = "default_api_key" - "#}, - Format::Toml, - ) - .unwrap(); - - let config2: ConfigBuilder = format::deserialize( - indoc! {r#" - [enterprise] - api_key = "api_key" - configuration_key = "configuration_key" - - [enterprise.tags] - another_tag = "another value" - - [sources.internal_metrics] - type = "internal_metrics" - - [sinks.datadog_metrics] - type = "datadog_metrics" - inputs = ["*"] - default_api_key = "default_api_key" - "#}, - Format::Toml, - ) - .unwrap(); - - assert_eq!(config1.sha256_hash(), config2.sha256_hash()) - } } #[cfg(all(test, feature = "sources-file", feature = "sinks-file"))] @@ -1478,9 +1374,9 @@ mod resource_config_tests { let json = serde_json::to_string_pretty(&schema) .expect("rendering root schema to JSON should not fail"); - println!("{}", json); + println!("{json}"); } - Err(e) => eprintln!("error while generating schema: {:?}", e), + Err(e) => eprintln!("error while generating schema: {e:?}"), } } } diff --git a/src/config/provider.rs b/src/config/provider.rs index f2ae9595ee2c4..c2d7fbd462e62 100644 --- a/src/config/provider.rs +++ b/src/config/provider.rs @@ -1,11 +1,9 @@ -use async_trait::async_trait; use enum_dispatch::enum_dispatch; use vector_lib::configurable::NamedComponent; use crate::{providers::BuildResult, signal}; /// Generalized interface for constructing a configuration from a provider. -#[async_trait] #[enum_dispatch] pub trait ProviderConfig: NamedComponent + core::fmt::Debug + Send + Sync { /// Builds a configuration. diff --git a/src/config/schema.rs b/src/config/schema.rs index 2245e84e758f3..bed2c0f532b64 100644 --- a/src/config/schema.rs +++ b/src/config/schema.rs @@ -147,13 +147,12 @@ mod test { let mut errors = vec![]; a.append(b, &mut errors); if errors.is_empty() { - assert_eq!(Some(a), expected, "result mismatch: {}", test); + assert_eq!(Some(a), expected, "result mismatch: {test}"); } else { assert_eq!( errors.is_empty(), expected.is_some(), - "error mismatch: {}", - test + "error mismatch: {test}" ); } } diff --git a/src/config/secret.rs b/src/config/secret.rs index 7bf7d66fd4d42..90bc331ec0bd9 100644 --- a/src/config/secret.rs +++ b/src/config/secret.rs @@ -8,7 +8,7 @@ use crate::signal; /// Generalized interface to a secret backend. #[enum_dispatch] pub trait SecretBackend: NamedComponent + core::fmt::Debug + Send + Sync { - fn retrieve( + async fn retrieve( &mut self, secret_keys: HashSet, signal_rx: &mut signal::SignalRx, diff --git a/src/config/sink.rs b/src/config/sink.rs index 77ab8e1e8e933..b4b6ac23c085c 100644 --- a/src/config/sink.rs +++ b/src/config/sink.rs @@ -1,8 +1,11 @@ use std::cell::RefCell; +use std::time::Duration; use async_trait::async_trait; use dyn_clone::DynClone; use serde::Serialize; +use serde_with::serde_as; +use std::path::PathBuf; use vector_lib::buffers::{BufferConfig, BufferType}; use vector_lib::configurable::attributes::CustomAttribute; use vector_lib::configurable::schema::{SchemaGenerator, SchemaObject}; @@ -11,10 +14,11 @@ use vector_lib::configurable::{ }; use vector_lib::{ config::{AcknowledgementsConfig, GlobalOptions, Input}, + id::Inputs, sink::VectorSink, }; -use super::{id::Inputs, schema, ComponentKey, ProxyConfig, Resource}; +use super::{dot_graph::GraphConfig, schema, ComponentKey, ProxyConfig, Resource}; use crate::extra_context::ExtraContext; use crate::sinks::{util::UriSerde, Healthcheck}; @@ -33,8 +37,10 @@ impl Configurable for BoxedSink { metadata } - fn generate_schema(gen: &RefCell) -> Result { - vector_lib::configurable::component::SinkDescription::generate_schemas(gen) + fn generate_schema( + generator: &RefCell, + ) -> Result { + vector_lib::configurable::component::SinkDescription::generate_schemas(generator) } } @@ -52,6 +58,10 @@ pub struct SinkOuter where T: Configurable + Serialize + 'static, { + #[configurable(derived)] + #[serde(default, skip_serializing_if = "vector_lib::serde::is_default")] + pub graph: GraphConfig, + #[configurable(derived)] pub inputs: Inputs, @@ -60,11 +70,11 @@ where /// This must be a valid URI, which requires at least the scheme and host. All other /// components -- port, path, etc -- are allowed as well. #[configurable(deprecated, metadata(docs::hidden), validation(format = "uri"))] - healthcheck_uri: Option, + pub healthcheck_uri: Option, #[configurable(derived, metadata(docs::advanced))] #[serde(default, deserialize_with = "crate::serde::bool_or_struct")] - healthcheck: SinkHealthcheckOptions, + pub healthcheck: SinkHealthcheckOptions, #[configurable(derived)] #[serde(default, skip_serializing_if = "vector_lib::serde::is_default")] @@ -72,7 +82,7 @@ where #[configurable(derived)] #[serde(default, skip_serializing_if = "vector_lib::serde::is_default")] - proxy: ProxyConfig, + pub proxy: ProxyConfig, #[serde(flatten)] #[configurable(metadata(docs::hidden))] @@ -95,6 +105,7 @@ where healthcheck_uri: None, inner: inner.into(), proxy: Default::default(), + graph: Default::default(), } } @@ -151,11 +162,13 @@ where healthcheck: self.healthcheck, healthcheck_uri: self.healthcheck_uri, proxy: self.proxy, + graph: self.graph, } } } /// Healthcheck configuration. +#[serde_as] #[configurable_component] #[derive(Clone, Debug)] #[serde(default)] @@ -163,6 +176,14 @@ pub struct SinkHealthcheckOptions { /// Whether or not to check the health of the sink when Vector starts up. pub enabled: bool, + /// Timeout duration for healthcheck in seconds. + #[serde_as(as = "serde_with::DurationSecondsWithFrac")] + #[serde( + default = "default_healthcheck_timeout", + skip_serializing_if = "is_default_healthcheck_timeout" + )] + pub timeout: Duration, + /// The full URI to make HTTP healthcheck requests to. /// /// This must be a valid URI, which requires at least the scheme and host. All other @@ -171,26 +192,38 @@ pub struct SinkHealthcheckOptions { pub uri: Option, } +const fn default_healthcheck_timeout() -> Duration { + Duration::from_secs(10) +} + +fn is_default_healthcheck_timeout(timeout: &Duration) -> bool { + timeout == &default_healthcheck_timeout() +} + impl Default for SinkHealthcheckOptions { fn default() -> Self { Self { enabled: true, uri: None, + timeout: default_healthcheck_timeout(), } } } impl From for SinkHealthcheckOptions { fn from(enabled: bool) -> Self { - Self { enabled, uri: None } + Self { + enabled, + ..Default::default() + } } } impl From for SinkHealthcheckOptions { fn from(uri: UriSerde) -> Self { Self { - enabled: true, uri: Some(uri), + ..Default::default() } } } @@ -213,6 +246,11 @@ pub trait SinkConfig: DynClone + NamedComponent + core::fmt::Debug + Send + Sync /// Gets the input configuration for this sink. fn input(&self) -> Input; + /// Gets the files to watch to trigger reload + fn files_to_watch(&self) -> Vec<&PathBuf> { + Vec::new() + } + /// Gets the list of resources, if any, used by this sink. /// /// Resources represent dependencies -- network ports, file descriptors, and so on -- that @@ -230,10 +268,11 @@ pub trait SinkConfig: DynClone + NamedComponent + core::fmt::Debug + Send + Sync dyn_clone::clone_trait_object!(SinkConfig); -#[derive(Clone)] +#[derive(Clone, Debug)] pub struct SinkContext { pub healthcheck: SinkHealthcheckOptions, pub globals: GlobalOptions, + pub enrichment_tables: vector_lib::enrichment::TableRegistry, pub proxy: ProxyConfig, pub schema: schema::Options, pub app_name: String, @@ -249,6 +288,7 @@ impl Default for SinkContext { Self { healthcheck: Default::default(), globals: Default::default(), + enrichment_tables: Default::default(), proxy: Default::default(), schema: Default::default(), app_name: crate::get_app_name().to_string(), diff --git a/src/config/source.rs b/src/config/source.rs index 20b2f227e7198..2c540eeea8a18 100644 --- a/src/config/source.rs +++ b/src/config/source.rs @@ -3,11 +3,10 @@ use std::collections::HashMap; use async_trait::async_trait; use dyn_clone::DynClone; -use vector_lib::configurable::attributes::CustomAttribute; -use vector_lib::configurable::schema::{SchemaGenerator, SchemaObject}; -use vector_lib::configurable::{ - configurable_component, Configurable, GenerateError, Metadata, NamedComponent, -}; +use vector_config::{Configurable, GenerateError, Metadata, NamedComponent}; +use vector_config_common::attributes::CustomAttribute; +use vector_config_common::schema::{SchemaGenerator, SchemaObject}; +use vector_config_macros::configurable_component; use vector_lib::{ config::{ AcknowledgementsConfig, GlobalOptions, LogNamespace, SourceAcknowledgementsConfig, @@ -16,7 +15,7 @@ use vector_lib::{ source::Source, }; -use super::{schema, ComponentKey, ProxyConfig, Resource}; +use super::{dot_graph::GraphConfig, schema, ComponentKey, ProxyConfig, Resource}; use crate::{extra_context::ExtraContext, shutdown::ShutdownSignal, SourceSender}; pub type BoxedSource = Box; @@ -34,8 +33,10 @@ impl Configurable for BoxedSource { metadata } - fn generate_schema(gen: &RefCell) -> Result { - vector_lib::configurable::component::SourceDescription::generate_schemas(gen) + fn generate_schema( + generator: &RefCell, + ) -> Result { + vector_lib::configurable::component::SourceDescription::generate_schemas(generator) } } @@ -54,6 +55,10 @@ pub struct SourceOuter { #[serde(default, skip_serializing_if = "vector_lib::serde::is_default")] pub proxy: ProxyConfig, + #[configurable(derived)] + #[serde(default, skip_serializing_if = "vector_lib::serde::is_default")] + pub graph: GraphConfig, + #[serde(default, skip)] pub sink_acknowledgements: bool, @@ -66,6 +71,7 @@ impl SourceOuter { pub(crate) fn new>(inner: I) -> Self { Self { proxy: Default::default(), + graph: Default::default(), sink_acknowledgements: false, inner: inner.into(), } @@ -120,6 +126,7 @@ dyn_clone::clone_trait_object!(SourceConfig); pub struct SourceContext { pub key: ComponentKey, pub globals: GlobalOptions, + pub enrichment_tables: vector_lib::enrichment::TableRegistry, pub shutdown: ShutdownSignal, pub out: SourceSender, pub proxy: ProxyConfig, @@ -149,6 +156,7 @@ impl SourceContext { Self { key: key.clone(), globals: GlobalOptions::default(), + enrichment_tables: Default::default(), shutdown: shutdown_signal, out, proxy: Default::default(), @@ -169,6 +177,7 @@ impl SourceContext { Self { key: ComponentKey::from("default"), globals: GlobalOptions::default(), + enrichment_tables: Default::default(), shutdown: ShutdownSignal::noop(), out, proxy: Default::default(), diff --git a/src/config/transform.rs b/src/config/transform.rs index c7315dc80e630..5471c41b1429e 100644 --- a/src/config/transform.rs +++ b/src/config/transform.rs @@ -1,5 +1,6 @@ use std::cell::RefCell; use std::collections::{HashMap, HashSet}; +use std::path::PathBuf; use async_trait::async_trait; use dyn_clone::DynClone; @@ -12,13 +13,15 @@ use vector_lib::configurable::{ }; use vector_lib::{ config::{GlobalOptions, Input, LogNamespace, TransformOutput}, + id::Inputs, schema, transform::Transform, }; +use super::dot_graph::GraphConfig; use super::schema::Options as SchemaOptions; +use super::ComponentKey; use super::OutputId; -use super::{id::Inputs, ComponentKey}; use crate::extra_context::ExtraContext; pub type BoxedTransform = Box; @@ -36,8 +39,10 @@ impl Configurable for BoxedTransform { metadata } - fn generate_schema(gen: &RefCell) -> Result { - vector_lib::configurable::component::TransformDescription::generate_schemas(gen) + fn generate_schema( + generator: &RefCell, + ) -> Result { + vector_lib::configurable::component::TransformDescription::generate_schemas(generator) } } @@ -55,6 +60,10 @@ pub struct TransformOuter where T: Configurable + Serialize + 'static, { + #[configurable(derived)] + #[serde(default, skip_serializing_if = "vector_lib::serde::is_default")] + pub graph: GraphConfig, + #[configurable(derived)] pub inputs: Inputs, @@ -74,7 +83,11 @@ where { let inputs = Inputs::from_iter(inputs); let inner = inner.into(); - TransformOuter { inputs, inner } + TransformOuter { + inputs, + inner, + graph: Default::default(), + } } pub(super) fn map_inputs(self, f: impl Fn(&T) -> U) -> TransformOuter @@ -93,6 +106,7 @@ where TransformOuter { inputs: Inputs::from_iter(inputs), inner: self.inner, + graph: self.graph, } } } @@ -152,7 +166,7 @@ impl TransformContext { } } - #[cfg(any(test, feature = "test"))] + #[cfg(test)] pub fn new_test( schema_definitions: HashMap, HashMap>, ) -> Self { @@ -242,6 +256,11 @@ pub trait TransformConfig: DynClone + NamedComponent + core::fmt::Debug + Send + fn nestable(&self, _parents: &HashSet<&'static str>) -> bool { true } + + /// Gets the files to watch to trigger reload + fn files_to_watch(&self) -> Vec<&PathBuf> { + Vec::new() + } } dyn_clone::clone_trait_object!(TransformConfig); diff --git a/src/config/unit_test/mod.rs b/src/config/unit_test/mod.rs index 8a00373970032..cb967a4de5460 100644 --- a/src/config/unit_test/mod.rs +++ b/src/config/unit_test/mod.rs @@ -1,4 +1,13 @@ -#[cfg(all(test, feature = "vector-unit-test-tests"))] +// should match vector-unit-test-tests feature +#[cfg(all( + test, + feature = "sources-demo_logs", + feature = "transforms-remap", + feature = "transforms-route", + feature = "transforms-filter", + feature = "transforms-reduce", + feature = "sinks-console" +))] mod tests; mod unit_test_components; @@ -9,7 +18,6 @@ use std::{ use futures_util::{stream::FuturesUnordered, StreamExt}; use indexmap::IndexMap; -use ordered_float::NotNan; use tokio::sync::{ oneshot::{self, Receiver}, Mutex, @@ -30,9 +38,9 @@ use crate::{ conditions::Condition, config::{ self, loading, ComponentKey, Config, ConfigBuilder, ConfigPath, SinkOuter, SourceOuter, - TestDefinition, TestInput, TestInputValue, TestOutput, + TestDefinition, TestInput, TestOutput, }, - event::{Event, EventMetadata, LogEvent, Value}, + event::{Event, EventMetadata, LogEvent}, signal, topology::{builder::TopologyPieces, RunningTopology}, }; @@ -74,15 +82,29 @@ impl UnitTest { } } +/// Loads Log Schema from configurations and sets global schema. +/// Once this is done, configurations can be correctly loaded using +/// configured log schema defaults. +/// If deny is set, will panic if schema has already been set. +fn init_log_schema_from_paths( + config_paths: &[ConfigPath], + deny_if_set: bool, +) -> Result<(), Vec> { + let builder = config::loading::load_builder_from_paths(config_paths)?; + vector_lib::config::init_log_schema(builder.global.log_schema, deny_if_set); + Ok(()) +} + pub async fn build_unit_tests_main( paths: &[ConfigPath], signal_handler: &mut signal::SignalHandler, ) -> Result, Vec> { - config::init_log_schema(paths, false)?; - let (mut secrets_backends_loader, _) = loading::load_secret_backends_from_paths(paths)?; - let (config_builder, _) = if secrets_backends_loader.has_secrets_to_retrieve() { + init_log_schema_from_paths(paths, false)?; + let mut secrets_backends_loader = loading::load_secret_backends_from_paths(paths)?; + let config_builder = if secrets_backends_loader.has_secrets_to_retrieve() { let resolved_secrets = secrets_backends_loader .retrieve(&mut signal_handler.subscribe()) + .await .map_err(|e| vec![e])?; loading::load_builder_from_paths_with_secrets(paths, resolved_secrets)? } else { @@ -117,7 +139,7 @@ pub async fn build_unit_tests( let mut test_error = errors.join("\n"); // Indent all line breaks test_error = test_error.replace('\n', "\n "); - test_error.insert_str(0, &format!("Failed to build test '{}':\n ", test_name)); + test_error.insert_str(0, &format!("Failed to build test '{test_name}':\n ")); build_errors.push(test_error); } } @@ -363,6 +385,10 @@ async fn build_unit_test( &transform_only_config.transforms, &transform_only_config.sinks, transform_only_config.schema, + transform_only_config + .global + .wildcard_matching + .unwrap_or_default(), ); let test = test.resolve_outputs(&transform_only_graph)?; @@ -379,6 +405,7 @@ async fn build_unit_test( &config_builder.transforms, &config_builder.sinks, config_builder.schema, + config_builder.global.wildcard_matching.unwrap_or_default(), ); let mut valid_components = get_relevant_test_components( @@ -410,6 +437,7 @@ async fn build_unit_test( &config_builder.transforms, &config_builder.sinks, config_builder.schema, + config_builder.global.wildcard_matching.unwrap_or_default(), ); let valid_inputs = graph.input_map()?; for (_, transform) in config_builder.transforms.iter_mut() { @@ -540,8 +568,7 @@ fn build_outputs( match condition.build(&Default::default()) { Ok(condition) => conditions.push(condition), Err(error) => errors.push(format!( - "failed to create test condition '{}': {}", - index, error + "failed to create test condition '{index}': {error}" )), } } @@ -599,16 +626,8 @@ fn build_input_event(input: &TestInput) -> Result { if let Some(log_fields) = &input.log_fields { let mut event = LogEvent::from_str_legacy(""); for (path, value) in log_fields { - let value: Value = match value { - TestInputValue::String(s) => Value::from(s.to_owned()), - TestInputValue::Boolean(b) => Value::from(*b), - TestInputValue::Integer(i) => Value::from(*i), - TestInputValue::Float(f) => Value::from( - NotNan::new(*f).map_err(|_| "NaN value not supported".to_string())?, - ), - }; event - .parse_path_and_insert(path, value) + .parse_path_and_insert(path, value.clone()) .map_err(|e| e.to_string())?; } Ok(event.into()) diff --git a/src/config/unit_test/tests.rs b/src/config/unit_test/tests.rs index 225397c3ce901..87a8e2d7c770f 100644 --- a/src/config/unit_test/tests.rs +++ b/src/config/unit_test/tests.rs @@ -5,6 +5,8 @@ use crate::config::ConfigBuilder; #[tokio::test] async fn parse_no_input() { + crate::test_util::trace_init(); + let config: ConfigBuilder = toml::from_str(indoc! {r#" [transforms.bar] inputs = ["foo"] @@ -31,9 +33,9 @@ async fn parse_no_input() { let errs = build_unit_tests(config).await.err().unwrap(); assert_eq!( errs, - vec![indoc! {r#" + vec![indoc! {r" Failed to build test 'broken test': - inputs[0]: unable to locate target transform 'foo'"#} + inputs[0]: unable to locate target transform 'foo'"} .to_owned(),] ); @@ -67,15 +69,17 @@ async fn parse_no_input() { let errs = build_unit_tests(config).await.err().unwrap(); assert_eq!( errs, - vec![indoc! {r#" + vec![indoc! {r" Failed to build test 'broken test': - inputs[1]: unable to locate target transform 'foo'"#} + inputs[1]: unable to locate target transform 'foo'"} .to_owned(),] ); } #[tokio::test] async fn parse_no_test_input() { + crate::test_util::trace_init(); + let config: ConfigBuilder = toml::from_str(indoc! {r#" [transforms.bar] inputs = ["foo"] @@ -98,15 +102,17 @@ async fn parse_no_test_input() { let errs = build_unit_tests(config).await.err().unwrap(); assert_eq!( errs, - vec![indoc! {r#" + vec![indoc! {r" Failed to build test 'broken test': - must specify at least one input."#} + must specify at least one input."} .to_owned(),] ); } #[tokio::test] async fn parse_no_outputs() { + crate::test_util::trace_init(); + let config: ConfigBuilder = toml::from_str(indoc! {r#" [transforms.foo] inputs = ["ignored"] @@ -127,15 +133,17 @@ async fn parse_no_outputs() { let errs = build_unit_tests(config).await.err().unwrap(); assert_eq!( errs, - vec![indoc! {r#" + vec![indoc! {r" Failed to build test 'broken test': - unit test must contain at least one of `outputs` or `no_outputs_from`."#} + unit test must contain at least one of `outputs` or `no_outputs_from`."} .to_owned(),] ); } #[tokio::test] async fn parse_invalid_output_targets() { + crate::test_util::trace_init(); + let config: ConfigBuilder = toml::from_str(indoc! {r#" [transforms.bar] inputs = ["foo"] @@ -162,9 +170,9 @@ async fn parse_invalid_output_targets() { let errs = build_unit_tests(config).await.err().unwrap(); assert_eq!( errs, - vec![indoc! {r#" + vec![indoc! {r" Failed to build test 'broken test': - Invalid extract_from target in test 'broken test': 'nonexistent' does not exist"#} + Invalid extract_from target in test 'broken test': 'nonexistent' does not exist"} .to_owned(),] ); @@ -189,15 +197,17 @@ async fn parse_invalid_output_targets() { let errs = build_unit_tests(config).await.err().unwrap(); assert_eq!( errs, - vec![indoc! {r#" + vec![indoc! {r" Failed to build test 'broken test': - Invalid no_outputs_from target in test 'broken test': 'nonexistent' does not exist"#} + Invalid no_outputs_from target in test 'broken test': 'nonexistent' does not exist"} .to_owned(),] ); } #[tokio::test] async fn parse_broken_topology() { + crate::test_util::trace_init(); + let config: ConfigBuilder = toml::from_str(indoc! {r#" [transforms.foo] inputs = ["something"] @@ -299,6 +309,8 @@ async fn parse_broken_topology() { #[tokio::test] async fn parse_bad_input_event() { + crate::test_util::trace_init(); + let config: ConfigBuilder = toml::from_str(indoc! {r#" [transforms.foo] inputs = ["ignored"] @@ -326,15 +338,17 @@ async fn parse_bad_input_event() { let errs = build_unit_tests(config).await.err().unwrap(); assert_eq!( errs, - vec![indoc! {r#" + vec![indoc! {r" Failed to build test 'broken test': - unrecognized input type 'nah', expected one of: 'raw', 'log' or 'metric'"#} + unrecognized input type 'nah', expected one of: 'raw', 'log' or 'metric'"} .to_owned(),] ); } #[tokio::test] async fn test_success_multi_inputs() { + crate::test_util::trace_init(); + let config: ConfigBuilder = toml::from_str(indoc! {r#" [transforms.foo] inputs = ["ignored"] @@ -347,7 +361,7 @@ async fn test_success_multi_inputs() { inputs = ["ignored"] type = "remap" source = ''' - .new_field_two = "string value" + .new_field_two = "second string value" ''' [transforms.bar] @@ -437,6 +451,8 @@ async fn test_success_multi_inputs() { #[tokio::test] async fn test_success() { + crate::test_util::trace_init(); + let config: ConfigBuilder = toml::from_str(indoc! {r#" [transforms.foo] inputs = ["ignored"] @@ -504,6 +520,8 @@ async fn test_success() { #[tokio::test] async fn test_route() { + crate::test_util::trace_init(); + let config: ConfigBuilder = toml::from_str(indoc! {r#" [transforms.foo] inputs = ["ignored"] @@ -566,6 +584,8 @@ async fn test_route() { #[tokio::test] async fn test_fail_no_outputs() { + crate::test_util::trace_init(); + let config: ConfigBuilder = toml::from_str(indoc! {r#" [transforms.foo] inputs = [ "TODO" ] @@ -599,6 +619,8 @@ async fn test_fail_no_outputs() { #[tokio::test] async fn test_fail_two_output_events() { + crate::test_util::trace_init(); + let config: ConfigBuilder = toml::from_str(indoc! {r#" [transforms.foo] inputs = [ "TODO" ] @@ -678,6 +700,8 @@ async fn test_fail_two_output_events() { #[tokio::test] async fn test_no_outputs_from() { + crate::test_util::trace_init(); + let config: ConfigBuilder = toml::from_str(indoc! {r#" [transforms.foo] inputs = [ "ignored" ] @@ -715,6 +739,8 @@ async fn test_no_outputs_from() { #[tokio::test] async fn test_no_outputs_from_chained() { + crate::test_util::trace_init(); + let config: ConfigBuilder = toml::from_str(indoc! { r#" [transforms.foo] inputs = [ "ignored" ] @@ -759,6 +785,8 @@ async fn test_no_outputs_from_chained() { #[tokio::test] async fn test_log_input() { + crate::test_util::trace_init(); + let config: ConfigBuilder = toml::from_str(indoc! { r#" [transforms.foo] inputs = ["ignored"] @@ -777,6 +805,9 @@ async fn test_log_input() { message = "this is the message" int_val = 5 bool_val = true + arr_val = [1, 2, "hi", false] + obj_val = { a = true, b = "b", c = 5 } + [[tests.outputs]] extract_from = "foo" @@ -788,6 +819,10 @@ async fn test_log_input() { assert_eq!(.message, "this is the message") assert!(.bool_val) assert_eq!(.int_val, 5) + assert_eq!(.arr_val, [1, 2, "hi", false]) + assert!(.obj_val.a) + assert_eq!(.obj_val.b, "b") + assert_eq!(.obj_val.c, 5) """ "#}) .unwrap(); @@ -798,6 +833,8 @@ async fn test_log_input() { #[tokio::test] async fn test_metric_input() { + crate::test_util::trace_init(); + let config: ConfigBuilder = toml::from_str(indoc! { r#" [transforms.foo] inputs = ["ignored"] @@ -837,6 +874,8 @@ async fn test_metric_input() { #[tokio::test] async fn test_success_over_gap() { + crate::test_util::trace_init(); + let config: ConfigBuilder = toml::from_str(indoc! { r#" [transforms.foo] inputs = ["ignored"] @@ -885,6 +924,8 @@ async fn test_success_over_gap() { #[tokio::test] async fn test_success_tree() { + crate::test_util::trace_init(); + let config: ConfigBuilder = toml::from_str(indoc! { r#" [transforms.ignored] inputs = ["also_ignored"] @@ -949,6 +990,8 @@ async fn test_success_tree() { #[tokio::test] async fn test_fails() { + crate::test_util::trace_init(); + let config: ConfigBuilder = toml::from_str(indoc! { r#" [transforms.foo] inputs = ["ignored"] @@ -1033,6 +1076,8 @@ async fn test_fails() { #[tokio::test] async fn test_dropped_branch() { + crate::test_util::trace_init(); + let config: ConfigBuilder = toml::from_str(indoc! {r#" [transforms.droptest] type = "remap" @@ -1117,6 +1162,8 @@ async fn test_dropped_branch() { #[tokio::test] async fn test_task_transform() { + crate::test_util::trace_init(); + let config: ConfigBuilder = toml::from_str(indoc! {r#" [transforms.ingress1] type = "remap" @@ -1198,6 +1245,8 @@ async fn test_task_transform() { #[tokio::test] async fn test_glob_input() { + crate::test_util::trace_init(); + let config: ConfigBuilder = toml::from_str(indoc! {r#" [transforms.ingress1] type = "remap" diff --git a/src/config/unit_test/unit_test_components.rs b/src/config/unit_test/unit_test_components.rs index fcffaccea6ea0..8daf82b5029d3 100644 --- a/src/config/unit_test/unit_test_components.rs +++ b/src/config/unit_test/unit_test_components.rs @@ -46,8 +46,8 @@ impl SourceConfig for UnitTestSourceConfig { } fn outputs(&self, _global_log_namespace: LogNamespace) -> Vec { - vec![SourceOutput::new_logs( - DataType::all(), + vec![SourceOutput::new_maybe_logs( + DataType::all_bits(), schema::Definition::default_legacy_namespace(), )] } @@ -103,8 +103,8 @@ impl SourceConfig for UnitTestStreamSourceConfig { } fn outputs(&self, _global_log_namespace: LogNamespace) -> Vec { - vec![SourceOutput::new_logs( - DataType::all(), + vec![SourceOutput::new_maybe_logs( + DataType::all_bits(), schema::Definition::default_legacy_namespace(), )] } @@ -220,8 +220,7 @@ impl StreamSink for UnitTestSink { break; } Err(error) => { - condition_errors - .push(format!(" condition[{}]: {}", j, error)); + condition_errors.push(format!(" condition[{j}]: {error}")); } } } diff --git a/src/config/validation.rs b/src/config/validation.rs index 56423e6aa742a..6cae1ecfbf19a 100644 --- a/src/config/validation.rs +++ b/src/config/validation.rs @@ -140,10 +140,7 @@ pub fn check_resources(config: &ConfigBuilder) -> Result<(), Vec> { Err(conflicting_components .into_iter() .map(|(resource, components)| { - format!( - "Resource `{}` is claimed by multiple components: {:?}", - resource, components - ) + format!("Resource `{resource}` is claimed by multiple components: {components:?}") }) .collect()) } @@ -325,20 +322,29 @@ async fn process_partitions(partitions: Vec) -> heim::Result Vec { let mut warnings = vec![]; - let source_ids = config.sources.iter().flat_map(|(key, source)| { - source - .inner - .outputs(config.schema.log_namespace()) - .iter() - .map(|output| { - if let Some(port) = &output.port { - ("source", OutputId::from((key, port.clone()))) - } else { - ("source", OutputId::from(key)) - } - }) - .collect::>() - }); + let table_sources = config + .enrichment_tables + .iter() + .filter_map(|(key, table)| table.as_source(key)) + .collect::>(); + let source_ids = config + .sources + .iter() + .chain(table_sources.iter().map(|(k, s)| (k, s))) + .flat_map(|(key, source)| { + source + .inner + .outputs(config.schema.log_namespace()) + .iter() + .map(|output| { + if let Some(port) = &output.port { + ("source", OutputId::from((key, port.clone()))) + } else { + ("source", OutputId::from(key)) + } + }) + .collect::>() + }); let transform_ids = config.transforms.iter().flat_map(|(key, transform)| { get_transform_output_ids( transform.inner.as_ref(), @@ -349,6 +355,11 @@ pub fn warnings(config: &Config) -> Vec { .collect::>() }); + let table_sinks = config + .enrichment_tables + .iter() + .filter_map(|(key, table)| table.as_sink(key)) + .collect::>(); for (input_type, id) in transform_ids.chain(source_ids) { if !config .transforms @@ -358,6 +369,9 @@ pub fn warnings(config: &Config) -> Vec { .sinks .iter() .any(|(_, sink)| sink.inputs.contains(&id)) + && !table_sinks + .iter() + .any(|(_, sink)| sink.inputs.contains(&id)) { warnings.push(format!( "{} \"{}\" has no consumers", diff --git a/src/config/vars.rs b/src/config/vars.rs index 9f4a14ca49611..63d6e08b2db52 100644 --- a/src/config/vars.rs +++ b/src/config/vars.rs @@ -1,6 +1,5 @@ -use std::collections::HashMap; +use std::{collections::HashMap, sync::LazyLock}; -use once_cell::sync::Lazy; use regex::{Captures, Regex}; // Environment variable names can have any characters from the Portable Character Set other @@ -10,7 +9,7 @@ use regex::{Captures, Regex}; // variable names when they come from a Java properties file. // // https://pubs.opengroup.org/onlinepubs/000095399/basedefs/xbd_chap08.html -pub static ENVIRONMENT_VARIABLE_INTERPOLATION_REGEX: Lazy = Lazy::new(|| { +pub static ENVIRONMENT_VARIABLE_INTERPOLATION_REGEX: LazyLock = LazyLock::new(|| { Regex::new( r"(?x) \$\$| @@ -20,14 +19,9 @@ pub static ENVIRONMENT_VARIABLE_INTERPOLATION_REGEX: Lazy = Lazy::new(|| .unwrap() }); -/// (result, warnings) -pub fn interpolate( - input: &str, - vars: &HashMap, - strict_vars: bool, -) -> Result<(String, Vec), Vec> { +/// Result +pub fn interpolate(input: &str, vars: &HashMap) -> Result> { let mut errors = Vec::new(); - let mut warnings = Vec::new(); let interpolated = ENVIRONMENT_VARIABLE_INTERPOLATION_REGEX .replace_all(input, |caps: &Captures<'_>| { @@ -59,17 +53,11 @@ pub fn interpolate( )); "" }), - _ => val.unwrap_or_else(|| if strict_vars { + _ => val.unwrap_or_else(|| { errors.push(format!( "Missing environment variable in config. name = {name:?}", )); "" - } else { - warnings - .push(format!( - "Unknown environment variable in config. This is DEPRECATED and will become an error in future versions. name = {name:?}", - )); - "" }), } }) @@ -79,7 +67,7 @@ pub fn interpolate( .into_owned(); if errors.is_empty() { - Ok((interpolated, warnings)) + Ok(interpolated) } else { Err(errors) } @@ -100,53 +88,34 @@ mod test { .into_iter() .collect(); - assert_eq!("dogs", interpolate("$FOO", &vars, true).unwrap().0); - assert_eq!("dogs", interpolate("${FOO}", &vars, true).unwrap().0); - assert_eq!("cats", interpolate("${FOOBAR}", &vars, true).unwrap().0); - assert_eq!("xcatsy", interpolate("x${FOOBAR}y", &vars, true).unwrap().0); - assert_eq!("x", interpolate("x$FOOBARy", &vars, false).unwrap().0); - assert!(interpolate("x$FOOBARy", &vars, true).is_err()); - assert_eq!("$ x", interpolate("$ x", &vars, false).unwrap().0); - assert_eq!("$ x", interpolate("$ x", &vars, true).unwrap().0); - assert_eq!("$FOO", interpolate("$$FOO", &vars, true).unwrap().0); - assert_eq!("dogs=bar", interpolate("$FOO=bar", &vars, true).unwrap().0); - assert_eq!("", interpolate("$NOT_FOO", &vars, false).unwrap().0); - assert!(interpolate("$NOT_FOO", &vars, true).is_err()); - assert_eq!("-FOO", interpolate("$NOT-FOO", &vars, false).unwrap().0); - assert!(interpolate("$NOT-FOO", &vars, true).is_err()); - assert_eq!("turtles", interpolate("$FOO.BAR", &vars, true).unwrap().0); - assert_eq!("${FOO x", interpolate("${FOO x", &vars, true).unwrap().0); - assert_eq!("${}", interpolate("${}", &vars, true).unwrap().0); - assert_eq!("dogs", interpolate("${FOO:-cats}", &vars, true).unwrap().0); - assert_eq!( - "dogcats", - interpolate("${NOT:-dogcats}", &vars, true).unwrap().0 - ); + assert_eq!("dogs", interpolate("$FOO", &vars).unwrap()); + assert_eq!("dogs", interpolate("${FOO}", &vars).unwrap()); + assert_eq!("cats", interpolate("${FOOBAR}", &vars).unwrap()); + assert_eq!("xcatsy", interpolate("x${FOOBAR}y", &vars).unwrap()); + assert!(interpolate("x$FOOBARy", &vars).is_err()); + assert_eq!("$ x", interpolate("$ x", &vars).unwrap()); + assert_eq!("$FOO", interpolate("$$FOO", &vars).unwrap()); + assert_eq!("dogs=bar", interpolate("$FOO=bar", &vars).unwrap()); + assert!(interpolate("$NOT_FOO", &vars).is_err()); + assert!(interpolate("$NOT-FOO", &vars).is_err()); + assert_eq!("turtles", interpolate("$FOO.BAR", &vars).unwrap()); + assert_eq!("${FOO x", interpolate("${FOO x", &vars).unwrap()); + assert_eq!("${}", interpolate("${}", &vars).unwrap()); + assert_eq!("dogs", interpolate("${FOO:-cats}", &vars).unwrap()); + assert_eq!("dogcats", interpolate("${NOT:-dogcats}", &vars).unwrap()); assert_eq!( "dogs and cats", - interpolate("${NOT:-dogs and cats}", &vars, true).unwrap().0 - ); - assert_eq!( - "${:-cats}", - interpolate("${:-cats}", &vars, true).unwrap().0 - ); - assert_eq!("", interpolate("${NOT:-}", &vars, true).unwrap().0); - assert_eq!("cats", interpolate("${NOT-cats}", &vars, true).unwrap().0); - assert_eq!("", interpolate("${EMPTY-cats}", &vars, true).unwrap().0); - assert_eq!( - "dogs", - interpolate("${FOO:?error cats}", &vars, true).unwrap().0 - ); - assert_eq!( - "dogs", - interpolate("${FOO?error cats}", &vars, true).unwrap().0 - ); - assert_eq!( - "", - interpolate("${EMPTY?error cats}", &vars, true).unwrap().0 + interpolate("${NOT:-dogs and cats}", &vars).unwrap() ); - assert!(interpolate("${NOT:?error cats}", &vars, true).is_err()); - assert!(interpolate("${NOT?error cats}", &vars, true).is_err()); - assert!(interpolate("${EMPTY:?error cats}", &vars, true).is_err()); + assert_eq!("${:-cats}", interpolate("${:-cats}", &vars).unwrap()); + assert_eq!("", interpolate("${NOT:-}", &vars).unwrap()); + assert_eq!("cats", interpolate("${NOT-cats}", &vars).unwrap()); + assert_eq!("", interpolate("${EMPTY-cats}", &vars).unwrap()); + assert_eq!("dogs", interpolate("${FOO:?error cats}", &vars).unwrap()); + assert_eq!("dogs", interpolate("${FOO?error cats}", &vars).unwrap()); + assert_eq!("", interpolate("${EMPTY?error cats}", &vars).unwrap()); + assert!(interpolate("${NOT:?error cats}", &vars).is_err()); + assert!(interpolate("${NOT?error cats}", &vars).is_err()); + assert!(interpolate("${EMPTY:?error cats}", &vars).is_err()); } } diff --git a/src/config/watcher.rs b/src/config/watcher.rs index bff6fbe7965fe..245a8a12c7448 100644 --- a/src/config/watcher.rs +++ b/src/config/watcher.rs @@ -1,12 +1,15 @@ -use std::{path::PathBuf, time::Duration}; -#[cfg(unix)] +use crate::config::ComponentConfig; +use std::collections::HashSet; +use std::{ + path::{Path, PathBuf}, + time::Duration, +}; use std::{ sync::mpsc::{channel, Receiver}, thread, }; -#[cfg(unix)] -use notify::{recommended_watcher, EventKind, RecommendedWatcher, RecursiveMode, Watcher}; +use notify::{recommended_watcher, EventKind, RecursiveMode}; use crate::Error; @@ -16,28 +19,73 @@ use crate::Error; /// But, config and topology reload logic can handle: /// - Invalid config, caused either by user or by data race. /// - Frequent changes, caused by user/editor modifying/saving file in small chunks. -/// so we can use smaller, more responsive delay. -#[cfg(unix)] +/// so we can use smaller, more responsive delay. const CONFIG_WATCH_DELAY: std::time::Duration = std::time::Duration::from_secs(1); -#[cfg(unix)] const RETRY_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10); -/// Triggers SIGHUP when file on config_path changes. +/// Refer to [`crate::cli::WatchConfigMethod`] for details. +pub enum WatcherConfig { + /// Recommended watcher for the current OS. + RecommendedWatcher, + /// A poll-based watcher that checks for file changes at regular intervals. + PollWatcher(u64), +} + +enum Watcher { + /// recommended watcher for os, usually inotify for linux based systems + RecommendedWatcher(notify::RecommendedWatcher), + /// poll based watcher. for watching files from NFS. + PollWatcher(notify::PollWatcher), +} + +impl Watcher { + fn add_paths(&mut self, config_paths: &[PathBuf]) -> Result<(), Error> { + for path in config_paths { + self.watch(path, RecursiveMode::Recursive)?; + } + Ok(()) + } + + fn watch(&mut self, path: &Path, recursive_mode: RecursiveMode) -> Result<(), Error> { + use notify::Watcher as NotifyWatcher; + match self { + Watcher::RecommendedWatcher(watcher) => { + watcher.watch(path, recursive_mode)?; + } + Watcher::PollWatcher(watcher) => { + watcher.watch(path, recursive_mode)?; + } + } + Ok(()) + } +} + +/// Sends a ReloadFromDisk on config_path changes. /// Accumulates file changes until no change for given duration has occurred. /// Has best effort guarantee of detecting all file changes from the end of /// this function until the main thread stops. -#[cfg(unix)] pub fn spawn_thread<'a>( + watcher_conf: WatcherConfig, + signal_tx: crate::signal::SignalTx, config_paths: impl IntoIterator + 'a, + component_configs: Vec, delay: impl Into>, ) -> Result<(), Error> { - let config_paths: Vec<_> = config_paths.into_iter().cloned().collect(); + let mut config_paths: Vec<_> = config_paths.into_iter().cloned().collect(); + let mut component_config_paths: Vec<_> = component_configs + .clone() + .into_iter() + .flat_map(|p| p.config_paths.clone()) + .collect(); + + config_paths.append(&mut component_config_paths); + let delay = delay.into().unwrap_or(CONFIG_WATCH_DELAY); // Create watcher now so not to miss any changes happening between // returning from this function and the thread starting. - let mut watcher = Some(create_watcher(&config_paths)?); + let mut watcher = Some(create_watcher(&watcher_conf, &config_paths)?); info!("Watching configuration files."); @@ -55,9 +103,15 @@ pub fn spawn_thread<'a>( debug!(message = "Consumed file change events for delay.", delay = ?delay); + let component_keys: HashSet<_> = component_configs + .clone() + .into_iter() + .flat_map(|p| p.contains(&event.paths)) + .collect(); + // We need to read paths to resolve any inode changes that may have happened. // And we need to do it before raising sighup to avoid missing any change. - if let Err(error) = add_paths(&mut watcher, &config_paths) { + if let Err(error) = watcher.add_paths(&config_paths) { error!(message = "Failed to read files to watch.", %error); break; } @@ -65,7 +119,17 @@ pub fn spawn_thread<'a>( debug!(message = "Reloaded paths."); info!("Configuration file changed."); - raise_sighup(); + if !component_keys.is_empty() { + info!("Component {:?} configuration changed.", component_keys); + _ = signal_tx.send(crate::signal::SignalTo::ReloadComponents(component_keys)).map_err(|error| { + error!(message = "Unable to reload component configuration. Restart Vector to reload it.", cause = %error) + }); + } else { + _ = signal_tx.send(crate::signal::SignalTo::ReloadFromDisk) + .map_err(|error| { + error!(message = "Unable to reload configuration file. Restart Vector to reload it.", cause = %error) + }); + } } else { debug!(message = "Ignoring event.", event = ?event) } @@ -74,7 +138,7 @@ pub fn spawn_thread<'a>( thread::sleep(RETRY_TIMEOUT); - watcher = create_watcher(&config_paths) + watcher = create_watcher(&watcher_conf, &config_paths) .map_err(|error| error!(message = "Failed to create file watcher.", %error)) .ok(); @@ -83,73 +147,122 @@ pub fn spawn_thread<'a>( // so for a good measure raise SIGHUP and let reload logic // determine if anything changed. info!("Speculating that configuration files have changed."); - raise_sighup(); + _ = signal_tx.send(crate::signal::SignalTo::ReloadFromDisk).map_err(|error| { + error!(message = "Unable to reload configuration file. Restart Vector to reload it.", cause = %error) + }); } }); Ok(()) } -#[cfg(windows)] -/// Errors on Windows. -pub fn spawn_thread<'a>( - _config_paths: impl IntoIterator + 'a, - _delay: impl Into>, -) -> Result<(), Error> { - Err("Reloading config on Windows isn't currently supported. Related issue https://github.com/vectordotdev/vector/issues/938 .".into()) -} - -#[cfg(unix)] -fn raise_sighup() { - use nix::sys::signal; - _ = signal::raise(signal::Signal::SIGHUP).map_err(|error| { - error!(message = "Unable to reload configuration file. Restart Vector to reload it.", cause = %error) - }); -} - -#[cfg(unix)] fn create_watcher( + watcher_conf: &WatcherConfig, config_paths: &[PathBuf], -) -> Result< - ( - RecommendedWatcher, - Receiver>, - ), - Error, -> { +) -> Result<(Watcher, Receiver>), Error> { info!("Creating configuration file watcher."); + let (sender, receiver) = channel(); - let mut watcher = recommended_watcher(sender)?; - add_paths(&mut watcher, config_paths)?; + let mut watcher = match watcher_conf { + WatcherConfig::RecommendedWatcher => { + let recommended_watcher = recommended_watcher(sender)?; + Watcher::RecommendedWatcher(recommended_watcher) + } + WatcherConfig::PollWatcher(interval) => { + let config = + notify::Config::default().with_poll_interval(Duration::from_secs(*interval)); + let poll_watcher = notify::PollWatcher::new(sender, config)?; + Watcher::PollWatcher(poll_watcher) + } + }; + watcher.add_paths(config_paths)?; Ok((watcher, receiver)) } -#[cfg(unix)] -fn add_paths(watcher: &mut RecommendedWatcher, config_paths: &[PathBuf]) -> Result<(), Error> { - for path in config_paths { - watcher.watch(path, RecursiveMode::NonRecursive)?; - } - Ok(()) -} - #[cfg(all(test, unix, not(target_os = "macos")))] // https://github.com/vectordotdev/vector/issues/5000 mod tests { + use super::*; + use crate::{ + config::ComponentKey, + signal::SignalRx, + test_util::{temp_dir, temp_file, trace_init}, + }; use std::{fs::File, io::Write, time::Duration}; + use tokio::sync::broadcast; + + async fn test_signal( + file: &mut File, + expected_signal: crate::signal::SignalTo, + timeout: Duration, + mut receiver: SignalRx, + ) -> bool { + file.write_all(&[0]).unwrap(); + file.sync_all().unwrap(); - use tokio::signal::unix::{signal, SignalKind}; + match tokio::time::timeout(timeout, receiver.recv()).await { + Ok(Ok(signal)) => signal == expected_signal, + _ => false, + } + } - use super::*; - use crate::test_util::{temp_dir, temp_file, trace_init}; + #[tokio::test] + async fn component_update() { + trace_init(); - async fn test(file: &mut File, timeout: Duration) -> bool { - let mut signal = signal(SignalKind::hangup()).expect("Signal handlers should not panic."); + let delay = Duration::from_secs(3); + let dir = temp_dir().to_path_buf(); + let watcher_conf = WatcherConfig::RecommendedWatcher; + let component_file_path = vec![dir.join("tls.cert"), dir.join("tls.key")]; + let http_component = ComponentKey::from("http"); - file.write_all(&[0]).unwrap(); - file.sync_all().unwrap(); + std::fs::create_dir(&dir).unwrap(); - tokio::time::timeout(timeout, signal.recv()).await.is_ok() - } + let mut component_files: Vec = component_file_path + .iter() + .map(|file| File::create(file).unwrap()) + .collect(); + let component_config = + ComponentConfig::new(component_file_path.clone(), http_component.clone()); + + let (signal_tx, signal_rx) = broadcast::channel(128); + spawn_thread( + watcher_conf, + signal_tx, + &[dir], + vec![component_config], + delay, + ) + .unwrap(); + + let signal_rx = signal_rx.resubscribe(); + let signal_rx2 = signal_rx.resubscribe(); + + if !test_signal( + &mut component_files[0], + crate::signal::SignalTo::ReloadComponents(HashSet::from_iter(vec![ + http_component.clone() + ])), + delay * 5, + signal_rx, + ) + .await + { + panic!("Test timed out"); + } + if !test_signal( + &mut component_files[1], + crate::signal::SignalTo::ReloadComponents(HashSet::from_iter(vec![ + http_component.clone() + ])), + delay * 5, + signal_rx2, + ) + .await + { + panic!("Test timed out"); + } + } #[tokio::test] async fn file_directory_update() { trace_init(); @@ -157,13 +270,22 @@ mod tests { let delay = Duration::from_secs(3); let dir = temp_dir().to_path_buf(); let file_path = dir.join("vector.toml"); + let watcher_conf = WatcherConfig::RecommendedWatcher; std::fs::create_dir(&dir).unwrap(); let mut file = File::create(&file_path).unwrap(); - spawn_thread(&[dir], delay).unwrap(); - - if !test(&mut file, delay * 5).await { + let (signal_tx, signal_rx) = broadcast::channel(128); + spawn_thread(watcher_conf, signal_tx, &[dir], vec![], delay).unwrap(); + + if !test_signal( + &mut file, + crate::signal::SignalTo::ReloadFromDisk, + delay * 5, + signal_rx, + ) + .await + { panic!("Test timed out"); } } @@ -175,15 +297,25 @@ mod tests { let delay = Duration::from_secs(3); let file_path = temp_file(); let mut file = File::create(&file_path).unwrap(); - - spawn_thread(&[file_path], delay).unwrap(); - - if !test(&mut file, delay * 5).await { + let watcher_conf = WatcherConfig::RecommendedWatcher; + + let (signal_tx, signal_rx) = broadcast::channel(128); + spawn_thread(watcher_conf, signal_tx, &[file_path], vec![], delay).unwrap(); + + if !test_signal( + &mut file, + crate::signal::SignalTo::ReloadFromDisk, + delay * 5, + signal_rx, + ) + .await + { panic!("Test timed out"); } } #[tokio::test] + #[cfg(unix)] async fn sym_file_update() { trace_init(); @@ -193,9 +325,47 @@ mod tests { let mut file = File::create(&file_path).unwrap(); std::os::unix::fs::symlink(&file_path, &sym_file).unwrap(); - spawn_thread(&[sym_file], delay).unwrap(); + let watcher_conf = WatcherConfig::RecommendedWatcher; + + let (signal_tx, signal_rx) = broadcast::channel(128); + spawn_thread(watcher_conf, signal_tx, &[sym_file], vec![], delay).unwrap(); + + if !test_signal( + &mut file, + crate::signal::SignalTo::ReloadFromDisk, + delay * 5, + signal_rx, + ) + .await + { + panic!("Test timed out"); + } + } + + #[tokio::test] + async fn recursive_directory_file_update() { + trace_init(); + + let delay = Duration::from_secs(3); + let dir = temp_dir().to_path_buf(); + let sub_dir = dir.join("sources"); + let file_path = sub_dir.join("input.toml"); + let watcher_conf = WatcherConfig::RecommendedWatcher; + + std::fs::create_dir_all(&sub_dir).unwrap(); + let mut file = File::create(&file_path).unwrap(); - if !test(&mut file, delay * 5).await { + let (signal_tx, signal_rx) = broadcast::channel(128); + spawn_thread(watcher_conf, signal_tx, &[sub_dir], vec![], delay).unwrap(); + + if !test_signal( + &mut file, + crate::signal::SignalTo::ReloadFromDisk, + delay * 5, + signal_rx, + ) + .await + { panic!("Test timed out"); } } diff --git a/src/convert_config.rs b/src/convert_config.rs index d81b998c5ee1f..526838b06cd43 100644 --- a/src/convert_config.rs +++ b/src/convert_config.rs @@ -57,7 +57,7 @@ pub(crate) fn cmd(opts: &Opts) -> exitcode::ExitCode { return exitcode::SOFTWARE; } - return if opts.input_path.is_file() && opts.output_path.extension().is_some() { + if opts.input_path.is_file() && opts.output_path.extension().is_some() { if let Some(base_dir) = opts.output_path.parent() { if !base_dir.exists() { fs::create_dir_all(base_dir).unwrap_or_else(|_| { @@ -96,7 +96,7 @@ pub(crate) fn cmd(opts: &Opts) -> exitcode::ExitCode { exitcode::SOFTWARE } } - }; + } } fn convert_config( @@ -271,7 +271,7 @@ mod tests { let input_path = test_data_dir(); let output_dir = tempdir() .expect("Unable to create tempdir for config") - .into_path(); + .keep(); walk_dir_and_convert(&input_path, &output_dir, Format::Yaml).unwrap(); let mut count: usize = 0; @@ -283,7 +283,7 @@ mod tests { let extension = path.extension().unwrap().to_str().unwrap(); if extension == Format::Yaml.to_string() { // Note that here we read the converted string directly. - let converted_config = fs::read_to_string(&output_dir.join(&path)).unwrap(); + let converted_config = fs::read_to_string(output_dir.join(&path)).unwrap(); assert_eq!(converted_config, original_config); count += 1; } diff --git a/src/docker.rs b/src/docker.rs index 58e9c1819f7a8..24a1fc429748e 100644 --- a/src/docker.rs +++ b/src/docker.rs @@ -2,10 +2,13 @@ use std::{collections::HashMap, env, path::PathBuf}; use bollard::{ - container::{Config, CreateContainerOptions}, errors::Error as DockerError, - image::{CreateImageOptions, ListImagesOptions}, models::HostConfig, + query_parameters::{ + CreateContainerOptionsBuilder, CreateImageOptionsBuilder, ListImagesOptionsBuilder, + RemoveContainerOptions, StartContainerOptions, StopContainerOptions, + }, + secret::ContainerCreateBody, Docker, API_DEFAULT_VERSION, }; use futures::StreamExt; @@ -45,7 +48,7 @@ pub fn docker(host: Option, tls: Option) -> crate::Resu let host = host.or_else(|| env::var("DOCKER_HOST").ok()); match host { - None => Docker::connect_with_local_defaults().map_err(Into::into), + None => Docker::connect_with_defaults().map_err(Into::into), Some(host) => { let scheme = host .parse::() @@ -74,19 +77,9 @@ pub fn docker(host: Option, tls: Option) -> crate::Resu .map_err(Into::into) } Some("unix") | Some("npipe") | None => { - // TODO: Use `connect_with_local` on all platforms. - // - // Named pipes are currently disabled in Tokio. Tracking issue: - // https://github.com/fussybeaver/bollard/pull/138 - if cfg!(windows) { - warn!("Named pipes are currently not available on Windows, trying to connecting to Docker with default HTTP settings instead."); - Docker::connect_with_http_defaults().map_err(Into::into) - } else { - Docker::connect_with_local(&host, DEFAULT_TIMEOUT, API_DEFAULT_VERSION) - .map_err(Into::into) - } + Docker::connect_with_defaults().map_err(Into::into) } - Some(scheme) => Err(format!("Unknown scheme: {}", scheme).into()), + Some(scheme) => Err(format!("Unknown scheme: {scheme}").into()), } } } @@ -120,26 +113,24 @@ async fn pull_image(docker: &Docker, image: &str, tag: &str) { vec![format!("{}:{}", image, tag)], ); - let options = Some(ListImagesOptions { - filters, - ..Default::default() - }); + let options = Some(ListImagesOptionsBuilder::new().filters(&filters).build()); let images = docker.list_images(options).await.unwrap(); if images.is_empty() { // If not found, pull it - let options = Some(CreateImageOptions { - from_image: image, - tag, - ..Default::default() - }); + let options = Some( + CreateImageOptionsBuilder::new() + .from_image(image) + .tag(tag) + .build(), + ); docker .create_image(options, None, None) .for_each(|item| async move { let info = item.unwrap(); if let Some(error) = info.error { - panic!("{:?}", error); + panic!("{error:?}"); } }) .await @@ -150,7 +141,7 @@ async fn remove_container(docker: &Docker, id: &str) { trace!("Stopping container."); _ = docker - .stop_container(id, None) + .stop_container(id, None::) .await .map_err(|e| error!(%e)); @@ -158,7 +149,7 @@ async fn remove_container(docker: &Docker, id: &str) { // Don't panic, as this is unrelated to the test _ = docker - .remove_container(id, None) + .remove_container(id, None::) .await .map_err(|e| error!(%e)); } @@ -181,7 +172,7 @@ impl Container { } pub fn bind(mut self, src: impl std::fmt::Display, dst: &str) -> Self { - let bind = format!("{}:{}", src, dst); + let bind = format!("{src}:{dst}"); self.binds.get_or_insert_with(Vec::new).push(bind); self } @@ -196,12 +187,11 @@ impl Container { pull_image(&docker, self.image, self.tag).await; - let options = Some(CreateContainerOptions { - name: format!("vector_test_{}", uuid::Uuid::new_v4()), - platform: None, - }); + let options = CreateContainerOptionsBuilder::new() + .name(&format!("vector_test_{}", uuid::Uuid::new_v4())) + .build(); - let config = Config { + let config = ContainerCreateBody { image: Some(format!("{}:{}", &self.image, &self.tag)), cmd: self.cmd, host_config: Some(HostConfig { @@ -213,10 +203,13 @@ impl Container { ..Default::default() }; - let container = docker.create_container(options, config).await.unwrap(); + let container = docker + .create_container(Some(options), config) + .await + .unwrap(); docker - .start_container::(&container.id, None) + .start_container(&container.id, None::) .await .unwrap(); diff --git a/src/encoding_transcode.rs b/src/encoding_transcode.rs index 5cc1c6882bdb5..92a9c88b6e58b 100644 --- a/src/encoding_transcode.rs +++ b/src/encoding_transcode.rs @@ -84,10 +84,7 @@ impl Decoder { // processing, we handle it centrally here. Also, the BOM does not serve // any more use for us, since the source encoding is already pre-identified // as part of decoder initialization. - if output - .get(..BOM_UTF8_LEN) - .map_or(false, |start| start == BOM_UTF8) - { + if output.get(..BOM_UTF8_LEN) == Some(BOM_UTF8) { emit!(DecoderBomRemoval { from_encoding: self.inner.encoding().name() }); @@ -293,10 +290,7 @@ mod tests { assert_eq!( d.decode_to_utf8(Bytes::from(problematic_input)), - Bytes::from(format!( - "{}{}123", - REPLACEMENT_CHARACTER, REPLACEMENT_CHARACTER - )) + Bytes::from(format!("{REPLACEMENT_CHARACTER}{REPLACEMENT_CHARACTER}123")) ); } diff --git a/src/enrichment_tables/file.rs b/src/enrichment_tables/file.rs index 5f1e63cce1214..ec234ea10e16c 100644 --- a/src/enrichment_tables/file.rs +++ b/src/enrichment_tables/file.rs @@ -14,7 +14,8 @@ use crate::config::EnrichmentTableConfig; #[configurable_component] #[derive(Clone, Debug, Eq, PartialEq)] #[serde(tag = "type", rename_all = "snake_case")] -enum Encoding { +#[configurable(metadata(docs::enum_tag_description = "File encoding type."))] +pub enum Encoding { /// Decodes the file as a [CSV][csv] (comma-separated values) file. /// /// [csv]: https://wikipedia.org/wiki/Comma-separated_values @@ -46,24 +47,26 @@ impl Default for Encoding { /// File-specific settings. #[configurable_component] #[derive(Clone, Debug, Default, Eq, PartialEq)] -struct FileSettings { +pub struct FileSettings { /// The path of the enrichment table file. /// /// Currently, only [CSV][csv] files are supported. /// /// [csv]: https://en.wikipedia.org/wiki/Comma-separated_values - path: PathBuf, + pub path: PathBuf, + /// File encoding configuration. #[configurable(derived)] - encoding: Encoding, + pub encoding: Encoding, } /// Configuration for the `file` enrichment table. #[configurable_component(enrichment_table("file"))] #[derive(Clone, Debug, Default, Eq, PartialEq)] pub struct FileConfig { + /// File-specific settings. #[configurable(derived)] - file: FileSettings, + pub file: FileSettings, /// Key/value pairs representing mapped log field names and types. /// @@ -74,7 +77,7 @@ pub struct FileConfig { /// 1. One of the built-in-formats listed in the `Timestamp Formats` table below. /// 2. The [time format specifiers][chrono_fmt] from Rust’s `chrono` library. /// - /// ### Types + /// Types /// /// - **`bool`** /// - **`string`** @@ -83,7 +86,7 @@ pub struct FileConfig { /// - **`date`** /// - **`timestamp`** (see the table below for formats) /// - /// ### Timestamp Formats + /// Timestamp Formats /// /// | Format | Description | Example | /// |----------------------|----------------------------------------------------------------------------------|----------------------------------| @@ -109,7 +112,10 @@ pub struct FileConfig { /// [rfc3339]: https://tools.ietf.org/html/rfc3339 /// [chrono_fmt]: https://docs.rs/chrono/latest/chrono/format/strftime/index.html#specifiers #[serde(default)] - schema: HashMap, + #[configurable(metadata( + docs::additional_props_description = "Represents mapped log field names and types." + ))] + pub schema: HashMap, } const fn default_delimiter() -> char { @@ -137,10 +143,7 @@ impl FileConfig { .from_utc_datetime( &chrono::NaiveDate::parse_from_str(value, "%Y-%m-%d") .map_err(|_| { - format!( - "unable to parse date {} found in row {}", - value, row - ) + format!("unable to parse date {value} found in row {row}") })? .and_hms_opt(0, 0, 0) .expect("invalid timestamp"), @@ -153,10 +156,7 @@ impl FileConfig { .from_utc_datetime( &chrono::NaiveDate::parse_from_str(value, format) .map_err(|_| { - format!( - "unable to parse date {} found in row {}", - value, row - ) + format!("unable to parse date {value} found in row {row}") })? .and_hms_opt(0, 0, 0) .expect("invalid timestamp"), @@ -168,9 +168,7 @@ impl FileConfig { Conversion::parse(format, timezone).map_err(|err| err.to_string())?; conversion .convert(Bytes::copy_from_slice(value.as_bytes())) - .map_err(|_| { - format!("unable to parse {} found in row {}", value, row) - })? + .map_err(|_| format!("unable to parse {value} found in row {row}"))? } } } @@ -178,10 +176,8 @@ impl FileConfig { }) } - fn load_file( - &self, - timezone: TimeZone, - ) -> crate::Result<(Vec, Vec>, SystemTime)> { + /// Load the configured file into memory. Required to create a new file enrichment table. + pub fn load_file(&self, timezone: TimeZone) -> crate::Result { let Encoding::Csv { include_headers, delimiter, @@ -192,6 +188,7 @@ impl FileConfig { .delimiter(delimiter as u8) .from_path(&self.file.path)?; + let first_row = reader.records().next(); let headers = if include_headers { reader .headers()? @@ -201,14 +198,15 @@ impl FileConfig { } else { // If there are no headers in the datafile we make headers as the numerical index of // the column. - match reader.records().next() { - Some(Ok(row)) => (0..row.len()).map(|idx| idx.to_string()).collect(), + match first_row { + Some(Ok(ref row)) => (0..row.len()).map(|idx| idx.to_string()).collect(), _ => Vec::new(), } }; - let data = reader - .records() + let data = first_row + .into_iter() + .chain(reader.records()) .map(|row| { Ok(row? .iter() @@ -224,26 +222,40 @@ impl FileConfig { headers ); - let modified = fs::metadata(&self.file.path)?.modified()?; + let file = reader.into_inner(); - Ok((headers, data, modified)) + Ok(FileData { + headers, + data, + modified: file.metadata()?.modified()?, + }) } } -#[async_trait::async_trait] impl EnrichmentTableConfig for FileConfig { async fn build( &self, globals: &crate::config::GlobalOptions, ) -> crate::Result> { - let (headers, data, modified) = self.load_file(globals.timezone())?; - - Ok(Box::new(File::new(self.clone(), modified, data, headers))) + Ok(Box::new(File::new( + self.clone(), + self.load_file(globals.timezone())?, + ))) } } impl_generate_config_from_default!(FileConfig); +/// The data resulting from loading a configured file. +pub struct FileData { + /// The ordered set of headers of the data columns. + pub headers: Vec, + /// The data contained in the file. + pub data: Vec>, + /// The last modified time of the file. + pub modified: SystemTime, +} + /// A struct that implements [vector_lib::enrichment::Table] to handle loading enrichment data from a CSV file. #[derive(Clone)] pub struct File { @@ -260,17 +272,12 @@ pub struct File { impl File { /// Creates a new [File] based on the provided config. - pub fn new( - config: FileConfig, - last_modified: SystemTime, - data: Vec>, - headers: Vec, - ) -> Self { + pub fn new(config: FileConfig, data: FileData) -> Self { Self { config, - last_modified, - data, - headers, + last_modified: data.modified, + data: data.data, + headers: data.headers, indexes: Vec::new(), } } @@ -280,20 +287,60 @@ impl File { } /// Does the given row match all the conditions specified? - fn row_equals(&self, case: Case, condition: &[Condition], row: &[Value]) -> bool { + fn row_equals( + &self, + case: Case, + condition: &[Condition], + row: &[Value], + wildcard: Option<&Value>, + ) -> bool { condition.iter().all(|condition| match condition { Condition::Equals { field, value } => match self.column_index(field) { None => false, - Some(idx) => match (case, &row[idx], value) { - (Case::Insensitive, Value::Bytes(bytes1), Value::Bytes(bytes2)) => { - match (std::str::from_utf8(bytes1), std::str::from_utf8(bytes2)) { - (Ok(s1), Ok(s2)) => s1.to_lowercase() == s2.to_lowercase(), - (Err(_), Err(_)) => bytes1 == bytes2, - _ => false, + Some(idx) => { + let current_row_value = &row[idx]; + + // Helper closure for comparing current_row_value with another value, + // respecting the specified case for Value::Bytes. + let compare_values = |val_to_compare: &Value| -> bool { + match (case, current_row_value, val_to_compare) { + ( + Case::Insensitive, + Value::Bytes(bytes_row), + Value::Bytes(bytes_cmp), + ) => { + // Perform case-insensitive comparison for byte strings. + // If both are valid UTF-8, compare their lowercase versions. + // If both are non-UTF-8 bytes, compare them directly. + // If one is UTF-8 and the other is not, they are considered not equal. + match ( + std::str::from_utf8(bytes_row), + std::str::from_utf8(bytes_cmp), + ) { + (Ok(s_row), Ok(s_cmp)) => { + s_row.to_lowercase() == s_cmp.to_lowercase() + } + (Err(_), Err(_)) => bytes_row == bytes_cmp, + _ => false, + } + } + // For Case::Sensitive, or for Case::Insensitive with non-Bytes types, + // perform a direct equality check. + _ => current_row_value == val_to_compare, } + }; + + // First, check if the row value matches the condition's value. + if compare_values(value) { + true + } else if let Some(wc_val) = wildcard { + // If not, and a wildcard is provided, check if the row value matches the wildcard. + compare_values(wc_val) + } else { + // Otherwise, no match. + false } - (_, value1, value2) => value1 == value2, - }, + } }, Condition::BetweenDates { field, from, to } => match self.column_index(field) { None => false, @@ -302,6 +349,20 @@ impl File { _ => false, }, }, + Condition::FromDate { field, from } => match self.column_index(field) { + None => false, + Some(idx) => match row[idx] { + Value::Timestamp(date) => from <= &date, + _ => false, + }, + }, + Condition::ToDate { field, to } => match self.column_index(field) { + None => false, + Some(idx) => match row[idx] { + Value::Timestamp(date) => &date <= to, + _ => false, + }, + }, }) } @@ -347,7 +408,7 @@ impl File { }) .collect::>() .join(", "); - Err(format!("field(s) '{}' missing from dataset", missing)) + Err(format!("field(s) '{missing}' missing from dataset")) } else { Ok(normalized) } @@ -393,12 +454,13 @@ impl File { case: Case, condition: &'a [Condition<'a>], select: Option<&'a [String]>, + wildcard: Option<&'a Value>, ) -> impl Iterator + 'a where I: Iterator> + 'a, { data.filter_map(move |row| { - if self.row_equals(case, condition, row) { + if self.row_equals(case, condition, row, wildcard) { Some(self.add_columns(select, row)) } else { None @@ -430,6 +492,32 @@ impl File { let IndexHandle(handle) = handle; Ok(self.indexes[handle].2.get(&key)) } + + fn indexed_with_wildcard<'a>( + &'a self, + case: Case, + wildcard: &'a Value, + condition: &'a [Condition<'a>], + handle: IndexHandle, + ) -> Result>, String> { + if let Some(result) = self.indexed(case, condition, handle)? { + return Ok(Some(result)); + } + + // If lookup fails and a wildcard is provided, compute hash for the wildcard + let mut wildcard_hash = seahash::SeaHasher::default(); + for header in self.headers.iter() { + if condition.iter().any( + |condition| matches!(condition, Condition::Equals { field, .. } if field == header), + ) { + hash_value(&mut wildcard_hash, case, wildcard)?; + } + } + + let wildcard_key = wildcard_hash.finish(); + let IndexHandle(handle) = handle; + Ok(self.indexes[handle].2.get(&wildcard_key)) + } } /// Adds the bytes from the given value to the hash. @@ -477,22 +565,26 @@ impl Table for File { case: Case, condition: &'a [Condition<'a>], select: Option<&'a [String]>, + wildcard: Option<&Value>, index: Option, ) -> Result { match index { None => { // No index has been passed so we need to do a Sequential Scan. - single_or_err(self.sequential(self.data.iter(), case, condition, select)) + single_or_err(self.sequential(self.data.iter(), case, condition, select, wildcard)) } Some(handle) => { - let result = self - .indexed(case, condition, handle)? - .ok_or_else(|| "no rows found in index".to_string())? - .iter() - .map(|idx| &self.data[*idx]); + let result = if let Some(wildcard) = wildcard { + self.indexed_with_wildcard(case, wildcard, condition, handle)? + } else { + self.indexed(case, condition, handle)? + } + .ok_or_else(|| "no rows found in index".to_string())? + .iter() + .map(|idx| &self.data[*idx]); // Perform a sequential scan over the indexed result. - single_or_err(self.sequential(result, case, condition, select)) + single_or_err(self.sequential(result, case, condition, select, wildcard)) } } } @@ -502,25 +594,33 @@ impl Table for File { case: Case, condition: &'a [Condition<'a>], select: Option<&'a [String]>, + wildcard: Option<&Value>, index: Option, ) -> Result, String> { match index { None => { // No index has been passed so we need to do a Sequential Scan. Ok(self - .sequential(self.data.iter(), case, condition, select) + .sequential(self.data.iter(), case, condition, select, wildcard) .collect()) } Some(handle) => { // Perform a sequential scan over the indexed result. + let indexed_result = if let Some(wildcard) = wildcard { + self.indexed_with_wildcard(case, wildcard, condition, handle)? + } else { + self.indexed(case, condition, handle)? + }; + Ok(self .sequential( - self.indexed(case, condition, handle)? + indexed_result .iter() .flat_map(|results| results.iter().map(|idx| &self.data[*idx])), case, condition, select, + wildcard, ) .collect()) } @@ -589,6 +689,64 @@ mod tests { use super::*; + #[test] + fn parse_file_with_headers() { + let dir = tempfile::tempdir().expect("Unable to create tempdir for enrichment table"); + let path = dir.path().join("table.csv"); + fs::write(path.clone(), "foo,bar\na,1\nb,2").expect("Failed to write enrichment table"); + + let config = FileConfig { + file: FileSettings { + path, + encoding: Encoding::Csv { + include_headers: true, + delimiter: default_delimiter(), + }, + }, + schema: HashMap::new(), + }; + let data = config + .load_file(Default::default()) + .expect("Failed to parse csv"); + assert_eq!(vec!["foo".to_string(), "bar".to_string()], data.headers); + assert_eq!( + vec![ + vec![Value::from("a"), Value::from("1")], + vec![Value::from("b"), Value::from("2")], + ], + data.data + ); + } + + #[test] + fn parse_file_no_headers() { + let dir = tempfile::tempdir().expect("Unable to create tempdir for enrichment table"); + let path = dir.path().join("table.csv"); + fs::write(path.clone(), "a,1\nb,2").expect("Failed to write enrichment table"); + + let config = FileConfig { + file: FileSettings { + path, + encoding: Encoding::Csv { + include_headers: false, + delimiter: default_delimiter(), + }, + }, + schema: HashMap::new(), + }; + let data = config + .load_file(Default::default()) + .expect("Failed to parse csv"); + assert_eq!(vec!["0".to_string(), "1".to_string()], data.headers); + assert_eq!( + vec![ + vec![Value::from("a"), Value::from("1")], + vec![Value::from("b"), Value::from("2")], + ], + data.data + ); + } + #[test] fn parse_column() { let mut schema = HashMap::new(); @@ -697,12 +855,14 @@ mod tests { fn finds_row() { let file = File::new( Default::default(), - SystemTime::now(), - vec![ - vec!["zip".into(), "zup".into()], - vec!["zirp".into(), "zurp".into()], - ], - vec!["field1".to_string(), "field2".to_string()], + FileData { + modified: SystemTime::now(), + data: vec![ + vec!["zip".into(), "zup".into()], + vec!["zirp".into(), "zurp".into()], + ], + headers: vec!["field1".to_string(), "field2".to_string()], + }, ); let condition = Condition::Equals { @@ -715,7 +875,37 @@ mod tests { ("field1".into(), Value::from("zirp")), ("field2".into(), Value::from("zurp")), ])), - file.find_table_row(Case::Sensitive, &[condition], None, None) + file.find_table_row(Case::Sensitive, &[condition], None, None, None) + ); + } + + #[test] + fn finds_row_with_wildcard() { + let file = File::new( + Default::default(), + FileData { + modified: SystemTime::now(), + data: vec![ + vec!["zip".into(), "zup".into()], + vec!["zirp".into(), "zurp".into()], + ], + headers: vec!["field1".to_string(), "field2".to_string()], + }, + ); + + let wildcard = Value::from("zirp"); + + let condition = Condition::Equals { + field: "field1", + value: Value::from("nonexistent"), + }; + + assert_eq!( + Ok(ObjectMap::from([ + ("field1".into(), Value::from("zirp")), + ("field2".into(), Value::from("zurp")), + ])), + file.find_table_row(Case::Sensitive, &[condition], None, Some(&wildcard), None) ); } @@ -723,13 +913,15 @@ mod tests { fn duplicate_indexes() { let mut file = File::new( Default::default(), - SystemTime::now(), - Vec::new(), - vec![ - "field1".to_string(), - "field2".to_string(), - "field3".to_string(), - ], + FileData { + modified: SystemTime::now(), + data: Vec::new(), + headers: vec![ + "field1".to_string(), + "field2".to_string(), + "field3".to_string(), + ], + }, ); let handle1 = file.add_index(Case::Sensitive, &["field2", "field3"]); @@ -743,13 +935,15 @@ mod tests { fn errors_on_missing_columns() { let mut file = File::new( Default::default(), - SystemTime::now(), - Vec::new(), - vec![ - "field1".to_string(), - "field2".to_string(), - "field3".to_string(), - ], + FileData { + modified: SystemTime::now(), + data: Vec::new(), + headers: vec![ + "field1".to_string(), + "field2".to_string(), + "field3".to_string(), + ], + }, ); let error = file.add_index(Case::Sensitive, &["apples", "field2", "bananas"]); @@ -763,12 +957,14 @@ mod tests { fn finds_row_with_index() { let mut file = File::new( Default::default(), - SystemTime::now(), - vec![ - vec!["zip".into(), "zup".into()], - vec!["zirp".into(), "zurp".into()], - ], - vec!["field1".to_string(), "field2".to_string()], + FileData { + modified: SystemTime::now(), + data: vec![ + vec!["zip".into(), "zup".into()], + vec!["zirp".into(), "zurp".into()], + ], + headers: vec!["field1".to_string(), "field2".to_string()], + }, ); let handle = file.add_index(Case::Sensitive, &["field1"]).unwrap(); @@ -783,7 +979,44 @@ mod tests { ("field1".into(), Value::from("zirp")), ("field2".into(), Value::from("zurp")), ])), - file.find_table_row(Case::Sensitive, &[condition], None, Some(handle)) + file.find_table_row(Case::Sensitive, &[condition], None, None, Some(handle)) + ); + } + + #[test] + fn finds_row_with_index_case_sensitive_and_wildcard() { + let mut file = File::new( + Default::default(), + FileData { + modified: SystemTime::now(), + data: vec![ + vec!["zip".into(), "zup".into()], + vec!["zirp".into(), "zurp".into()], + ], + headers: vec!["field1".to_string(), "field2".to_string()], + }, + ); + + let handle = file.add_index(Case::Sensitive, &["field1"]).unwrap(); + let wildcard = Value::from("zirp"); + + let condition = Condition::Equals { + field: "field1", + value: Value::from("nonexistent"), + }; + + assert_eq!( + Ok(ObjectMap::from([ + ("field1".into(), Value::from("zirp")), + ("field2".into(), Value::from("zurp")), + ])), + file.find_table_row( + Case::Sensitive, + &[condition], + None, + Some(&wildcard), + Some(handle) + ) ); } @@ -791,13 +1024,15 @@ mod tests { fn finds_rows_with_index_case_sensitive() { let mut file = File::new( Default::default(), - SystemTime::now(), - vec![ - vec!["zip".into(), "zup".into()], - vec!["zirp".into(), "zurp".into()], - vec!["zip".into(), "zoop".into()], - ], - vec!["field1".to_string(), "field2".to_string()], + FileData { + modified: SystemTime::now(), + data: vec![ + vec!["zip".into(), "zup".into()], + vec!["zirp".into(), "zurp".into()], + vec!["zip".into(), "zoop".into()], + ], + headers: vec!["field1".to_string(), "field2".to_string()], + }, ); let handle = file.add_index(Case::Sensitive, &["field1"]).unwrap(); @@ -820,6 +1055,7 @@ mod tests { value: Value::from("zip"), }], None, + None, Some(handle) ) ); @@ -833,6 +1069,7 @@ mod tests { value: Value::from("ZiP"), }], None, + None, Some(handle) ) ); @@ -842,17 +1079,19 @@ mod tests { fn selects_columns() { let mut file = File::new( Default::default(), - SystemTime::now(), - vec![ - vec!["zip".into(), "zup".into(), "zoop".into()], - vec!["zirp".into(), "zurp".into(), "zork".into()], - vec!["zip".into(), "zoop".into(), "zibble".into()], - ], - vec![ - "field1".to_string(), - "field2".to_string(), - "field3".to_string(), - ], + FileData { + modified: SystemTime::now(), + data: vec![ + vec!["zip".into(), "zup".into(), "zoop".into()], + vec!["zirp".into(), "zurp".into(), "zork".into()], + vec!["zip".into(), "zoop".into(), "zibble".into()], + ], + headers: vec![ + "field1".to_string(), + "field2".to_string(), + "field3".to_string(), + ], + }, ); let handle = file.add_index(Case::Sensitive, &["field1"]).unwrap(); @@ -877,6 +1116,7 @@ mod tests { Case::Sensitive, &[condition], Some(&["field1".to_string(), "field3".to_string()]), + None, Some(handle) ) ); @@ -886,13 +1126,15 @@ mod tests { fn finds_rows_with_index_case_insensitive() { let mut file = File::new( Default::default(), - SystemTime::now(), - vec![ - vec!["zip".into(), "zup".into()], - vec!["zirp".into(), "zurp".into()], - vec!["zip".into(), "zoop".into()], - ], - vec!["field1".to_string(), "field2".to_string()], + FileData { + modified: SystemTime::now(), + data: vec![ + vec!["zip".into(), "zup".into()], + vec!["zirp".into(), "zurp".into()], + vec!["zip".into(), "zoop".into()], + ], + headers: vec!["field1".to_string(), "field2".to_string()], + }, ); let handle = file.add_index(Case::Insensitive, &["field1"]).unwrap(); @@ -915,6 +1157,7 @@ mod tests { value: Value::from("zip"), }], None, + None, Some(handle) ) ); @@ -937,37 +1180,171 @@ mod tests { value: Value::from("ZiP"), }], None, + None, Some(handle) ) ); } #[test] - fn finds_row_with_dates() { + fn finds_rows_with_index_case_insensitive_and_wildcard() { let mut file = File::new( Default::default(), - SystemTime::now(), - vec![ - vec![ - "zip".into(), - Value::Timestamp( - chrono::Utc - .with_ymd_and_hms(2015, 12, 7, 0, 0, 0) - .single() - .expect("invalid timestamp"), - ), + FileData { + modified: SystemTime::now(), + data: vec![ + vec!["zip".into(), "zup".into()], + vec!["zirp".into(), "zurp".into()], + vec!["zip".into(), "zoop".into()], + ], + headers: vec!["field1".to_string(), "field2".to_string()], + }, + ); + + let handle = file.add_index(Case::Insensitive, &["field1"]).unwrap(); + + assert_eq!( + Ok(vec![ + ObjectMap::from([ + ("field1".into(), Value::from("zip")), + ("field2".into(), Value::from("zup")), + ]), + ObjectMap::from([ + ("field1".into(), Value::from("zip")), + ("field2".into(), Value::from("zoop")), + ]), + ]), + file.find_table_rows( + Case::Insensitive, + &[Condition::Equals { + field: "field1", + value: Value::from("nonexistent"), + }], + None, + Some(&Value::from("zip")), + Some(handle) + ) + ); + + assert_eq!( + Ok(vec![ + ObjectMap::from([ + ("field1".into(), Value::from("zip")), + ("field2".into(), Value::from("zup")), + ]), + ObjectMap::from([ + ("field1".into(), Value::from("zip")), + ("field2".into(), Value::from("zoop")), + ]), + ]), + file.find_table_rows( + Case::Insensitive, + &[Condition::Equals { + field: "field1", + value: Value::from("ZiP"), + }], + None, + Some(&Value::from("ZiP")), + Some(handle) + ) + ); + } + + #[test] + fn finds_row_between_dates() { + let mut file = File::new( + Default::default(), + FileData { + modified: SystemTime::now(), + data: vec![ + vec![ + "zip".into(), + Value::Timestamp( + chrono::Utc + .with_ymd_and_hms(2015, 12, 7, 0, 0, 0) + .single() + .expect("invalid timestamp"), + ), + ], + vec![ + "zip".into(), + Value::Timestamp( + chrono::Utc + .with_ymd_and_hms(2016, 12, 7, 0, 0, 0) + .single() + .expect("invalid timestamp"), + ), + ], ], - vec![ - "zip".into(), + headers: vec!["field1".to_string(), "field2".to_string()], + }, + ); + + let handle = file.add_index(Case::Sensitive, &["field1"]).unwrap(); + + let conditions = [ + Condition::Equals { + field: "field1", + value: "zip".into(), + }, + Condition::BetweenDates { + field: "field2", + from: chrono::Utc + .with_ymd_and_hms(2016, 1, 1, 0, 0, 0) + .single() + .expect("invalid timestamp"), + to: chrono::Utc + .with_ymd_and_hms(2017, 1, 1, 0, 0, 0) + .single() + .expect("invalid timestamp"), + }, + ]; + + assert_eq!( + Ok(ObjectMap::from([ + ("field1".into(), Value::from("zip")), + ( + "field2".into(), Value::Timestamp( chrono::Utc .with_ymd_and_hms(2016, 12, 7, 0, 0, 0) .single() - .expect("invalid timestamp"), - ), + .expect("invalid timestamp") + ) + ) + ])), + file.find_table_row(Case::Sensitive, &conditions, None, None, Some(handle)) + ); + } + + #[test] + fn finds_row_from_date() { + let mut file = File::new( + Default::default(), + FileData { + modified: SystemTime::now(), + data: vec![ + vec![ + "zip".into(), + Value::Timestamp( + chrono::Utc + .with_ymd_and_hms(2015, 12, 7, 0, 0, 0) + .single() + .expect("invalid timestamp"), + ), + ], + vec![ + "zip".into(), + Value::Timestamp( + chrono::Utc + .with_ymd_and_hms(2016, 12, 7, 0, 0, 0) + .single() + .expect("invalid timestamp"), + ), + ], ], - ], - vec!["field1".to_string(), "field2".to_string()], + headers: vec!["field1".to_string(), "field2".to_string()], + }, ); let handle = file.add_index(Case::Sensitive, &["field1"]).unwrap(); @@ -977,14 +1354,73 @@ mod tests { field: "field1", value: "zip".into(), }, - Condition::BetweenDates { + Condition::FromDate { field: "field2", from: chrono::Utc .with_ymd_and_hms(2016, 1, 1, 0, 0, 0) .single() .expect("invalid timestamp"), + }, + ]; + + assert_eq!( + Ok(ObjectMap::from([ + ("field1".into(), Value::from("zip")), + ( + "field2".into(), + Value::Timestamp( + chrono::Utc + .with_ymd_and_hms(2016, 12, 7, 0, 0, 0) + .single() + .expect("invalid timestamp") + ) + ) + ])), + file.find_table_row(Case::Sensitive, &conditions, None, None, Some(handle)) + ); + } + + #[test] + fn finds_row_to_date() { + let mut file = File::new( + Default::default(), + FileData { + modified: SystemTime::now(), + data: vec![ + vec![ + "zip".into(), + Value::Timestamp( + chrono::Utc + .with_ymd_and_hms(2015, 12, 7, 0, 0, 0) + .single() + .expect("invalid timestamp"), + ), + ], + vec![ + "zip".into(), + Value::Timestamp( + chrono::Utc + .with_ymd_and_hms(2016, 12, 7, 0, 0, 0) + .single() + .expect("invalid timestamp"), + ), + ], + ], + headers: vec!["field1".to_string(), "field2".to_string()], + }, + ); + + let handle = file.add_index(Case::Sensitive, &["field1"]).unwrap(); + + let conditions = [ + Condition::Equals { + field: "field1", + value: "zip".into(), + }, + Condition::ToDate { + field: "field2", to: chrono::Utc - .with_ymd_and_hms(2017, 1, 1, 0, 0, 0) + .with_ymd_and_hms(2016, 1, 1, 0, 0, 0) .single() .expect("invalid timestamp"), }, @@ -997,13 +1433,13 @@ mod tests { "field2".into(), Value::Timestamp( chrono::Utc - .with_ymd_and_hms(2016, 12, 7, 0, 0, 0) + .with_ymd_and_hms(2015, 12, 7, 0, 0, 0) .single() .expect("invalid timestamp") ) ) ])), - file.find_table_row(Case::Sensitive, &conditions, None, Some(handle)) + file.find_table_row(Case::Sensitive, &conditions, None, None, Some(handle)) ); } @@ -1011,12 +1447,14 @@ mod tests { fn doesnt_find_row() { let file = File::new( Default::default(), - SystemTime::now(), - vec![ - vec!["zip".into(), "zup".into()], - vec!["zirp".into(), "zurp".into()], - ], - vec!["field1".to_string(), "field2".to_string()], + FileData { + modified: SystemTime::now(), + data: vec![ + vec!["zip".into(), "zup".into()], + vec!["zirp".into(), "zurp".into()], + ], + headers: vec!["field1".to_string(), "field2".to_string()], + }, ); let condition = Condition::Equals { @@ -1026,7 +1464,7 @@ mod tests { assert_eq!( Err("no rows found".to_string()), - file.find_table_row(Case::Sensitive, &[condition], None, None) + file.find_table_row(Case::Sensitive, &[condition], None, None, None) ); } @@ -1034,15 +1472,45 @@ mod tests { fn doesnt_find_row_with_index() { let mut file = File::new( Default::default(), - SystemTime::now(), - vec![ - vec!["zip".into(), "zup".into()], - vec!["zirp".into(), "zurp".into()], - ], - vec!["field1".to_string(), "field2".to_string()], + FileData { + modified: SystemTime::now(), + data: vec![ + vec!["zip".into(), "zup".into()], + vec!["zirp".into(), "zurp".into()], + ], + headers: vec!["field1".to_string(), "field2".to_string()], + }, + ); + + let handle = file.add_index(Case::Sensitive, &["field1"]).unwrap(); + + let condition = Condition::Equals { + field: "field1", + value: Value::from("zorp"), + }; + + assert_eq!( + Err("no rows found in index".to_string()), + file.find_table_row(Case::Sensitive, &[condition], None, None, Some(handle)) + ); + } + + #[test] + fn doesnt_find_row_with_index_and_wildcard() { + let mut file = File::new( + Default::default(), + FileData { + modified: SystemTime::now(), + data: vec![ + vec!["zip".into(), "zup".into()], + vec!["zirp".into(), "zurp".into()], + ], + headers: vec!["field1".to_string(), "field2".to_string()], + }, ); let handle = file.add_index(Case::Sensitive, &["field1"]).unwrap(); + let wildcard = Value::from("nonexistent"); let condition = Condition::Equals { field: "field1", @@ -1051,7 +1519,13 @@ mod tests { assert_eq!( Err("no rows found in index".to_string()), - file.find_table_row(Case::Sensitive, &[condition], None, Some(handle)) + file.find_table_row( + Case::Sensitive, + &[condition], + None, + Some(&wildcard), + Some(handle) + ) ); } } diff --git a/src/enrichment_tables/geoip.rs b/src/enrichment_tables/geoip.rs index 64c77fb75a159..fd3dbcf899ab3 100644 --- a/src/enrichment_tables/geoip.rs +++ b/src/enrichment_tables/geoip.rs @@ -7,8 +7,8 @@ use std::{collections::BTreeMap, fs, net::IpAddr, sync::Arc, time::SystemTime}; use maxminddb::{ - geoip2::{City, ConnectionType, Isp}, - MaxMindDBError, Reader, + geoip2::{AnonymousIp, City, ConnectionType, Isp}, + Reader, }; use ordered_float::NotNan; use vector_lib::configurable::configurable_component; @@ -18,8 +18,7 @@ use vrl::value::{ObjectMap, Value}; use crate::config::{EnrichmentTableConfig, GenerateConfig}; // MaxMind GeoIP database files have a type field we can use to recognize specific -// products. If we encounter one of these two types, we look for ASN/ISP information; -// otherwise we expect to be working with a City database. +// products. If it is an unknown type, an error will be returned. #[derive(Copy, Clone, Debug)] #[allow(missing_docs)] pub enum DatabaseKind { @@ -27,15 +26,20 @@ pub enum DatabaseKind { Isp, ConnectionType, City, + AnonymousIp, } -impl From<&str> for DatabaseKind { - fn from(v: &str) -> Self { - match v { - "GeoLite2-ASN" => Self::Asn, - "GeoIP2-ISP" => Self::Isp, - "GeoIP2-Connection-Type" => Self::ConnectionType, - _ => Self::City, +impl TryFrom<&str> for DatabaseKind { + type Error = (); + + fn try_from(value: &str) -> Result { + match value { + "GeoLite2-ASN" => Ok(Self::Asn), + "GeoIP2-ISP" => Ok(Self::Isp), + "GeoIP2-Connection-Type" => Ok(Self::ConnectionType), + "GeoIP2-City" | "GeoLite2-City" => Ok(Self::City), + "GeoIP2-Anonymous-IP" => Ok(Self::AnonymousIp), + _ => Err(()), } } } @@ -48,6 +52,7 @@ pub struct GeoipConfig { /// (**GeoLite2-City.mmdb**). /// /// Other databases, such as the country database, are not supported. + /// `mmdb` enrichment table can be used for other databases. /// /// [geoip2]: https://dev.maxmind.com/geoip/geoip2/downloadable /// [geolite2]: https://dev.maxmind.com/geoip/geoip2/geolite2/#Download_Access @@ -89,7 +94,6 @@ impl GenerateConfig for GeoipConfig { } } -#[async_trait::async_trait] impl EnrichmentTableConfig for GeoipConfig { async fn build( &self, @@ -112,7 +116,13 @@ impl Geoip { /// Creates a new GeoIP struct from the provided config. pub fn new(config: GeoipConfig) -> crate::Result { let dbreader = Arc::new(Reader::open_readfile(config.path.clone())?); - let dbkind = DatabaseKind::from(dbreader.metadata.database_type.as_str()); + let dbkind = + DatabaseKind::try_from(dbreader.metadata.database_type.as_str()).map_err(|_| { + format!( + "Unsupported MMDB database type ({}). Use `mmdb` enrichment table instead.", + dbreader.metadata.database_type + ) + })?; // Check if we can read database with dummy Ip. let ip = IpAddr::V4(std::net::Ipv4Addr::UNSPECIFIED); @@ -120,10 +130,11 @@ impl Geoip { DatabaseKind::Asn | DatabaseKind::Isp => dbreader.lookup::(ip).map(|_| ()), DatabaseKind::ConnectionType => dbreader.lookup::(ip).map(|_| ()), DatabaseKind::City => dbreader.lookup::(ip).map(|_| ()), + DatabaseKind::AnonymousIp => dbreader.lookup::(ip).map(|_| ()), }; match result { - Ok(_) | Err(MaxMindDBError::AddressNotFoundError(_)) => Ok(Geoip { + Ok(_) => Ok(Geoip { last_modified: fs::metadata(&config.path)?.modified()?, dbreader, dbkind, @@ -145,14 +156,14 @@ impl Geoip { }; macro_rules! add_field { - ($k:expr, $v:expr) => { + ($k:expr_2021, $v:expr_2021) => { add_field($k, $v.map(Into::into)) }; } match self.dbkind { DatabaseKind::Asn | DatabaseKind::Isp => { - let data = self.dbreader.lookup::(ip).ok()?; + let data = self.dbreader.lookup::(ip).ok()??; add_field!("autonomous_system_number", data.autonomous_system_number); add_field!( @@ -163,7 +174,7 @@ impl Geoip { add_field!("organization", data.organization); } DatabaseKind::City => { - let data = self.dbreader.lookup::(ip).ok()?; + let data = self.dbreader.lookup::(ip).ok()??; add_field!( "city_name", @@ -213,10 +224,20 @@ impl Geoip { add_field!("postal_code", data.postal.and_then(|p| p.code)); } DatabaseKind::ConnectionType => { - let data = self.dbreader.lookup::(ip).ok()?; + let data = self.dbreader.lookup::(ip).ok()??; add_field!("connection_type", data.connection_type); } + DatabaseKind::AnonymousIp => { + let data = self.dbreader.lookup::(ip).ok()??; + + add_field!("is_anonymous", data.is_anonymous); + add_field!("is_anonymous_vpn", data.is_anonymous_vpn); + add_field!("is_hosting_provider", data.is_hosting_provider); + add_field!("is_public_proxy", data.is_public_proxy); + add_field!("is_residential_proxy", data.is_residential_proxy); + add_field!("is_tor_exit_node", data.is_tor_exit_node); + } } Some(map) @@ -243,9 +264,10 @@ impl Table for Geoip { case: Case, condition: &'a [Condition<'a>], select: Option<&[String]>, + wildcard: Option<&Value>, index: Option, ) -> Result { - let mut rows = self.find_table_rows(case, condition, select, index)?; + let mut rows = self.find_table_rows(case, condition, select, wildcard, index)?; match rows.pop() { Some(row) if rows.is_empty() => Ok(row), @@ -262,6 +284,7 @@ impl Table for Geoip { _: Case, condition: &'a [Condition<'a>], select: Option<&[String]>, + _wildcard: Option<&Value>, _: Option, ) -> Result, String> { match condition.first() { @@ -444,6 +467,37 @@ mod tests { assert!(values.is_none()); } + #[test] + fn custom_mmdb_type_error() { + let result = Geoip::new(GeoipConfig { + path: "tests/data/custom-type.mmdb".to_string(), + locale: default_locale(), + }); + + assert!(result.is_err()); + } + #[test] + fn anonymous_ip_lookup() { + let values = find("101.99.92.179", "tests/data/GeoIP2-Anonymous-IP-Test.mmdb").unwrap(); + + let mut expected = ObjectMap::new(); + expected.insert("is_anonymous".into(), true.into()); + expected.insert("is_anonymous_vpn".into(), true.into()); + expected.insert("is_hosting_provider".into(), true.into()); + expected.insert("is_tor_exit_node".into(), true.into()); + expected.insert("is_public_proxy".into(), Value::Null); + expected.insert("is_residential_proxy".into(), Value::Null); + + assert_eq!(values, expected); + } + + #[test] + fn anonymous_ip_lookup_no_results() { + let values = find("10.1.12.1", "tests/data/GeoIP2-Anonymous-IP-Test.mmdb"); + + assert!(values.is_none()); + } + fn find(ip: &str, database: &str) -> Option { find_select(ip, database, None) } @@ -462,6 +516,7 @@ mod tests { }], select, None, + None, ) .unwrap() .pop() diff --git a/src/enrichment_tables/memory/config.rs b/src/enrichment_tables/memory/config.rs new file mode 100644 index 0000000000000..b2fed6384f5c6 --- /dev/null +++ b/src/enrichment_tables/memory/config.rs @@ -0,0 +1,203 @@ +use std::num::NonZeroU64; +use std::sync::Arc; + +use crate::sinks::Healthcheck; +use crate::sources::Source; +use crate::{config::SinkContext, enrichment_tables::memory::Memory}; +use async_trait::async_trait; +use futures::{future, FutureExt}; +use tokio::sync::Mutex; +use vector_lib::config::{AcknowledgementsConfig, DataType, Input, LogNamespace}; +use vector_lib::enrichment::Table; +use vector_lib::id::ComponentKey; +use vector_lib::schema::{self}; +use vector_lib::{configurable::configurable_component, sink::VectorSink}; +use vrl::path::OwnedTargetPath; +use vrl::value::Kind; + +use crate::config::{EnrichmentTableConfig, SinkConfig, SourceConfig, SourceContext, SourceOutput}; + +use super::internal_events::InternalMetricsConfig; +use super::source::MemorySourceConfig; + +/// Configuration for the `memory` enrichment table. +#[configurable_component(enrichment_table("memory"))] +#[derive(Clone)] +pub struct MemoryConfig { + /// TTL (time-to-live in seconds) is used to limit the lifetime of data stored in the cache. + /// When TTL expires, data behind a specific key in the cache is removed. + /// TTL is reset when the key is replaced. + #[serde(default = "default_ttl")] + pub ttl: u64, + /// The scan interval used to look for expired records. This is provided + /// as an optimization to ensure that TTL is updated, but without doing + /// too many cache scans. + #[serde(default = "default_scan_interval")] + pub scan_interval: NonZeroU64, + /// The interval used for making writes visible in the table. + /// Longer intervals might get better performance, + /// but there is a longer delay before the data is visible in the table. + /// Since every TTL scan makes its changes visible, only use this value + /// if it is shorter than the `scan_interval`. + /// + /// By default, all writes are made visible immediately. + #[serde(skip_serializing_if = "vector_lib::serde::is_default")] + pub flush_interval: Option, + /// Maximum size of the table in bytes. All insertions that make + /// this table bigger than the maximum size are rejected. + /// + /// By default, there is no size limit. + #[serde(skip_serializing_if = "vector_lib::serde::is_default")] + pub max_byte_size: Option, + /// The namespace to use for logs. This overrides the global setting. + #[configurable(metadata(docs::hidden))] + #[serde(default)] + pub log_namespace: Option, + /// Configuration of internal metrics + #[configurable(derived)] + #[serde(default)] + pub internal_metrics: InternalMetricsConfig, + /// Configuration for source functionality. + #[configurable(derived)] + #[serde(skip_serializing_if = "vector_lib::serde::is_default")] + pub source_config: Option, + + #[serde(skip)] + memory: Arc>>>, +} + +impl PartialEq for MemoryConfig { + fn eq(&self, other: &Self) -> bool { + self.ttl == other.ttl + && self.scan_interval == other.scan_interval + && self.flush_interval == other.flush_interval + } +} +impl Eq for MemoryConfig {} + +impl Default for MemoryConfig { + fn default() -> Self { + Self { + ttl: default_ttl(), + scan_interval: default_scan_interval(), + flush_interval: None, + memory: Arc::new(Mutex::new(None)), + max_byte_size: None, + log_namespace: None, + source_config: None, + internal_metrics: InternalMetricsConfig::default(), + } + } +} + +const fn default_ttl() -> u64 { + 600 +} + +const fn default_scan_interval() -> NonZeroU64 { + unsafe { NonZeroU64::new_unchecked(30) } +} + +impl MemoryConfig { + pub(super) async fn get_or_build_memory(&self) -> Memory { + let mut boxed_memory = self.memory.lock().await; + *boxed_memory + .get_or_insert_with(|| Box::new(Memory::new(self.clone()))) + .clone() + } +} + +impl EnrichmentTableConfig for MemoryConfig { + async fn build( + &self, + _globals: &crate::config::GlobalOptions, + ) -> crate::Result> { + Ok(Box::new(self.get_or_build_memory().await)) + } + + fn sink_config( + &self, + default_key: &ComponentKey, + ) -> Option<(ComponentKey, Box)> { + Some((default_key.clone(), Box::new(self.clone()))) + } + + fn source_config( + &self, + _default_key: &ComponentKey, + ) -> Option<(ComponentKey, Box)> { + let Some(source_config) = &self.source_config else { + return None; + }; + Some(( + source_config.source_key.clone().into(), + Box::new(self.clone()), + )) + } +} + +#[async_trait] +#[typetag::serde(name = "memory_enrichment_table")] +impl SinkConfig for MemoryConfig { + async fn build(&self, _cx: SinkContext) -> crate::Result<(VectorSink, Healthcheck)> { + let sink = VectorSink::from_event_streamsink(self.get_or_build_memory().await); + + Ok((sink, future::ok(()).boxed())) + } + + fn input(&self) -> Input { + Input::log() + } + + fn acknowledgements(&self) -> &AcknowledgementsConfig { + &AcknowledgementsConfig::DEFAULT + } +} + +#[async_trait] +#[typetag::serde(name = "memory_enrichment_table")] +impl SourceConfig for MemoryConfig { + async fn build(&self, cx: SourceContext) -> crate::Result { + let memory = self.get_or_build_memory().await; + + let log_namespace = cx.log_namespace(self.log_namespace); + + Ok(Box::pin( + memory.as_source(cx.shutdown, cx.out, log_namespace).run(), + )) + } + + fn outputs(&self, global_log_namespace: LogNamespace) -> Vec { + let log_namespace = global_log_namespace.merge(self.log_namespace); + let schema_definition = match log_namespace { + LogNamespace::Legacy => schema::Definition::default_legacy_namespace(), + LogNamespace::Vector => { + schema::Definition::new_with_default_metadata(Kind::any_object(), [log_namespace]) + .with_meaning(OwnedTargetPath::event_root(), "message") + } + } + .with_standard_vector_source_metadata(); + + vec![SourceOutput::new_maybe_logs( + DataType::Log, + schema_definition, + )] + } + + fn can_acknowledge(&self) -> bool { + false + } +} + +impl std::fmt::Debug for MemoryConfig { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("MemoryConfig") + .field("ttl", &self.ttl) + .field("scan_interval", &self.scan_interval) + .field("flush_interval", &self.flush_interval) + .field("max_byte_size", &self.max_byte_size) + .finish() + } +} + +impl_generate_config_from_default!(MemoryConfig); diff --git a/src/enrichment_tables/memory/internal_events.rs b/src/enrichment_tables/memory/internal_events.rs new file mode 100644 index 0000000000000..7a954389388d4 --- /dev/null +++ b/src/enrichment_tables/memory/internal_events.rs @@ -0,0 +1,154 @@ +use metrics::{counter, gauge}; +use vector_lib::configurable::configurable_component; +use vector_lib::internal_event::InternalEvent; + +/// Configuration of internal metrics for enrichment memory table. +#[configurable_component] +#[derive(Clone, Debug, PartialEq, Eq, Default)] +#[serde(deny_unknown_fields)] +pub struct InternalMetricsConfig { + /// Determines whether to include the key tag on internal metrics. + /// + /// This is useful for distinguishing between different keys while monitoring. However, the tag's + /// cardinality is unbounded. + #[serde(default = "crate::serde::default_false")] + pub include_key_tag: bool, +} + +#[derive(Debug)] +pub(crate) struct MemoryEnrichmentTableRead<'a> { + pub key: &'a str, + pub include_key_metric_tag: bool, +} + +impl InternalEvent for MemoryEnrichmentTableRead<'_> { + fn emit(self) { + if self.include_key_metric_tag { + counter!( + "memory_enrichment_table_reads_total", + "key" => self.key.to_owned() + ) + .increment(1); + } else { + counter!("memory_enrichment_table_reads_total",).increment(1); + } + } + + fn name(&self) -> Option<&'static str> { + Some("MemoryEnrichmentTableRead") + } +} + +#[derive(Debug)] +pub(crate) struct MemoryEnrichmentTableInserted<'a> { + pub key: &'a str, + pub include_key_metric_tag: bool, +} + +impl InternalEvent for MemoryEnrichmentTableInserted<'_> { + fn emit(self) { + if self.include_key_metric_tag { + counter!( + "memory_enrichment_table_insertions_total", + "key" => self.key.to_owned() + ) + .increment(1); + } else { + counter!("memory_enrichment_table_insertions_total",).increment(1); + } + } + + fn name(&self) -> Option<&'static str> { + Some("MemoryEnrichmentTableInserted") + } +} + +#[derive(Debug)] +pub(crate) struct MemoryEnrichmentTableFlushed { + pub new_objects_count: usize, + pub new_byte_size: usize, +} + +impl InternalEvent for MemoryEnrichmentTableFlushed { + fn emit(self) { + counter!("memory_enrichment_table_flushes_total",).increment(1); + gauge!("memory_enrichment_table_objects_count",).set(self.new_objects_count as f64); + gauge!("memory_enrichment_table_byte_size",).set(self.new_byte_size as f64); + } + + fn name(&self) -> Option<&'static str> { + Some("MemoryEnrichmentTableFlushed") + } +} + +#[derive(Debug)] +pub(crate) struct MemoryEnrichmentTableTtlExpired<'a> { + pub key: &'a str, + pub include_key_metric_tag: bool, +} + +impl InternalEvent for MemoryEnrichmentTableTtlExpired<'_> { + fn emit(self) { + if self.include_key_metric_tag { + counter!( + "memory_enrichment_table_ttl_expirations", + "key" => self.key.to_owned() + ) + .increment(1); + } else { + counter!("memory_enrichment_table_ttl_expirations",).increment(1); + } + } + + fn name(&self) -> Option<&'static str> { + Some("MemoryEnrichmentTableTtlExpired") + } +} + +#[derive(Debug)] +pub(crate) struct MemoryEnrichmentTableReadFailed<'a> { + pub key: &'a str, + pub include_key_metric_tag: bool, +} + +impl InternalEvent for MemoryEnrichmentTableReadFailed<'_> { + fn emit(self) { + if self.include_key_metric_tag { + counter!( + "memory_enrichment_table_failed_reads", + "key" => self.key.to_owned() + ) + .increment(1); + } else { + counter!("memory_enrichment_table_failed_reads",).increment(1); + } + } + + fn name(&self) -> Option<&'static str> { + Some("MemoryEnrichmentTableReadFailed") + } +} + +#[derive(Debug)] +pub(crate) struct MemoryEnrichmentTableInsertFailed<'a> { + pub key: &'a str, + pub include_key_metric_tag: bool, +} + +impl InternalEvent for MemoryEnrichmentTableInsertFailed<'_> { + fn emit(self) { + if self.include_key_metric_tag { + counter!( + "memory_enrichment_table_failed_insertions", + "key" => self.key.to_owned() + ) + .increment(1); + } else { + counter!("memory_enrichment_table_failed_insertions",).increment(1); + } + } + + fn name(&self) -> Option<&'static str> { + Some("MemoryEnrichmentTableInsertFailed") + } +} diff --git a/src/enrichment_tables/memory/mod.rs b/src/enrichment_tables/memory/mod.rs new file mode 100644 index 0000000000000..72b0986f9b9e9 --- /dev/null +++ b/src/enrichment_tables/memory/mod.rs @@ -0,0 +1,9 @@ +//! Handles enrichment tables for `type = memory`. + +mod config; +mod internal_events; +mod source; +mod table; + +pub use config::*; +pub use table::*; diff --git a/src/enrichment_tables/memory/source.rs b/src/enrichment_tables/memory/source.rs new file mode 100644 index 0000000000000..379ae200c00c0 --- /dev/null +++ b/src/enrichment_tables/memory/source.rs @@ -0,0 +1,138 @@ +use chrono::Utc; +use futures::StreamExt; +use std::{ + num::NonZeroU64, + time::{Duration, Instant}, +}; +use tokio::time::interval; +use tokio_stream::wrappers::IntervalStream; +use vector_lib::{ + config::LogNamespace, + configurable::configurable_component, + event::{Event, EventMetadata, LogEvent}, + internal_event::{ + ByteSize, BytesReceived, CountByteSize, EventsReceived, InternalEventHandle, Protocol, + }, + shutdown::ShutdownSignal, + ByteSizeOf, EstimatedJsonEncodedSizeOf, +}; + +use crate::{internal_events::StreamClosedError, SourceSender}; + +use super::{Memory, MemoryConfig}; + +/// Configuration for memory enrichment table source functionality. +#[configurable_component] +#[derive(Clone, Debug, PartialEq, Eq)] +#[serde(deny_unknown_fields)] +pub struct MemorySourceConfig { + /// Interval for exporting all data from the table when used as a source. + pub export_interval: NonZeroU64, + /// Batch size for data exporting. Used to prevent exporting entire table at + /// once and blocking the system. + /// + /// By default, batches are not used and entire table is exported. + #[serde(skip_serializing_if = "vector_lib::serde::is_default")] + pub export_batch_size: Option, + /// If set to true, all data will be removed from cache after exporting. + /// Only valid if used as a source and export_interval > 0 + /// + /// By default, export will not remove data from cache + #[serde(default = "crate::serde::default_false")] + pub remove_after_export: bool, + /// Key to use for this component when used as a source. This must be different from the + /// component key. + pub source_key: String, +} + +/// A struct that represents Memory when used as a source. +pub(crate) struct MemorySource { + pub(super) memory: Memory, + pub(super) shutdown: ShutdownSignal, + pub(super) out: SourceSender, + pub(super) log_namespace: LogNamespace, +} + +impl MemorySource { + pub(crate) async fn run(mut self) -> Result<(), ()> { + let events_received = register!(EventsReceived); + let bytes_received = register!(BytesReceived::from(Protocol::INTERNAL)); + let source_config = self + .memory + .config + .source_config + .as_ref() + .expect("Unexpected missing source config in memory table used as a source."); + let mut interval = IntervalStream::new(interval(Duration::from_secs( + source_config.export_interval.into(), + ))) + .take_until(self.shutdown); + + while interval.next().await.is_some() { + let mut sent = 0_usize; + loop { + let mut events = Vec::new(); + { + let mut writer = self.memory.write_handle.lock().unwrap(); + if let Some(reader) = self.memory.get_read_handle().read() { + let now = Instant::now(); + let utc_now = Utc::now(); + events = reader + .iter() + .skip(if source_config.remove_after_export { + 0 + } else { + sent + }) + .take(if let Some(batch_size) = source_config.export_batch_size { + batch_size as usize + } else { + usize::MAX + }) + .filter_map(|(k, v)| { + if source_config.remove_after_export { + writer.write_handle.empty(k.clone()); + } + v.get_one().map(|v| (k, v)) + }) + .filter_map(|(k, v)| { + let mut event = Event::Log(LogEvent::from_map( + v.as_object_map(now, self.memory.config.ttl, k).ok()?, + EventMetadata::default(), + )); + let log = event.as_mut_log(); + self.log_namespace.insert_standard_vector_source_metadata( + log, + MemoryConfig::NAME, + utc_now, + ); + + Some(event) + }) + .collect::>(); + if source_config.remove_after_export { + writer.write_handle.refresh(); + } + } + } + let count = events.len(); + let byte_size = events.size_of(); + let json_size = events.estimated_json_encoded_size_of(); + bytes_received.emit(ByteSize(byte_size)); + events_received.emit(CountByteSize(count, json_size)); + if self.out.send_batch(events).await.is_err() { + emit!(StreamClosedError { count }); + } + + sent += count; + match source_config.export_batch_size { + None => break, + Some(export_batch_size) if count < export_batch_size as usize => break, + _ => {} + } + } + } + + Ok(()) + } +} diff --git a/src/enrichment_tables/memory/table.rs b/src/enrichment_tables/memory/table.rs new file mode 100644 index 0000000000000..068f6630bfb75 --- /dev/null +++ b/src/enrichment_tables/memory/table.rs @@ -0,0 +1,889 @@ +use crate::enrichment_tables::memory::internal_events::{ + MemoryEnrichmentTableFlushed, MemoryEnrichmentTableInsertFailed, MemoryEnrichmentTableInserted, + MemoryEnrichmentTableRead, MemoryEnrichmentTableReadFailed, MemoryEnrichmentTableTtlExpired, +}; +use crate::enrichment_tables::memory::MemoryConfig; +use crate::SourceSender; +use std::sync::{Arc, Mutex, MutexGuard}; +use std::time::{Duration, Instant}; + +use evmap::shallow_copy::CopyValue; +use evmap::{self}; +use evmap_derive::ShallowCopy; +use futures::StreamExt; +use thread_local::ThreadLocal; +use tokio::time::interval; +use tokio_stream::wrappers::IntervalStream; +use vector_lib::config::LogNamespace; +use vector_lib::shutdown::ShutdownSignal; +use vector_lib::{ByteSizeOf, EstimatedJsonEncodedSizeOf}; + +use async_trait::async_trait; +use bytes::Bytes; +use futures::stream::BoxStream; +use vector_lib::enrichment::{Case, Condition, IndexHandle, Table}; +use vector_lib::event::{Event, EventStatus, Finalizable}; +use vector_lib::internal_event::{ + ByteSize, BytesSent, CountByteSize, EventsSent, InternalEventHandle, Output, Protocol, +}; +use vector_lib::sink::StreamSink; +use vrl::value::{KeyString, ObjectMap, Value}; + +use super::source::MemorySource; + +/// Single memory entry containing the value and TTL +#[derive(Clone, Eq, PartialEq, Hash, ShallowCopy)] +pub struct MemoryEntry { + value: String, + update_time: CopyValue, +} + +impl ByteSizeOf for MemoryEntry { + fn allocated_bytes(&self) -> usize { + self.value.size_of() + } +} + +impl MemoryEntry { + pub(super) fn as_object_map( + &self, + now: Instant, + total_ttl: u64, + key: &str, + ) -> Result { + let ttl = total_ttl.saturating_sub(now.duration_since(*self.update_time).as_secs()); + Ok(ObjectMap::from([ + ( + KeyString::from("key"), + Value::Bytes(Bytes::copy_from_slice(key.as_bytes())), + ), + ( + KeyString::from("value"), + serde_json::from_str::(&self.value) + .map_err(|_| "Failed to read value from memory!")?, + ), + ( + KeyString::from("ttl"), + Value::Integer(ttl.try_into().unwrap_or(i64::MAX)), + ), + ])) + } + + fn expired(&self, now: Instant, ttl: u64) -> bool { + now.duration_since(*self.update_time).as_secs() > ttl + } +} + +#[derive(Default)] +struct MemoryMetadata { + byte_size: u64, +} + +// Used to ensure that these 2 are locked together +pub(super) struct MemoryWriter { + pub(super) write_handle: evmap::WriteHandle, + metadata: MemoryMetadata, +} + +/// A struct that implements [vector_lib::enrichment::Table] to handle loading enrichment data from a memory structure. +pub struct Memory { + pub(super) read_handle_factory: evmap::ReadHandleFactory, + pub(super) read_handle: ThreadLocal>, + pub(super) write_handle: Arc>, + pub(super) config: MemoryConfig, +} + +impl Memory { + /// Creates a new [Memory] based on the provided config. + pub fn new(config: MemoryConfig) -> Self { + let (read_handle, write_handle) = evmap::new(); + Self { + config, + read_handle_factory: read_handle.factory(), + read_handle: ThreadLocal::new(), + write_handle: Arc::new(Mutex::new(MemoryWriter { + write_handle, + metadata: MemoryMetadata::default(), + })), + } + } + + pub(super) fn get_read_handle(&self) -> &evmap::ReadHandle { + self.read_handle + .get_or(|| self.read_handle_factory.handle()) + } + + fn handle_value(&self, value: ObjectMap) { + let mut writer = self.write_handle.lock().expect("mutex poisoned"); + let now = Instant::now(); + + for (k, v) in value.into_iter() { + let new_entry_key = String::from(k); + let Ok(v) = serde_json::to_string(&v) else { + emit!(MemoryEnrichmentTableInsertFailed { + key: &new_entry_key, + include_key_metric_tag: self.config.internal_metrics.include_key_tag + }); + continue; + }; + let new_entry = MemoryEntry { + value: v, + update_time: now.into(), + }; + let new_entry_size = new_entry_key.size_of() + new_entry.size_of(); + if let Some(max_byte_size) = self.config.max_byte_size { + if writer + .metadata + .byte_size + .saturating_add(new_entry_size as u64) + > max_byte_size + { + // Reject new entries + emit!(MemoryEnrichmentTableInsertFailed { + key: &new_entry_key, + include_key_metric_tag: self.config.internal_metrics.include_key_tag + }); + continue; + } + } + writer.metadata.byte_size = writer + .metadata + .byte_size + .saturating_add(new_entry_size as u64); + emit!(MemoryEnrichmentTableInserted { + key: &new_entry_key, + include_key_metric_tag: self.config.internal_metrics.include_key_tag + }); + writer.write_handle.update(new_entry_key, new_entry); + } + + if self.config.flush_interval.is_none() { + self.flush(writer); + } + } + + fn scan_and_mark_for_deletion(&self, writer: &mut MutexGuard<'_, MemoryWriter>) -> bool { + let now = Instant::now(); + + let mut needs_flush = false; + // Since evmap holds 2 separate maps for the data, we are free to directly remove + // elements via the writer, while we are iterating the reader + // Refresh will happen only after we manually invoke it after iteration + if let Some(reader) = self.get_read_handle().read() { + for (k, v) in reader.iter() { + if let Some(entry) = v.get_one() { + if entry.expired(now, self.config.ttl) { + // Byte size is not reduced at this point, because the actual deletion + // will only happen at refresh time + writer.write_handle.empty(k.clone()); + emit!(MemoryEnrichmentTableTtlExpired { + key: k, + include_key_metric_tag: self.config.internal_metrics.include_key_tag + }); + needs_flush = true; + } + } + } + }; + + needs_flush + } + + fn scan(&self, mut writer: MutexGuard<'_, MemoryWriter>) { + let needs_flush = self.scan_and_mark_for_deletion(&mut writer); + if needs_flush { + self.flush(writer); + } + } + + fn flush(&self, mut writer: MutexGuard<'_, MemoryWriter>) { + writer.write_handle.refresh(); + if let Some(reader) = self.get_read_handle().read() { + let mut byte_size = 0; + for (k, v) in reader.iter() { + byte_size += k.size_of() + v.get_one().size_of(); + } + writer.metadata.byte_size = byte_size as u64; + emit!(MemoryEnrichmentTableFlushed { + new_objects_count: reader.len(), + new_byte_size: byte_size + }); + } + } + + pub(crate) fn as_source( + &self, + shutdown: ShutdownSignal, + out: SourceSender, + log_namespace: LogNamespace, + ) -> MemorySource { + MemorySource { + memory: self.clone(), + shutdown, + out, + log_namespace, + } + } +} + +impl Clone for Memory { + fn clone(&self) -> Self { + Self { + read_handle_factory: self.read_handle_factory.clone(), + read_handle: ThreadLocal::new(), + write_handle: Arc::clone(&self.write_handle), + config: self.config.clone(), + } + } +} + +impl Table for Memory { + fn find_table_row<'a>( + &self, + case: Case, + condition: &'a [Condition<'a>], + select: Option<&'a [String]>, + wildcard: Option<&Value>, + index: Option, + ) -> Result { + let mut rows = self.find_table_rows(case, condition, select, wildcard, index)?; + + match rows.pop() { + Some(row) if rows.is_empty() => Ok(row), + Some(_) => Err("More than 1 row found".to_string()), + None => Err("Key not found".to_string()), + } + } + + fn find_table_rows<'a>( + &self, + _case: Case, + condition: &'a [Condition<'a>], + _select: Option<&'a [String]>, + _wildcard: Option<&Value>, + _index: Option, + ) -> Result, String> { + match condition.first() { + Some(_) if condition.len() > 1 => Err("Only one condition is allowed".to_string()), + Some(Condition::Equals { value, .. }) => { + let key = value.to_string_lossy(); + match self.get_read_handle().get_one(key.as_ref()) { + Some(row) => { + emit!(MemoryEnrichmentTableRead { + key: &key, + include_key_metric_tag: self.config.internal_metrics.include_key_tag + }); + row.as_object_map(Instant::now(), self.config.ttl, &key) + .map(|r| vec![r]) + } + None => { + emit!(MemoryEnrichmentTableReadFailed { + key: &key, + include_key_metric_tag: self.config.internal_metrics.include_key_tag + }); + Ok(Default::default()) + } + } + } + Some(_) => Err("Only equality condition is allowed".to_string()), + None => Err("Key condition must be specified".to_string()), + } + } + + fn add_index(&mut self, _case: Case, fields: &[&str]) -> Result { + match fields.len() { + 0 => Err("Key field is required".to_string()), + 1 => Ok(IndexHandle(0)), + _ => Err("Only one field is allowed".to_string()), + } + } + + /// Returns a list of the field names that are in each index + fn index_fields(&self) -> Vec<(Case, Vec)> { + Vec::new() + } + + /// Doesn't need reload, data is written directly + fn needs_reload(&self) -> bool { + false + } +} + +impl std::fmt::Debug for Memory { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "Memory {} row(s)", self.get_read_handle().len()) + } +} + +#[async_trait] +impl StreamSink for Memory { + async fn run(mut self: Box, mut input: BoxStream<'_, Event>) -> Result<(), ()> { + let events_sent = register!(EventsSent::from(Output(None))); + let bytes_sent = register!(BytesSent::from(Protocol("memory_enrichment_table".into(),))); + let mut flush_interval = IntervalStream::new(interval( + self.config + .flush_interval + .map(Duration::from_secs) + .unwrap_or(Duration::MAX), + )); + let mut scan_interval = IntervalStream::new(interval(Duration::from_secs( + self.config.scan_interval.into(), + ))); + + loop { + tokio::select! { + event = input.next() => { + let mut event = if let Some(event) = event { + event + } else { + break; + }; + let event_byte_size = event.estimated_json_encoded_size_of(); + + let finalizers = event.take_finalizers(); + + // Panic: This sink only accepts Logs, so this should never panic + let log = event.into_log(); + + if let (Value::Object(map), _) = log.into_parts() { + self.handle_value(map) + }; + + finalizers.update_status(EventStatus::Delivered); + events_sent.emit(CountByteSize(1, event_byte_size)); + bytes_sent.emit(ByteSize(event_byte_size.get())); + } + + Some(_) = flush_interval.next() => { + let writer = self.write_handle.lock().expect("mutex poisoned"); + self.flush(writer); + } + + Some(_) = scan_interval.next() => { + let writer = self.write_handle.lock().expect("mutex poisoned"); + self.scan(writer); + } + } + } + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use futures::{future::ready, StreamExt}; + use futures_util::stream; + use std::{num::NonZeroU64, time::Duration}; + use tokio::time; + + use vector_lib::{ + event::{EventContainer, MetricValue}, + metrics::Controller, + sink::VectorSink, + }; + + use super::*; + use crate::{ + enrichment_tables::memory::{ + internal_events::InternalMetricsConfig, source::MemorySourceConfig, + }, + event::{Event, LogEvent}, + test_util::components::{ + run_and_assert_sink_compliance, run_and_assert_source_compliance, SINK_TAGS, + SOURCE_TAGS, + }, + }; + + fn build_memory_config(modfn: impl Fn(&mut MemoryConfig)) -> MemoryConfig { + let mut config = MemoryConfig::default(); + modfn(&mut config); + config + } + + #[test] + fn finds_row() { + let memory = Memory::new(Default::default()); + memory.handle_value(ObjectMap::from([("test_key".into(), Value::from(5))])); + + let condition = Condition::Equals { + field: "key", + value: Value::from("test_key"), + }; + + assert_eq!( + Ok(ObjectMap::from([ + ("key".into(), Value::from("test_key")), + ("ttl".into(), Value::from(memory.config.ttl)), + ("value".into(), Value::from(5)), + ])), + memory.find_table_row(Case::Sensitive, &[condition], None, None, None) + ); + } + + #[test] + fn calculates_ttl() { + let ttl = 100; + let secs_to_subtract = 10; + let memory = Memory::new(build_memory_config(|c| c.ttl = ttl)); + { + let mut handle = memory.write_handle.lock().unwrap(); + handle.write_handle.update( + "test_key".to_string(), + MemoryEntry { + value: "5".to_string(), + update_time: (Instant::now() - Duration::from_secs(secs_to_subtract)).into(), + }, + ); + handle.write_handle.refresh(); + } + + let condition = Condition::Equals { + field: "key", + value: Value::from("test_key"), + }; + + assert_eq!( + Ok(ObjectMap::from([ + ("key".into(), Value::from("test_key")), + ("ttl".into(), Value::from(ttl - secs_to_subtract)), + ("value".into(), Value::from(5)), + ])), + memory.find_table_row(Case::Sensitive, &[condition], None, None, None) + ); + } + + #[test] + fn removes_expired_records_on_scan_interval() { + let ttl = 100; + let memory = Memory::new(build_memory_config(|c| { + c.ttl = ttl; + })); + { + let mut handle = memory.write_handle.lock().unwrap(); + handle.write_handle.update( + "test_key".to_string(), + MemoryEntry { + value: "5".to_string(), + update_time: (Instant::now() - Duration::from_secs(ttl + 10)).into(), + }, + ); + handle.write_handle.refresh(); + } + + // Finds the value before scan + let condition = Condition::Equals { + field: "key", + value: Value::from("test_key"), + }; + assert_eq!( + Ok(ObjectMap::from([ + ("key".into(), Value::from("test_key")), + ("ttl".into(), Value::from(0)), + ("value".into(), Value::from(5)), + ])), + memory.find_table_row(Case::Sensitive, &[condition.clone()], None, None, None) + ); + + // Force scan + let writer = memory.write_handle.lock().unwrap(); + memory.scan(writer); + + // The value is not present anymore + assert!(memory + .find_table_rows(Case::Sensitive, &[condition], None, None, None) + .unwrap() + .pop() + .is_none()); + } + + #[test] + fn does_not_show_values_before_flush_interval() { + let ttl = 100; + let memory = Memory::new(build_memory_config(|c| { + c.ttl = ttl; + c.flush_interval = Some(10); + })); + memory.handle_value(ObjectMap::from([("test_key".into(), Value::from(5))])); + + let condition = Condition::Equals { + field: "key", + value: Value::from("test_key"), + }; + + assert!(memory + .find_table_rows(Case::Sensitive, &[condition], None, None, None) + .unwrap() + .pop() + .is_none()); + } + + #[test] + fn updates_ttl_on_value_replacement() { + let ttl = 100; + let memory = Memory::new(build_memory_config(|c| c.ttl = ttl)); + { + let mut handle = memory.write_handle.lock().unwrap(); + handle.write_handle.update( + "test_key".to_string(), + MemoryEntry { + value: "5".to_string(), + update_time: (Instant::now() - Duration::from_secs(ttl / 2)).into(), + }, + ); + handle.write_handle.refresh(); + } + let condition = Condition::Equals { + field: "key", + value: Value::from("test_key"), + }; + + assert_eq!( + Ok(ObjectMap::from([ + ("key".into(), Value::from("test_key")), + ("ttl".into(), Value::from(ttl / 2)), + ("value".into(), Value::from(5)), + ])), + memory.find_table_row(Case::Sensitive, &[condition.clone()], None, None, None) + ); + + memory.handle_value(ObjectMap::from([("test_key".into(), Value::from(5))])); + + assert_eq!( + Ok(ObjectMap::from([ + ("key".into(), Value::from("test_key")), + ("ttl".into(), Value::from(ttl)), + ("value".into(), Value::from(5)), + ])), + memory.find_table_row(Case::Sensitive, &[condition], None, None, None) + ); + } + + #[test] + fn ignores_all_values_over_byte_size_limit() { + let memory = Memory::new(build_memory_config(|c| { + c.max_byte_size = Some(1); + })); + memory.handle_value(ObjectMap::from([("test_key".into(), Value::from(5))])); + + let condition = Condition::Equals { + field: "key", + value: Value::from("test_key"), + }; + + assert!(memory + .find_table_rows(Case::Sensitive, &[condition], None, None, None) + .unwrap() + .pop() + .is_none()); + } + + #[test] + fn ignores_values_when_byte_size_limit_is_reached() { + let ttl = 100; + let memory = Memory::new(build_memory_config(|c| { + c.ttl = ttl; + c.max_byte_size = Some(150); + })); + memory.handle_value(ObjectMap::from([("test_key".into(), Value::from(5))])); + memory.handle_value(ObjectMap::from([("rejected_key".into(), Value::from(5))])); + + assert_eq!( + Ok(ObjectMap::from([ + ("key".into(), Value::from("test_key")), + ("ttl".into(), Value::from(ttl)), + ("value".into(), Value::from(5)), + ])), + memory.find_table_row( + Case::Sensitive, + &[Condition::Equals { + field: "key", + value: Value::from("test_key") + }], + None, + None, + None + ) + ); + + assert!(memory + .find_table_rows( + Case::Sensitive, + &[Condition::Equals { + field: "key", + value: Value::from("rejected_key") + }], + None, + None, + None + ) + .unwrap() + .pop() + .is_none()); + } + + #[test] + fn missing_key() { + let memory = Memory::new(Default::default()); + + let condition = Condition::Equals { + field: "key", + value: Value::from("test_key"), + }; + + assert!(memory + .find_table_rows(Case::Sensitive, &[condition], None, None, None) + .unwrap() + .pop() + .is_none()); + } + + #[tokio::test] + async fn sink_spec_compliance() { + let event = Event::Log(LogEvent::from(ObjectMap::from([( + "test_key".into(), + Value::from(5), + )]))); + + let memory = Memory::new(Default::default()); + + run_and_assert_sink_compliance( + VectorSink::from_event_streamsink(memory), + stream::once(ready(event)), + &SINK_TAGS, + ) + .await; + } + + #[tokio::test] + async fn flush_metrics_without_interval() { + let event = Event::Log(LogEvent::from(ObjectMap::from([( + "test_key".into(), + Value::from(5), + )]))); + + let memory = Memory::new(Default::default()); + + run_and_assert_sink_compliance( + VectorSink::from_event_streamsink(memory), + stream::once(ready(event)), + &SINK_TAGS, + ) + .await; + + let metrics = Controller::get().unwrap().capture_metrics(); + let insertions_counter = metrics + .iter() + .find(|m| { + matches!(m.value(), MetricValue::Counter { .. }) + && m.name() == "memory_enrichment_table_insertions_total" + }) + .expect("Insertions metric is missing!"); + let MetricValue::Counter { + value: insertions_count, + } = insertions_counter.value() + else { + unreachable!(); + }; + let flushes_counter = metrics + .iter() + .find(|m| { + matches!(m.value(), MetricValue::Counter { .. }) + && m.name() == "memory_enrichment_table_flushes_total" + }) + .expect("Flushes metric is missing!"); + let MetricValue::Counter { + value: flushes_count, + } = flushes_counter.value() + else { + unreachable!(); + }; + let object_count_gauge = metrics + .iter() + .find(|m| { + matches!(m.value(), MetricValue::Gauge { .. }) + && m.name() == "memory_enrichment_table_objects_count" + }) + .expect("Object count metric is missing!"); + let MetricValue::Gauge { + value: object_count, + } = object_count_gauge.value() + else { + unreachable!(); + }; + let byte_size_gauge = metrics + .iter() + .find(|m| { + matches!(m.value(), MetricValue::Gauge { .. }) + && m.name() == "memory_enrichment_table_byte_size" + }) + .expect("Byte size metric is missing!"); + assert_eq!(*insertions_count, 1.0); + assert_eq!(*flushes_count, 1.0); + assert_eq!(*object_count, 1.0); + assert!(!byte_size_gauge.is_empty()); + } + + #[tokio::test] + async fn flush_metrics_with_interval() { + let event = Event::Log(LogEvent::from(ObjectMap::from([( + "test_key".into(), + Value::from(5), + )]))); + + let memory = Memory::new(build_memory_config(|c| { + c.flush_interval = Some(1); + })); + + run_and_assert_sink_compliance( + VectorSink::from_event_streamsink(memory), + stream::iter(vec![event.clone(), event]).flat_map(|e| { + stream::once(async move { + tokio::time::sleep(Duration::from_millis(600)).await; + e + }) + }), + &SINK_TAGS, + ) + .await; + + let metrics = Controller::get().unwrap().capture_metrics(); + let insertions_counter = metrics + .iter() + .find(|m| { + matches!(m.value(), MetricValue::Counter { .. }) + && m.name() == "memory_enrichment_table_insertions_total" + }) + .expect("Insertions metric is missing!"); + let MetricValue::Counter { + value: insertions_count, + } = insertions_counter.value() + else { + unreachable!(); + }; + let flushes_counter = metrics + .iter() + .find(|m| { + matches!(m.value(), MetricValue::Counter { .. }) + && m.name() == "memory_enrichment_table_flushes_total" + }) + .expect("Flushes metric is missing!"); + let MetricValue::Counter { + value: flushes_count, + } = flushes_counter.value() + else { + unreachable!(); + }; + let object_count_gauge = metrics + .iter() + .find(|m| { + matches!(m.value(), MetricValue::Gauge { .. }) + && m.name() == "memory_enrichment_table_objects_count" + }) + .expect("Object count metric is missing!"); + let MetricValue::Gauge { + value: object_count, + } = object_count_gauge.value() + else { + unreachable!(); + }; + let byte_size_gauge = metrics + .iter() + .find(|m| { + matches!(m.value(), MetricValue::Gauge { .. }) + && m.name() == "memory_enrichment_table_byte_size" + }) + .expect("Byte size metric is missing!"); + + assert_eq!(*insertions_count, 2.0); + // One is done right away and the next one after the interval + assert_eq!(*flushes_count, 2.0); + assert_eq!(*object_count, 1.0); + assert!(!byte_size_gauge.is_empty()); + } + + #[tokio::test] + async fn flush_metrics_with_key() { + let event = Event::Log(LogEvent::from(ObjectMap::from([( + "test_key".into(), + Value::from(5), + )]))); + + let memory = Memory::new(build_memory_config(|c| { + c.internal_metrics = InternalMetricsConfig { + include_key_tag: true, + }; + })); + + run_and_assert_sink_compliance( + VectorSink::from_event_streamsink(memory), + stream::once(ready(event)), + &SINK_TAGS, + ) + .await; + + let metrics = Controller::get().unwrap().capture_metrics(); + let insertions_counter = metrics + .iter() + .find(|m| { + matches!(m.value(), MetricValue::Counter { .. }) + && m.name() == "memory_enrichment_table_insertions_total" + }) + .expect("Insertions metric is missing!"); + + assert!(insertions_counter.tag_matches("key", "test_key")); + } + + #[tokio::test] + async fn flush_metrics_without_key() { + let event = Event::Log(LogEvent::from(ObjectMap::from([( + "test_key".into(), + Value::from(5), + )]))); + + let memory = Memory::new(Default::default()); + + run_and_assert_sink_compliance( + VectorSink::from_event_streamsink(memory), + stream::once(ready(event)), + &SINK_TAGS, + ) + .await; + + let metrics = Controller::get().unwrap().capture_metrics(); + let insertions_counter = metrics + .iter() + .find(|m| { + matches!(m.value(), MetricValue::Counter { .. }) + && m.name() == "memory_enrichment_table_insertions_total" + }) + .expect("Insertions metric is missing!"); + + assert!(insertions_counter.tag_value("key").is_none()); + } + + #[tokio::test] + async fn source_spec_compliance() { + let mut memory_config = MemoryConfig::default(); + memory_config.source_config = Some(MemorySourceConfig { + export_interval: NonZeroU64::try_from(1).unwrap(), + export_batch_size: None, + remove_after_export: false, + source_key: "test".to_string(), + }); + let memory = memory_config.get_or_build_memory().await; + memory.handle_value(ObjectMap::from([("test_key".into(), Value::from(5))])); + + let mut events: Vec = run_and_assert_source_compliance( + memory_config, + time::Duration::from_secs(5), + &SOURCE_TAGS, + ) + .await; + + assert!(!events.is_empty()); + let event = events.remove(0); + let log = event.as_log(); + + assert!(!log.value().is_empty()); + } +} diff --git a/src/enrichment_tables/mmdb.rs b/src/enrichment_tables/mmdb.rs new file mode 100644 index 0000000000000..0c47c1d52d4b4 --- /dev/null +++ b/src/enrichment_tables/mmdb.rs @@ -0,0 +1,280 @@ +//! Handles enrichment tables for `type = mmdb`. +//! Enrichment data is loaded from any database in [MaxMind][maxmind] format. +//! +//! [maxmind]: https://maxmind.com +use std::{fs, net::IpAddr, sync::Arc, time::SystemTime}; + +use maxminddb::Reader; +use vector_lib::configurable::configurable_component; +use vector_lib::enrichment::{Case, Condition, IndexHandle, Table}; +use vrl::value::{ObjectMap, Value}; + +use crate::config::{EnrichmentTableConfig, GenerateConfig}; + +/// Configuration for the `mmdb` enrichment table. +#[derive(Clone, Debug, Eq, PartialEq)] +#[configurable_component(enrichment_table("mmdb"))] +pub struct MmdbConfig { + /// Path to the [MaxMind][maxmind] database + /// + /// [maxmind]: https://maxmind.com + pub path: String, +} + +impl GenerateConfig for MmdbConfig { + fn generate_config() -> toml::Value { + toml::Value::try_from(Self { + path: "/path/to/GeoLite2-City.mmdb".to_string(), + }) + .unwrap() + } +} + +impl EnrichmentTableConfig for MmdbConfig { + async fn build( + &self, + _: &crate::config::GlobalOptions, + ) -> crate::Result> { + Ok(Box::new(Mmdb::new(self.clone())?)) + } +} + +#[derive(Clone)] +/// A struct that implements [vector_lib::enrichment::Table] to handle loading enrichment data from a MaxMind database. +pub struct Mmdb { + config: MmdbConfig, + dbreader: Arc>>, + last_modified: SystemTime, +} + +impl Mmdb { + /// Creates a new Mmdb struct from the provided config. + pub fn new(config: MmdbConfig) -> crate::Result { + let dbreader = Arc::new(Reader::open_readfile(config.path.clone())?); + + // Check if we can read database with dummy Ip. + let ip = IpAddr::V4(std::net::Ipv4Addr::UNSPECIFIED); + let result = dbreader.lookup::(ip).map(|_| ()); + + match result { + Ok(_) => Ok(Mmdb { + last_modified: fs::metadata(&config.path)?.modified()?, + dbreader, + config, + }), + Err(error) => Err(error.into()), + } + } + + fn lookup(&self, ip: IpAddr, select: Option<&[String]>) -> Option { + let data = self.dbreader.lookup::(ip).ok()??; + + if let Some(fields) = select { + let mut filtered = Value::from(ObjectMap::new()); + let mut data_value = Value::from(data); + for field in fields { + filtered.insert( + field.as_str(), + data_value + .remove(field.as_str(), false) + .unwrap_or(Value::Null), + ); + } + filtered.into_object() + } else { + Some(data) + } + } +} + +impl Table for Mmdb { + /// Search the enrichment table data with the given condition. + /// All conditions must match (AND). + /// + /// # Errors + /// Errors if no rows, or more than 1 row is found. + fn find_table_row<'a>( + &self, + case: Case, + condition: &'a [Condition<'a>], + select: Option<&[String]>, + wildcard: Option<&Value>, + index: Option, + ) -> Result { + let mut rows = self.find_table_rows(case, condition, select, wildcard, index)?; + + match rows.pop() { + Some(row) if rows.is_empty() => Ok(row), + Some(_) => Err("More than 1 row found".to_string()), + None => Err("IP not found".to_string()), + } + } + + /// Search the enrichment table data with the given condition. + /// All conditions must match (AND). + /// Can return multiple matched records + fn find_table_rows<'a>( + &self, + _: Case, + condition: &'a [Condition<'a>], + select: Option<&[String]>, + _wildcard: Option<&Value>, + _: Option, + ) -> Result, String> { + match condition.first() { + Some(_) if condition.len() > 1 => Err("Only one condition is allowed".to_string()), + Some(Condition::Equals { value, .. }) => { + let ip = value + .to_string_lossy() + .parse::() + .map_err(|_| "Invalid IP address".to_string())?; + Ok(self + .lookup(ip, select) + .map(|values| vec![values]) + .unwrap_or_default()) + } + Some(_) => Err("Only equality condition is allowed".to_string()), + None => Err("IP condition must be specified".to_string()), + } + } + + /// Hints to the enrichment table what data is going to be searched to allow it to index the + /// data in advance. + /// + /// # Errors + /// Errors if the fields are not in the table. + fn add_index(&mut self, _: Case, fields: &[&str]) -> Result { + match fields.len() { + 0 => Err("IP field is required".to_string()), + 1 => Ok(IndexHandle(0)), + _ => Err("Only one field is allowed".to_string()), + } + } + + /// Returns a list of the field names that are in each index + fn index_fields(&self) -> Vec<(Case, Vec)> { + Vec::new() + } + + /// Returns true if the underlying data has changed and the table needs reloading. + fn needs_reload(&self) -> bool { + matches!(fs::metadata(&self.config.path) + .and_then(|metadata| metadata.modified()), + Ok(modified) if modified > self.last_modified) + } +} + +impl std::fmt::Debug for Mmdb { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "Maxmind database {})", self.config.path) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use vrl::value::Value; + + #[test] + fn city_partial_lookup() { + let values = find_select( + "2.125.160.216", + "tests/data/GeoIP2-City-Test.mmdb", + Some(&[ + "location.latitude".to_string(), + "location.longitude".to_string(), + ]), + ) + .unwrap(); + + let mut expected = ObjectMap::new(); + expected.insert( + "location".into(), + ObjectMap::from([ + ("latitude".into(), Value::from(51.75)), + ("longitude".into(), Value::from(-1.25)), + ]) + .into(), + ); + + assert_eq!(values, expected); + } + + #[test] + fn isp_lookup() { + let values = find("208.192.1.2", "tests/data/GeoIP2-ISP-Test.mmdb").unwrap(); + + let mut expected = ObjectMap::new(); + expected.insert("autonomous_system_number".into(), 701i64.into()); + expected.insert( + "autonomous_system_organization".into(), + "MCI Communications Services, Inc. d/b/a Verizon Business".into(), + ); + expected.insert("isp".into(), "Verizon Business".into()); + expected.insert("organization".into(), "Verizon Business".into()); + + assert_eq!(values, expected); + } + + #[test] + fn connection_type_lookup_success() { + let values = find( + "201.243.200.1", + "tests/data/GeoIP2-Connection-Type-Test.mmdb", + ) + .unwrap(); + + let mut expected = ObjectMap::new(); + expected.insert("connection_type".into(), "Corporate".into()); + + assert_eq!(values, expected); + } + + #[test] + fn lookup_missing() { + let values = find("10.1.12.1", "tests/data/custom-type.mmdb"); + + assert!(values.is_none()); + } + + #[test] + fn custom_mmdb_type() { + let values = find("208.192.1.2", "tests/data/custom-type.mmdb").unwrap(); + + let mut expected = ObjectMap::new(); + expected.insert("hostname".into(), "custom".into()); + expected.insert( + "nested".into(), + ObjectMap::from([ + ("hostname".into(), "custom".into()), + ("original_cidr".into(), "208.192.1.2/24".into()), + ]) + .into(), + ); + + assert_eq!(values, expected); + } + + fn find(ip: &str, database: &str) -> Option { + find_select(ip, database, None) + } + + fn find_select(ip: &str, database: &str, select: Option<&[String]>) -> Option { + Mmdb::new(MmdbConfig { + path: database.to_string(), + }) + .unwrap() + .find_table_rows( + Case::Insensitive, + &[Condition::Equals { + field: "ip", + value: ip.into(), + }], + select, + None, + None, + ) + .unwrap() + .pop() + } +} diff --git a/src/enrichment_tables/mod.rs b/src/enrichment_tables/mod.rs index 15ec912d911be..d067262a1ea74 100644 --- a/src/enrichment_tables/mod.rs +++ b/src/enrichment_tables/mod.rs @@ -1,41 +1,79 @@ //! Functionality to handle enrichment tables. use enum_dispatch::enum_dispatch; -use vector_lib::configurable::{configurable_component, NamedComponent}; +use vector_lib::configurable::configurable_component; pub use vector_lib::enrichment::{Condition, IndexHandle, Table}; -use crate::config::{EnrichmentTableConfig, GlobalOptions}; +use crate::config::{ + ComponentKey, EnrichmentTableConfig, GenerateConfig, GlobalOptions, SinkConfig, SourceConfig, +}; pub mod file; +#[cfg(feature = "enrichment-tables-memory")] +pub mod memory; + #[cfg(feature = "enrichment-tables-geoip")] pub mod geoip; -/// Configurable enrichment tables. -#[configurable_component] +#[cfg(feature = "enrichment-tables-mmdb")] +pub mod mmdb; + +/// Configuration options for an [enrichment table](https://vector.dev/docs/reference/glossary/#enrichment-tables) to be used in a +/// [`remap`](https://vector.dev/docs/reference/configuration/transforms/remap/) transform. Currently supported are: +/// +/// * [CSV](https://en.wikipedia.org/wiki/Comma-separated_values) files +/// * [MaxMind](https://www.maxmind.com/en/home) databases +/// * In-memory storage +/// +/// For the lookup in the enrichment tables to be as performant as possible, the data is indexed according +/// to the fields that are used in the search. Note that indices can only be created for fields for which an +/// exact match is used in the condition. For range searches, an index isn't used and the enrichment table +/// drops back to a sequential scan of the data. A sequential scan shouldn't impact performance +/// significantly provided that there are only a few possible rows returned by the exact matches in the +/// condition. We don't recommend using a condition that uses only date range searches. +/// +/// +#[configurable_component(global_option("enrichment_tables"))] #[derive(Clone, Debug)] #[serde(tag = "type", rename_all = "snake_case")] #[enum_dispatch(EnrichmentTableConfig)] +#[configurable(metadata( + docs::enum_tag_description = "enrichment table type", + docs::common = false, + docs::required = false, +))] pub enum EnrichmentTables { /// Exposes data from a static file as an enrichment table. File(file::FileConfig), + /// Exposes data from a memory cache as an enrichment table. The cache can be written to using + /// a sink. + #[cfg(feature = "enrichment-tables-memory")] + Memory(memory::MemoryConfig), + /// Exposes data from a [MaxMind][maxmind] [GeoIP2][geoip2] database as an enrichment table. /// /// [maxmind]: https://www.maxmind.com/ /// [geoip2]: https://www.maxmind.com/en/geoip2-databases #[cfg(feature = "enrichment-tables-geoip")] Geoip(geoip::GeoipConfig), + + /// Exposes data from a [MaxMind][maxmind] database as an enrichment table. + /// + /// [maxmind]: https://www.maxmind.com/ + #[cfg(feature = "enrichment-tables-mmdb")] + Mmdb(mmdb::MmdbConfig), } -// TODO: Use `enum_dispatch` here. -impl NamedComponent for EnrichmentTables { - fn get_component_name(&self) -> &'static str { - match self { - Self::File(config) => config.get_component_name(), - #[cfg(feature = "enrichment-tables-geoip")] - Self::Geoip(config) => config.get_component_name(), - #[allow(unreachable_patterns)] - _ => unimplemented!(), - } +impl GenerateConfig for EnrichmentTables { + fn generate_config() -> toml::Value { + toml::Value::try_from(Self::File(file::FileConfig { + file: file::FileSettings { + path: "path/to/file".into(), + encoding: file::Encoding::default(), + }, + schema: Default::default(), + })) + .unwrap() } } diff --git a/src/extra_context.rs b/src/extra_context.rs index 025093b2ad22d..b80586bc714fd 100644 --- a/src/extra_context.rs +++ b/src/extra_context.rs @@ -8,7 +8,7 @@ use std::{ /// Structure containing any extra data. /// The data is held in an [`Arc`] so is cheap to clone. -#[derive(Clone, Default)] +#[derive(Clone, Debug, Default)] pub struct ExtraContext(Arc>); type ContextItem = Box; diff --git a/src/gcp.rs b/src/gcp.rs index bfc486f92808a..d484a063ccd35 100644 --- a/src/gcp.rs +++ b/src/gcp.rs @@ -1,6 +1,6 @@ #![allow(missing_docs)] use std::{ - sync::{Arc, RwLock}, + sync::{Arc, LazyLock, RwLock}, time::Duration, }; @@ -13,10 +13,9 @@ use goauth::{ }; use http::{uri::PathAndQuery, Uri}; use hyper::header::AUTHORIZATION; -use once_cell::sync::Lazy; use smpl_jwt::Jwt; use snafu::{ResultExt, Snafu}; -use tokio::{sync::watch, time::Instant}; +use tokio::sync::watch; use vector_lib::configurable::configurable_component; use vector_lib::sensitive_string::SensitiveString; @@ -25,9 +24,14 @@ use crate::{config::ProxyConfig, http::HttpClient, http::HttpError}; const SERVICE_ACCOUNT_TOKEN_URL: &str = "http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/token"; +// See https://cloud.google.com/compute/docs/access/authenticate-workloads#applications +const METADATA_TOKEN_EXPIRY_MARGIN_SECS: u64 = 200; + +const METADATA_TOKEN_ERROR_RETRY_SECS: u64 = 2; + pub const PUBSUB_URL: &str = "https://pubsub.googleapis.com"; -pub static PUBSUB_ADDRESS: Lazy = Lazy::new(|| { +pub static PUBSUB_ADDRESS: LazyLock = LazyLock::new(|| { std::env::var("EMULATOR_ADDRESS").unwrap_or_else(|_| "http://localhost:8681".into()) }); @@ -194,19 +198,34 @@ impl GcpAuthenticator { async fn token_regenerator(self, sender: watch::Sender<()>) { match self { Self::Credentials(inner) => { - let period = - Duration::from_secs(inner.token.read().unwrap().expires_in() as u64 / 2); - let mut interval = tokio::time::interval_at(Instant::now() + period, period); + let mut expires_in = inner.token.read().unwrap().expires_in() as u64; loop { - interval.tick().await; - debug!("Renewing GCP authentication token."); + let deadline = Duration::from_secs( + expires_in + .saturating_sub(METADATA_TOKEN_EXPIRY_MARGIN_SECS) + .max(METADATA_TOKEN_ERROR_RETRY_SECS), + ); + debug!( + deadline = deadline.as_secs(), + "Sleeping before refreshing GCP authentication token.", + ); + tokio::time::sleep(deadline).await; match inner.regenerate_token().await { - Ok(()) => sender.send_replace(()), + Ok(()) => { + sender.send_replace(()); + debug!("GCP authentication token renewed."); + // Rather than an expected fresh token, the Metadata Server may return + // the same (cached) token during the last 300 seconds of its lifetime. + // This scenario is handled by retrying the token refresh after the + // METADATA_TOKEN_ERROR_RETRY_SECS period when a fresh token is expected + expires_in = inner.token.read().unwrap().expires_in() as u64; + } Err(error) => { error!( message = "Failed to update GCP authentication token.", %error ); + expires_in = METADATA_TOKEN_EXPIRY_MARGIN_SECS; } } } @@ -238,7 +257,7 @@ impl InnerCreds { } async fn fetch_token(creds: &Credentials, scope: &Scope) -> crate::Result { - let claims = JwtClaims::new(creds.iss(), scope, creds.token_uri(), None, None); + let claims = JwtClaims::new(creds.iss(), &[scope.clone()], creds.token_uri(), None, None); let rsa_key = creds.rsa_key().context(InvalidRsaKeySnafu)?; let jwt = Jwt::new(claims, rsa_key, None); diff --git a/src/generate.rs b/src/generate.rs index 09e3ba88d15f5..1da6c414c7640 100644 --- a/src/generate.rs +++ b/src/generate.rs @@ -11,7 +11,7 @@ use indexmap::IndexMap; use serde::Serialize; use toml::{map::Map, Value}; use vector_lib::configurable::component::{ - ExampleError, SinkDescription, SourceDescription, TransformDescription, + SinkDescription, SourceDescription, TransformDescription, }; use vector_lib::{buffers::BufferConfig, config::GlobalOptions, default_data_dir}; @@ -119,7 +119,7 @@ pub(crate) fn generate_example( ) -> Result> { let components: Vec> = opts .expression - .split(|c| c == '|' || c == '/') + .split(['|', '/']) .map(|s| { s.split(',') .map(|s| s.trim().to_string()) @@ -140,8 +140,7 @@ pub(crate) fn generate_example( let (name, source_type) = if let Some(c_index) = source_expr.find(':') { if c_index == 0 { errs.push(format!( - "failed to generate source '{}': empty name is not allowed", - source_expr + "failed to generate source '{source_expr}': empty name is not allowed" )); continue; } @@ -151,19 +150,14 @@ pub(crate) fn generate_example( chopped_expr.drain(1..).collect(), ) } else { - (format!("source{}", i), source_expr.clone()) + (format!("source{i}"), source_expr.clone()) }; source_names.push(name.clone()); let mut example = match SourceDescription::example(&source_type) { Ok(example) => example, Err(err) => { - if err != ExampleError::MissingExample { - errs.push(format!( - "failed to generate source '{}': {}", - source_type, err - )); - } + errs.push(format!("failed to generate source '{source_type}': {err}")); Value::Table(Map::new()) } }; @@ -188,8 +182,7 @@ pub(crate) fn generate_example( let (name, transform_type) = if let Some(c_index) = transform_expr.find(':') { if c_index == 0 { errs.push(format!( - "failed to generate transform '{}': empty name is not allowed", - transform_expr + "failed to generate transform '{transform_expr}': empty name is not allowed" )); continue; } @@ -199,7 +192,7 @@ pub(crate) fn generate_example( chopped_expr.drain(1..).collect(), ) } else { - (format!("transform{}", i), transform_expr.clone()) + (format!("transform{i}"), transform_expr.clone()) }; transform_names.push(name.clone()); @@ -221,12 +214,9 @@ pub(crate) fn generate_example( let mut example = match TransformDescription::example(&transform_type) { Ok(example) => example, Err(err) => { - if err != ExampleError::MissingExample { - errs.push(format!( - "failed to generate transform '{}': {}", - transform_type, err - )); - } + errs.push(format!( + "failed to generate transform '{transform_type}': {err}" + )); Value::Table(Map::new()) } }; @@ -256,8 +246,7 @@ pub(crate) fn generate_example( let (name, sink_type) = if let Some(c_index) = sink_expr.find(':') { if c_index == 0 { errs.push(format!( - "failed to generate sink '{}': empty name is not allowed", - sink_expr + "failed to generate sink '{sink_expr}': empty name is not allowed" )); continue; } @@ -267,15 +256,13 @@ pub(crate) fn generate_example( chopped_expr.drain(1..).collect(), ) } else { - (format!("sink{}", i), sink_expr.clone()) + (format!("sink{i}"), sink_expr.clone()) }; let mut example = match SinkDescription::example(&sink_type) { Ok(example) => example, Err(err) => { - if err != ExampleError::MissingExample { - errs.push(format!("failed to generate sink '{}': {}", sink_type, err)); - } + errs.push(format!("failed to generate sink '{sink_type}': {err}")); Value::Table(Map::new()) } }; @@ -360,7 +347,7 @@ pub fn cmd(opts: &Opts) -> exitcode::ExitCode { Ok(s) => { #[allow(clippy::print_stdout)] { - println!("{}", s); + println!("{s}"); } exitcode::OK } @@ -417,15 +404,15 @@ mod tests { #[test] fn generate_all(#[case] format: Format) { for name in SourceDescription::types() { - generate_and_deserialize(format!("{}//", name), format); + generate_and_deserialize(format!("{name}//"), format); } for name in TransformDescription::types() { - generate_and_deserialize(format!("/{}/", name), format); + generate_and_deserialize(format!("/{name}/"), format); } for name in SinkDescription::types() { - generate_and_deserialize(format!("//{}", name), format); + generate_and_deserialize(format!("//{name}"), format); } } @@ -489,6 +476,9 @@ mod tests { [sinks.sink0.encoding] codec = "json" + [sinks.sink0.encoding.json] + pretty = false + [sinks.sink0.healthcheck] enabled = true @@ -526,6 +516,9 @@ mod tests { [sinks.sink0.encoding] codec = "json" + [sinks.sink0.encoding.json] + pretty = false + [sinks.sink0.healthcheck] enabled = true @@ -557,6 +550,9 @@ mod tests { [sinks.sink0.encoding] codec = "json" + [sinks.sink0.encoding.json] + pretty = false + [sinks.sink0.healthcheck] enabled = true @@ -581,6 +577,9 @@ mod tests { [sinks.sink0.encoding] codec = "json" + [sinks.sink0.encoding.json] + pretty = false + [sinks.sink0.healthcheck] enabled = true @@ -661,18 +660,18 @@ mod tests { assert_eq!( generate_example(&opts, TransformInputsStrategy::Auto).unwrap(), - indoc::indoc! {r#" + indoc::indoc! {r" data_dir: /var/lib/vector/ sources: source0: count: 9223372036854775807 - format: json - interval: 1.0 - type: demo_logs decoding: codec: bytes + format: json framing: method: bytes + interval: 1.0 + type: demo_logs transforms: transform0: inputs: @@ -687,10 +686,12 @@ mod tests { sink0: inputs: - transform0 - target: stdout - type: console encoding: codec: json + json: + pretty: false + target: stdout + type: console healthcheck: enabled: true uri: null @@ -698,7 +699,7 @@ mod tests { type: memory max_events: 500 when_full: block - "#} + "} ); } @@ -724,15 +725,15 @@ mod tests { "sources": { "source0": { "count": 9223372036854775807, - "format": "json", - "interval": 1.0, - "type": "demo_logs", "decoding": { "codec": "bytes" }, + "format": "json", "framing": { "method": "bytes" - } + }, + "interval": 1.0, + "type": "demo_logs" } }, "transforms": { @@ -753,11 +754,14 @@ mod tests { "inputs": [ "transform0" ], - "target": "stdout", - "type": "console", "encoding": { - "codec": "json" + "codec": "json", + "json": { + "pretty": false + } }, + "target": "stdout", + "type": "console", "healthcheck": { "enabled": true, "uri": null diff --git a/src/generate_schema.rs b/src/generate_schema.rs index ab8e830d77c11..f20a71480c106 100644 --- a/src/generate_schema.rs +++ b/src/generate_schema.rs @@ -1,25 +1,52 @@ -#![allow(missing_docs)] +//! Vector `generate-schema` command implementation. + +use clap::Parser; +use std::fs; +use std::path::PathBuf; use vector_lib::configurable::schema::generate_root_schema; use crate::config::ConfigBuilder; -pub fn cmd() -> exitcode::ExitCode { +#[derive(Parser, Debug)] +#[command(rename_all = "kebab-case")] +/// Command line options for the `generate-schema` command. +pub struct Opts { + /// File path to + #[arg(short, long)] + pub(crate) output_path: Option, +} + +/// Execute the `generate-schema` command. +#[allow(clippy::print_stdout, clippy::print_stderr)] +pub fn cmd(opts: &Opts) -> exitcode::ExitCode { match generate_root_schema::() { Ok(schema) => { let json = serde_json::to_string_pretty(&schema) .expect("rendering root schema to JSON should not fail"); - #[allow(clippy::print_stdout)] - { - println!("{}", json); + if let Some(output_path) = &opts.output_path { + if output_path.exists() { + eprintln!("Error: Output file {output_path:?} already exists"); + return exitcode::CANTCREAT; + } + + return match fs::write(output_path, json) { + Ok(_) => { + println!("Schema successfully written to {output_path:?}"); + exitcode::OK + } + Err(e) => { + eprintln!("Error writing to file {output_path:?}: {e:?}"); + exitcode::IOERR + } + }; + } else { + println!("{json}"); } exitcode::OK } Err(e) => { - #[allow(clippy::print_stderr)] - { - eprintln!("error while generating schema: {:?}", e); - } + eprintln!("error while generating schema: {e:?}"); exitcode::SOFTWARE } } diff --git a/src/graph.rs b/src/graph.rs index cf5f009b3a8f2..5d7ad0da03fca 100644 --- a/src/graph.rs +++ b/src/graph.rs @@ -1,7 +1,9 @@ +use std::collections::HashMap; use std::fmt::Write as _; use std::path::PathBuf; use clap::Parser; +use itertools::Itertools; use crate::config; @@ -45,6 +47,19 @@ pub struct Opts { value_delimiter(',') )] pub config_dirs: Vec, + + /// Set the output format + /// + /// See https://mermaid.js.org/syntax/flowchart.html#styling-and-classes for + /// information on the `mermaid` format. + #[arg(id = "format", long, default_value = "dot")] + pub format: OutputFormat, +} + +#[derive(clap::ValueEnum, Debug, Clone, Copy, PartialEq, Eq)] +pub enum OutputFormat { + Dot, + Mermaid, } impl Opts { @@ -65,6 +80,14 @@ impl Opts { } } +fn node_attributes_to_string(attributes: &HashMap, default_shape: &str) -> String { + let mut attrs = attributes.clone(); + if !attrs.contains_key("shape") { + attrs.insert("shape".to_string(), default_shape.to_string()); + } + attrs.iter().map(|(k, v)| format!("{k}=\"{v}\"")).join(" ") +} + pub(crate) fn cmd(opts: &Opts) -> exitcode::ExitCode { let paths = opts.paths_with_formats(); let paths = match config::process_paths(&paths) { @@ -77,20 +100,40 @@ pub(crate) fn cmd(opts: &Opts) -> exitcode::ExitCode { Err(errs) => { #[allow(clippy::print_stderr)] for err in errs { - eprintln!("{}", err); + eprintln!("{err}"); } return exitcode::CONFIG; } }; + let format = opts.format; + match format { + OutputFormat::Dot => render_dot(config), + OutputFormat::Mermaid => render_mermaid(config), + } +} + +fn render_dot(config: config::Config) -> exitcode::ExitCode { let mut dot = String::from("digraph {\n"); - for (id, _source) in config.sources() { - writeln!(dot, " \"{}\" [shape=trapezium]", id).expect("write to String never fails"); + for (id, source) in config.sources() { + writeln!( + dot, + " \"{}\" [{}]", + id, + node_attributes_to_string(&source.graph.node_attributes, "trapezium") + ) + .expect("write to String never fails"); } for (id, transform) in config.transforms() { - writeln!(dot, " \"{}\" [shape=diamond]", id).expect("write to String never fails"); + writeln!( + dot, + " \"{}\" [{}]", + id, + node_attributes_to_string(&transform.graph.node_attributes, "diamond") + ) + .expect("write to String never fails"); for input in transform.inputs.iter() { if let Some(port) = &input.port { @@ -101,14 +144,19 @@ pub(crate) fn cmd(opts: &Opts) -> exitcode::ExitCode { ) .expect("write to String never fails"); } else { - writeln!(dot, " \"{}\" -> \"{}\"", input, id) - .expect("write to String never fails"); + writeln!(dot, " \"{input}\" -> \"{id}\"").expect("write to String never fails"); } } } for (id, sink) in config.sinks() { - writeln!(dot, " \"{}\" [shape=invtrapezium]", id).expect("write to String never fails"); + writeln!( + dot, + " \"{}\" [{}]", + id, + node_attributes_to_string(&sink.graph.node_attributes, "invtrapezium") + ) + .expect("write to String never fails"); for input in &sink.inputs { if let Some(port) = &input.port { @@ -119,8 +167,7 @@ pub(crate) fn cmd(opts: &Opts) -> exitcode::ExitCode { ) .expect("write to String never fails"); } else { - writeln!(dot, " \"{}\" -> \"{}\"", input, id) - .expect("write to String never fails"); + writeln!(dot, " \"{input}\" -> \"{id}\"").expect("write to String never fails"); } } } @@ -129,7 +176,49 @@ pub(crate) fn cmd(opts: &Opts) -> exitcode::ExitCode { #[allow(clippy::print_stdout)] { - println!("{}", dot); + println!("{dot}"); + } + + exitcode::OK +} + +fn render_mermaid(config: config::Config) -> exitcode::ExitCode { + let mut mermaid = String::from("flowchart TD;\n"); + + writeln!(mermaid, "\n %% Sources").unwrap(); + for (id, _) in config.sources() { + writeln!(mermaid, " {id}[/{id}/]").unwrap(); + } + + writeln!(mermaid, "\n %% Transforms").unwrap(); + for (id, transform) in config.transforms() { + writeln!(mermaid, " {id}{{{id}}}").unwrap(); + + for input in transform.inputs.iter() { + if let Some(port) = &input.port { + writeln!(mermaid, " {0} -->|{port}| {id}", input.component).unwrap(); + } else { + writeln!(mermaid, " {0} --> {id}", input.component).unwrap(); + } + } + } + + writeln!(mermaid, "\n %% Sinks").unwrap(); + for (id, sink) in config.sinks() { + writeln!(mermaid, " {id}[\\{id}\\]").unwrap(); + + for input in &sink.inputs { + if let Some(port) = &input.port { + writeln!(mermaid, " {0} -->|{port}| {id}", input.component).unwrap(); + } else { + writeln!(mermaid, " {0} --> {id}", input.component).unwrap(); + } + } + } + + #[allow(clippy::print_stdout)] + { + println!("{mermaid}"); } exitcode::OK diff --git a/src/http.rs b/src/http.rs index 2afb9abdac38c..bbd716a385817 100644 --- a/src/http.rs +++ b/src/http.rs @@ -1,11 +1,4 @@ #![allow(missing_docs)] -use std::{ - fmt, - net::SocketAddr, - task::{Context, Poll}, - time::Duration, -}; - use futures::future::BoxFuture; use headers::{Authorization, HeaderMapExt}; use http::{ @@ -22,6 +15,13 @@ use hyper_proxy::ProxyConnector; use rand::Rng; use serde_with::serde_as; use snafu::{ResultExt, Snafu}; +use std::{ + collections::HashMap, + fmt, + net::SocketAddr, + task::{Context, Poll}, + time::Duration, +}; use tokio::time::Instant; use tower::{Layer, Service}; use tower_http::{ @@ -32,6 +32,9 @@ use tracing::{Instrument, Span}; use vector_lib::configurable::configurable_component; use vector_lib::sensitive_string::SensitiveString; +#[cfg(feature = "aws-core")] +use crate::aws::AwsAuthentication; + use crate::{ config::ProxyConfig, internal_events::{http_client, HttpServerRequestReceived, HttpServerResponseSent}, @@ -102,7 +105,7 @@ where let app_name = crate::get_app_name(); let version = crate::get_version(); - let user_agent = HeaderValue::from_str(&format!("{}/{}", app_name, version)) + let user_agent = HeaderValue::from_str(&format!("{app_name}/{version}")) .expect("Invalid header value for user-agent!"); Ok(HttpClient { @@ -140,13 +143,9 @@ where // Handle the errors and extract the response. let response = response_result - .map_err(|error| { + .inspect_err(|error| { // Emit the error into the internal events system. - emit!(http_client::GotHttpWarning { - error: &error, - roundtrip - }); - error + emit!(http_client::GotHttpWarning { error, roundtrip }); }) .context(CallRequestSnafu)?; @@ -205,10 +204,10 @@ pub fn build_tls_connector( let settings = tls_settings.tls().cloned(); https.set_callback(move |c, _uri| { if let Some(settings) = &settings { - settings.apply_connect_configuration(c); + settings.apply_connect_configuration(c) + } else { + Ok(()) } - - Ok(()) }); Ok(https) } @@ -278,7 +277,7 @@ impl fmt::Debug for HttpClient { pub enum Auth { /// Basic authentication. /// - /// The username and password are concatenated and encoded via [base64][base64]. + /// The username and password are concatenated and encoded using [base64][base64]. /// /// [base64]: https://en.wikipedia.org/wiki/Base64 Basic { @@ -300,6 +299,16 @@ pub enum Auth { /// The bearer authentication token. token: SensitiveString, }, + + #[cfg(feature = "aws-core")] + /// AWS authentication. + Aws { + /// The AWS authentication configuration. + auth: AwsAuthentication, + + /// The AWS service name to use for signing. + service: String, + }, } pub trait MaybeAuth: Sized { @@ -338,6 +347,8 @@ impl Auth { Ok(auth) => map.typed_insert(auth), Err(error) => error!(message = "Invalid bearer token.", token = %token, %error), }, + #[cfg(feature = "aws-core")] + _ => {} } } } @@ -352,26 +363,26 @@ pub fn get_http_scheme_from_uri(uri: &Uri) -> &'static str { // it also supports arbitrary schemes, which is where we bomb out down here, since we can't generate a static // string for an arbitrary input string... and anything other than "http" and "https" makes no sense for an HTTP // client anyways. - s => panic!("invalid URI scheme for HTTP client: {}", s), + s => panic!("invalid URI scheme for HTTP client: {s}"), }) } /// Builds a [TraceLayer] configured for a HTTP server. /// /// This layer emits HTTP specific telemetry for requests received, responses sent, and handler duration. -pub fn build_http_trace_layer( +pub fn build_http_trace_layer( span: Span, ) -> TraceLayer< SharedClassifier, - impl Fn(&Request) -> Span + Clone, - impl Fn(&Request, &Span) + Clone, - impl Fn(&Response, Duration, &Span) + Clone, + impl Fn(&Request) -> Span + Clone, + impl Fn(&Request, &Span) + Clone, + impl Fn(&Response, Duration, &Span) + Clone, (), (), (), > { TraceLayer::new_for_http() - .make_span_with(move |request: &Request| { + .make_span_with(move |request: &Request| { // This is an error span so that the labels are always present for metrics. error_span!( parent: &span, @@ -380,14 +391,12 @@ pub fn build_http_trace_layer( path = %request.uri().path(), ) }) - .on_request(Box::new(|_request: &Request, _span: &Span| { + .on_request(Box::new(|_request: &Request, _span: &Span| { emit!(HttpServerRequestReceived); })) - .on_response( - |response: &Response, latency: Duration, _span: &Span| { - emit!(HttpServerResponseSent { response, latency }); - }, - ) + .on_response(|response: &Response, latency: Duration, _span: &Span| { + emit!(HttpServerResponseSent { response, latency }); + }) .on_failure(()) .on_body_chunk(()) .on_eos(()) @@ -446,10 +455,9 @@ impl Default for KeepaliveConfig { /// /// **Notes:** /// - This is intended to be used in a Hyper server (or similar) that will automatically close -/// the connection after a response with a `Connection: close` header is sent. +/// the connection after a response with a `Connection: close` header is sent. /// - This layer assumes that it is instantiated once per connection, which is true within the -/// Hyper framework. - +/// Hyper framework. pub struct MaxConnectionAgeLayer { start_reference: Instant, max_connection_age: Duration, @@ -469,8 +477,8 @@ impl MaxConnectionAgeLayer { // Ensure the jitter_factor is between 0.0 and 1.0 let jitter_factor = jitter_factor.clamp(0.0, 1.0); // Generate a random jitter factor between `1 - jitter_factor`` and `1 + jitter_factor`. - let mut rng = rand::thread_rng(); - let random_jitter_factor = rng.gen_range(-jitter_factor..=jitter_factor) + 1.; + let mut rng = rand::rng(); + let random_jitter_factor = rng.random_range(-jitter_factor..=jitter_factor) + 1.; duration.mul_f64(random_jitter_factor) } } @@ -498,9 +506,9 @@ where /// /// **Notes:** /// - This is intended to be used in a Hyper server (or similar) that will automatically close -/// the connection after a response with a `Connection: close` header is sent. +/// the connection after a response with a `Connection: close` header is sent. /// - This service assumes that it is instantiated once per connection, which is true within the -/// Hyper framework. +/// Hyper framework. #[derive(Clone)] pub struct MaxConnectionAgeService { service: S, @@ -561,6 +569,115 @@ where } } +/// The type of a query parameter's value, determines if it's treated as a plain string or a VRL expression. +#[configurable_component] +#[derive(Clone, Debug, Default, Eq, PartialEq)] +#[serde(rename_all = "snake_case")] +pub enum ParamType { + /// The parameter value is a plain string. + #[default] + String, + /// The parameter value is a VRL expression that will be evaluated before each request. + Vrl, +} + +impl ParamType { + fn is_default(&self) -> bool { + *self == Self::default() + } +} + +/// Represents a query parameter value, which can be a simple string or a typed object +/// indicating whether the value is a string or a VRL expression. +#[configurable_component] +#[derive(Clone, Debug, Eq, PartialEq)] +#[serde(untagged)] +pub enum ParameterValue { + /// A simple string value. For backwards compatibility. + String(String), + /// A value with an explicit type. + Typed { + /// The raw value of the parameter. + value: String, + /// The type of the parameter, indicating how the `value` should be treated. + #[serde( + default, + skip_serializing_if = "ParamType::is_default", + rename = "type" + )] + r#type: ParamType, + }, +} + +impl ParameterValue { + /// Returns true if the parameter is a VRL expression. + pub const fn is_vrl(&self) -> bool { + match self { + ParameterValue::String(_) => false, + ParameterValue::Typed { r#type, .. } => matches!(r#type, ParamType::Vrl), + } + } + + /// Returns the raw string value of the parameter. + #[allow(clippy::missing_const_for_fn)] + pub fn value(&self) -> &str { + match self { + ParameterValue::String(s) => s, + ParameterValue::Typed { value, .. } => value, + } + } + + /// Consumes the `ParameterValue` and returns the owned raw string value. + pub fn into_value(self) -> String { + match self { + ParameterValue::String(s) => s, + ParameterValue::Typed { value, .. } => value, + } + } +} + +/// Configuration of the query parameter value for HTTP requests. +#[configurable_component] +#[derive(Clone, Debug, Eq, PartialEq)] +#[serde(untagged)] +#[configurable(metadata(docs::enum_tag_description = "Query parameter value"))] +pub enum QueryParameterValue { + /// Query parameter with single value + SingleParam(ParameterValue), + /// Query parameter with multiple values + MultiParams(Vec), +} + +impl QueryParameterValue { + /// Returns an iterator over the contained `ParameterValue`s. + pub fn iter(&self) -> impl Iterator { + match self { + QueryParameterValue::SingleParam(param) => std::slice::from_ref(param).iter(), + QueryParameterValue::MultiParams(params) => params.iter(), + } + } + + /// Convert to `Vec` for owned iteration. + fn into_vec(self) -> Vec { + match self { + QueryParameterValue::SingleParam(param) => vec![param], + QueryParameterValue::MultiParams(params) => params, + } + } +} + +// Implement IntoIterator for owned QueryParameterValue +impl IntoIterator for QueryParameterValue { + type Item = ParameterValue; + type IntoIter = std::vec::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.into_vec().into_iter() + } +} + +pub type QueryParameters = HashMap; + #[cfg(test)] mod tests { use std::convert::Infallible; @@ -785,13 +902,13 @@ mod tests { // Responses generated before the client's max connection age has elapsed do not // include a `Connection: close` header in the response. - let req = Request::get(format!("http://{}/", addr)) + let req = Request::get(format!("http://{addr}/")) .body(Body::empty()) .unwrap(); let response = client.send(req).await.unwrap(); assert_eq!(response.headers().get("Connection"), None); - let req = Request::get(format!("http://{}/", addr)) + let req = Request::get(format!("http://{addr}/")) .body(Body::empty()) .unwrap(); let response = client.send(req).await.unwrap(); @@ -800,7 +917,7 @@ mod tests { // The first response generated after the client's max connection age has elapsed should // include the `Connection: close` header. tokio::time::sleep(Duration::from_secs(1)).await; - let req = Request::get(format!("http://{}/", addr)) + let req = Request::get(format!("http://{addr}/")) .body(Body::empty()) .unwrap(); let response = client.send(req).await.unwrap(); @@ -812,7 +929,7 @@ mod tests { // The next request should establish a new connection. // Importantly, this also confirms that each connection has its own independent // connection age timer. - let req = Request::get(format!("http://{}/", addr)) + let req = Request::get(format!("http://{addr}/")) .body(Body::empty()) .unwrap(); let response = client.send(req).await.unwrap(); diff --git a/src/internal_events/adaptive_concurrency.rs b/src/internal_events/adaptive_concurrency.rs index f40c05297ce61..e5898295ed79d 100644 --- a/src/internal_events/adaptive_concurrency.rs +++ b/src/internal_events/adaptive_concurrency.rs @@ -1,6 +1,6 @@ use std::time::Duration; -use metrics::{register_histogram, Histogram}; +use metrics::{histogram, Histogram}; #[derive(Clone, Copy)] pub struct AdaptiveConcurrencyLimitData { @@ -17,17 +17,17 @@ registered_event! { // These are histograms, as they may have a number of different // values over each reporting interval, and each of those values // is valuable for diagnosis. - limit: Histogram = register_histogram!("adaptive_concurrency_limit"), - reached_limit: Histogram = register_histogram!("adaptive_concurrency_reached_limit"), - back_pressure: Histogram = register_histogram!("adaptive_concurrency_back_pressure"), - past_rtt_mean: Histogram = register_histogram!("adaptive_concurrency_past_rtt_mean"), + limit: Histogram = histogram!("adaptive_concurrency_limit"), + reached_limit: Histogram = histogram!("adaptive_concurrency_reached_limit"), + back_pressure: Histogram = histogram!("adaptive_concurrency_back_pressure"), + past_rtt_mean: Histogram = histogram!("adaptive_concurrency_past_rtt_mean"), } fn emit(&self, data: AdaptiveConcurrencyLimitData) { self.limit.record(data.concurrency as f64); - let reached_limit = data.reached_limit.then_some(1.0).unwrap_or_default(); + let reached_limit = if data.reached_limit { 1.0 } else { Default::default() }; self.reached_limit.record(reached_limit); - let back_pressure = data.had_back_pressure.then_some(1.0).unwrap_or_default(); + let back_pressure = if data.had_back_pressure { 1.0 } else { Default::default() }; self.back_pressure.record(back_pressure); self.past_rtt_mean.record(data.past_rtt); // past_rtt_deviation is unrecorded @@ -36,7 +36,7 @@ registered_event! { registered_event! { AdaptiveConcurrencyInFlight => { - in_flight: Histogram = register_histogram!("adaptive_concurrency_in_flight"), + in_flight: Histogram = histogram!("adaptive_concurrency_in_flight"), } fn emit(&self, in_flight: u64) { @@ -46,7 +46,7 @@ registered_event! { registered_event! { AdaptiveConcurrencyObservedRtt => { - observed_rtt: Histogram = register_histogram!("adaptive_concurrency_observed_rtt"), + observed_rtt: Histogram = histogram!("adaptive_concurrency_observed_rtt"), } fn emit(&self, rtt: Duration) { @@ -56,7 +56,7 @@ registered_event! { registered_event! { AdaptiveConcurrencyAveragedRtt => { - averaged_rtt: Histogram = register_histogram!("adaptive_concurrency_averaged_rtt"), + averaged_rtt: Histogram = histogram!("adaptive_concurrency_averaged_rtt"), } fn emit(&self, rtt: Duration) { diff --git a/src/internal_events/aggregate.rs b/src/internal_events/aggregate.rs index aa0d97e242521..7fc84bda29abf 100644 --- a/src/internal_events/aggregate.rs +++ b/src/internal_events/aggregate.rs @@ -6,7 +6,7 @@ pub struct AggregateEventRecorded; impl InternalEvent for AggregateEventRecorded { fn emit(self) { - counter!("aggregate_events_recorded_total", 1); + counter!("aggregate_events_recorded_total").increment(1); } } @@ -15,7 +15,7 @@ pub struct AggregateFlushed; impl InternalEvent for AggregateFlushed { fn emit(self) { - counter!("aggregate_flushes_total", 1); + counter!("aggregate_flushes_total").increment(1); } } @@ -24,6 +24,6 @@ pub struct AggregateUpdateFailed; impl InternalEvent for AggregateUpdateFailed { fn emit(self) { - counter!("aggregate_failed_updates", 1); + counter!("aggregate_failed_updates").increment(1); } } diff --git a/src/internal_events/amqp.rs b/src/internal_events/amqp.rs index fa5c46db4a18d..677ec605ea234 100644 --- a/src/internal_events/amqp.rs +++ b/src/internal_events/amqp.rs @@ -19,9 +19,9 @@ pub mod source { ); counter!( "component_received_bytes_total", - self.byte_size as u64, "protocol" => self.protocol, - ); + ) + .increment(self.byte_size as u64); } } @@ -36,13 +36,14 @@ pub mod source { error = ?self.error, error_type = error_type::REQUEST_FAILED, stage = error_stage::RECEIVING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::REQUEST_FAILED, "stage" => error_stage::RECEIVING, - ); + ) + .increment(1); } } @@ -57,13 +58,14 @@ pub mod source { error = ?self.error, error_type = error_type::ACKNOWLEDGMENT_FAILED, stage = error_stage::RECEIVING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::ACKNOWLEDGMENT_FAILED, "stage" => error_stage::RECEIVING, - ); + ) + .increment(1); } } @@ -78,13 +80,14 @@ pub mod source { error = ?self.error, error_type = error_type::COMMAND_FAILED, stage = error_stage::RECEIVING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::COMMAND_FAILED, "stage" => error_stage::RECEIVING, - ); + ) + .increment(1); } } } diff --git a/src/internal_events/apache_metrics.rs b/src/internal_events/apache_metrics.rs index b3e0b5a5fcb70..849741e44dd41 100644 --- a/src/internal_events/apache_metrics.rs +++ b/src/internal_events/apache_metrics.rs @@ -15,18 +15,20 @@ pub struct ApacheMetricsEventsReceived<'a> { pub endpoint: &'a str, } -impl<'a> InternalEvent for ApacheMetricsEventsReceived<'a> { +impl InternalEvent for ApacheMetricsEventsReceived<'_> { // ## skip check-duplicate-events ## fn emit(self) { trace!(message = "Events received.", count = %self.count, byte_size = %self.byte_size, endpoint = %self.endpoint); counter!( - "component_received_events_total", self.count as u64, + "component_received_events_total", "endpoint" => self.endpoint.to_owned(), - ); + ) + .increment(self.count as u64); counter!( - "component_received_event_bytes_total", self.byte_size.get() as u64, + "component_received_event_bytes_total", "endpoint" => self.endpoint.to_owned(), - ); + ) + .increment(self.byte_size.get() as u64); } } @@ -44,13 +46,14 @@ impl InternalEvent for ApacheMetricsParseError<'_> { stage = error_stage::PROCESSING, error_type = error_type::PARSER_FAILED, endpoint = %self.endpoint, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "stage" => error_stage::PROCESSING, "error_type" => error_type::PARSER_FAILED, "endpoint" => self.endpoint.to_owned(), - ); + ) + .increment(1); } } diff --git a/src/internal_events/api.rs b/src/internal_events/api.rs index 0066095d9bff6..c6163b67925c9 100644 --- a/src/internal_events/api.rs +++ b/src/internal_events/api.rs @@ -21,6 +21,6 @@ impl InternalEvent for ApiStarted { graphql = %if self.graphql { graphql } else { "off" } ); - counter!("api_started_total", 1); + counter!("api_started_total").increment(1); } } diff --git a/src/internal_events/aws.rs b/src/internal_events/aws.rs index b2b100136a903..341205f0846ab 100644 --- a/src/internal_events/aws.rs +++ b/src/internal_events/aws.rs @@ -19,9 +19,10 @@ impl InternalEvent for AwsBytesSent { region = ?self.region, ); counter!( - "component_sent_bytes_total", self.byte_size as u64, + "component_sent_bytes_total", "protocol" => "https", "region" => region, - ); + ) + .increment(self.byte_size as u64); } } diff --git a/src/internal_events/aws_cloudwatch_logs.rs b/src/internal_events/aws_cloudwatch_logs.rs index 0d99c561785e7..d8c4838dab7e1 100644 --- a/src/internal_events/aws_cloudwatch_logs.rs +++ b/src/internal_events/aws_cloudwatch_logs.rs @@ -18,14 +18,14 @@ impl InternalEvent for AwsCloudwatchLogsMessageSizeError { error_code = "message_too_long", error_type = error_type::ENCODER_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "message_too_long", "error_type" => error_type::ENCODER_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); emit!(ComponentEventsDropped:: { count: 1, reason }); } } diff --git a/src/internal_events/aws_ec2_metadata.rs b/src/internal_events/aws_ec2_metadata.rs index 0e60f5b96ba5f..3eafadf20222d 100644 --- a/src/internal_events/aws_ec2_metadata.rs +++ b/src/internal_events/aws_ec2_metadata.rs @@ -8,7 +8,7 @@ pub struct AwsEc2MetadataRefreshSuccessful; impl InternalEvent for AwsEc2MetadataRefreshSuccessful { fn emit(self) { debug!(message = "AWS EC2 metadata refreshed."); - counter!("metadata_refresh_successful_total", 1); + counter!("metadata_refresh_successful_total").increment(1); } } @@ -24,14 +24,15 @@ impl InternalEvent for AwsEc2MetadataRefreshError { error = %self.error, error_type = error_type::REQUEST_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::REQUEST_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); // deprecated - counter!("metadata_refresh_failed_total", 1); + counter!("metadata_refresh_failed_total").increment(1); } } diff --git a/src/internal_events/aws_ecs_metrics.rs b/src/internal_events/aws_ecs_metrics.rs index e98076e127546..969c5c33aa2e6 100644 --- a/src/internal_events/aws_ecs_metrics.rs +++ b/src/internal_events/aws_ecs_metrics.rs @@ -13,7 +13,7 @@ pub struct AwsEcsMetricsEventsReceived<'a> { pub endpoint: &'a str, } -impl<'a> InternalEvent for AwsEcsMetricsEventsReceived<'a> { +impl InternalEvent for AwsEcsMetricsEventsReceived<'_> { fn emit(self) { trace!( message = "Events received.", @@ -23,13 +23,15 @@ impl<'a> InternalEvent for AwsEcsMetricsEventsReceived<'a> { endpoint = %self.endpoint, ); counter!( - "component_received_events_total", self.count as u64, + "component_received_events_total", "endpoint" => self.endpoint.to_string(), - ); + ) + .increment(self.count as u64); counter!( - "component_received_event_bytes_total", self.byte_size.get() as u64, + "component_received_event_bytes_total", "endpoint" => self.endpoint.to_string(), - ); + ) + .increment(self.byte_size.get() as u64); } } @@ -40,7 +42,7 @@ pub struct AwsEcsMetricsParseError<'a> { pub body: Cow<'a, str>, } -impl<'a> InternalEvent for AwsEcsMetricsParseError<'a> { +impl InternalEvent for AwsEcsMetricsParseError<'_> { fn emit(self) { error!( message = "Parsing error.", @@ -48,19 +50,20 @@ impl<'a> InternalEvent for AwsEcsMetricsParseError<'a> { error = ?self.error, stage = error_stage::PROCESSING, error_type = error_type::PARSER_FAILED, - internal_log_rate_limit = true, + ); debug!( message = %format!("Failed to parse response:\\n\\n{}\\n\\n", self.body.escape_debug()), endpoint = %self.endpoint, - internal_log_rate_limit = true, + ); - counter!("parse_errors_total", 1); + counter!("parse_errors_total").increment(1); counter!( - "component_errors_total", 1, + "component_errors_total", "stage" => error_stage::PROCESSING, "error_type" => error_type::PARSER_FAILED, "endpoint" => self.endpoint.to_string(), - ); + ) + .increment(1); } } diff --git a/src/internal_events/aws_kinesis.rs b/src/internal_events/aws_kinesis.rs index 43b75e769bd6d..fd5f73bb7e11f 100644 --- a/src/internal_events/aws_kinesis.rs +++ b/src/internal_events/aws_kinesis.rs @@ -17,14 +17,15 @@ impl InternalEvent for AwsKinesisStreamNoPartitionKeyError<'_> { partition_key_field = %self.partition_key_field, error_type = error_type::PARSER_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::PARSER_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); emit!(ComponentEventsDropped:: { count: 1, reason }); } diff --git a/src/internal_events/aws_kinesis_firehose.rs b/src/internal_events/aws_kinesis_firehose.rs index 9b63a8dd1ef29..317435696b799 100644 --- a/src/internal_events/aws_kinesis_firehose.rs +++ b/src/internal_events/aws_kinesis_firehose.rs @@ -11,7 +11,7 @@ pub struct AwsKinesisFirehoseRequestReceived<'a> { pub source_arn: Option<&'a str>, } -impl<'a> InternalEvent for AwsKinesisFirehoseRequestReceived<'a> { +impl InternalEvent for AwsKinesisFirehoseRequestReceived<'_> { fn emit(self) { debug!( message = "Handling AWS Kinesis Firehose request.", @@ -39,7 +39,7 @@ impl<'a> AwsKinesisFirehoseRequestError<'a> { } } -impl<'a> InternalEvent for AwsKinesisFirehoseRequestError<'a> { +impl InternalEvent for AwsKinesisFirehoseRequestError<'_> { fn emit(self) { error!( message = "Error occurred while handling request.", @@ -48,14 +48,14 @@ impl<'a> InternalEvent for AwsKinesisFirehoseRequestError<'a> { error_type = error_type::REQUEST_FAILED, error_code = %self.error_code, request_id = %self.request_id.unwrap_or(""), - internal_log_rate_limit = true, ); counter!( - "component_errors_total", 1, + "component_errors_total", "stage" => error_stage::RECEIVING, "error_type" => error_type::REQUEST_FAILED, "error_code" => self.error_code, - ); + ) + .increment(1); } } @@ -74,13 +74,14 @@ impl InternalEvent for AwsKinesisFirehoseAutomaticRecordDecodeError { error_type = error_type::PARSER_FAILED, error_code = %io_error_code(&self.error), compression = %self.compression, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "stage" => error_stage::PROCESSING, "error_type" => error_type::PARSER_FAILED, "error_code" => io_error_code(&self.error), - ); + ) + .increment(1); } } diff --git a/src/internal_events/aws_sqs.rs b/src/internal_events/aws_sqs.rs index 9f18a9f57ff50..37f4ea98d25ec 100644 --- a/src/internal_events/aws_sqs.rs +++ b/src/internal_events/aws_sqs.rs @@ -9,6 +9,7 @@ use vector_lib::internal_event::{error_stage, error_type}; mod s3 { use aws_sdk_sqs::types::{ BatchResultErrorEntry, DeleteMessageBatchRequestEntry, DeleteMessageBatchResultEntry, + SendMessageBatchRequestEntry, SendMessageBatchResultEntry, }; use super::*; @@ -20,7 +21,7 @@ mod s3 { pub error: &'a ProcessingError, } - impl<'a> InternalEvent for SqsMessageProcessingError<'a> { + impl InternalEvent for SqsMessageProcessingError<'_> { fn emit(self) { error!( message = "Failed to process SQS message.", @@ -29,14 +30,15 @@ mod s3 { error_code = "failed_processing_sqs_message", error_type = error_type::PARSER_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "failed_processing_sqs_message", "error_type" => error_type::PARSER_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); } } @@ -52,10 +54,7 @@ mod s3 { .map(|x| x.id.as_str()) .collect::>() .join(", ")); - counter!( - "sqs_message_delete_succeeded_total", - self.message_ids.len() as u64 - ); + counter!("sqs_message_delete_succeeded_total").increment(self.message_ids.len() as u64); } } @@ -75,14 +74,15 @@ mod s3 { error_code = "failed_deleting_some_sqs_messages", error_type = error_type::ACKNOWLEDGMENT_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "failed_deleting_some_sqs_messages", "error_type" => error_type::ACKNOWLEDGMENT_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); } } @@ -104,14 +104,89 @@ mod s3 { error_code = "failed_deleting_all_sqs_messages", error_type = error_type::ACKNOWLEDGMENT_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "failed_deleting_all_sqs_messages", "error_type" => error_type::ACKNOWLEDGMENT_FAILED, "stage" => error_stage::PROCESSING, + ) + .increment(1); + } + } + + #[derive(Debug)] + pub struct SqsMessageSentSucceeded { + pub message_ids: Vec, + } + + impl InternalEvent for SqsMessageSentSucceeded { + fn emit(self) { + trace!(message = "Deferred SQS message(s).", + message_ids = %self.message_ids.iter() + .map(|x| x.id.as_str()) + .collect::>() + .join(", ")); + counter!("sqs_message_defer_succeeded_total").increment(self.message_ids.len() as u64); + } + } + + #[derive(Debug)] + pub struct SqsMessageSentPartialError { + pub entries: Vec, + } + + impl InternalEvent for SqsMessageSentPartialError { + fn emit(self) { + error!( + message = "Sending of deferred SQS message(s) failed.", + message_ids = %self.entries.iter() + .map(|x| format!("{}/{}", x.id, x.code)) + .collect::>() + .join(", "), + error_code = "failed_deferring_some_sqs_messages", + error_type = error_type::ACKNOWLEDGMENT_FAILED, + stage = error_stage::PROCESSING, + internal_log_rate_limit = true, + ); + counter!( + "component_errors_total", + "error_code" => "failed_deferring_some_sqs_messages", + "error_type" => error_type::ACKNOWLEDGMENT_FAILED, + "stage" => error_stage::PROCESSING, + ) + .increment(1); + } + } + + #[derive(Debug)] + pub struct SqsMessageSendBatchError { + pub entries: Vec, + pub error: E, + } + + impl InternalEvent for SqsMessageSendBatchError { + fn emit(self) { + error!( + message = "Sending of deferred SQS message(s) failed.", + message_ids = %self.entries.iter() + .map(|x| x.id.as_str()) + .collect::>() + .join(", "), + error = %self.error, + error_code = "failed_deferring_all_sqs_messages", + error_type = error_type::ACKNOWLEDGMENT_FAILED, + stage = error_stage::PROCESSING, + internal_log_rate_limit = true, ); + counter!( + "component_errors_total", + "error_code" => "failed_deferring_all_sqs_messages", + "error_type" => error_type::ACKNOWLEDGMENT_FAILED, + "stage" => error_stage::PROCESSING, + ) + .increment(1); } } } @@ -121,7 +196,7 @@ pub struct SqsMessageReceiveError<'a, E> { pub error: &'a E, } -impl<'a, E: std::fmt::Display> InternalEvent for SqsMessageReceiveError<'a, E> { +impl InternalEvent for SqsMessageReceiveError<'_, E> { fn emit(self) { error!( message = "Failed to fetch SQS events.", @@ -129,14 +204,15 @@ impl<'a, E: std::fmt::Display> InternalEvent for SqsMessageReceiveError<'a, E> { error_code = "failed_fetching_sqs_events", error_type = error_type::REQUEST_FAILED, stage = error_stage::RECEIVING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "failed_fetching_sqs_events", "error_type" => error_type::REQUEST_FAILED, "stage" => error_stage::RECEIVING, - ); + ) + .increment(1); } } @@ -148,8 +224,8 @@ pub struct SqsMessageReceiveSucceeded { impl InternalEvent for SqsMessageReceiveSucceeded { fn emit(self) { trace!(message = "Received SQS messages.", count = %self.count); - counter!("sqs_message_receive_succeeded_total", 1); - counter!("sqs_message_received_messages_total", self.count as u64); + counter!("sqs_message_receive_succeeded_total").increment(1); + counter!("sqs_message_received_messages_total").increment(self.count as u64); } } @@ -158,10 +234,10 @@ pub struct SqsMessageProcessingSucceeded<'a> { pub message_id: &'a str, } -impl<'a> InternalEvent for SqsMessageProcessingSucceeded<'a> { +impl InternalEvent for SqsMessageProcessingSucceeded<'_> { fn emit(self) { trace!(message = "Processed SQS message successfully.", message_id = %self.message_id); - counter!("sqs_message_processing_succeeded_total", 1); + counter!("sqs_message_processing_succeeded_total").increment(1); } } @@ -174,20 +250,21 @@ pub struct SqsMessageDeleteError<'a, E> { } #[cfg(feature = "sources-aws_sqs")] -impl<'a, E: std::fmt::Display> InternalEvent for SqsMessageDeleteError<'a, E> { +impl InternalEvent for SqsMessageDeleteError<'_, E> { fn emit(self) { error!( message = "Failed to delete SQS events.", error = %self.error, error_type = error_type::WRITER_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::WRITER_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); } } @@ -201,10 +278,11 @@ pub struct SqsS3EventRecordInvalidEventIgnored<'a> { pub name: &'a str, } -impl<'a> InternalEvent for SqsS3EventRecordInvalidEventIgnored<'a> { +impl InternalEvent for SqsS3EventRecordInvalidEventIgnored<'_> { fn emit(self) { warn!(message = "Ignored S3 record in SQS message for an event that was not ObjectCreated.", bucket = %self.bucket, key = %self.key, kind = %self.kind, name = %self.name); - counter!("sqs_s3_event_record_ignored_total", 1, "ignore_type" => "invalid_event_kind"); + counter!("sqs_s3_event_record_ignored_total", "ignore_type" => "invalid_event_kind") + .increment(1); } } diff --git a/src/internal_events/batch.rs b/src/internal_events/batch.rs index f5091d9aa5eff..5afc99d47f1bf 100644 --- a/src/internal_events/batch.rs +++ b/src/internal_events/batch.rs @@ -17,14 +17,15 @@ impl InternalEvent for LargeEventDroppedError { length = %self.length, error_type = error_type::CONDITION_FAILED, stage = error_stage::SENDING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "oversized", "error_type" => error_type::CONDITION_FAILED, "stage" => error_stage::SENDING, - ); + ) + .increment(1); emit!(ComponentEventsDropped:: { count: 1, reason }); } } diff --git a/src/internal_events/codecs.rs b/src/internal_events/codecs.rs index b530dccdd4550..e876db1dab5ab 100644 --- a/src/internal_events/codecs.rs +++ b/src/internal_events/codecs.rs @@ -15,14 +15,15 @@ impl InternalEvent for DecoderFramingError { error_code = "decoder_frame", error_type = error_type::PARSER_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "decoder_frame", "error_type" => error_type::PARSER_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); } } @@ -31,7 +32,7 @@ pub struct DecoderDeserializeError<'a> { pub error: &'a crate::Error, } -impl<'a> InternalEvent for DecoderDeserializeError<'a> { +impl InternalEvent for DecoderDeserializeError<'_> { fn emit(self) { error!( message = "Failed deserializing frame.", @@ -39,14 +40,15 @@ impl<'a> InternalEvent for DecoderDeserializeError<'a> { error_code = "decoder_deserialize", error_type = error_type::PARSER_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "decoder_deserialize", "error_type" => error_type::PARSER_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); } } @@ -55,7 +57,7 @@ pub struct EncoderFramingError<'a> { pub error: &'a vector_lib::codecs::encoding::BoxedFramingError, } -impl<'a> InternalEvent for EncoderFramingError<'a> { +impl InternalEvent for EncoderFramingError<'_> { fn emit(self) { let reason = "Failed framing bytes."; error!( @@ -64,14 +66,15 @@ impl<'a> InternalEvent for EncoderFramingError<'a> { error_code = "encoder_frame", error_type = error_type::ENCODER_FAILED, stage = error_stage::SENDING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "encoder_frame", "error_type" => error_type::ENCODER_FAILED, "stage" => error_stage::SENDING, - ); + ) + .increment(1); emit!(ComponentEventsDropped:: { count: 1, reason }); } } @@ -81,7 +84,7 @@ pub struct EncoderSerializeError<'a> { pub error: &'a crate::Error, } -impl<'a> InternalEvent for EncoderSerializeError<'a> { +impl InternalEvent for EncoderSerializeError<'_> { fn emit(self) { let reason = "Failed serializing frame."; error!( @@ -90,14 +93,15 @@ impl<'a> InternalEvent for EncoderSerializeError<'a> { error_code = "encoder_serialize", error_type = error_type::ENCODER_FAILED, stage = error_stage::SENDING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "encoder_serialize", "error_type" => error_type::ENCODER_FAILED, "stage" => error_stage::SENDING, - ); + ) + .increment(1); emit!(ComponentEventsDropped:: { count: 1, reason }); } } @@ -116,13 +120,14 @@ impl InternalEvent for EncoderWriteError<'_, E> { error = %self.error, error_type = error_type::IO_FAILED, stage = error_stage::SENDING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::ENCODER_FAILED, "stage" => error_stage::SENDING, - ); + ) + .increment(1); if self.count > 0 { emit!(ComponentEventsDropped:: { count: self.count, diff --git a/src/internal_events/common.rs b/src/internal_events/common.rs index 63b35951fd177..f342710daedb2 100644 --- a/src/internal_events/common.rs +++ b/src/internal_events/common.rs @@ -21,10 +21,11 @@ impl InternalEvent for EndpointBytesReceived<'_> { endpoint = %self.endpoint, ); counter!( - "component_received_bytes_total", self.byte_size as u64, + "component_received_bytes_total", "protocol" => self.protocol.to_owned(), "endpoint" => self.endpoint.to_owned(), - ); + ) + .increment(self.byte_size as u64); } } @@ -35,7 +36,7 @@ pub struct EndpointBytesSent<'a> { pub endpoint: &'a str, } -impl<'a> InternalEvent for EndpointBytesSent<'a> { +impl InternalEvent for EndpointBytesSent<'_> { fn emit(self) { trace!( message = "Bytes sent.", @@ -44,10 +45,11 @@ impl<'a> InternalEvent for EndpointBytesSent<'a> { endpoint = %self.endpoint ); counter!( - "component_sent_bytes_total", self.byte_size as u64, + "component_sent_bytes_total", "protocol" => self.protocol.to_string(), "endpoint" => self.endpoint.to_string() - ); + ) + .increment(self.byte_size as u64); } } @@ -64,14 +66,15 @@ impl InternalEvent for SocketOutgoingConnectionError { error_code = "failed_connecting", error_type = error_type::CONNECTION_FAILED, stage = error_stage::SENDING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "failed_connecting", "error_type" => error_type::CONNECTION_FAILED, "stage" => error_stage::SENDING, - ); + ) + .increment(1); } } @@ -89,14 +92,14 @@ impl InternalEvent for StreamClosedError { error_code = STREAM_CLOSED, error_type = error_type::WRITER_FAILED, stage = error_stage::SENDING, - internal_log_rate_limit = true, ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => STREAM_CLOSED, "error_type" => error_type::WRITER_FAILED, "stage" => error_stage::SENDING, - ); + ) + .increment(1); emit!(ComponentEventsDropped:: { count: self.count, reason: "Downstream is closed.", @@ -113,8 +116,8 @@ pub struct CollectionCompleted { impl InternalEvent for CollectionCompleted { fn emit(self) { debug!(message = "Collection completed."); - counter!("collect_completed_total", 1); - histogram!("collect_duration_seconds", self.end - self.start); + counter!("collect_completed_total").increment(1); + histogram!("collect_duration_seconds").record(self.end - self.start); } } @@ -133,12 +136,13 @@ impl InternalEvent for SinkRequestBuildError { error = %self.error, error_type = error_type::ENCODER_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::ENCODER_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); } } diff --git a/src/internal_events/conditions.rs b/src/internal_events/conditions.rs index 718cddc8d4e5d..0f7cb70962b08 100644 --- a/src/internal_events/conditions.rs +++ b/src/internal_events/conditions.rs @@ -7,19 +7,19 @@ pub struct VrlConditionExecutionError<'a> { pub error: &'a str, } -impl<'a> InternalEvent for VrlConditionExecutionError<'a> { +impl InternalEvent for VrlConditionExecutionError<'_> { fn emit(self) { error!( message = "VRL condition execution failed.", error = %self.error, error_type = error_type::SCRIPT_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::SCRIPT_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); } } diff --git a/src/internal_events/datadog_agent.rs b/src/internal_events/datadog_agent.rs new file mode 100644 index 0000000000000..686e3e0e3179c --- /dev/null +++ b/src/internal_events/datadog_agent.rs @@ -0,0 +1,27 @@ +use metrics::counter; + +use vector_lib::internal_event::InternalEvent; +use vector_lib::internal_event::{error_stage, error_type}; + +#[derive(Debug)] +pub struct DatadogAgentJsonParseError<'a> { + pub error: &'a serde_json::Error, +} + +impl InternalEvent for DatadogAgentJsonParseError<'_> { + fn emit(self) { + error!( + message = "Failed to parse JSON body.", + error = ?self.error, + error_type = error_type::PARSER_FAILED, + stage = error_stage::PROCESSING, + + ); + counter!( + "component_errors_total", + "error_type" => error_type::PARSER_FAILED, + "stage" => error_stage::PROCESSING, + ) + .increment(1); + } +} diff --git a/src/internal_events/datadog_metrics.rs b/src/internal_events/datadog_metrics.rs index af4255d70b184..03812c2c81def 100644 --- a/src/internal_events/datadog_metrics.rs +++ b/src/internal_events/datadog_metrics.rs @@ -9,7 +9,7 @@ pub struct DatadogMetricsEncodingError<'a> { pub dropped_events: usize, } -impl<'a> InternalEvent for DatadogMetricsEncodingError<'a> { +impl InternalEvent for DatadogMetricsEncodingError<'_> { fn emit(self) { error!( message = self.reason, @@ -17,14 +17,14 @@ impl<'a> InternalEvent for DatadogMetricsEncodingError<'a> { error_type = error_type::ENCODER_FAILED, intentional = "false", stage = error_stage::PROCESSING, - internal_log_rate_limit = true, ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => self.error_code, "error_type" => error_type::ENCODER_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); if self.dropped_events > 0 { emit!(ComponentEventsDropped:: { diff --git a/src/internal_events/datadog_traces.rs b/src/internal_events/datadog_traces.rs index f20a660e3b806..aee946528391b 100644 --- a/src/internal_events/datadog_traces.rs +++ b/src/internal_events/datadog_traces.rs @@ -18,13 +18,14 @@ impl InternalEvent for DatadogTracesEncodingError { error_reason = %self.error_reason, error_type = error_type::ENCODER_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::ENCODER_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); if self.dropped_events > 0 { emit!(ComponentEventsDropped:: { @@ -47,13 +48,14 @@ impl InternalEvent for DatadogTracesAPMStatsError { error = %self.error, error_type = error_type::WRITER_FAILED, stage = error_stage::SENDING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::WRITER_FAILED, "stage" => error_stage::SENDING, - ); + ) + .increment(1); // No dropped events because APM stats payloads are not considered events. } diff --git a/src/internal_events/dnstap.rs b/src/internal_events/dnstap.rs index 5020f6322aebd..adf9ea8688d6c 100644 --- a/src/internal_events/dnstap.rs +++ b/src/internal_events/dnstap.rs @@ -14,29 +14,13 @@ impl InternalEvent for DnstapParseError { error = %self.error, stage = error_stage::PROCESSING, error_type = error_type::PARSER_FAILED, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "stage" => error_stage::PROCESSING, "error_type" => error_type::PARSER_FAILED, - ); - } -} - -#[derive(Debug)] -pub(crate) struct DnstapParseWarning { - pub error: E, -} - -impl InternalEvent for DnstapParseWarning { - fn emit(self) { - warn!( - message = "Recoverable error occurred while parsing dnstap data.", - error = %self.error, - stage = error_stage::PROCESSING, - error_type = error_type::PARSER_FAILED, - internal_log_rate_limit = true, - ); + ) + .increment(1); } } diff --git a/src/internal_events/docker_logs.rs b/src/internal_events/docker_logs.rs index a7864588a09be..f1daaa6c2d4f4 100644 --- a/src/internal_events/docker_logs.rs +++ b/src/internal_events/docker_logs.rs @@ -23,13 +23,12 @@ impl InternalEvent for DockerLogsEventsReceived<'_> { container_id = %self.container_id ); counter!( - "component_received_events_total", 1, - "container_name" => self.container_name.to_owned() - ); + "component_received_events_total", "container_name" => self.container_name.to_owned() + ) + .increment(1); counter!( - "component_received_event_bytes_total", self.byte_size.get() as u64, - "container_name" => self.container_name.to_owned() - ); + "component_received_event_bytes_total", "container_name" => self.container_name.to_owned() + ).increment(self.byte_size.get() as u64); } } @@ -46,7 +45,7 @@ impl InternalEvent for DockerLogsContainerEventReceived<'_> { container_id = %self.container_id, action = %self.action, ); - counter!("container_processed_events_total", 1); + counter!("container_processed_events_total").increment(1); } } @@ -61,7 +60,7 @@ impl InternalEvent for DockerLogsContainerWatch<'_> { message = "Started watching for container logs.", container_id = %self.container_id, ); - counter!("containers_watched_total", 1); + counter!("containers_watched_total").increment(1); } } @@ -76,7 +75,7 @@ impl InternalEvent for DockerLogsContainerUnwatch<'_> { message = "Stopped watching for container logs.", container_id = %self.container_id, ); - counter!("containers_unwatched_total", 1); + counter!("containers_unwatched_total").increment(1); } } @@ -97,10 +96,11 @@ impl InternalEvent for DockerLogsCommunicationError<'_> { internal_log_rate_limit = true ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::CONNECTION_FAILED, "stage" => error_stage::RECEIVING, - ); + ) + .increment(1); } } @@ -121,11 +121,12 @@ impl InternalEvent for DockerLogsContainerMetadataFetchError<'_> { internal_log_rate_limit = true ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::REQUEST_FAILED, "stage" => error_stage::RECEIVING, "container_id" => self.container_id.to_owned(), - ); + ) + .increment(1); } } @@ -146,11 +147,12 @@ impl InternalEvent for DockerLogsTimestampParseError<'_> { internal_log_rate_limit = true ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::PARSER_FAILED, "stage" => error_stage::PROCESSING, "container_id" => self.container_id.to_owned(), - ); + ) + .increment(1); } } @@ -168,13 +170,14 @@ impl InternalEvent for DockerLogsLoggingDriverUnsupportedError<'_> { error_type = error_type::CONFIGURATION_FAILED, stage = error_stage::RECEIVING, container_id = ?self.container_id, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::CONFIGURATION_FAILED, "stage" => error_stage::RECEIVING, "container_id" => self.container_id.to_owned(), - ); + ) + .increment(1); } } diff --git a/src/internal_events/encoding_transcode.rs b/src/internal_events/encoding_transcode.rs index d8532a4e96aab..869423fb98241 100644 --- a/src/internal_events/encoding_transcode.rs +++ b/src/internal_events/encoding_transcode.rs @@ -13,7 +13,7 @@ impl InternalEvent for DecoderBomRemoval { from_encoding = %self.from_encoding, internal_log_rate_limit = true ); - counter!("decoder_bom_removals_total", 1); + counter!("decoder_bom_removals_total").increment(1); } } @@ -27,11 +27,10 @@ impl InternalEvent for DecoderMalformedReplacement { warn!( message = "Replaced malformed sequences with replacement character while decoding to utf8.", from_encoding = %self.from_encoding, - internal_log_rate_limit = true ); // NOT the actual number of replacements in the output: there's no easy // way to get that from the lib we use here (encoding_rs) - counter!("decoder_malformed_replacement_warnings_total", 1); + counter!("decoder_malformed_replacement_warnings_total").increment(1); } } @@ -49,6 +48,6 @@ impl InternalEvent for EncoderUnmappableReplacement { ); // NOT the actual number of replacements in the output: there's no easy // way to get that from the lib we use here (encoding_rs) - counter!("encoder_unmappable_replacement_warnings_total", 1); + counter!("encoder_unmappable_replacement_warnings_total").increment(1); } } diff --git a/src/internal_events/eventstoredb_metrics.rs b/src/internal_events/eventstoredb_metrics.rs index 7158b38e2fb63..7ee7f36cf9f50 100644 --- a/src/internal_events/eventstoredb_metrics.rs +++ b/src/internal_events/eventstoredb_metrics.rs @@ -14,13 +14,14 @@ impl InternalEvent for EventStoreDbMetricsHttpError { error = ?self.error, stage = error_stage::RECEIVING, error_type = error_type::REQUEST_FAILED, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "stage" => error_stage::RECEIVING, "error_type" => error_type::REQUEST_FAILED, - ); + ) + .increment(1); } } @@ -36,12 +37,13 @@ impl InternalEvent for EventStoreDbStatsParsingError { error = ?self.error, stage = error_stage::PROCESSING, error_type = error_type::PARSER_FAILED, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "stage" => error_stage::PROCESSING, "error_type" => error_type::PARSER_FAILED, - ); + ) + .increment(1); } } diff --git a/src/internal_events/exec.rs b/src/internal_events/exec.rs index a2de45a6d2a2a..1cb656cd493c6 100644 --- a/src/internal_events/exec.rs +++ b/src/internal_events/exec.rs @@ -26,13 +26,15 @@ impl InternalEvent for ExecEventsReceived<'_> { command = %self.command, ); counter!( - "component_received_events_total", self.count as u64, + "component_received_events_total", "command" => self.command.to_owned(), - ); + ) + .increment(self.count as u64); counter!( - "component_received_event_bytes_total", self.byte_size.get() as u64, + "component_received_event_bytes_total", "command" => self.command.to_owned(), - ); + ) + .increment(self.byte_size.get() as u64); } } @@ -51,15 +53,16 @@ impl InternalEvent for ExecFailedError<'_> { error_type = error_type::COMMAND_FAILED, error_code = %io_error_code(&self.error), stage = error_stage::RECEIVING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "command" => self.command.to_owned(), "error_type" => error_type::COMMAND_FAILED, "error_code" => io_error_code(&self.error), "stage" => error_stage::RECEIVING, - ); + ) + .increment(1); } } @@ -79,14 +82,15 @@ impl InternalEvent for ExecTimeoutError<'_> { error = %self.error, error_type = error_type::TIMED_OUT, stage = error_stage::RECEIVING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "command" => self.command.to_owned(), "error_type" => error_type::TIMED_OUT, "stage" => error_stage::RECEIVING, - ); + ) + .increment(1); } } @@ -114,19 +118,21 @@ impl InternalEvent for ExecCommandExecuted<'_> { command = %self.command, exit_status = %exit_status, elapsed_millis = %self.exec_duration.as_millis(), - internal_log_rate_limit = true, + ); counter!( - "command_executed_total", 1, + "command_executed_total", "command" => self.command.to_owned(), "exit_status" => exit_status.clone(), - ); + ) + .increment(1); histogram!( - "command_execution_duration_seconds", self.exec_duration, - "command" => self.command.to_owned(), + "command_execution_duration_seconds", "exit_status" => exit_status, - ); + "command" => self.command.to_owned(), + ) + .record(self.exec_duration); } } @@ -147,7 +153,7 @@ impl ExecFailedToSignalChild { match self { #[cfg(unix)] - SignalError(err) => format!("errno_{}", err), + SignalError(err) => format!("errno_{err}"), #[cfg(unix)] FailedToMarshalPid(_) => String::from("failed_to_marshal_pid"), #[cfg(unix)] @@ -164,9 +170,9 @@ impl std::fmt::Display for ExecFailedToSignalChild { match self { #[cfg(unix)] - SignalError(err) => write!(f, "errno: {}", err), + SignalError(err) => write!(f, "errno: {err}"), #[cfg(unix)] - FailedToMarshalPid(err) => write!(f, "failed to marshal pid to i32: {}", err), + FailedToMarshalPid(err) => write!(f, "failed to marshal pid to i32: {err}"), #[cfg(unix)] NoPid => write!(f, "child had no pid"), #[cfg(windows)] @@ -188,15 +194,16 @@ impl InternalEvent for ExecFailedToSignalChildError<'_> { error_code = %self.error.to_error_code(), error_type = error_type::COMMAND_FAILED, stage = error_stage::RECEIVING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "command" => format!("{:?}", self.command.as_std()), "error_code" => self.error.to_error_code(), "error_type" => error_type::COMMAND_FAILED, "stage" => error_stage::RECEIVING, - ); + ) + .increment(1); } } @@ -209,13 +216,13 @@ impl InternalEvent for ExecChannelClosedError { message = exec_reason, error_type = error_type::COMMAND_FAILED, stage = error_stage::RECEIVING, - internal_log_rate_limit = true, ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::COMMAND_FAILED, "stage" => error_stage::RECEIVING, - ); + ) + .increment(1); emit!(ComponentEventsDropped:: { count: 1, reason: exec_reason diff --git a/src/internal_events/file.rs b/src/internal_events/file.rs index 876ffc3af0c2f..c6d80460143f2 100644 --- a/src/internal_events/file.rs +++ b/src/internal_events/file.rs @@ -30,7 +30,7 @@ pub struct FileOpen { impl InternalEvent for FileOpen { fn emit(self) { - gauge!("open_files", self.count as f64); + gauge!("open_files").set(self.count as f64); } } @@ -51,16 +51,17 @@ impl InternalEvent for FileBytesSent<'_> { ); if self.include_file_metric_tag { counter!( - "component_sent_bytes_total", self.byte_size as u64, + "component_sent_bytes_total", "protocol" => "file", "file" => self.file.clone().into_owned(), - ); + ) } else { counter!( - "component_sent_bytes_total", self.byte_size as u64, + "component_sent_bytes_total", "protocol" => "file", - ); + ) } + .increment(self.byte_size as u64); } } @@ -73,7 +74,7 @@ pub struct FileIoError<'a, P> { pub dropped_events: usize, } -impl<'a, P: std::fmt::Debug> InternalEvent for FileIoError<'a, P> { +impl InternalEvent for FileIoError<'_, P> { fn emit(self) { error!( message = %self.message, @@ -82,14 +83,14 @@ impl<'a, P: std::fmt::Debug> InternalEvent for FileIoError<'a, P> { error_code = %self.code, error_type = error_type::IO_FAILED, stage = error_stage::SENDING, - internal_log_rate_limit = true, ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => self.code, "error_type" => error_type::IO_FAILED, "stage" => error_stage::SENDING, - ); + ) + .increment(1); if self.dropped_events > 0 { emit!(ComponentEventsDropped:: { @@ -104,8 +105,10 @@ impl<'a, P: std::fmt::Debug> InternalEvent for FileIoError<'a, P> { mod source { use std::{io::Error, path::Path, time::Duration}; + use bytes::BytesMut; use metrics::counter; use vector_lib::file_source::FileSourceInternalEvents; + use vector_lib::internal_event::{ComponentEventsDropped, INTENTIONAL}; use super::{FileOpen, InternalEvent}; use vector_lib::emit; @@ -121,7 +124,7 @@ mod source { pub include_file_metric_tag: bool, } - impl<'a> InternalEvent for FileBytesReceived<'a> { + impl InternalEvent for FileBytesReceived<'_> { fn emit(self) { trace!( message = "Bytes received.", @@ -131,16 +134,17 @@ mod source { ); if self.include_file_metric_tag { counter!( - "component_received_bytes_total", self.byte_size as u64, + "component_received_bytes_total", "protocol" => "file", "file" => self.file.to_owned() - ); + ) } else { counter!( - "component_received_bytes_total", self.byte_size as u64, + "component_received_bytes_total", "protocol" => "file", - ); + ) } + .increment(self.byte_size as u64); } } @@ -162,19 +166,19 @@ mod source { ); if self.include_file_metric_tag { counter!( - "component_received_events_total", self.count as u64, + "component_received_events_total", "file" => self.file.to_owned(), - ); + ) + .increment(self.count as u64); counter!( - "component_received_event_bytes_total", self.byte_size.get() as u64, + "component_received_event_bytes_total", "file" => self.file.to_owned(), - ); + ) + .increment(self.byte_size.get() as u64); } else { - counter!("component_received_events_total", self.count as u64); - counter!( - "component_received_event_bytes_total", - self.byte_size.get() as u64, - ); + counter!("component_received_events_total").increment(self.count as u64); + counter!("component_received_event_bytes_total") + .increment(self.byte_size.get() as u64); } } } @@ -185,7 +189,7 @@ mod source { pub include_file_metric_tag: bool, } - impl<'a> InternalEvent for FileChecksumFailed<'a> { + impl InternalEvent for FileChecksumFailed<'_> { fn emit(self) { warn!( message = "Currently ignoring file too small to fingerprint.", @@ -193,12 +197,13 @@ mod source { ); if self.include_file_metric_tag { counter!( - "checksum_errors_total", 1, + "checksum_errors_total", "file" => self.file.to_string_lossy().into_owned(), - ); + ) } else { - counter!("checksum_errors_total", 1); + counter!("checksum_errors_total") } + .increment(1); } } @@ -209,7 +214,7 @@ mod source { pub include_file_metric_tag: bool, } - impl<'a> InternalEvent for FileFingerprintReadError<'a> { + impl InternalEvent for FileFingerprintReadError<'_> { fn emit(self) { error!( message = "Failed reading file for fingerprinting.", @@ -218,24 +223,25 @@ mod source { error_code = "reading_fingerprint", error_type = error_type::READER_FAILED, stage = error_stage::RECEIVING, - internal_log_rate_limit = true, + ); if self.include_file_metric_tag { counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "reading_fingerprint", "error_type" => error_type::READER_FAILED, "stage" => error_stage::RECEIVING, "file" => self.file.to_string_lossy().into_owned(), - ); + ) } else { counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "reading_fingerprint", "error_type" => error_type::READER_FAILED, "stage" => error_stage::RECEIVING, - ); + ) } + .increment(1); } } @@ -248,7 +254,7 @@ mod source { pub include_file_metric_tag: bool, } - impl<'a> InternalEvent for FileDeleteError<'a> { + impl InternalEvent for FileDeleteError<'_> { fn emit(self) { error!( message = "Failed in deleting file.", @@ -257,24 +263,24 @@ mod source { error_code = DELETION_FAILED, error_type = error_type::COMMAND_FAILED, stage = error_stage::RECEIVING, - internal_log_rate_limit = true, ); if self.include_file_metric_tag { counter!( - "component_errors_total", 1, + "component_errors_total", "file" => self.file.to_string_lossy().into_owned(), "error_code" => DELETION_FAILED, "error_type" => error_type::COMMAND_FAILED, "stage" => error_stage::RECEIVING, - ); + ) } else { counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => DELETION_FAILED, "error_type" => error_type::COMMAND_FAILED, "stage" => error_stage::RECEIVING, - ); + ) } + .increment(1); } } @@ -284,7 +290,7 @@ mod source { pub include_file_metric_tag: bool, } - impl<'a> InternalEvent for FileDeleted<'a> { + impl InternalEvent for FileDeleted<'_> { fn emit(self) { info!( message = "File deleted.", @@ -292,12 +298,13 @@ mod source { ); if self.include_file_metric_tag { counter!( - "files_deleted_total", 1, + "files_deleted_total", "file" => self.file.to_string_lossy().into_owned(), - ); + ) } else { - counter!("files_deleted_total", 1); + counter!("files_deleted_total") } + .increment(1); } } @@ -308,7 +315,7 @@ mod source { pub reached_eof: bool, } - impl<'a> InternalEvent for FileUnwatched<'a> { + impl InternalEvent for FileUnwatched<'_> { fn emit(self) { let reached_eof = if self.reached_eof { "true" } else { "false" }; info!( @@ -318,15 +325,17 @@ mod source { ); if self.include_file_metric_tag { counter!( - "files_unwatched_total", 1, + "files_unwatched_total", "file" => self.file.to_string_lossy().into_owned(), "reached_eof" => reached_eof, - ); + ) } else { - counter!("files_unwatched_total", 1, + counter!( + "files_unwatched_total", "reached_eof" => reached_eof, - ); + ) } + .increment(1); } } @@ -337,7 +346,7 @@ mod source { pub include_file_metric_tag: bool, } - impl<'a> InternalEvent for FileWatchError<'a> { + impl InternalEvent for FileWatchError<'_> { fn emit(self) { error!( message = "Failed to watch file.", @@ -346,24 +355,25 @@ mod source { error_type = error_type::COMMAND_FAILED, stage = error_stage::RECEIVING, file = %self.file.display(), - internal_log_rate_limit = true, + ); if self.include_file_metric_tag { counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "watching", "error_type" => error_type::COMMAND_FAILED, "stage" => error_stage::RECEIVING, "file" => self.file.to_string_lossy().into_owned(), - ); + ) } else { counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "watching", "error_type" => error_type::COMMAND_FAILED, "stage" => error_stage::RECEIVING, - ); + ) } + .increment(1); } } @@ -374,7 +384,7 @@ mod source { pub include_file_metric_tag: bool, } - impl<'a> InternalEvent for FileResumed<'a> { + impl InternalEvent for FileResumed<'_> { fn emit(self) { info!( message = "Resuming to watch file.", @@ -383,12 +393,13 @@ mod source { ); if self.include_file_metric_tag { counter!( - "files_resumed_total", 1, + "files_resumed_total", "file" => self.file.to_string_lossy().into_owned(), - ); + ) } else { - counter!("files_resumed_total", 1); + counter!("files_resumed_total") } + .increment(1); } } @@ -398,7 +409,7 @@ mod source { pub include_file_metric_tag: bool, } - impl<'a> InternalEvent for FileAdded<'a> { + impl InternalEvent for FileAdded<'_> { fn emit(self) { info!( message = "Found new file to watch.", @@ -406,12 +417,13 @@ mod source { ); if self.include_file_metric_tag { counter!( - "files_added_total", 1, + "files_added_total", "file" => self.file.to_string_lossy().into_owned(), - ); + ) } else { - counter!("files_added_total", 1); + counter!("files_added_total") } + .increment(1); } } @@ -428,7 +440,7 @@ mod source { count = %self.count, duration_ms = self.duration.as_millis() as u64, ); - counter!("checkpoints_total", self.count as u64); + counter!("checkpoints_total").increment(self.count as u64); } } @@ -445,14 +457,15 @@ mod source { error_code = "writing_checkpoints", error_type = error_type::WRITER_FAILED, stage = error_stage::RECEIVING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "writing_checkpoints", "error_type" => error_type::WRITER_FAILED, "stage" => error_stage::RECEIVING, - ); + ) + .increment(1); } } @@ -462,7 +475,7 @@ mod source { pub error: &'a Error, } - impl<'a> InternalEvent for PathGlobbingError<'a> { + impl InternalEvent for PathGlobbingError<'_> { fn emit(self) { error!( message = "Failed to glob path.", @@ -471,14 +484,46 @@ mod source { error_type = error_type::READER_FAILED, stage = error_stage::RECEIVING, path = %self.path.display(), - internal_log_rate_limit = true, ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "globbing", "error_type" => error_type::READER_FAILED, "stage" => error_stage::RECEIVING, + ) + .increment(1); + } + } + + #[derive(Debug)] + pub struct FileLineTooBigError<'a> { + pub truncated_bytes: &'a BytesMut, + pub configured_limit: usize, + pub encountered_size_so_far: usize, + } + + impl InternalEvent for FileLineTooBigError<'_> { + fn emit(self) { + error!( + message = "Found line that exceeds max_line_bytes; discarding.", + truncated_bytes = ?self.truncated_bytes, + configured_limit = self.configured_limit, + encountered_size_so_far = self.encountered_size_so_far, + internal_log_rate_limit = true, + error_type = error_type::CONDITION_FAILED, + stage = error_stage::RECEIVING, ); + counter!( + "component_errors_total", + "error_code" => "reading_line_from_file", + "error_type" => error_type::CONDITION_FAILED, + "stage" => error_stage::RECEIVING, + ) + .increment(1); + emit!(ComponentEventsDropped:: { + count: 1, + reason: "Found line that exceeds max_line_bytes; discarding.", + }); } } @@ -564,5 +609,18 @@ mod source { fn emit_path_globbing_failed(&self, path: &Path, error: &Error) { emit!(PathGlobbingError { path, error }); } + + fn emit_file_line_too_long( + &self, + truncated_bytes: &bytes::BytesMut, + configured_limit: usize, + encountered_size_so_far: usize, + ) { + emit!(FileLineTooBigError { + truncated_bytes, + configured_limit, + encountered_size_so_far + }); + } } } diff --git a/src/internal_events/file_descriptor.rs b/src/internal_events/file_descriptor.rs index a27ac7875ab71..367f223557f7c 100644 --- a/src/internal_events/file_descriptor.rs +++ b/src/internal_events/file_descriptor.rs @@ -20,9 +20,10 @@ where internal_log_rate_limit = true ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::CONNECTION_FAILED, "stage" => error_stage::RECEIVING, - ); + ) + .increment(1); } } diff --git a/src/internal_events/fluent.rs b/src/internal_events/fluent.rs index 6eb5d1d829ce1..aa0d26610b72a 100644 --- a/src/internal_events/fluent.rs +++ b/src/internal_events/fluent.rs @@ -12,7 +12,7 @@ pub struct FluentMessageReceived { impl InternalEvent for FluentMessageReceived { fn emit(self) { trace!(message = "Received fluent message.", byte_size = %self.byte_size); - counter!("component_received_events_total", 1); + counter!("component_received_events_total").increment(1); } } @@ -22,7 +22,7 @@ pub struct FluentMessageDecodeError<'a> { pub base64_encoded_message: String, } -impl<'a> InternalEvent for FluentMessageDecodeError<'a> { +impl InternalEvent for FluentMessageDecodeError<'_> { fn emit(self) { error!( message = "Error decoding fluent message.", @@ -30,12 +30,13 @@ impl<'a> InternalEvent for FluentMessageDecodeError<'a> { base64_encoded_message = %self.base64_encoded_message, error_type = error_type::PARSER_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::PARSER_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); } } diff --git a/src/internal_events/gcp_pubsub.rs b/src/internal_events/gcp_pubsub.rs index 686f2a6703c9c..be3f274e8d225 100644 --- a/src/internal_events/gcp_pubsub.rs +++ b/src/internal_events/gcp_pubsub.rs @@ -14,15 +14,16 @@ impl InternalEvent for GcpPubsubConnectError { error_code = "failed_connecting", error_type = error_type::CONNECTION_FAILED, stage = error_stage::RECEIVING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "failed_connecting", "error_type" => error_type::CONNECTION_FAILED, "stage" => error_stage::RECEIVING, - ); + ) + .increment(1); } } @@ -38,15 +39,16 @@ impl InternalEvent for GcpPubsubStreamingPullError { error_code = "failed_streaming_pull", error_type = error_type::REQUEST_FAILED, stage = error_stage::RECEIVING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "failed_streaming_pull", "error_type" => error_type::REQUEST_FAILED, "stage" => error_stage::RECEIVING, - ); + ) + .increment(1); } } @@ -62,14 +64,15 @@ impl InternalEvent for GcpPubsubReceiveError { error_code = "failed_fetching_events", error_type = error_type::REQUEST_FAILED, stage = error_stage::RECEIVING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "failed_fetching_events", "error_type" => error_type::REQUEST_FAILED, "stage" => error_stage::RECEIVING, - ); + ) + .increment(1); } } diff --git a/src/internal_events/grpc.rs b/src/internal_events/grpc.rs index ea3fae807df40..6b8fec71365e5 100644 --- a/src/internal_events/grpc.rs +++ b/src/internal_events/grpc.rs @@ -13,7 +13,7 @@ pub struct GrpcServerRequestReceived; impl InternalEvent for GrpcServerRequestReceived { fn emit(self) { - counter!("grpc_server_messages_received_total", 1); + counter!("grpc_server_messages_received_total").increment(1); } } @@ -23,7 +23,7 @@ pub struct GrpcServerResponseSent<'a, B> { pub latency: Duration, } -impl<'a, B> InternalEvent for GrpcServerResponseSent<'a, B> { +impl InternalEvent for GrpcServerResponseSent<'_, B> { fn emit(self) { let grpc_code = self .response @@ -34,8 +34,8 @@ impl<'a, B> InternalEvent for GrpcServerResponseSent<'a, B> { let grpc_code = grpc_code_to_name(grpc_code); let labels = &[(GRPC_STATUS_LABEL, grpc_code)]; - counter!("grpc_server_messages_sent_total", 1, labels); - histogram!("grpc_server_handler_duration_seconds", self.latency, labels); + counter!("grpc_server_messages_sent_total", labels).increment(1); + histogram!("grpc_server_handler_duration_seconds", labels).record(self.latency); } } @@ -54,10 +54,11 @@ impl InternalEvent for GrpcInvalidCompressionSchemeError<'_> { internal_log_rate_limit = true ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::REQUEST_FAILED, "stage" => error_stage::RECEIVING, - ); + ) + .increment(1); } } @@ -79,10 +80,11 @@ where internal_log_rate_limit = true ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::REQUEST_FAILED, "stage" => error_stage::RECEIVING, - ); + ) + .increment(1); } } diff --git a/src/internal_events/heartbeat.rs b/src/internal_events/heartbeat.rs index 8a7dc6beb9ef4..4e71d8a45fd1a 100644 --- a/src/internal_events/heartbeat.rs +++ b/src/internal_events/heartbeat.rs @@ -1,5 +1,6 @@ use std::time::Instant; +use crate::built_info; use metrics::gauge; use vector_lib::internal_event::InternalEvent; @@ -11,6 +12,15 @@ pub struct Heartbeat { impl InternalEvent for Heartbeat { fn emit(self) { trace!(target: "vector", message = "Beep."); - gauge!("uptime_seconds", self.since.elapsed().as_secs() as f64); + gauge!("uptime_seconds").set(self.since.elapsed().as_secs() as f64); + gauge!( + "build_info", + "debug" => built_info::DEBUG, + "version" => built_info::PKG_VERSION, + "rust_version" => built_info::RUST_VERSION, + "arch" => built_info::TARGET_ARCH, + "revision" => built_info::VECTOR_BUILD_DESC.unwrap_or("") + ) + .set(1.0); } } diff --git a/src/internal_events/host_metrics.rs b/src/internal_events/host_metrics.rs index 8b811aef56d35..56d5649159dfb 100644 --- a/src/internal_events/host_metrics.rs +++ b/src/internal_events/host_metrics.rs @@ -13,14 +13,14 @@ impl InternalEvent for HostMetricsScrapeError { message = self.message, error_type = error_type::READER_FAILED, stage = error_stage::RECEIVING, - internal_log_rate_limit = true, ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::READER_FAILED, "stage" => error_stage::RECEIVING, - ); + ) + .increment(1); } } @@ -37,14 +37,15 @@ impl InternalEvent for HostMetricsScrapeDetailError { error = %self.error, error_type = error_type::READER_FAILED, stage = error_stage::RECEIVING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::READER_FAILED, "stage" => error_stage::RECEIVING, - ); + ) + .increment(1); } } @@ -63,13 +64,14 @@ impl InternalEvent for HostMetricsScrapeFilesystemError { error = %self.error, error_type = error_type::READER_FAILED, stage = error_stage::RECEIVING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::READER_FAILED, "stage" => error_stage::RECEIVING, - ); + ) + .increment(1); } } diff --git a/src/internal_events/http.rs b/src/internal_events/http.rs index 032f4e35edd66..5a1e818dcc484 100644 --- a/src/internal_events/http.rs +++ b/src/internal_events/http.rs @@ -19,7 +19,7 @@ impl InternalEvent for HttpServerRequestReceived { message = "Received HTTP request.", internal_log_rate_limit = true ); - counter!("http_server_requests_received_total", 1); + counter!("http_server_requests_received_total").increment(1); } } @@ -29,14 +29,14 @@ pub struct HttpServerResponseSent<'a, B> { pub latency: Duration, } -impl<'a, B> InternalEvent for HttpServerResponseSent<'a, B> { +impl InternalEvent for HttpServerResponseSent<'_, B> { fn emit(self) { let labels = &[( HTTP_STATUS_LABEL, self.response.status().as_u16().to_string(), )]; - counter!("http_server_responses_sent_total", 1, labels); - histogram!("http_server_handler_duration_seconds", self.latency, labels); + counter!("http_server_responses_sent_total", labels).increment(1); + histogram!("http_server_handler_duration_seconds", labels).record(self.latency); } } @@ -56,10 +56,11 @@ impl InternalEvent for HttpBytesReceived<'_> { protocol = %self.protocol ); counter!( - "component_received_bytes_total", self.byte_size as u64, + "component_received_bytes_total", "http_path" => self.http_path.to_string(), "protocol" => self.protocol, - ); + ) + .increment(self.byte_size as u64); } } @@ -81,21 +82,23 @@ impl InternalEvent for HttpEventsReceived<'_> { protocol = %self.protocol, ); - histogram!("component_received_events_count", self.count as f64); + histogram!("component_received_events_count").record(self.count as f64); counter!( - "component_received_events_total", self.count as u64, + "component_received_events_total", "http_path" => self.http_path.to_string(), "protocol" => self.protocol, - ); + ) + .increment(self.count as u64); counter!( "component_received_event_bytes_total", - self.byte_size.get() as u64, "http_path" => self.http_path.to_string(), "protocol" => self.protocol, - ); + ) + .increment(self.byte_size.get() as u64); } } +#[cfg(feature = "sources-utils-http")] #[derive(Debug)] pub struct HttpBadRequest<'a> { code: u16, @@ -114,7 +117,8 @@ impl<'a> HttpBadRequest<'a> { } } -impl<'a> InternalEvent for HttpBadRequest<'a> { +#[cfg(feature = "sources-utils-http")] +impl InternalEvent for HttpBadRequest<'_> { fn emit(self) { warn!( message = "Received bad request.", @@ -123,14 +127,15 @@ impl<'a> InternalEvent for HttpBadRequest<'a> { error_type = error_type::REQUEST_FAILED, error_stage = error_stage::RECEIVING, http_code = %self.code, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => self.error_code, "error_type" => error_type::REQUEST_FAILED, "error_stage" => error_stage::RECEIVING, - ); + ) + .increment(1); } } @@ -140,7 +145,7 @@ pub struct HttpDecompressError<'a> { pub encoding: &'a str, } -impl<'a> InternalEvent for HttpDecompressError<'a> { +impl InternalEvent for HttpDecompressError<'_> { fn emit(self) { error!( message = "Failed decompressing payload.", @@ -152,11 +157,12 @@ impl<'a> InternalEvent for HttpDecompressError<'a> { internal_log_rate_limit = true ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "failed_decompressing_payload", "error_type" => error_type::PARSER_FAILED, "stage" => error_stage::RECEIVING, - ); + ) + .increment(1); } } @@ -164,7 +170,7 @@ pub struct HttpInternalError<'a> { pub message: &'a str, } -impl<'a> InternalEvent for HttpInternalError<'a> { +impl InternalEvent for HttpInternalError<'_> { fn emit(self) { error!( message = %self.message, @@ -173,9 +179,10 @@ impl<'a> InternalEvent for HttpInternalError<'a> { internal_log_rate_limit = true ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::CONNECTION_FAILED, "stage" => error_stage::RECEIVING, - ); + ) + .increment(1); } } diff --git a/src/internal_events/http_client.rs b/src/internal_events/http_client.rs index 8fa1cdb918668..4aabe421d083c 100644 --- a/src/internal_events/http_client.rs +++ b/src/internal_events/http_client.rs @@ -29,7 +29,7 @@ fn remove_sensitive(headers: &HeaderMap) -> HeaderMap headers } -impl<'a, T: HttpBody> InternalEvent for AboutToSendHttpRequest<'a, T> { +impl InternalEvent for AboutToSendHttpRequest<'_, T> { fn emit(self) { debug!( message = "Sending HTTP request.", @@ -39,7 +39,8 @@ impl<'a, T: HttpBody> InternalEvent for AboutToSendHttpRequest<'a, T> { headers = ?remove_sensitive(self.request.headers()), body = %FormatBody(self.request.body()), ); - counter!("http_client_requests_sent_total", 1, "method" => self.request.method().to_string()); + counter!("http_client_requests_sent_total", "method" => self.request.method().to_string()) + .increment(1); } } @@ -49,7 +50,7 @@ pub struct GotHttpResponse<'a, T> { pub roundtrip: Duration, } -impl<'a, T: HttpBody> InternalEvent for GotHttpResponse<'a, T> { +impl InternalEvent for GotHttpResponse<'_, T> { fn emit(self) { debug!( message = "HTTP response.", @@ -58,9 +59,17 @@ impl<'a, T: HttpBody> InternalEvent for GotHttpResponse<'a, T> { headers = ?remove_sensitive(self.response.headers()), body = %FormatBody(self.response.body()), ); - counter!("http_client_responses_total", 1, "status" => self.response.status().as_u16().to_string()); - histogram!("http_client_rtt_seconds", self.roundtrip); - histogram!("http_client_response_rtt_seconds", self.roundtrip, "status" => self.response.status().as_u16().to_string()); + counter!( + "http_client_responses_total", + "status" => self.response.status().as_u16().to_string(), + ) + .increment(1); + histogram!("http_client_rtt_seconds").record(self.roundtrip); + histogram!( + "http_client_response_rtt_seconds", + "status" => self.response.status().as_u16().to_string(), + ) + .record(self.roundtrip); } } @@ -70,36 +79,37 @@ pub struct GotHttpWarning<'a> { pub roundtrip: Duration, } -impl<'a> InternalEvent for GotHttpWarning<'a> { +impl InternalEvent for GotHttpWarning<'_> { fn emit(self) { warn!( message = "HTTP error.", error = %self.error, error_type = error_type::REQUEST_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, + ); - counter!("http_client_errors_total", 1, "error_kind" => self.error.to_string()); - histogram!("http_client_rtt_seconds", self.roundtrip); - histogram!("http_client_error_rtt_seconds", self.roundtrip, "error_kind" => self.error.to_string()); + counter!("http_client_errors_total", "error_kind" => self.error.to_string()).increment(1); + histogram!("http_client_rtt_seconds").record(self.roundtrip); + histogram!("http_client_error_rtt_seconds", "error_kind" => self.error.to_string()) + .record(self.roundtrip); } } /// Newtype placeholder to provide a formatter for the request and response body. struct FormatBody<'a, B>(&'a B); -impl<'a, B: HttpBody> std::fmt::Display for FormatBody<'a, B> { +impl std::fmt::Display for FormatBody<'_, B> { fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { let size = self.0.size_hint(); match (size.lower(), size.upper()) { (0, None) => write!(fmt, "[unknown]"), - (lower, None) => write!(fmt, "[>={} bytes]", lower), + (lower, None) => write!(fmt, "[>={lower} bytes]"), (0, Some(0)) => write!(fmt, "[empty]"), - (0, Some(upper)) => write!(fmt, "[<={} bytes]", upper), + (0, Some(upper)) => write!(fmt, "[<={upper} bytes]"), - (lower, Some(upper)) if lower == upper => write!(fmt, "[{} bytes]", lower), - (lower, Some(upper)) => write!(fmt, "[{}..={} bytes]", lower, upper), + (lower, Some(upper)) if lower == upper => write!(fmt, "[{lower} bytes]"), + (lower, Some(upper)) => write!(fmt, "[{lower}..={upper} bytes]"), } } } diff --git a/src/internal_events/http_client_source.rs b/src/internal_events/http_client_source.rs index 19d7f5311ea06..ad1b989d3969a 100644 --- a/src/internal_events/http_client_source.rs +++ b/src/internal_events/http_client_source.rs @@ -23,13 +23,15 @@ impl InternalEvent for HttpClientEventsReceived { url = %self.url, ); counter!( - "component_received_events_total", self.count as u64, + "component_received_events_total", "uri" => self.url.clone(), - ); + ) + .increment(self.count as u64); counter!( - "component_received_event_bytes_total", self.byte_size.get() as u64, + "component_received_event_bytes_total", "uri" => self.url.clone(), - ); + ) + .increment(self.byte_size.get() as u64); } } @@ -47,15 +49,16 @@ impl InternalEvent for HttpClientHttpResponseError { stage = error_stage::RECEIVING, error_type = error_type::REQUEST_FAILED, error_code = %http_error_code(self.code.as_u16()), - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "url" => self.url, "stage" => error_stage::RECEIVING, "error_type" => error_type::REQUEST_FAILED, "error_code" => http_error_code(self.code.as_u16()), - ); + ) + .increment(1); } } @@ -73,13 +76,14 @@ impl InternalEvent for HttpClientHttpError { error = ?self.error, error_type = error_type::REQUEST_FAILED, stage = error_stage::RECEIVING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "url" => self.url, "error_type" => error_type::REQUEST_FAILED, "stage" => error_stage::RECEIVING, - ); + ) + .increment(1); } } diff --git a/src/internal_events/influxdb.rs b/src/internal_events/influxdb.rs index 075150b845f1e..d487353b8dc68 100644 --- a/src/internal_events/influxdb.rs +++ b/src/internal_events/influxdb.rs @@ -16,13 +16,13 @@ impl InternalEvent for InfluxdbEncodingError { error = %self.error_message, error_type = error_type::ENCODER_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::ENCODER_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); emit!(ComponentEventsDropped:: { count: self.count, diff --git a/src/internal_events/internal_logs.rs b/src/internal_events/internal_logs.rs index 9321e9e7a81da..29c37abc209ec 100644 --- a/src/internal_events/internal_logs.rs +++ b/src/internal_events/internal_logs.rs @@ -9,11 +9,12 @@ pub struct InternalLogsBytesReceived { impl InternalEvent for InternalLogsBytesReceived { fn emit(self) { - // MUST not emit logs here to avoid an infinite log loop + // MUST NOT emit logs here to avoid an infinite log loop counter!( - "component_received_bytes_total", self.byte_size as u64, + "component_received_bytes_total", "protocol" => "internal", - ); + ) + .increment(self.byte_size as u64); } } @@ -25,11 +26,8 @@ pub struct InternalLogsEventsReceived { impl InternalEvent for InternalLogsEventsReceived { fn emit(self) { - // MUST not emit logs here to avoid an infinite log loop - counter!("component_received_events_total", self.count as u64); - counter!( - "component_received_event_bytes_total", - self.byte_size.get() as u64 - ); + // MUST NOT emit logs here to avoid an infinite log loop + counter!("component_received_events_total").increment(self.count as u64); + counter!("component_received_event_bytes_total").increment(self.byte_size.get() as u64); } } diff --git a/src/internal_events/internal_metrics.rs b/src/internal_events/internal_metrics.rs deleted file mode 100644 index 680b209c7e1eb..0000000000000 --- a/src/internal_events/internal_metrics.rs +++ /dev/null @@ -1,21 +0,0 @@ -use metrics::counter; -use vector_lib::internal_event::InternalEvent; - -#[derive(Debug)] -pub struct InternalMetricsBytesReceived { - pub byte_size: usize, -} - -impl InternalEvent for InternalMetricsBytesReceived { - fn emit(self) { - trace!( - message = "Bytes received.", - byte_size = %self.byte_size, - protocol = "internal", - ); - counter!( - "component_received_bytes_total", self.byte_size as u64, - "protocol" => "internal", - ); - } -} diff --git a/src/internal_events/journald.rs b/src/internal_events/journald.rs index 0056b37d46a7d..9f7e9a694f9ca 100644 --- a/src/internal_events/journald.rs +++ b/src/internal_events/journald.rs @@ -17,13 +17,14 @@ impl InternalEvent for JournaldInvalidRecordError { text = %self.text, error_type = error_type::PARSER_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "stage" => error_stage::PROCESSING, "error_type" => error_type::PARSER_FAILED, - ); + ) + .increment(1); } } @@ -39,13 +40,14 @@ impl InternalEvent for JournaldStartJournalctlError { error = %self.error, error_type = error_type::COMMAND_FAILED, stage = error_stage::RECEIVING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "stage" => error_stage::RECEIVING, "error_type" => error_type::COMMAND_FAILED, - ); + ) + .increment(1); } } @@ -61,14 +63,14 @@ impl InternalEvent for JournaldReadError { error = %self.error, error_type = error_type::READER_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, + ); counter!( "component_errors_total", - 1, "stage" => error_stage::PROCESSING, "error_type" => error_type::READER_FAILED, - ); + ) + .increment(1); } } @@ -86,13 +88,13 @@ impl InternalEvent for JournaldCheckpointSetError { error = %self.error, error_type = error_type::IO_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, ); counter!( - "component_errors_total", 1, + "component_errors_total", "stage" => error_stage::PROCESSING, "error_type" => error_type::IO_FAILED, - ); + ) + .increment(1); } } @@ -110,12 +112,13 @@ impl InternalEvent for JournaldCheckpointFileOpenError { error = %self.error, error_type = error_type::IO_FAILED, stage = error_stage::RECEIVING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "stage" => error_stage::RECEIVING, "error_type" => error_type::IO_FAILED, - ); + ) + .increment(1); } } diff --git a/src/internal_events/kafka.rs b/src/internal_events/kafka.rs index bd09773843444..05e06ea5161ff 100644 --- a/src/internal_events/kafka.rs +++ b/src/internal_events/kafka.rs @@ -1,5 +1,5 @@ use metrics::{counter, gauge}; -use vector_lib::{internal_event::InternalEvent, update_counter}; +use vector_lib::internal_event::InternalEvent; use vector_lib::{ internal_event::{error_stage, error_type}, json_size::JsonSize, @@ -14,7 +14,7 @@ pub struct KafkaBytesReceived<'a> { pub partition: i32, } -impl<'a> InternalEvent for KafkaBytesReceived<'a> { +impl InternalEvent for KafkaBytesReceived<'_> { fn emit(self) { trace!( message = "Bytes received.", @@ -25,11 +25,11 @@ impl<'a> InternalEvent for KafkaBytesReceived<'a> { ); counter!( "component_received_bytes_total", - self.byte_size as u64, "protocol" => self.protocol, "topic" => self.topic.to_string(), "partition" => self.partition.to_string(), - ); + ) + .increment(self.byte_size as u64); } } @@ -41,7 +41,7 @@ pub struct KafkaEventsReceived<'a> { pub partition: i32, } -impl<'a> InternalEvent for KafkaEventsReceived<'a> { +impl InternalEvent for KafkaEventsReceived<'_> { fn emit(self) { trace!( message = "Events received.", @@ -50,13 +50,18 @@ impl<'a> InternalEvent for KafkaEventsReceived<'a> { topic = self.topic, partition = %self.partition, ); - counter!("component_received_events_total", self.count as u64, "topic" => self.topic.to_string(), "partition" => self.partition.to_string()); + counter!( + "component_received_events_total", + "topic" => self.topic.to_string(), + "partition" => self.partition.to_string(), + ) + .increment(self.count as u64); counter!( "component_received_event_bytes_total", - self.byte_size.get() as u64, "topic" => self.topic.to_string(), "partition" => self.partition.to_string(), - ); + ) + .increment(self.byte_size.get() as u64); } } @@ -73,14 +78,15 @@ impl InternalEvent for KafkaOffsetUpdateError { error_code = "kafka_offset_update", error_type = error_type::READER_FAILED, stage = error_stage::SENDING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "kafka_offset_update", "error_type" => error_type::READER_FAILED, "stage" => error_stage::SENDING, - ); + ) + .increment(1); } } @@ -97,14 +103,15 @@ impl InternalEvent for KafkaReadError { error_code = "reading_message", error_type = error_type::READER_FAILED, stage = error_stage::RECEIVING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "reading_message", "error_type" => error_type::READER_FAILED, "stage" => error_stage::RECEIVING, - ); + ) + .increment(1); } } @@ -116,42 +123,28 @@ pub struct KafkaStatisticsReceived<'a> { impl InternalEvent for KafkaStatisticsReceived<'_> { fn emit(self) { - gauge!("kafka_queue_messages", self.statistics.msg_cnt as f64); - gauge!( - "kafka_queue_messages_bytes", - self.statistics.msg_size as f64 - ); - update_counter!("kafka_requests_total", self.statistics.tx as u64); - update_counter!( - "kafka_requests_bytes_total", - self.statistics.tx_bytes as u64 - ); - update_counter!("kafka_responses_total", self.statistics.rx as u64); - update_counter!( - "kafka_responses_bytes_total", - self.statistics.rx_bytes as u64 - ); - update_counter!( - "kafka_produced_messages_total", - self.statistics.txmsgs as u64 - ); - update_counter!( - "kafka_produced_messages_bytes_total", - self.statistics.txmsg_bytes as u64 - ); - update_counter!( - "kafka_consumed_messages_total", - self.statistics.rxmsgs as u64 - ); - update_counter!( - "kafka_consumed_messages_bytes_total", - self.statistics.rxmsg_bytes as u64 - ); + gauge!("kafka_queue_messages").set(self.statistics.msg_cnt as f64); + gauge!("kafka_queue_messages_bytes").set(self.statistics.msg_size as f64); + counter!("kafka_requests_total").absolute(self.statistics.tx as u64); + counter!("kafka_requests_bytes_total").absolute(self.statistics.tx_bytes as u64); + counter!("kafka_responses_total").absolute(self.statistics.rx as u64); + counter!("kafka_responses_bytes_total").absolute(self.statistics.rx_bytes as u64); + counter!("kafka_produced_messages_total").absolute(self.statistics.txmsgs as u64); + counter!("kafka_produced_messages_bytes_total") + .absolute(self.statistics.txmsg_bytes as u64); + counter!("kafka_consumed_messages_total").absolute(self.statistics.rxmsgs as u64); + counter!("kafka_consumed_messages_bytes_total") + .absolute(self.statistics.rxmsg_bytes as u64); if self.expose_lag_metrics { for (topic_id, topic) in &self.statistics.topics { for (partition_id, partition) in &topic.partitions { - gauge!("kafka_consumer_lag", partition.consumer_lag as f64, "topic_id" => topic_id.clone(), "partition_id" => partition_id.to_string()); + gauge!( + "kafka_consumer_lag", + "topic_id" => topic_id.clone(), + "partition_id" => partition_id.to_string(), + ) + .set(partition.consumer_lag as f64); } } } @@ -170,13 +163,13 @@ impl InternalEvent for KafkaHeaderExtractionError<'_> { error_type = error_type::PARSER_FAILED, stage = error_stage::RECEIVING, header_field = self.header_field.to_string(), - internal_log_rate_limit = true, ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "extracting_header", "error_type" => error_type::PARSER_FAILED, "stage" => error_stage::RECEIVING, - ); + ) + .increment(1); } } diff --git a/src/internal_events/kubernetes_logs.rs b/src/internal_events/kubernetes_logs.rs index 2829de42beffb..ed68c14790656 100644 --- a/src/internal_events/kubernetes_logs.rs +++ b/src/internal_events/kubernetes_logs.rs @@ -1,9 +1,10 @@ use metrics::counter; -use vector_lib::internal_event::InternalEvent; +use vector_lib::internal_event::{InternalEvent, INTENTIONAL}; use vector_lib::{ internal_event::{error_stage, error_type, ComponentEventsDropped, UNINTENTIONAL}, json_size::JsonSize, }; +use vrl::core::Value; use crate::event::Event; @@ -33,15 +34,23 @@ impl InternalEvent for KubernetesLogsEventsReceived<'_> { let pod_name = pod_info.name; let pod_namespace = pod_info.namespace; - counter!("component_received_events_total", 1, "pod_name" => pod_name.clone(), "pod_namespace" => pod_namespace.clone()); - counter!("component_received_event_bytes_total", self.byte_size.get() as u64, "pod_name" => pod_name, "pod_namespace" => pod_namespace); - } - None => { - counter!("component_received_events_total", 1); + counter!( + "component_received_events_total", + "pod_name" => pod_name.clone(), + "pod_namespace" => pod_namespace.clone(), + ) + .increment(1); counter!( "component_received_event_bytes_total", - self.byte_size.get() as u64 - ); + "pod_name" => pod_name, + "pod_namespace" => pod_namespace, + ) + .increment(self.byte_size.get() as u64); + } + None => { + counter!("component_received_events_total").increment(1); + counter!("component_received_event_bytes_total") + .increment(self.byte_size.get() as u64); } } } @@ -62,14 +71,15 @@ impl InternalEvent for KubernetesLogsEventAnnotationError<'_> { error_code = ANNOTATION_FAILED, error_type = error_type::READER_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => ANNOTATION_FAILED, "error_type" => error_type::READER_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); } } @@ -86,15 +96,16 @@ impl InternalEvent for KubernetesLogsEventNamespaceAnnotationError<'_> { error_code = ANNOTATION_FAILED, error_type = error_type::READER_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => ANNOTATION_FAILED, "error_type" => error_type::READER_FAILED, "stage" => error_stage::PROCESSING, - ); - counter!("k8s_event_namespace_annotation_failures_total", 1); + ) + .increment(1); + counter!("k8s_event_namespace_annotation_failures_total").increment(1); } } @@ -111,15 +122,16 @@ impl InternalEvent for KubernetesLogsEventNodeAnnotationError<'_> { error_code = ANNOTATION_FAILED, error_type = error_type::READER_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => ANNOTATION_FAILED, "error_type" => error_type::READER_FAILED, "stage" => error_stage::PROCESSING, - ); - counter!("k8s_event_node_annotation_failures_total", 1); + ) + .increment(1); + counter!("k8s_event_node_annotation_failures_total").increment(1); } } @@ -134,7 +146,7 @@ impl InternalEvent for KubernetesLogsFormatPickerEdgeCase { message = "Encountered format picker edge case.", what = %self.what, ); - counter!("k8s_format_picker_edge_cases_total", 1); + counter!("k8s_format_picker_edge_cases_total").increment(1); } } @@ -150,14 +162,15 @@ impl InternalEvent for KubernetesLogsDockerFormatParseError<'_> { error = %self.error, error_type = error_type::PARSER_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::PARSER_FAILED, "stage" => error_stage::PROCESSING, - ); - counter!("k8s_docker_format_parse_failures_total", 1); + ) + .increment(1); + counter!("k8s_docker_format_parse_failures_total").increment(1); } } @@ -178,17 +191,50 @@ impl InternalEvent for KubernetesLifecycleError { error_code = KUBERNETES_LIFECYCLE, error_type = error_type::READER_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => KUBERNETES_LIFECYCLE, "error_type" => error_type::READER_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); emit!(ComponentEventsDropped:: { count: self.count, reason: self.message, }); } } + +#[derive(Debug)] +pub struct KubernetesMergedLineTooBigError<'a> { + pub event: &'a Value, + pub configured_limit: usize, + pub encountered_size_so_far: usize, +} + +impl InternalEvent for KubernetesMergedLineTooBigError<'_> { + fn emit(self) { + error!( + message = "Found line that exceeds max_merged_line_bytes; discarding.", + event = ?self.event, + configured_limit = self.configured_limit, + encountered_size_so_far = self.encountered_size_so_far, + internal_log_rate_limit = true, + error_type = error_type::CONDITION_FAILED, + stage = error_stage::RECEIVING, + ); + counter!( + "component_errors_total", + "error_code" => "reading_line_from_kubernetes_log", + "error_type" => error_type::CONDITION_FAILED, + "stage" => error_stage::RECEIVING, + ) + .increment(1); + emit!(ComponentEventsDropped:: { + count: 1, + reason: "Found line that exceeds max_merged_line_bytes; discarding.", + }); + } +} diff --git a/src/internal_events/log_to_metric.rs b/src/internal_events/log_to_metric.rs index 68bad4df8af1c..0469cdacdd047 100644 --- a/src/internal_events/log_to_metric.rs +++ b/src/internal_events/log_to_metric.rs @@ -8,7 +8,7 @@ pub struct LogToMetricFieldNullError<'a> { pub field: &'a str, } -impl<'a> InternalEvent for LogToMetricFieldNullError<'a> { +impl InternalEvent for LogToMetricFieldNullError<'_> { fn emit(self) { let reason = "Unable to convert null field."; error!( @@ -20,12 +20,13 @@ impl<'a> InternalEvent for LogToMetricFieldNullError<'a> { internal_log_rate_limit = true ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "field_null", "error_type" => error_type::CONDITION_FAILED, "stage" => error_stage::PROCESSING, "null_field" => self.field.to_string(), - ); + ) + .increment(1); emit!(ComponentEventsDropped:: { count: 1, reason }) } @@ -36,7 +37,7 @@ pub struct LogToMetricParseFloatError<'a> { pub error: ParseFloatError, } -impl<'a> InternalEvent for LogToMetricParseFloatError<'a> { +impl InternalEvent for LogToMetricParseFloatError<'_> { fn emit(self) { let reason = "Failed to parse field as float."; error!( @@ -49,12 +50,13 @@ impl<'a> InternalEvent for LogToMetricParseFloatError<'a> { internal_log_rate_limit = true ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "failed_parsing_float", "error_type" => error_type::PARSER_FAILED, "stage" => error_stage::PROCESSING, "field" => self.field.to_string(), - ); + ) + .increment(1); emit!(ComponentEventsDropped:: { count: 1, reason }) } @@ -66,7 +68,7 @@ pub struct MetricMetadataInvalidFieldValueError<'a> { pub field_value: &'a str, } -impl<'a> InternalEvent for MetricMetadataInvalidFieldValueError<'a> { +impl InternalEvent for MetricMetadataInvalidFieldValueError<'_> { fn emit(self) { let reason = "Field contained unsupported value."; error!( @@ -79,12 +81,13 @@ impl<'a> InternalEvent for MetricMetadataInvalidFieldValueError<'a> { internal_log_rate_limit = true ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "invalid_field_value", "error_type" => error_type::PARSER_FAILED, "stage" => error_stage::PROCESSING, "field" => self.field.to_string(), - ); + ) + .increment(1); emit!(ComponentEventsDropped:: { count: 1, reason }) } @@ -95,7 +98,7 @@ pub struct MetricMetadataParseError<'a> { pub kind: &'a str, } -impl<'a> InternalEvent for MetricMetadataParseError<'a> { +impl InternalEvent for MetricMetadataParseError<'_> { fn emit(self) { let reason = "Failed to parse field as float."; error!( @@ -107,12 +110,13 @@ impl<'a> InternalEvent for MetricMetadataParseError<'a> { internal_log_rate_limit = true ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => format!("failed_parsing_{}", self.kind), "error_type" => error_type::PARSER_FAILED, "stage" => error_stage::PROCESSING, "field" => self.field.to_string(), - ); + ) + .increment(1); emit!(ComponentEventsDropped:: { count: 1, reason }) } @@ -131,11 +135,12 @@ impl InternalEvent for MetricMetadataMetricDetailsNotFoundError { internal_log_rate_limit = true ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "missing_metric_details", "error_type" => error_type::PARSER_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); emit!(ComponentEventsDropped:: { count: 1, reason }) } diff --git a/src/internal_events/logplex.rs b/src/internal_events/logplex.rs index 2fb0698b3d53d..b08df7c624d4f 100644 --- a/src/internal_events/logplex.rs +++ b/src/internal_events/logplex.rs @@ -11,7 +11,7 @@ pub struct HerokuLogplexRequestReceived<'a> { pub drain_token: &'a str, } -impl<'a> InternalEvent for HerokuLogplexRequestReceived<'a> { +impl InternalEvent for HerokuLogplexRequestReceived<'_> { fn emit(self) { debug!( message = "Handling logplex request.", @@ -36,13 +36,14 @@ impl InternalEvent for HerokuLogplexRequestReadError { error_type = error_type::READER_FAILED, error_code = io_error_code(&self.error), stage = error_stage::PROCESSING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::READER_FAILED, "error_code" => io_error_code(&self.error), "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); } } diff --git a/src/internal_events/loki.rs b/src/internal_events/loki.rs index c07b14ea450de..6a5e7567b65b2 100644 --- a/src/internal_events/loki.rs +++ b/src/internal_events/loki.rs @@ -12,15 +12,15 @@ impl InternalEvent for LokiEventUnlabeledError { error_code = "unlabeled_event", error_type = error_type::CONDITION_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "unlabeled_event", "error_type" => error_type::CONDITION_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); } } @@ -38,7 +38,6 @@ impl InternalEvent for LokiOutOfOrderEventDroppedError { error_code = "out_of_order", error_type = error_type::CONDITION_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, ); emit!(ComponentEventsDropped:: { @@ -47,11 +46,12 @@ impl InternalEvent for LokiOutOfOrderEventDroppedError { }); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "out_of_order", "error_type" => error_type::CONDITION_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); } } @@ -66,8 +66,33 @@ impl InternalEvent for LokiOutOfOrderEventRewritten { message = "Timestamps rewritten.", count = self.count, reason = "out_of_order", - internal_log_rate_limit = true, ); - counter!("rewritten_timestamp_events_total", self.count as u64); + counter!("rewritten_timestamp_events_total").increment(self.count as u64); + } +} + +#[derive(Debug)] +pub struct LokiTimestampNonParsableEventsDropped; + +impl InternalEvent for LokiTimestampNonParsableEventsDropped { + fn emit(self) { + let reason = "Dropping timestamp non-parsable event(s)."; + + error!( + message = "Event timestamp non-parsable.", + error_code = "non-parsable_timestamp", + error_type = error_type::CONDITION_FAILED, + stage = error_stage::PROCESSING, + ); + + emit!(ComponentEventsDropped:: { count: 1, reason }); + + counter!( + "component_errors_total", + "error_code" => "non-parsable_timestamp", + "error_type" => error_type::CONDITION_FAILED, + "stage" => error_stage::PROCESSING, + ) + .increment(1); } } diff --git a/src/internal_events/lua.rs b/src/internal_events/lua.rs index 4037344c3bc99..392f1b8b26199 100644 --- a/src/internal_events/lua.rs +++ b/src/internal_events/lua.rs @@ -11,7 +11,7 @@ pub struct LuaGcTriggered { impl InternalEvent for LuaGcTriggered { fn emit(self) { - gauge!("lua_memory_used_bytes", self.used_memory as f64); + gauge!("lua_memory_used_bytes").set(self.used_memory as f64); } } @@ -28,14 +28,14 @@ impl InternalEvent for LuaScriptError { error_code = mlua_error_code(&self.error), error_type = error_type::COMMAND_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => mlua_error_code(&self.error), "error_type" => error_type::SCRIPT_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); emit!(ComponentEventsDropped:: { count: 1, reason: "Error in lua script.", @@ -57,20 +57,21 @@ impl InternalEvent for LuaBuildError { error_type = error_type::SCRIPT_FAILED, error_code = lua_build_error_code(&self.error), stage = error_stage::PROCESSING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => lua_build_error_code(&self.error), "error_type" => error_type::SCRIPT_FAILED, "stage" => error_stage:: PROCESSING, - ); + ) + .increment(1); emit!(ComponentEventsDropped:: { count: 1, reason }) } } -const fn mlua_error_code(err: &mlua::Error) -> &'static str { +fn mlua_error_code(err: &mlua::Error) -> &'static str { use mlua::Error::*; match err { @@ -78,14 +79,15 @@ const fn mlua_error_code(err: &mlua::Error) -> &'static str { RuntimeError(_) => "runtime_error", MemoryError(_) => "memory_error", SafetyError(_) => "memory_safety_error", - MemoryLimitNotAvailable => "memory_limit_not_available", + MemoryControlNotAvailable => "memory_control_not_available", RecursiveMutCallback => "mutable_callback_called_recursively", CallbackDestructed => "callback_destructed", StackError => "out_of_stack", BindError => "too_many_arguments_to_function_bind", + BadArgument { .. } => "bad_argument", ToLuaConversionError { .. } => "error_converting_value_to_lua", FromLuaConversionError { .. } => "error_converting_value_from_lua", - CoroutineInactive => "coroutine_inactive", + CoroutineUnresumable => "coroutine_unresumable", UserDataTypeMismatch => "userdata_type_mismatch", UserDataDestructed => "userdata_destructed", UserDataBorrowError => "userdata_borrow_error", @@ -96,6 +98,7 @@ const fn mlua_error_code(err: &mlua::Error) -> &'static str { CallbackError { .. } => "callback_error", PreviouslyResumedPanic => "previously_resumed_panic", ExternalError(_) => "external_error", + WithContext { cause, .. } => mlua_error_code(cause), _ => "unknown", } } diff --git a/src/internal_events/metric_to_log.rs b/src/internal_events/metric_to_log.rs index 66c146dfb84a3..5bbceeb5017c6 100644 --- a/src/internal_events/metric_to_log.rs +++ b/src/internal_events/metric_to_log.rs @@ -16,13 +16,13 @@ impl InternalEvent for MetricToLogSerializeError { error = ?self.error, error_type = error_type::ENCODER_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::ENCODER_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); emit!(ComponentEventsDropped:: { count: 1, reason }) } diff --git a/src/internal_events/mod.rs b/src/internal_events/mod.rs index 0da2383b7eaee..a77796cc8af16 100644 --- a/src/internal_events/mod.rs +++ b/src/internal_events/mod.rs @@ -30,6 +30,8 @@ mod batch; mod codecs; mod common; mod conditions; +#[cfg(feature = "sources-datadog_agent")] +mod datadog_agent; #[cfg(feature = "sinks-datadog_metrics")] mod datadog_metrics; #[cfg(feature = "sinks-datadog_traces")] @@ -47,7 +49,7 @@ mod encoding_transcode; mod eventstoredb_metrics; #[cfg(feature = "sources-exec")] mod exec; -#[cfg(any(feature = "sources-file-descriptor", feature = "sources-stdin"))] +#[cfg(any(feature = "sources-file_descriptor", feature = "sources-stdin"))] mod file_descriptor; #[cfg(feature = "transforms-filter")] mod filter; @@ -68,8 +70,6 @@ mod http_client_source; mod influxdb; #[cfg(feature = "sources-internal_logs")] mod internal_logs; -#[cfg(feature = "sources-internal_metrics")] -mod internal_metrics; #[cfg(all(unix, feature = "sources-journald"))] mod journald; #[cfg(any(feature = "sources-kafka", feature = "sinks-kafka"))] @@ -106,7 +106,7 @@ mod prometheus; mod pulsar; #[cfg(feature = "sources-redis")] mod redis; -#[cfg(feature = "transforms-reduce")] +#[cfg(feature = "transforms-impl-reduce")] mod reduce; mod remap; mod sample; @@ -125,8 +125,12 @@ mod template; mod throttle; mod udp; mod unix; -#[cfg(feature = "sinks-websocket")] +#[cfg(any(feature = "sources-websocket", feature = "sinks-websocket"))] mod websocket; +#[cfg(feature = "sinks-websocket-server")] +mod websocket_server; +#[cfg(feature = "transforms-window")] +mod window; #[cfg(any( feature = "sources-file", @@ -165,6 +169,8 @@ pub(crate) use self::aws_kinesis_firehose::*; #[cfg(any(feature = "sources-aws_s3", feature = "sources-aws_sqs",))] pub(crate) use self::aws_sqs::*; pub(crate) use self::codecs::*; +#[cfg(feature = "sources-datadog_agent")] +pub(crate) use self::datadog_agent::*; #[cfg(feature = "sinks-datadog_metrics")] pub(crate) use self::datadog_metrics::*; #[cfg(feature = "sinks-datadog_traces")] @@ -187,7 +193,7 @@ pub(crate) use self::exec::*; feature = "sinks-file", ))] pub(crate) use self::file::*; -#[cfg(any(feature = "sources-file-descriptor", feature = "sources-stdin"))] +#[cfg(any(feature = "sources-file_descriptor", feature = "sources-stdin"))] pub(crate) use self::file_descriptor::*; #[cfg(feature = "transforms-filter")] pub(crate) use self::filter::*; @@ -205,8 +211,6 @@ pub(crate) use self::http_client_source::*; pub(crate) use self::influxdb::*; #[cfg(feature = "sources-internal_logs")] pub(crate) use self::internal_logs::*; -#[cfg(feature = "sources-internal_metrics")] -pub(crate) use self::internal_metrics::*; #[cfg(all(unix, feature = "sources-journald"))] pub(crate) use self::journald::*; #[cfg(any(feature = "sources-kafka", feature = "sinks-kafka"))] @@ -241,7 +245,7 @@ pub(crate) use self::prometheus::*; pub(crate) use self::pulsar::*; #[cfg(feature = "sources-redis")] pub(crate) use self::redis::*; -#[cfg(feature = "transforms-reduce")] +#[cfg(feature = "transforms-impl-reduce")] pub(crate) use self::reduce::*; #[cfg(feature = "transforms-remap")] pub(crate) use self::remap::*; @@ -259,8 +263,12 @@ pub(crate) use self::tag_cardinality_limit::*; pub(crate) use self::throttle::*; #[cfg(unix)] pub(crate) use self::unix::*; -#[cfg(feature = "sinks-websocket")] +#[cfg(any(feature = "sources-websocket", feature = "sinks-websocket"))] pub(crate) use self::websocket::*; +#[cfg(feature = "sinks-websocket-server")] +pub(crate) use self::websocket_server::*; +#[cfg(feature = "transforms-window")] +pub(crate) use self::window::*; #[cfg(windows)] pub(crate) use self::windows::*; pub use self::{ diff --git a/src/internal_events/mongodb_metrics.rs b/src/internal_events/mongodb_metrics.rs index 1112cb5ebe2fe..cf57b7af94e6a 100644 --- a/src/internal_events/mongodb_metrics.rs +++ b/src/internal_events/mongodb_metrics.rs @@ -13,7 +13,7 @@ pub struct MongoDbMetricsEventsReceived<'a> { pub endpoint: &'a str, } -impl<'a> InternalEvent for MongoDbMetricsEventsReceived<'a> { +impl InternalEvent for MongoDbMetricsEventsReceived<'_> { // ## skip check-duplicate-events ## fn emit(self) { trace!( @@ -23,13 +23,15 @@ impl<'a> InternalEvent for MongoDbMetricsEventsReceived<'a> { endpoint = self.endpoint, ); counter!( - "component_received_events_total", self.count as u64, + "component_received_events_total", "endpoint" => self.endpoint.to_owned(), - ); + ) + .increment(self.count as u64); counter!( - "component_received_event_bytes_total", self.byte_size.get() as u64, + "component_received_event_bytes_total", "endpoint" => self.endpoint.to_owned(), - ); + ) + .increment(self.byte_size.get() as u64); } } @@ -38,7 +40,7 @@ pub struct MongoDbMetricsRequestError<'a> { pub endpoint: &'a str, } -impl<'a> InternalEvent for MongoDbMetricsRequestError<'a> { +impl InternalEvent for MongoDbMetricsRequestError<'_> { fn emit(self) { error!( message = "MongoDb request error.", @@ -46,13 +48,14 @@ impl<'a> InternalEvent for MongoDbMetricsRequestError<'a> { error = ?self.error, error_type = error_type::REQUEST_FAILED, stage = error_stage::RECEIVING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::REQUEST_FAILED, "stage" => error_stage::RECEIVING, - ); + ) + .increment(1); } } @@ -61,7 +64,7 @@ pub struct MongoDbMetricsBsonParseError<'a> { pub endpoint: &'a str, } -impl<'a> InternalEvent for MongoDbMetricsBsonParseError<'a> { +impl InternalEvent for MongoDbMetricsBsonParseError<'_> { fn emit(self) { error!( message = "BSON document parse error.", @@ -69,13 +72,14 @@ impl<'a> InternalEvent for MongoDbMetricsBsonParseError<'a> { error = ?self.error, error_type = error_type::PARSER_FAILED, stage = error_stage::RECEIVING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::PARSER_FAILED, "stage" => error_stage::RECEIVING, "endpoint" => self.endpoint.to_owned(), - ); + ) + .increment(1); } } diff --git a/src/internal_events/mqtt.rs b/src/internal_events/mqtt.rs index fa80f738da827..507f4aea2464c 100644 --- a/src/internal_events/mqtt.rs +++ b/src/internal_events/mqtt.rs @@ -18,14 +18,15 @@ impl InternalEvent for MqttConnectionError { error_code = "mqtt_connection_error", error_type = error_type::WRITER_FAILED, stage = error_stage::SENDING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "mqtt_connection_error", "error_type" => error_type::WRITER_FAILED, "stage" => error_stage::SENDING, - ); + ) + .increment(1); } fn name(&self) -> Option<&'static str> { diff --git a/src/internal_events/nginx_metrics.rs b/src/internal_events/nginx_metrics.rs index 14928391b9aae..d6561ccf21096 100644 --- a/src/internal_events/nginx_metrics.rs +++ b/src/internal_events/nginx_metrics.rs @@ -14,7 +14,7 @@ pub struct NginxMetricsEventsReceived<'a> { pub endpoint: &'a str, } -impl<'a> InternalEvent for NginxMetricsEventsReceived<'a> { +impl InternalEvent for NginxMetricsEventsReceived<'_> { fn emit(self) { trace!( message = "Events received.", @@ -23,13 +23,15 @@ impl<'a> InternalEvent for NginxMetricsEventsReceived<'a> { endpoint = self.endpoint, ); counter!( - "component_received_events_total", self.count as u64, + "component_received_events_total", "endpoint" => self.endpoint.to_owned(), - ); + ) + .increment(self.count as u64); counter!( - "component_received_event_bytes_total", self.byte_size.get() as u64, + "component_received_event_bytes_total", "endpoint" => self.endpoint.to_owned(), - ); + ) + .increment(self.byte_size.get() as u64); } } @@ -38,7 +40,7 @@ pub struct NginxMetricsRequestError<'a> { pub endpoint: &'a str, } -impl<'a> InternalEvent for NginxMetricsRequestError<'a> { +impl InternalEvent for NginxMetricsRequestError<'_> { fn emit(self) { error!( message = "Nginx request error.", @@ -46,14 +48,15 @@ impl<'a> InternalEvent for NginxMetricsRequestError<'a> { error = %self.error, error_type = error_type::REQUEST_FAILED, stage = error_stage::RECEIVING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "endpoint" => self.endpoint.to_owned(), "error_type" => error_type::REQUEST_FAILED, "stage" => error_stage::RECEIVING, - ); + ) + .increment(1); } } @@ -62,7 +65,7 @@ pub(crate) struct NginxMetricsStubStatusParseError<'a> { pub endpoint: &'a str, } -impl<'a> InternalEvent for NginxMetricsStubStatusParseError<'a> { +impl InternalEvent for NginxMetricsStubStatusParseError<'_> { fn emit(self) { error!( message = "NginxStubStatus parse error.", @@ -70,13 +73,14 @@ impl<'a> InternalEvent for NginxMetricsStubStatusParseError<'a> { error = %self.error, error_type = error_type::PARSER_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "endpoint" => self.endpoint.to_owned(), "error_type" => error_type::PARSER_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); } } diff --git a/src/internal_events/open.rs b/src/internal_events/open.rs index daf395394a567..b5a58d4011567 100644 --- a/src/internal_events/open.rs +++ b/src/internal_events/open.rs @@ -16,7 +16,7 @@ pub struct ConnectionOpen { impl InternalEvent for ConnectionOpen { fn emit(self) { - gauge!("open_connections", self.count as f64); + gauge!("open_connections").set(self.count as f64); } } @@ -27,7 +27,7 @@ pub struct EndpointsActive { impl InternalEvent for EndpointsActive { fn emit(self) { - gauge!("active_endpoints", self.count as f64); + gauge!("active_endpoints").set(self.count as f64); } } diff --git a/src/internal_events/parser.rs b/src/internal_events/parser.rs index 1eae63f470861..3638ab4189e7b 100644 --- a/src/internal_events/parser.rs +++ b/src/internal_events/parser.rs @@ -30,14 +30,14 @@ impl InternalEvent for ParserMatchError<'_> { error_type = error_type::CONDITION_FAILED, stage = error_stage::PROCESSING, field = &truncate_string_at(&String::from_utf8_lossy(self.value), 60)[..], - internal_log_rate_limit = true ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "no_match_found", "error_type" => error_type::CONDITION_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); } } @@ -63,12 +63,13 @@ impl InternalEvent for ParserMissingFieldError<'_, DROP_ internal_log_rate_limit = true ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "field_not_found", "error_type" => error_type::CONDITION_FAILED, "stage" => error_stage::PROCESSING, "field" => self.field.to_string(), - ); + ) + .increment(1); if DROP_EVENT { emit!(ComponentEventsDropped:: { count: 1, reason }); @@ -82,7 +83,7 @@ pub struct ParserConversionError<'a> { pub error: crate::types::Error, } -impl<'a> InternalEvent for ParserConversionError<'a> { +impl InternalEvent for ParserConversionError<'_> { fn emit(self) { error!( message = "Could not convert types.", @@ -94,12 +95,13 @@ impl<'a> InternalEvent for ParserConversionError<'a> { internal_log_rate_limit = true ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "type_conversion", "error_type" => error_type::CONVERSION_FAILED, "stage" => error_stage::PROCESSING, "name" => self.name.to_string(), - ); + ) + .increment(1); } } diff --git a/src/internal_events/postgresql_metrics.rs b/src/internal_events/postgresql_metrics.rs index 94a23bca78150..d7979f8dd1185 100644 --- a/src/internal_events/postgresql_metrics.rs +++ b/src/internal_events/postgresql_metrics.rs @@ -8,7 +8,7 @@ pub struct PostgresqlMetricsCollectError<'a> { pub endpoint: &'a str, } -impl<'a> InternalEvent for PostgresqlMetricsCollectError<'a> { +impl InternalEvent for PostgresqlMetricsCollectError<'_> { fn emit(self) { error!( message = "PostgreSQL query error.", @@ -16,12 +16,13 @@ impl<'a> InternalEvent for PostgresqlMetricsCollectError<'a> { error_type = error_type::REQUEST_FAILED, stage = error_stage::RECEIVING, endpoint = %self.endpoint, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::REQUEST_FAILED, "stage" => error_stage::RECEIVING, - ); + ) + .increment(1); } } diff --git a/src/internal_events/prelude.rs b/src/internal_events/prelude.rs index da809951f726c..e01bab6933b9f 100644 --- a/src/internal_events/prelude.rs +++ b/src/internal_events/prelude.rs @@ -2,11 +2,10 @@ feature = "sources-apache_metrics", feature = "sources-aws_ecs_metrics", feature = "sources-aws_kinesis_firehose", - feature = "sources-http-client", feature = "sources-utils-http", ))] pub(crate) fn http_error_code(code: u16) -> String { - format!("http_response_{}", code) + format!("http_response_{code}") } pub(crate) fn io_error_code(error: &std::io::Error) -> &'static str { diff --git a/src/internal_events/process.rs b/src/internal_events/process.rs index acf2a1d6171f6..ef30c1ea9e7e2 100644 --- a/src/internal_events/process.rs +++ b/src/internal_events/process.rs @@ -1,5 +1,4 @@ use metrics::counter; -use metrics::gauge; use vector_lib::internal_event::InternalEvent; use vector_lib::internal_event::{error_stage, error_type}; @@ -18,16 +17,7 @@ impl InternalEvent for VectorStarted { arch = built_info::TARGET_ARCH, revision = built_info::VECTOR_BUILD_DESC.unwrap_or(""), ); - gauge!( - "build_info", - 1.0, - "debug" => built_info::DEBUG, - "version" => built_info::PKG_VERSION, - "rust_version" => built_info::RUST_VERSION, - "arch" => built_info::TARGET_ARCH, - "revision" => built_info::VECTOR_BUILD_DESC.unwrap_or("") - ); - counter!("started_total", 1); + counter!("started_total").increment(1); } } @@ -43,7 +33,7 @@ impl InternalEvent for VectorReloaded<'_> { message = "Vector has reloaded.", path = ?self.config_paths ); - counter!("reloaded_total", 1); + counter!("reloaded_total").increment(1); } } @@ -56,7 +46,7 @@ impl InternalEvent for VectorStopped { target: "vector", message = "Vector has stopped." ); - counter!("stopped_total", 1); + counter!("stopped_total").increment(1); } } @@ -69,7 +59,7 @@ impl InternalEvent for VectorQuit { target: "vector", message = "Vector has quit." ); - counter!("quit_total", 1); + counter!("quit_total").increment(1); } } @@ -83,14 +73,14 @@ impl InternalEvent for VectorReloadError { error_code = "reload", error_type = error_type::CONFIGURATION_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "reload", "error_type" => error_type::CONFIGURATION_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); } } @@ -104,14 +94,14 @@ impl InternalEvent for VectorConfigLoadError { error_code = "config_load", error_type = error_type::CONFIGURATION_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "config_load", "error_type" => error_type::CONFIGURATION_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); } } @@ -125,13 +115,13 @@ impl InternalEvent for VectorRecoveryError { error_code = "recovery", error_type = error_type::CONFIGURATION_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "recovery", "error_type" => error_type::CONFIGURATION_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); } } diff --git a/src/internal_events/prometheus.rs b/src/internal_events/prometheus.rs index bca23b7971102..2285ee1323773 100644 --- a/src/internal_events/prometheus.rs +++ b/src/internal_events/prometheus.rs @@ -16,7 +16,7 @@ pub struct PrometheusParseError<'a> { } #[cfg(feature = "sources-prometheus-scrape")] -impl<'a> InternalEvent for PrometheusParseError<'a> { +impl InternalEvent for PrometheusParseError<'_> { fn emit(self) { error!( message = "Parsing error.", @@ -24,7 +24,7 @@ impl<'a> InternalEvent for PrometheusParseError<'a> { error = ?self.error, error_type = error_type::PARSER_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, + ); debug!( message = %format!("Failed to parse response:\n\n{}\n\n", self.body), @@ -32,11 +32,12 @@ impl<'a> InternalEvent for PrometheusParseError<'a> { internal_log_rate_limit = true ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::PARSER_FAILED, "stage" => error_stage::PROCESSING, "url" => self.url.to_string(), - ); + ) + .increment(1); } } @@ -52,13 +53,14 @@ impl InternalEvent for PrometheusRemoteWriteParseError { error = ?self.error, error_type = error_type::PARSER_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::PARSER_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); } } @@ -72,13 +74,13 @@ impl InternalEvent for PrometheusNormalizationError { message = normalization_reason, error_type = error_type::CONVERSION_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::CONVERSION_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); emit!(ComponentEventsDropped:: { count: 1, reason: normalization_reason diff --git a/src/internal_events/pulsar.rs b/src/internal_events/pulsar.rs index 7006d46fba7ec..416768fcff6a5 100644 --- a/src/internal_events/pulsar.rs +++ b/src/internal_events/pulsar.rs @@ -1,6 +1,6 @@ use metrics::counter; #[cfg(feature = "sources-pulsar")] -use metrics::{register_counter, Counter}; +use metrics::Counter; use vector_lib::internal_event::{ error_stage, error_type, ComponentEventsDropped, InternalEvent, UNINTENTIONAL, }; @@ -19,13 +19,14 @@ impl InternalEvent for PulsarSendingError { error = %self.error, error_type = error_type::REQUEST_FAILED, stage = error_stage::SENDING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::REQUEST_FAILED, "stage" => error_stage::SENDING, - ); + ) + .increment(1); emit!(ComponentEventsDropped:: { count: self.count, reason, @@ -45,14 +46,15 @@ impl InternalEvent for PulsarPropertyExtractionError { error_type = error_type::PARSER_FAILED, stage = error_stage::PROCESSING, property_field = %self.property_field, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "extracting_property", "error_type" => error_type::PARSER_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); } } @@ -72,21 +74,21 @@ pub struct PulsarErrorEventData { #[cfg(feature = "sources-pulsar")] registered_event!( PulsarErrorEvent => { - ack_errors: Counter = register_counter!( + ack_errors: Counter = counter!( "component_errors_total", "error_code" => "acknowledge_message", "error_type" => error_type::ACKNOWLEDGMENT_FAILED, "stage" => error_stage::RECEIVING, ), - nack_errors: Counter = register_counter!( + nack_errors: Counter = counter!( "component_errors_total", "error_code" => "negative_acknowledge_message", "error_type" => error_type::ACKNOWLEDGMENT_FAILED, "stage" => error_stage::RECEIVING, ), - read_errors: Counter = register_counter!( + read_errors: Counter = counter!( "component_errors_total", "error_code" => "reading_message", "error_type" => error_type::READER_FAILED, @@ -103,7 +105,7 @@ registered_event!( error_code = "reading_message", error_type = error_type::READER_FAILED, stage = error_stage::RECEIVING, - internal_log_rate_limit = true, + ); self.read_errors.increment(1_u64); @@ -115,7 +117,7 @@ registered_event!( error_code = "acknowledge_message", error_type = error_type::ACKNOWLEDGMENT_FAILED, stage = error_stage::RECEIVING, - internal_log_rate_limit = true, + ); self.ack_errors.increment(1_u64); @@ -127,7 +129,7 @@ registered_event!( error_code = "negative_acknowledge_message", error_type = error_type::ACKNOWLEDGMENT_FAILED, stage = error_stage::RECEIVING, - internal_log_rate_limit = true, + ); self.nack_errors.increment(1_u64); diff --git a/src/internal_events/redis.rs b/src/internal_events/redis.rs index c76dde5fb2e65..32af514f4215d 100644 --- a/src/internal_events/redis.rs +++ b/src/internal_events/redis.rs @@ -23,13 +23,14 @@ impl InternalEvent for RedisReceiveEventError { error_code = %self.error_code, error_type = error_type::READER_FAILED, stage = error_stage::SENDING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => self.error_code, "error_type" => error_type::READER_FAILED, "stage" => error_stage::RECEIVING, - ); + ) + .increment(1); } } diff --git a/src/internal_events/reduce.rs b/src/internal_events/reduce.rs index ae355696e17cb..c62c1ed3639fa 100644 --- a/src/internal_events/reduce.rs +++ b/src/internal_events/reduce.rs @@ -1,11 +1,38 @@ use metrics::counter; -use vector_lib::internal_event::InternalEvent; +use vector_lib::internal_event::{error_stage, error_type, InternalEvent}; +use vrl::path::PathParseError; +use vrl::value::KeyString; #[derive(Debug)] pub struct ReduceStaleEventFlushed; impl InternalEvent for ReduceStaleEventFlushed { fn emit(self) { - counter!("stale_events_flushed_total", 1); + counter!("stale_events_flushed_total").increment(1); + } +} + +#[derive(Debug)] +pub struct ReduceAddEventError { + pub error: PathParseError, + pub path: KeyString, +} + +impl InternalEvent for ReduceAddEventError { + fn emit(self) { + error!( + message = "Event field could not be reduced.", + path = ?self.path, + error = ?self.error, + error_type = error_type::CONDITION_FAILED, + stage = error_stage::PROCESSING, + + ); + counter!( + "component_errors_total", + "error_type" => error_type::PARSER_FAILED, + "stage" => error_stage::PROCESSING, + ) + .increment(1); } } diff --git a/src/internal_events/remap.rs b/src/internal_events/remap.rs index 211529e73c1d9..93193941b23fe 100644 --- a/src/internal_events/remap.rs +++ b/src/internal_events/remap.rs @@ -19,13 +19,14 @@ impl InternalEvent for RemapMappingError { error = ?self.error, error_type = error_type::CONVERSION_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::CONVERSION_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); if self.event_dropped { emit!(ComponentEventsDropped:: { count: 1, diff --git a/src/internal_events/sematext_metrics.rs b/src/internal_events/sematext_metrics.rs index a0254220cf961..67839e8c90e79 100644 --- a/src/internal_events/sematext_metrics.rs +++ b/src/internal_events/sematext_metrics.rs @@ -9,7 +9,7 @@ pub struct SematextMetricsInvalidMetricError<'a> { pub metric: &'a Metric, } -impl<'a> InternalEvent for SematextMetricsInvalidMetricError<'a> { +impl InternalEvent for SematextMetricsInvalidMetricError<'_> { fn emit(self) { let reason = "Invalid metric received."; error!( @@ -19,14 +19,15 @@ impl<'a> InternalEvent for SematextMetricsInvalidMetricError<'a> { stage = error_stage::PROCESSING, value = ?self.metric.value(), kind = ?self.metric.kind(), - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "invalid_metric", "error_type" => error_type::ENCODER_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); emit!(ComponentEventsDropped:: { count: 1, reason }); } @@ -45,13 +46,14 @@ impl InternalEvent for SematextMetricsEncodeEventError error = %self.error, error_type = error_type::ENCODER_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::ENCODER_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); emit!(ComponentEventsDropped:: { count: 1, reason }); } diff --git a/src/internal_events/socket.rs b/src/internal_events/socket.rs index 157084d52f340..389adb0f478af 100644 --- a/src/internal_events/socket.rs +++ b/src/internal_events/socket.rs @@ -1,4 +1,6 @@ -use metrics::counter; +use std::net::Ipv4Addr; + +use metrics::{counter, histogram}; use vector_lib::internal_event::{ComponentEventsDropped, InternalEvent, UNINTENTIONAL}; use vector_lib::{ internal_event::{error_stage, error_type}, @@ -37,9 +39,11 @@ impl InternalEvent for SocketBytesReceived { %protocol, ); counter!( - "component_received_bytes_total", self.byte_size as u64, + "component_received_bytes_total", "protocol" => protocol, - ); + ) + .increment(self.byte_size as u64); + histogram!("component_received_bytes").record(self.byte_size as f64); } } @@ -59,8 +63,10 @@ impl InternalEvent for SocketEventsReceived { byte_size = self.byte_size.get(), %mode, ); - counter!("component_received_events_total", self.count as u64, "mode" => mode); - counter!("component_received_event_bytes_total", self.byte_size.get() as u64, "mode" => mode); + counter!("component_received_events_total", "mode" => mode).increment(self.count as u64); + counter!("component_received_event_bytes_total", "mode" => mode) + .increment(self.byte_size.get() as u64); + histogram!("component_received_bytes", "mode" => mode).record(self.byte_size.get() as f64); } } @@ -79,9 +85,10 @@ impl InternalEvent for SocketBytesSent { %protocol, ); counter!( - "component_sent_bytes_total", self.byte_size as u64, + "component_sent_bytes_total", "protocol" => protocol, - ); + ) + .increment(self.byte_size as u64); } } @@ -95,8 +102,9 @@ pub struct SocketEventsSent { impl InternalEvent for SocketEventsSent { fn emit(self) { trace!(message = "Events sent.", count = %self.count, byte_size = %self.byte_size.get()); - counter!("component_sent_events_total", self.count, "mode" => self.mode.as_str()); - counter!("component_sent_event_bytes_total", self.byte_size.get() as u64, "mode" => self.mode.as_str()); + counter!("component_sent_events_total", "mode" => self.mode.as_str()).increment(self.count); + counter!("component_sent_event_bytes_total", "mode" => self.mode.as_str()) + .increment(self.byte_size.get() as u64); } } @@ -114,17 +122,56 @@ impl InternalEvent for SocketBindError { error = %self.error, error_code = "socket_bind", error_type = error_type::IO_FAILED, - stage = error_stage::RECEIVING, + stage = error_stage::INITIALIZING, %mode, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "socket_bind", "error_type" => error_type::IO_FAILED, - "stage" => error_stage::RECEIVING, + "stage" => error_stage::INITIALIZING, "mode" => mode, + ) + .increment(1); + } +} + +#[derive(Debug)] +pub struct SocketMulticastGroupJoinError { + pub error: E, + pub group_addr: Ipv4Addr, + pub interface: Ipv4Addr, +} + +impl InternalEvent for SocketMulticastGroupJoinError { + fn emit(self) { + // Multicast groups are only used in UDP mode + let mode = SocketMode::Udp.as_str(); + let group_addr = self.group_addr.to_string(); + let interface = self.interface.to_string(); + + error!( + message = "Error joining multicast group.", + error = %self.error, + error_code = "socket_multicast_group_join", + error_type = error_type::IO_FAILED, + stage = error_stage::INITIALIZING, + %mode, + %group_addr, + %interface, + internal_log_rate_limit = true, ); + counter!( + "component_errors_total", + "error_code" => "socket_multicast_group_join", + "error_type" => error_type::IO_FAILED, + "stage" => error_stage::INITIALIZING, + "mode" => mode, + "group_addr" => group_addr, + "interface" => interface, + ) + .increment(1); } } @@ -144,15 +191,16 @@ impl InternalEvent for SocketReceiveError { error_type = error_type::READER_FAILED, stage = error_stage::RECEIVING, %mode, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "socket_receive", "error_type" => error_type::READER_FAILED, "stage" => error_stage::RECEIVING, "mode" => mode, - ); + ) + .increment(1); } } @@ -173,15 +221,16 @@ impl InternalEvent for SocketSendError { error_type = error_type::WRITER_FAILED, stage = error_stage::SENDING, %mode, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "socket_send", "error_type" => error_type::WRITER_FAILED, "stage" => error_stage::SENDING, "mode" => mode, - ); + ) + .increment(1); emit!(ComponentEventsDropped:: { count: 1, reason }); } diff --git a/src/internal_events/splunk_hec.rs b/src/internal_events/splunk_hec.rs index 9922cca8641c7..e4853c1a8b40e 100644 --- a/src/internal_events/splunk_hec.rs +++ b/src/internal_events/splunk_hec.rs @@ -7,7 +7,7 @@ pub use self::source::*; #[cfg(feature = "sinks-splunk_hec")] mod sink { - use metrics::{counter, decrement_gauge, increment_gauge}; + use metrics::{counter, gauge}; use serde_json::Error; use vector_lib::internal_event::InternalEvent; use vector_lib::internal_event::{ @@ -33,14 +33,15 @@ mod sink { error_code = "serializing_json", error_type = error_type::ENCODER_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "serializing_json", "error_type" => error_type::ENCODER_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); emit!(ComponentEventsDropped:: { count: 1, reason }); } } @@ -52,7 +53,7 @@ mod sink { pub error: crate::Error, } - impl<'a> InternalEvent for SplunkInvalidMetricReceivedError<'a> { + impl InternalEvent for SplunkInvalidMetricReceivedError<'_> { fn emit(self) { error!( message = "Invalid metric received.", @@ -61,18 +62,20 @@ mod sink { stage = error_stage::PROCESSING, value = ?self.value, kind = ?self.kind, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::INVALID_METRIC, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); counter!( - "component_discarded_events_total", 1, + "component_discarded_events_total", "error_type" => error_type::INVALID_METRIC, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); } } @@ -89,14 +92,15 @@ mod sink { error_code = "invalid_response", error_type = error_type::PARSER_FAILED, stage = error_stage::SENDING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "invalid_response", "error_type" => error_type::PARSER_FAILED, "stage" => error_stage::SENDING, - ); + ) + .increment(1); } } @@ -114,14 +118,15 @@ mod sink { error_code = "indexer_ack_failed", error_type = error_type::ACKNOWLEDGMENT_FAILED, stage = error_stage::SENDING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "indexer_ack_failed", "error_type" => error_type::ACKNOWLEDGMENT_FAILED, "stage" => error_stage::SENDING, - ); + ) + .increment(1); } } @@ -138,14 +143,15 @@ mod sink { error_code = "indexer_ack_unavailable", error_type = error_type::ACKNOWLEDGMENT_FAILED, stage = error_stage::SENDING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "indexer_ack_unavailable", "error_type" => error_type::ACKNOWLEDGMENT_FAILED, "stage" => error_stage::SENDING, - ); + ) + .increment(1); } } @@ -153,7 +159,7 @@ mod sink { impl InternalEvent for SplunkIndexerAcknowledgementAckAdded { fn emit(self) { - increment_gauge!("splunk_pending_acks", 1.0); + gauge!("splunk_pending_acks").increment(1.0); } } @@ -163,7 +169,7 @@ mod sink { impl InternalEvent for SplunkIndexerAcknowledgementAcksRemoved { fn emit(self) { - decrement_gauge!("splunk_pending_acks", self.count); + gauge!("splunk_pending_acks").decrement(self.count); } } @@ -171,7 +177,7 @@ mod sink { pub r#type: &'a str, } - impl<'a> InternalEvent for SplunkEventTimestampInvalidType<'a> { + impl InternalEvent for SplunkEventTimestampInvalidType<'_> { fn emit(self) { warn!( message = @@ -218,11 +224,12 @@ mod source { internal_log_rate_limit = true ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "invalid_request_body", "error_type" => error_type::PARSER_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); } } @@ -241,10 +248,11 @@ mod source { internal_log_rate_limit = true ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::REQUEST_FAILED, "stage" => error_stage::RECEIVING, - ); + ) + .increment(1); } } } diff --git a/src/internal_events/statsd_sink.rs b/src/internal_events/statsd_sink.rs index 23911e16bcc0c..b7ee84cfbc457 100644 --- a/src/internal_events/statsd_sink.rs +++ b/src/internal_events/statsd_sink.rs @@ -10,7 +10,7 @@ pub struct StatsdInvalidMetricError<'a> { pub kind: MetricKind, } -impl<'a> InternalEvent for StatsdInvalidMetricError<'a> { +impl InternalEvent for StatsdInvalidMetricError<'_> { fn emit(self) { let reason = "Invalid metric type received."; error!( @@ -20,14 +20,15 @@ impl<'a> InternalEvent for StatsdInvalidMetricError<'a> { stage = error_stage::PROCESSING, value = ?self.value, kind = ?self.kind, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "invalid_metric", "error_type" => error_type::ENCODER_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); emit!(ComponentEventsDropped:: { reason, count: 1 }); } diff --git a/src/internal_events/tag_cardinality_limit.rs b/src/internal_events/tag_cardinality_limit.rs index 8a4cc0b69b466..e0a79e583bbcc 100644 --- a/src/internal_events/tag_cardinality_limit.rs +++ b/src/internal_events/tag_cardinality_limit.rs @@ -7,16 +7,15 @@ pub struct TagCardinalityLimitRejectingEvent<'a> { pub tag_value: &'a str, } -impl<'a> InternalEvent for TagCardinalityLimitRejectingEvent<'a> { +impl InternalEvent for TagCardinalityLimitRejectingEvent<'_> { fn emit(self) { debug!( message = "Event containing tag with new value after hitting configured 'value_limit'; discarding event.", metric_name = self.metric_name, tag_key = self.tag_key, tag_value = self.tag_value, - internal_log_rate_limit = true, ); - counter!("tag_value_limit_exceeded_total", 1); + counter!("tag_value_limit_exceeded_total").increment(1); emit!(ComponentEventsDropped:: { count: 1, @@ -31,16 +30,15 @@ pub struct TagCardinalityLimitRejectingTag<'a> { pub tag_value: &'a str, } -impl<'a> InternalEvent for TagCardinalityLimitRejectingTag<'a> { +impl InternalEvent for TagCardinalityLimitRejectingTag<'_> { fn emit(self) { debug!( message = "Rejecting tag after hitting configured 'value_limit'.", metric_name = self.metric_name, tag_key = self.tag_key, tag_value = self.tag_value, - internal_log_rate_limit = true, ); - counter!("tag_value_limit_exceeded_total", 1); + counter!("tag_value_limit_exceeded_total").increment(1); } } @@ -48,12 +46,12 @@ pub struct TagCardinalityValueLimitReached<'a> { pub key: &'a str, } -impl<'a> InternalEvent for TagCardinalityValueLimitReached<'a> { +impl InternalEvent for TagCardinalityValueLimitReached<'_> { fn emit(self) { debug!( message = "Value_limit reached for key. New values for this key will be rejected.", key = %self.key, ); - counter!("value_limit_reached_total", 1); + counter!("value_limit_reached_total").increment(1); } } diff --git a/src/internal_events/tcp.rs b/src/internal_events/tcp.rs index 2f73a97be6181..ae050b99be309 100644 --- a/src/internal_events/tcp.rs +++ b/src/internal_events/tcp.rs @@ -17,7 +17,7 @@ impl InternalEvent for TcpSocketConnectionEstablished { } else { debug!(message = "Connected.", peer_addr = "unknown"); } - counter!("connection_established_total", 1, "mode" => "tcp"); + counter!("connection_established_total", "mode" => "tcp").increment(1); } } @@ -40,16 +40,18 @@ pub struct TcpSocketConnectionShutdown; impl InternalEvent for TcpSocketConnectionShutdown { fn emit(self) { warn!(message = "Received EOF from the server, shutdown."); - counter!("connection_shutdown_total", 1, "mode" => "tcp"); + counter!("connection_shutdown_total", "mode" => "tcp").increment(1); } } +#[cfg(all(unix, feature = "sources-dnstap"))] #[derive(Debug)] pub struct TcpSocketError<'a, E> { pub(crate) error: &'a E, pub peer_addr: SocketAddr, } +#[cfg(all(unix, feature = "sources-dnstap"))] impl InternalEvent for TcpSocketError<'_, E> { fn emit(self) { error!( @@ -58,13 +60,13 @@ impl InternalEvent for TcpSocketError<'_, E> { peer_addr = ?self.peer_addr, error_type = error_type::CONNECTION_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::CONNECTION_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); } } @@ -86,7 +88,6 @@ impl InternalEvent for TcpSocketTlsConnectionError { debug!( message = "Connection error, probably a healthcheck.", error = %self.error, - internal_log_rate_limit = true, ); } _ => { @@ -96,15 +97,15 @@ impl InternalEvent for TcpSocketTlsConnectionError { error_code = "connection_failed", error_type = error_type::WRITER_FAILED, stage = error_stage::SENDING, - internal_log_rate_limit = true, ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "connection_failed", "error_type" => error_type::WRITER_FAILED, "stage" => error_stage::SENDING, "mode" => "tcp", - ); + ) + .increment(1); } } } @@ -123,15 +124,15 @@ impl InternalEvent for TcpSendAckError { error_code = "ack_failed", error_type = error_type::WRITER_FAILED, stage = error_stage::SENDING, - internal_log_rate_limit = true, ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "ack_failed", "error_type" => error_type::WRITER_FAILED, "stage" => error_stage::SENDING, "mode" => "tcp", - ); + ) + .increment(1); } } @@ -150,8 +151,8 @@ impl InternalEvent for TcpBytesReceived { peer_addr = %self.peer_addr, ); counter!( - "component_received_bytes_total", self.byte_size as u64, - "protocol" => "tcp" - ); + "component_received_bytes_total", "protocol" => "tcp" + ) + .increment(self.byte_size as u64); } } diff --git a/src/internal_events/template.rs b/src/internal_events/template.rs index b1ee55f1b3c1b..610acf9e7a166 100644 --- a/src/internal_events/template.rs +++ b/src/internal_events/template.rs @@ -8,12 +8,12 @@ pub struct TemplateRenderingError<'a> { pub error: crate::template::TemplateRenderingError, } -impl<'a> InternalEvent for TemplateRenderingError<'a> { +impl InternalEvent for TemplateRenderingError<'_> { fn emit(self) { let mut msg = "Failed to render template".to_owned(); if let Some(field) = self.field { use std::fmt::Write; - _ = write!(msg, " for \"{}\"", field); + _ = write!(msg, " for \"{field}\""); } msg.push('.'); @@ -23,14 +23,15 @@ impl<'a> InternalEvent for TemplateRenderingError<'a> { error = %self.error, error_type = error_type::TEMPLATE_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::TEMPLATE_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); emit!(ComponentEventsDropped:: { count: 1, @@ -42,7 +43,7 @@ impl<'a> InternalEvent for TemplateRenderingError<'a> { error = %self.error, error_type = error_type::TEMPLATE_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, + ); } } diff --git a/src/internal_events/throttle.rs b/src/internal_events/throttle.rs index 8d5484e75352a..3bbc67be88db8 100644 --- a/src/internal_events/throttle.rs +++ b/src/internal_events/throttle.rs @@ -22,7 +22,7 @@ impl InternalEvent for ThrottleEventDiscarded { // if we should change the specification wording? Sort of a similar situation to the // `error_code` tag for the component errors metric, where it's meant to be optional and // only specified when relevant. - counter!("events_discarded_total", 1, "key" => self.key); // Deprecated. + counter!("events_discarded_total", "key" => self.key).increment(1); // Deprecated. } emit!(ComponentEventsDropped:: { diff --git a/src/internal_events/udp.rs b/src/internal_events/udp.rs index 6be04c017b9e7..b983543e6cae1 100644 --- a/src/internal_events/udp.rs +++ b/src/internal_events/udp.rs @@ -13,7 +13,7 @@ pub struct UdpSocketConnectionEstablished; impl InternalEvent for UdpSocketConnectionEstablished { fn emit(self) { debug!(message = "Connected."); - counter!("connection_established_total", 1, "mode" => "udp"); + counter!("connection_established_total", "mode" => "udp").increment(1); } } @@ -47,15 +47,15 @@ impl InternalEvent for UdpSendIncompleteError { dropped = self.data_size - self.sent, error_type = error_type::WRITER_FAILED, stage = error_stage::SENDING, - internal_log_rate_limit = true, ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::WRITER_FAILED, "stage" => error_stage::SENDING, - ); + ) + .increment(1); // deprecated - counter!("connection_send_errors_total", 1, "mode" => "udp"); + counter!("connection_send_errors_total", "mode" => "udp").increment(1); emit!(ComponentEventsDropped:: { count: 1, reason }); } diff --git a/src/internal_events/unix.rs b/src/internal_events/unix.rs index f4730158fd5f4..f66327011da98 100644 --- a/src/internal_events/unix.rs +++ b/src/internal_events/unix.rs @@ -15,7 +15,7 @@ pub struct UnixSocketConnectionEstablished<'a> { impl InternalEvent for UnixSocketConnectionEstablished<'_> { fn emit(self) { debug!(message = "Connected.", path = ?self.path); - counter!("connection_established_total", 1, "mode" => "unix"); + counter!("connection_established_total", "mode" => "unix").increment(1); } } @@ -32,12 +32,20 @@ impl InternalEvent for UnixSocketOutgoingConnectionError { pub(crate) error: &'a E, pub path: &'a std::path::Path, } +#[cfg(all( + unix, + any(feature = "sources-utils-net-unix", feature = "sources-dnstap") +))] impl InternalEvent for UnixSocketError<'_, E> { fn emit(self) { error!( @@ -46,13 +54,14 @@ impl InternalEvent for UnixSocketError<'_, E> { path = ?self.path, error_type = error_type::CONNECTION_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::CONNECTION_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); } } @@ -71,13 +80,14 @@ impl InternalEvent for UnixSocketSendError<'_, E> { path = ?self.path, error_type = error_type::WRITER_FAILED, stage = error_stage::SENDING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::WRITER_FAILED, "stage" => error_stage::SENDING, - ); + ) + .increment(1); emit!(ComponentEventsDropped:: { count: 1, reason }); } @@ -99,13 +109,13 @@ impl InternalEvent for UnixSendIncompleteError { dropped = self.data_size - self.sent, error_type = error_type::WRITER_FAILED, stage = error_stage::SENDING, - internal_log_rate_limit = true, ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_type" => error_type::WRITER_FAILED, "stage" => error_stage::SENDING, - ); + ) + .increment(1); emit!(ComponentEventsDropped:: { count: 1, reason }); } @@ -117,7 +127,7 @@ pub struct UnixSocketFileDeleteError<'a> { pub error: Error, } -impl<'a> InternalEvent for UnixSocketFileDeleteError<'a> { +impl InternalEvent for UnixSocketFileDeleteError<'_> { fn emit(self) { error!( message = "Failed in deleting unix socket file.", @@ -126,13 +136,14 @@ impl<'a> InternalEvent for UnixSocketFileDeleteError<'a> { error_code = "delete_socket_file", error_type = error_type::WRITER_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "delete_socket_file", "error_type" => error_type::WRITER_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); } } diff --git a/src/internal_events/websocket.rs b/src/internal_events/websocket.rs index 604f627940866..20c8b3091b09b 100644 --- a/src/internal_events/websocket.rs +++ b/src/internal_events/websocket.rs @@ -1,91 +1,244 @@ use std::error::Error; -use std::fmt::Debug; +use std::fmt::{Debug, Display, Formatter, Result}; -use metrics::counter; +use metrics::{counter, histogram}; +use tokio_tungstenite::tungstenite::error::Error as TungsteniteError; use vector_lib::internal_event::InternalEvent; -use vector_lib::internal_event::{error_stage, error_type}; +use vector_common::{ + internal_event::{error_stage, error_type}, + json_size::JsonSize, +}; + +pub const PROTOCOL: &str = "websocket"; #[derive(Debug)] -pub struct WsConnectionEstablished; +pub struct WebSocketConnectionEstablished; -impl InternalEvent for WsConnectionEstablished { +impl InternalEvent for WebSocketConnectionEstablished { fn emit(self) { debug!(message = "Connected."); - counter!("connection_established_total", 1); + counter!("connection_established_total").increment(1); } fn name(&self) -> Option<&'static str> { - Some("WsConnectionEstablished") + Some("WebSocketConnectionEstablished") } } #[derive(Debug)] -pub struct WsConnectionFailedError { +pub struct WebSocketConnectionFailedError { pub error: Box, } -impl InternalEvent for WsConnectionFailedError { +impl InternalEvent for WebSocketConnectionFailedError { fn emit(self) { error!( message = "WebSocket connection failed.", error = %self.error, - error_code = "ws_connection_error", + error_code = "websocket_connection_error", error_type = error_type::CONNECTION_FAILED, stage = error_stage::SENDING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, - "error_code" => "ws_connection_failed", + "component_errors_total", + "error_code" => "websocket_connection_failed", "error_type" => error_type::CONNECTION_FAILED, "stage" => error_stage::SENDING, - ); + ) + .increment(1); } fn name(&self) -> Option<&'static str> { - Some("WsConnectionFailed") + Some("WebSocketConnectionFailedError") } } #[derive(Debug)] -pub struct WsConnectionShutdown; +pub struct WebSocketConnectionShutdown; -impl InternalEvent for WsConnectionShutdown { +impl InternalEvent for WebSocketConnectionShutdown { fn emit(self) { warn!(message = "Closed by the server."); - counter!("connection_shutdown_total", 1); + counter!("connection_shutdown_total").increment(1); } fn name(&self) -> Option<&'static str> { - Some("WsConnectionShutdown") + Some("WebSocketConnectionShutdown") } } #[derive(Debug)] -pub struct WsConnectionError { +pub struct WebSocketConnectionError { pub error: tokio_tungstenite::tungstenite::Error, } -impl InternalEvent for WsConnectionError { +impl InternalEvent for WebSocketConnectionError { fn emit(self) { error!( message = "WebSocket connection error.", error = %self.error, - error_code = "ws_connection_error", + error_code = "websocket_connection_error", error_type = error_type::WRITER_FAILED, stage = error_stage::SENDING, - internal_log_rate_limit = true, + ); counter!( - "component_errors_total", 1, - "error_code" => "ws_connection_error", + "component_errors_total", + "protocol" => PROTOCOL, + "error_code" => "websocket_connection_error", "error_type" => error_type::WRITER_FAILED, "stage" => error_stage::SENDING, + ) + .increment(1); + } + + fn name(&self) -> Option<&'static str> { + Some("WebSocketConnectionError") + } +} + +#[allow(dead_code)] +#[derive(Debug, Copy, Clone)] +pub enum WebSocketKind { + Ping, + Pong, + Text, + Binary, + Close, + Frame, +} + +impl Display for WebSocketKind { + fn fmt(&self, f: &mut Formatter<'_>) -> Result { + write!(f, "{self:?}") + } +} + +#[derive(Debug)] +pub struct WebSocketBytesReceived<'a> { + pub byte_size: usize, + pub url: &'a str, + pub protocol: &'static str, + pub kind: WebSocketKind, +} + +impl InternalEvent for WebSocketBytesReceived<'_> { + fn emit(self) { + trace!( + message = "Bytes received.", + byte_size = %self.byte_size, + url = %self.url, + protocol = %self.protocol, + kind = %self.kind + ); + let counter = counter!( + "component_received_bytes_total", + "url" => self.url.to_string(), + "protocol" => self.protocol, + "kind" => self.kind.to_string() + ); + counter.increment(self.byte_size as u64); + } +} + +#[derive(Debug)] +pub struct WebSocketMessageReceived<'a> { + pub count: usize, + pub byte_size: JsonSize, + pub url: &'a str, + pub protocol: &'static str, + pub kind: WebSocketKind, +} + +impl InternalEvent for WebSocketMessageReceived<'_> { + fn emit(self) { + trace!( + message = "Events received.", + count = %self.count, + byte_size = %self.byte_size, + url = %self.url, + protcol = %self.protocol, + kind = %self.kind ); + + let histogram = histogram!("component_received_events_count"); + histogram.record(self.count as f64); + let counter = counter!( + "component_received_events_total", + "uri" => self.url.to_string(), + "protocol" => PROTOCOL, + "kind" => self.kind.to_string() + ); + counter.increment(self.count as u64); + let counter = counter!( + "component_received_event_bytes_total", + "url" => self.url.to_string(), + "protocol" => PROTOCOL, + "kind" => self.kind.to_string() + ); + counter.increment(self.byte_size.get() as u64); + } + + fn name(&self) -> Option<&'static str> { + Some("WebSocketMessageReceived") + } +} + +#[derive(Debug)] +pub struct WebSocketReceiveError<'a> { + pub error: &'a TungsteniteError, +} + +impl InternalEvent for WebSocketReceiveError<'_> { + fn emit(self) { + error!( + message = "Error receiving message from websocket.", + error = %self.error, + error_code = "websocket_receive_error", + error_type = error_type::CONNECTION_FAILED, + stage = error_stage::PROCESSING, + ); + counter!( + "component_errors_total", + "protocol" => PROTOCOL, + "error_code" => "websocket_receive_error", + "error_type" => error_type::CONNECTION_FAILED, + "stage" => error_stage::PROCESSING, + ) + .increment(1); + } + + fn name(&self) -> Option<&'static str> { + Some("WebSocketReceiveError") + } +} + +#[derive(Debug)] +pub struct WebSocketSendError<'a> { + pub error: &'a TungsteniteError, +} + +impl InternalEvent for WebSocketSendError<'_> { + fn emit(self) { + error!( + message = "Error sending message to websocket.", + error = %self.error, + error_code = "websocket_send_error", + error_type = error_type::CONNECTION_FAILED, + stage = error_stage::PROCESSING, + ); + counter!( + "component_errors_total", + "error_code" => "websocket_send_error", + "error_type" => error_type::CONNECTION_FAILED, + "stage" => error_stage::PROCESSING, + ) + .increment(1); } fn name(&self) -> Option<&'static str> { - Some("WsConnectionError") + Some("WebSocketSendError") } } diff --git a/src/internal_events/websocket_server.rs b/src/internal_events/websocket_server.rs new file mode 100644 index 0000000000000..b3209c70a9994 --- /dev/null +++ b/src/internal_events/websocket_server.rs @@ -0,0 +1,135 @@ +use std::error::Error; +use std::fmt::Debug; + +use metrics::{counter, gauge}; +use vector_lib::internal_event::InternalEvent; + +use vector_lib::internal_event::{error_stage, error_type}; + +#[derive(Debug)] +pub struct WebSocketListenerConnectionEstablished { + pub client_count: usize, + pub extra_tags: Vec<(String, String)>, +} + +impl InternalEvent for WebSocketListenerConnectionEstablished { + fn emit(self) { + debug!( + message = format!( + "Websocket client connected. Client count: {}", + self.client_count + ) + ); + counter!("connection_established_total", &self.extra_tags).increment(1); + gauge!("active_clients", &self.extra_tags).set(self.client_count as f64); + } + + fn name(&self) -> Option<&'static str> { + Some("WebSocketListenerConnectionEstablished") + } +} + +#[derive(Debug)] +pub struct WebSocketListenerConnectionFailedError { + pub error: Box, + pub extra_tags: Vec<(String, String)>, +} + +impl InternalEvent for WebSocketListenerConnectionFailedError { + fn emit(self) { + error!( + message = "WebSocket connection failed.", + error = %self.error, + error_code = "ws_connection_error", + error_type = error_type::CONNECTION_FAILED, + stage = error_stage::SENDING, + + ); + let mut all_tags = self.extra_tags.clone(); + all_tags.extend([ + ("error_code".to_string(), "ws_connection_failed".to_string()), + ( + "error_type".to_string(), + error_type::CONNECTION_FAILED.to_string(), + ), + ("stage".to_string(), error_stage::SENDING.to_string()), + ]); + // Tags required by `component_errors_total` are dynamically added above. + // ## skip check-validity-events ## + counter!("component_errors_total", &all_tags).increment(1); + } + + fn name(&self) -> Option<&'static str> { + Some("WsListenerConnectionFailed") + } +} + +#[derive(Debug)] +pub struct WebSocketListenerConnectionShutdown { + pub client_count: usize, + pub extra_tags: Vec<(String, String)>, +} + +impl InternalEvent for WebSocketListenerConnectionShutdown { + fn emit(self) { + info!( + message = format!( + "Client connection closed. Client count: {}.", + self.client_count + ) + ); + counter!("connection_shutdown_total", &self.extra_tags).increment(1); + gauge!("active_clients", &self.extra_tags).set(self.client_count as f64); + } + + fn name(&self) -> Option<&'static str> { + Some("WebSocketListenerConnectionShutdown") + } +} + +#[derive(Debug)] +pub struct WebSocketListenerSendError { + pub error: Box, +} + +impl InternalEvent for WebSocketListenerSendError { + fn emit(self) { + error!( + message = "WebSocket message send error.", + error = %self.error, + error_code = "ws_server_connection_error", + error_type = error_type::WRITER_FAILED, + stage = error_stage::SENDING, + + ); + counter!( + "component_errors_total", + "error_code" => "ws_server_connection_error", + "error_type" => error_type::WRITER_FAILED, + "stage" => error_stage::SENDING, + ) + .increment(1); + } + + fn name(&self) -> Option<&'static str> { + Some("WsListenerConnectionError") + } +} + +#[derive(Debug)] +pub struct WebSocketListenerMessageSent { + pub message_size: usize, + pub extra_tags: Vec<(String, String)>, +} + +impl InternalEvent for WebSocketListenerMessageSent { + fn emit(self) { + counter!("websocket_messages_sent_total", &self.extra_tags).increment(1); + counter!("websocket_bytes_sent_total", &self.extra_tags) + .increment(self.message_size as u64); + } + + fn name(&self) -> Option<&'static str> { + Some("WebSocketListenerMessageSent") + } +} diff --git a/src/internal_events/window.rs b/src/internal_events/window.rs new file mode 100644 index 0000000000000..5b2f666edcb11 --- /dev/null +++ b/src/internal_events/window.rs @@ -0,0 +1,14 @@ +use vector_lib::internal_event::{ComponentEventsDropped, Count, Registered, INTENTIONAL}; + +vector_lib::registered_event!( + WindowEventsDropped => { + events_dropped: Registered> + = register!(ComponentEventsDropped::::from( + "The buffer was full" + )), + } + + fn emit(&self, data: Count) { + self.events_dropped.emit(data); + } +); diff --git a/src/internal_events/windows.rs b/src/internal_events/windows.rs index 2537ebca47c3e..f44c0a26bc8f3 100644 --- a/src/internal_events/windows.rs +++ b/src/internal_events/windows.rs @@ -8,16 +8,18 @@ pub struct WindowsServiceStart<'a> { pub name: &'a str, } -impl<'a> InternalEvent for WindowsServiceStart<'a> { +impl InternalEvent for WindowsServiceStart<'_> { fn emit(self) { info!( already_started = %self.already_started, name = self.name, "Started Windows Service.", ); - counter!("windows_service_start_total", 1, + counter!( + "windows_service_start_total", "already_started" => self.already_started.to_string(), - ); + ) + .increment(1); } } @@ -27,16 +29,18 @@ pub struct WindowsServiceStop<'a> { pub name: &'a str, } -impl<'a> InternalEvent for WindowsServiceStop<'a> { +impl InternalEvent for WindowsServiceStop<'_> { fn emit(self) { info!( already_stopped = %self.already_stopped, name = ?self.name, "Stopped Windows Service.", ); - counter!("windows_service_stop_total", 1, + counter!( + "windows_service_stop_total", "already_stopped" => self.already_stopped.to_string(), - ); + ) + .increment(1); } } @@ -45,13 +49,13 @@ pub struct WindowsServiceRestart<'a> { pub name: &'a str, } -impl<'a> InternalEvent for WindowsServiceRestart<'a> { +impl InternalEvent for WindowsServiceRestart<'_> { fn emit(self) { info!( name = ?self.name, "Restarted Windows Service." ); - counter!("windows_service_restart_total", 1) + counter!("windows_service_restart_total").increment(1) } } @@ -60,13 +64,13 @@ pub struct WindowsServiceInstall<'a> { pub name: &'a str, } -impl<'a> InternalEvent for WindowsServiceInstall<'a> { +impl InternalEvent for WindowsServiceInstall<'_> { fn emit(self) { info!( name = ?self.name, "Installed Windows Service.", ); - counter!("windows_service_install_total", 1,); + counter!("windows_service_install_total").increment(1); } } @@ -75,13 +79,13 @@ pub struct WindowsServiceUninstall<'a> { pub name: &'a str, } -impl<'a> InternalEvent for WindowsServiceUninstall<'a> { +impl InternalEvent for WindowsServiceUninstall<'_> { fn emit(self) { info!( name = ?self.name, "Uninstalled Windows Service.", ); - counter!("windows_service_uninstall_total", 1,); + counter!("windows_service_uninstall_total").increment(1); } } @@ -90,7 +94,7 @@ pub struct WindowsServiceDoesNotExistError<'a> { pub name: &'a str, } -impl<'a> InternalEvent for WindowsServiceDoesNotExistError<'a> { +impl InternalEvent for WindowsServiceDoesNotExistError<'_> { fn emit(self) { error!( message = "Windows service does not exist. Maybe it needs to be installed.", @@ -98,13 +102,13 @@ impl<'a> InternalEvent for WindowsServiceDoesNotExistError<'a> { error_code = "service_missing", error_type = error_type::CONDITION_FAILED, stage = error_stage::PROCESSING, - internal_log_rate_limit = true, ); counter!( - "component_errors_total", 1, + "component_errors_total", "error_code" => "service_missing", "error_type" => error_type::CONDITION_FAILED, "stage" => error_stage::PROCESSING, - ); + ) + .increment(1); } } diff --git a/src/internal_telemetry/allocations/allocator/stack.rs b/src/internal_telemetry/allocations/allocator/stack.rs index 5479654501fe7..2ac8bab15410a 100644 --- a/src/internal_telemetry/allocations/allocator/stack.rs +++ b/src/internal_telemetry/allocations/allocator/stack.rs @@ -33,7 +33,7 @@ impl GroupStack { pub fn push(&mut self, group: AllocationGroupId) { self.current_top += 1; if self.current_top >= self.slots.len() { - panic!("tried to push new allocation group to the current stack, but hit the limit of {} entries", N); + panic!("tried to push new allocation group to the current stack, but hit the limit of {N} entries"); } self.slots[self.current_top] = group; } diff --git a/src/internal_telemetry/allocations/allocator/tracing_allocator.rs b/src/internal_telemetry/allocations/allocator/tracing_allocator.rs index 4c48928d079ce..dc189c35c239e 100644 --- a/src/internal_telemetry/allocations/allocator/tracing_allocator.rs +++ b/src/internal_telemetry/allocations/allocator/tracing_allocator.rs @@ -29,55 +29,59 @@ impl GroupedTraceableAllocator { unsafe impl GlobalAlloc for GroupedTraceableAllocator { #[inline] unsafe fn alloc(&self, object_layout: Layout) -> *mut u8 { - if !TRACK_ALLOCATIONS.load(Ordering::Relaxed) { - return self.allocator.alloc(object_layout); + unsafe { + if !TRACK_ALLOCATIONS.load(Ordering::Relaxed) { + return self.allocator.alloc(object_layout); + } + + // Allocate our wrapped layout and make sure the allocation succeeded. + let (actual_layout, offset_to_group_id) = get_wrapped_layout(object_layout); + let actual_ptr = self.allocator.alloc(actual_layout); + if actual_ptr.is_null() { + return actual_ptr; + } + + let group_id_ptr = actual_ptr.add(offset_to_group_id).cast::(); + + let object_size = object_layout.size(); + + try_with_suspended_allocation_group( + #[inline(always)] + |group_id| { + group_id_ptr.write(group_id.as_raw()); + self.tracer.trace_allocation(object_size, group_id); + }, + ); + actual_ptr } - - // Allocate our wrapped layout and make sure the allocation succeeded. - let (actual_layout, offset_to_group_id) = get_wrapped_layout(object_layout); - let actual_ptr = self.allocator.alloc(actual_layout); - if actual_ptr.is_null() { - return actual_ptr; - } - - let group_id_ptr = actual_ptr.add(offset_to_group_id).cast::(); - - let object_size = object_layout.size(); - - try_with_suspended_allocation_group( - #[inline(always)] - |group_id| { - group_id_ptr.write(group_id.as_raw()); - self.tracer.trace_allocation(object_size, group_id); - }, - ); - actual_ptr } #[inline] unsafe fn dealloc(&self, object_ptr: *mut u8, object_layout: Layout) { - if !TRACK_ALLOCATIONS.load(Ordering::Relaxed) { - self.allocator.dealloc(object_ptr, object_layout); - return; + unsafe { + if !TRACK_ALLOCATIONS.load(Ordering::Relaxed) { + self.allocator.dealloc(object_ptr, object_layout); + return; + } + // Regenerate the wrapped layout so we know where we have to look, as the pointer we've given relates to the + // requested layout, not the wrapped layout that was actually allocated. + let (wrapped_layout, offset_to_group_id) = get_wrapped_layout(object_layout); + + let raw_group_id = object_ptr.add(offset_to_group_id).cast::().read(); + + // Deallocate before tracking, just to make sure we're reclaiming memory as soon as possible. + self.allocator.dealloc(object_ptr, wrapped_layout); + + let object_size = object_layout.size(); + let source_group_id = AllocationGroupId::from_raw(raw_group_id); + + try_with_suspended_allocation_group( + #[inline(always)] + |_| { + self.tracer.trace_deallocation(object_size, source_group_id); + }, + ); } - // Regenerate the wrapped layout so we know where we have to look, as the pointer we've given relates to the - // requested layout, not the wrapped layout that was actually allocated. - let (wrapped_layout, offset_to_group_id) = get_wrapped_layout(object_layout); - - let raw_group_id = object_ptr.add(offset_to_group_id).cast::().read(); - - // Deallocate before tracking, just to make sure we're reclaiming memory as soon as possible. - self.allocator.dealloc(object_ptr, wrapped_layout); - - let object_size = object_layout.size(); - let source_group_id = AllocationGroupId::from_raw(raw_group_id); - - try_with_suspended_allocation_group( - #[inline(always)] - |_| { - self.tracer.trace_deallocation(object_size, source_group_id); - }, - ); } } diff --git a/src/internal_telemetry/allocations/mod.rs b/src/internal_telemetry/allocations/mod.rs index b34c40d136d0c..477afc9b65ed2 100644 --- a/src/internal_telemetry/allocations/mod.rs +++ b/src/internal_telemetry/allocations/mod.rs @@ -11,7 +11,7 @@ use std::{ }; use arr_macro::arr; -use metrics::{counter, decrement_gauge, increment_gauge}; +use metrics::{counter, gauge}; use rand_distr::num_traits::ToPrimitive; use self::allocator::Tracer; @@ -20,7 +20,7 @@ pub(crate) use self::allocator::{ without_allocation_tracing, AllocationGroupId, AllocationLayer, GroupedTraceableAllocator, }; -const NUM_GROUPS: usize = 128; +const NUM_GROUPS: usize = 256; // Allocations are not tracked during startup. // We use the Relaxed ordering for both stores and loads of this atomic as no other threads exist when @@ -55,8 +55,8 @@ impl GroupMemStats { pub fn new() -> Self { let mut mutex = THREAD_LOCAL_REFS.lock().unwrap(); let stats_ref: &'static GroupMemStatsStorage = Box::leak(Box::new(GroupMemStatsStorage { - allocations: arr![AtomicU64::new(0) ; 128], - deallocations: arr![AtomicU64::new(0) ; 128], + allocations: arr![AtomicU64::new(0) ; 256], + deallocations: arr![AtomicU64::new(0) ; 256], })); let group_mem_stats = GroupMemStats { stats: stats_ref }; mutex.push(stats_ref); @@ -84,7 +84,7 @@ impl GroupInfo { } } -static GROUP_INFO: [Mutex; NUM_GROUPS] = arr![Mutex::new(GroupInfo::new()); 128]; +static GROUP_INFO: [Mutex; NUM_GROUPS] = arr![Mutex::new(GroupInfo::new()); 256]; pub type Allocator = GroupedTraceableAllocator; @@ -145,35 +145,29 @@ pub fn init_allocation_tracing() { let group_info = group.lock().unwrap(); if allocations_diff > 0 { counter!( - "component_allocated_bytes_total", - allocations_diff, - "component_kind" => group_info.component_kind.clone(), + "component_allocated_bytes_total", "component_kind" => group_info.component_kind.clone(), "component_type" => group_info.component_type.clone(), - "component_id" => group_info.component_id.clone()); + "component_id" => group_info.component_id.clone()).increment(allocations_diff); } if deallocations_diff > 0 { counter!( - "component_deallocated_bytes_total", - deallocations_diff, - "component_kind" => group_info.component_kind.clone(), + "component_deallocated_bytes_total", "component_kind" => group_info.component_kind.clone(), "component_type" => group_info.component_type.clone(), - "component_id" => group_info.component_id.clone()); + "component_id" => group_info.component_id.clone()).increment(deallocations_diff); } if mem_used_diff > 0 { - increment_gauge!( - "component_allocated_bytes", - mem_used_diff.to_f64().expect("failed to convert mem_used from int to float"), - "component_kind" => group_info.component_kind.clone(), - "component_type" => group_info.component_type.clone(), - "component_id" => group_info.component_id.clone()); + gauge!( + "component_allocated_bytes", "component_type" => group_info.component_type.clone(), + "component_id" => group_info.component_id.clone(), + "component_kind" => group_info.component_kind.clone()) + .increment(mem_used_diff.to_f64().expect("failed to convert mem_used from int to float")); } if mem_used_diff < 0 { - decrement_gauge!( - "component_allocated_bytes", - -mem_used_diff.to_f64().expect("failed to convert mem_used from int to float"), - "component_kind" => group_info.component_kind.clone(), - "component_type" => group_info.component_type.clone(), - "component_id" => group_info.component_id.clone()); + gauge!( + "component_allocated_bytes", "component_type" => group_info.component_type.clone(), + "component_id" => group_info.component_id.clone(), + "component_kind" => group_info.component_kind.clone()) + .decrement(-mem_used_diff.to_f64().expect("failed to convert mem_used from int to float")); } } thread::sleep(Duration::from_millis( @@ -209,9 +203,6 @@ pub fn acquire_allocation_group_id( } } - // TODO: Technically, `NUM_GROUPS` is lower (128) than the upper bound for the - // `AllocationGroupId::register` call itself (253), so we can hardcode `NUM_GROUPS` here knowing - // it's the lower of the two values and will trigger first.. but this may not always be true. warn!("Maximum number of registrable allocation group IDs reached ({}). Allocations for component '{}' will be attributed to the root allocation group.", NUM_GROUPS, component_id); AllocationGroupId::ROOT } diff --git a/src/kafka.rs b/src/kafka.rs index 86d4beaeb3631..8567d19a0613d 100644 --- a/src/kafka.rs +++ b/src/kafka.rs @@ -115,7 +115,7 @@ impl KafkaAuthConfig { if let Some(verify_certificate) = &tls.options.verify_certificate { client.set( "enable.ssl.certificate.verification", - &verify_certificate.to_string(), + verify_certificate.to_string(), ); } diff --git a/src/kubernetes/reflector.rs b/src/kubernetes/reflector.rs index 1135fd4c42c53..14460c73981a6 100644 --- a/src/kubernetes/reflector.rs +++ b/src/kubernetes/reflector.rs @@ -1,6 +1,6 @@ //! Intercept [`watcher::Event`]'s. -use std::{hash::Hash, time::Duration}; +use std::{hash::Hash, sync::Arc, time::Duration}; use futures::StreamExt; use futures_util::Stream; @@ -26,31 +26,64 @@ pub async fn custom_reflector( { pin!(stream); let mut delay_queue = DelayQueue::default(); + let mut init_buffer_meta = Vec::new(); loop { tokio::select! { result = stream.next() => { match result { Some(Ok(event)) => { match event { - // Immediately reconcile `Applied` event - watcher::Event::Applied(ref obj) => { - trace!(message = "Processing Applied event.", ?event); + // Immediately reconcile `Apply` event + watcher::Event::Apply(ref obj) => { + trace!(message = "Processing Apply event.", event_type = std::any::type_name::(), event = ?event); store.apply_watcher_event(&event); let meta_descr = MetaDescribe::from_meta(obj.meta()); meta_cache.store(meta_descr); } - // Delay reconciling any `Deleted` events - watcher::Event::Deleted(ref obj) => { + // Delay reconciling any `Delete` events + watcher::Event::Delete(ref obj) => { + trace!(message = "Delaying processing Delete event.", event_type = std::any::type_name::(), event = ?event); delay_queue.insert(event.to_owned(), delay_deletion); let meta_descr = MetaDescribe::from_meta(obj.meta()); meta_cache.delete(&meta_descr); } - // Clear all delayed events on `Restarted` events - watcher::Event::Restarted(_) => { - trace!(message = "Processing Restarted event.", ?event); + // Clear all delayed events on `Init` event + watcher::Event::Init => { + trace!(message = "Processing Init event.", event_type = std::any::type_name::(), event = ?event); delay_queue.clear(); store.apply_watcher_event(&event); meta_cache.clear(); + init_buffer_meta.clear(); + } + // Immediately reconcile `InitApply` event (but buffer the obj ref so we can handle implied deletions on InitDone) + watcher::Event::InitApply(ref obj) => { + trace!(message = "Processing InitApply event.", event_type = std::any::type_name::(), event = ?event); + store.apply_watcher_event(&event); + let meta_descr = MetaDescribe::from_meta(obj.meta()); + meta_cache.store(meta_descr.clone()); + init_buffer_meta.push(meta_descr.clone()); + } + // Reconcile `InitApply` events and implied deletions + watcher::Event::InitDone => { + trace!(message = "Processing InitDone event.", event_type = std::any::type_name::(), event = ?event); + store.apply_watcher_event(&event); + + + store.as_reader().state().into_iter() + // delay deleting objs that were added before but not during InitApply + .for_each(|obj| { + if let Some(inner) = Arc::into_inner(obj) { + let meta_descr = MetaDescribe::from_meta(inner.meta()); + if !init_buffer_meta.contains(&meta_descr) { + let implied_deletion_event = watcher::Event::Delete(inner); + trace!(message = "Delaying processing implied deletion.", event_type = std::any::type_name::(), event = ?implied_deletion_event); + delay_queue.insert(implied_deletion_event, delay_deletion); + meta_cache.delete(&meta_descr); + } + } + }); + + init_buffer_meta.clear(); } } }, @@ -69,10 +102,10 @@ pub async fn custom_reflector( Some(event) => { let event = event.into_inner(); match event { - watcher::Event::Deleted(ref obj) => { + watcher::Event::Delete(ref obj) => { let meta_desc = MetaDescribe::from_meta(obj.meta()); if !meta_cache.contains(&meta_desc) { - trace!(message = "Processing Deleted event.", ?event); + trace!(message = "Processing Delete event.", event_type = std::any::type_name::(), event = ?event); store.apply_watcher_event(&event); } }, @@ -118,7 +151,7 @@ mod tests { ..ConfigMap::default() }; let (mut tx, rx) = mpsc::channel::<_>(5); - tx.send(Ok(watcher::Event::Applied(cm.clone()))) + tx.send(Ok(watcher::Event::Apply(cm.clone()))) .await .unwrap(); let meta_cache = MetaCache::new(); @@ -144,10 +177,10 @@ mod tests { ..ConfigMap::default() }; let (mut tx, rx) = mpsc::channel::<_>(5); - tx.send(Ok(watcher::Event::Applied(cm.clone()))) + tx.send(Ok(watcher::Event::Apply(cm.clone()))) .await .unwrap(); - tx.send(Ok(watcher::Event::Deleted(cm.clone()))) + tx.send(Ok(watcher::Event::Delete(cm.clone()))) .await .unwrap(); let meta_cache = MetaCache::new(); @@ -178,13 +211,13 @@ mod tests { ..ConfigMap::default() }; let (mut tx, rx) = mpsc::channel::<_>(5); - tx.send(Ok(watcher::Event::Applied(cm.clone()))) + tx.send(Ok(watcher::Event::Apply(cm.clone()))) .await .unwrap(); - tx.send(Ok(watcher::Event::Deleted(cm.clone()))) + tx.send(Ok(watcher::Event::Delete(cm.clone()))) .await .unwrap(); - tx.send(Ok(watcher::Event::Applied(cm.clone()))) + tx.send(Ok(watcher::Event::Apply(cm.clone()))) .await .unwrap(); let meta_cache = MetaCache::new(); diff --git a/src/lib.rs b/src/lib.rs index 4e710e3429385..cc2bb8b59ae54 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -7,6 +7,7 @@ #![deny(warnings)] #![deny(missing_docs)] #![cfg_attr(docsrs, feature(doc_cfg), deny(rustdoc::broken_intra_doc_links))] +#![allow(async_fn_in_trait)] #![allow(clippy::approx_constant)] #![allow(clippy::float_cmp)] #![allow(clippy::match_wild_err_arm)] @@ -20,13 +21,18 @@ //! The main library to support building Vector. +#[cfg(all(unix, feature = "sinks-socket"))] #[macro_use] -extern crate tracing; +extern crate cfg_if; #[macro_use] extern crate derivative; #[macro_use] +extern crate tracing; +#[macro_use] extern crate vector_lib; +pub use indoc::indoc; + #[cfg(all(feature = "tikv-jemallocator", not(feature = "allocation-tracing")))] #[global_allocator] static ALLOC: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc; @@ -129,25 +135,12 @@ pub use vector_lib::{shutdown, Error, Result}; static APP_NAME_SLUG: std::sync::OnceLock = std::sync::OnceLock::new(); -/// Flag denoting whether or not enterprise features are enabled. -#[cfg(feature = "enterprise")] -pub static ENTERPRISE_ENABLED: std::sync::OnceLock = std::sync::OnceLock::new(); - /// The name used to identify this Vector application. /// /// This can be set at compile-time through the VECTOR_APP_NAME env variable. /// Defaults to "Vector". pub fn get_app_name() -> &'static str { - #[cfg(not(feature = "enterprise"))] - let app_name = "Vector"; - #[cfg(feature = "enterprise")] - let app_name = if *ENTERPRISE_ENABLED.get().unwrap_or(&false) { - "Vector Enterprise" - } else { - "Vector" - }; - - option_env!("VECTOR_APP_NAME").unwrap_or(app_name) + option_env!("VECTOR_APP_NAME").unwrap_or("Vector") } /// Returns a slugified version of the name used to identify this Vector application. @@ -194,12 +187,12 @@ pub fn get_version() -> String { // or full debug symbols. See the Cargo Book profiling section for value meaning: // https://doc.rust-lang.org/cargo/reference/profiles.html#debug let build_string = match built_info::DEBUG { - "1" => format!("{} debug=line", build_string), - "2" | "true" => format!("{} debug=full", build_string), + "1" => format!("{build_string} debug=line"), + "2" | "true" => format!("{build_string} debug=full"), _ => build_string, }; - format!("{} ({})", pkg_version, build_string) + format!("{pkg_version} ({build_string})") } /// Includes information about the current build. @@ -209,8 +202,13 @@ pub mod built_info { } /// Returns the host name of the current system. +/// The hostname can be overridden by setting the VECTOR_HOSTNAME environment variable. pub fn get_hostname() -> std::io::Result { - Ok(hostname::get()?.to_string_lossy().into()) + Ok(if let Ok(hostname) = std::env::var("VECTOR_HOSTNAME") { + hostname.to_string() + } else { + hostname::get()?.to_string_lossy().into_owned() + }) } /// Spawn a task with the given name. The name is only used if diff --git a/src/line_agg.rs b/src/line_agg.rs index 35d37a4fb60b1..25693fb21783a 100644 --- a/src/line_agg.rs +++ b/src/line_agg.rs @@ -183,10 +183,13 @@ where // If we're in draining mode, short circuit here. if let Some(to_drain) = &mut this.draining { - if let Some(val) = to_drain.pop() { - return Poll::Ready(Some(val)); - } else { - return Poll::Ready(None); + match to_drain.pop() { + Some(val) => { + return Poll::Ready(Some(val)); + } + _ => { + return Poll::Ready(None); + } } } @@ -749,7 +752,7 @@ mod tests { "START msg 1".to_string(), // will be stashed ]; for i in 0..n { - lines.push(format!("line {}", i)); + lines.push(format!("line {i}")); } let config = Config { start_pattern: Regex::new("").unwrap(), @@ -760,7 +763,7 @@ mod tests { let mut expected = "START msg 1".to_string(); for i in 0..n { - write!(expected, "\nline {}", i).expect("write to String never fails"); + write!(expected, "\nline {i}").expect("write to String never fails"); } let (mut send, recv) = futures::channel::mpsc::unbounded(); diff --git a/src/list.rs b/src/list.rs index 566b07215d486..3cedbc91a9d0f 100644 --- a/src/list.rs +++ b/src/list.rs @@ -40,22 +40,22 @@ pub fn cmd(opts: &Opts) -> exitcode::ExitCode { Format::Text => { println!("Sources:"); for name in sources { - println!("- {}", name); + println!("- {name}"); } println!("\nTransforms:"); for name in transforms { - println!("- {}", name); + println!("- {name}"); } println!("\nSinks:"); for name in sinks { - println!("- {}", name); + println!("- {name}"); } println!("\nEnrichment tables:"); for name in enrichment_tables { - println!("- {}", name); + println!("- {name}"); } } Format::Json => { diff --git a/src/nats.rs b/src/nats.rs index 93a0a765ede55..5048cc3836315 100644 --- a/src/nats.rs +++ b/src/nats.rs @@ -65,7 +65,7 @@ impl std::fmt::Display for NatsAuthConfig { CredentialsFile { .. } => "credentials_file", Nkey { .. } => "nkey", }; - write!(f, "{}", word) + write!(f, "{word}") } } diff --git a/src/providers/http.rs b/src/providers/http.rs index 95bce42ea1547..d2d5d6a7eb2b5 100644 --- a/src/providers/http.rs +++ b/src/providers/http.rs @@ -8,7 +8,7 @@ use url::Url; use vector_lib::configurable::configurable_component; use crate::{ - config::{self, provider::ProviderConfig, ProxyConfig}, + config::{self, provider::ProviderConfig, Format, ProxyConfig}, http::HttpClient, signal, tls::{TlsConfig, TlsSettings}, @@ -53,6 +53,10 @@ pub struct HttpConfig { #[configurable(derived)] #[serde(default, skip_serializing_if = "crate::serde::is_default")] proxy: ProxyConfig, + + /// Which config format expected to be loaded + #[configurable(derived)] + config_format: Format, } impl Default for HttpConfig { @@ -63,6 +67,7 @@ impl Default for HttpConfig { poll_interval_secs: 30, tls_options: None, proxy: Default::default(), + config_format: Format::default(), } } } @@ -70,7 +75,7 @@ impl Default for HttpConfig { /// Makes an HTTP request to the provided endpoint, returning the String body. async fn http_request( url: &Url, - tls_options: &Option, + tls_options: Option<&TlsConfig>, headers: &IndexMap, proxy: &ProxyConfig, ) -> Result { @@ -123,22 +128,16 @@ async fn http_request( /// Calls `http_request`, serializing the result to a `ConfigBuilder`. async fn http_request_to_config_builder( url: &Url, - tls_options: &Option, + tls_options: Option<&TlsConfig>, headers: &IndexMap, proxy: &ProxyConfig, + config_format: &Format, ) -> BuildResult { let config_str = http_request(url, tls_options, headers, proxy) .await .map_err(|e| vec![e.to_owned()])?; - let (config_builder, warnings) = - config::load(config_str.chunk(), crate::config::format::Format::Toml)?; - - for warning in warnings.into_iter() { - warn!("{}", warning); - } - - Ok(config_builder) + config::load(config_str.chunk(), *config_format) } /// Polls the HTTP endpoint after/every `poll_interval_secs`, returning a stream of `ConfigBuilder`. @@ -148,6 +147,7 @@ fn poll_http( tls_options: Option, headers: IndexMap, proxy: ProxyConfig, + config_format: Format, ) -> impl Stream { let duration = time::Duration::from_secs(poll_interval_secs); let mut interval = time::interval_at(time::Instant::now() + duration, duration); @@ -156,7 +156,7 @@ fn poll_http( loop { interval.tick().await; - match http_request_to_config_builder(&url, &tls_options, &headers, &proxy).await { + match http_request_to_config_builder(&url, tls_options.as_ref(), &headers, &proxy, &config_format).await { Ok(config_builder) => yield signal::SignalTo::ReloadFromConfigBuilder(config_builder), Err(_) => {}, }; @@ -169,7 +169,6 @@ fn poll_http( } } -#[async_trait::async_trait] impl ProviderConfig for HttpConfig { async fn build(&mut self, signal_handler: &mut signal::SignalHandler) -> BuildResult { let url = self @@ -180,10 +179,17 @@ impl ProviderConfig for HttpConfig { let tls_options = self.tls_options.take(); let poll_interval_secs = self.poll_interval_secs; let request = self.request.clone(); + let config_format = self.config_format; let proxy = ProxyConfig::from_env().merge(&self.proxy); - let config_builder = - http_request_to_config_builder(&url, &tls_options, &request.headers, &proxy).await?; + let config_builder = http_request_to_config_builder( + &url, + tls_options.as_ref(), + &request.headers, + &proxy, + &config_format, + ) + .await?; // Poll for changes to remote configuration. signal_handler.add(poll_http( @@ -192,6 +198,7 @@ impl ProviderConfig for HttpConfig { tls_options, request.headers.clone(), proxy.clone(), + config_format, )); Ok(config_builder) diff --git a/src/secrets/aws_secrets_manager.rs b/src/secrets/aws_secrets_manager.rs new file mode 100644 index 0000000000000..3d402536c2754 --- /dev/null +++ b/src/secrets/aws_secrets_manager.rs @@ -0,0 +1,106 @@ +use std::collections::{HashMap, HashSet}; + +use aws_sdk_secretsmanager::{config, Client}; +use vector_lib::configurable::{component::GenerateConfig, configurable_component}; + +use crate::aws::{create_client, AwsAuthentication, ClientBuilder, RegionOrEndpoint}; +use crate::config::ProxyConfig; +use crate::tls::TlsConfig; +use crate::{config::SecretBackend, signal}; + +pub(crate) struct SecretsManagerClientBuilder; + +impl ClientBuilder for SecretsManagerClientBuilder { + type Client = Client; + + fn build(&self, config: &aws_types::SdkConfig) -> Self::Client { + let config = config::Builder::from(config).build(); + Client::from_conf(config) + } +} + +/// Configuration for the `aws_secrets_manager` secrets backend. +#[configurable_component(secrets("aws_secrets_manager"))] +#[derive(Clone, Debug)] +pub struct AwsSecretsManagerBackend { + /// ID of the secret to resolve. + pub secret_id: String, + + #[serde(flatten)] + #[configurable(derived)] + pub region: RegionOrEndpoint, + + #[configurable(derived)] + #[serde(default)] + pub auth: AwsAuthentication, + + #[configurable(derived)] + pub tls: Option, +} + +impl GenerateConfig for AwsSecretsManagerBackend { + fn generate_config() -> toml::Value { + toml::Value::try_from(AwsSecretsManagerBackend { + secret_id: String::from("secret-id"), + region: Default::default(), + auth: Default::default(), + tls: None, + }) + .unwrap() + } +} + +impl SecretBackend for AwsSecretsManagerBackend { + async fn retrieve( + &mut self, + secret_keys: HashSet, + _: &mut signal::SignalRx, + ) -> crate::Result> { + let client = create_client::( + &SecretsManagerClientBuilder {}, + &self.auth, + self.region.region(), + self.region.endpoint(), + &ProxyConfig::default(), + self.tls.as_ref(), + None, + ) + .await?; + + let get_secret_value_response = client + .get_secret_value() + .secret_id(&self.secret_id) + .send() + .await?; + + let secret_string = get_secret_value_response + .secret_string + .ok_or::(format!( + "secret for secret-id '{}' could not be retrieved", + &self.secret_id + ))?; + + let output = serde_json::from_str::>(secret_string.as_str())?; + + let mut secrets = HashMap::new(); + for k in secret_keys.into_iter() { + if let Some(secret) = output.get(&k) { + if secret.is_empty() { + return Err(format!( + "value for key '{}' in secret with id '{}' was empty", + k, &self.secret_id + ) + .into()); + } + secrets.insert(k.to_string(), secret.to_string()); + } else { + return Err(format!( + "key '{}' in secret with id '{}' does not exist", + k, &self.secret_id + ) + .into()); + } + } + Ok(secrets) + } +} diff --git a/src/secrets/directory.rs b/src/secrets/directory.rs new file mode 100644 index 0000000000000..8170c208fd125 --- /dev/null +++ b/src/secrets/directory.rs @@ -0,0 +1,52 @@ +use std::collections::{HashMap, HashSet}; +use std::path::PathBuf; + +use vector_lib::configurable::{component::GenerateConfig, configurable_component}; + +use crate::{config::SecretBackend, signal}; + +/// Configuration for the `directory` secrets backend. +#[configurable_component(secrets("directory"))] +#[derive(Clone, Debug)] +pub struct DirectoryBackend { + /// Directory path to read secrets from. + pub path: PathBuf, + + /// Remove trailing whitespace from file contents. + #[serde(default)] + pub remove_trailing_whitespace: bool, +} + +impl GenerateConfig for DirectoryBackend { + fn generate_config() -> toml::Value { + toml::Value::try_from(DirectoryBackend { + path: PathBuf::from("/path/to/secrets"), + remove_trailing_whitespace: false, + }) + .unwrap() + } +} + +impl SecretBackend for DirectoryBackend { + async fn retrieve( + &mut self, + secret_keys: HashSet, + _: &mut signal::SignalRx, + ) -> crate::Result> { + let mut secrets = HashMap::new(); + for k in secret_keys.into_iter() { + let file_path = self.path.join(&k); + let contents = tokio::fs::read_to_string(&file_path).await?; + let secret = if self.remove_trailing_whitespace { + contents.trim_end() + } else { + &contents + }; + if secret.is_empty() { + return Err(format!("secret in file '{k}' was empty").into()); + } + secrets.insert(k, secret.to_string()); + } + Ok(secrets) + } +} diff --git a/src/secrets/exec.rs b/src/secrets/exec.rs index 0c192adffd583..29fb2c1585075 100644 --- a/src/secrets/exec.rs +++ b/src/secrets/exec.rs @@ -58,7 +58,7 @@ struct ExecResponse { } impl SecretBackend for ExecBackend { - fn retrieve( + async fn retrieve( &mut self, secret_keys: HashSet, signal_rx: &mut signal::SignalRx, @@ -76,18 +76,18 @@ impl SecretBackend for ExecBackend { for k in secret_keys.into_iter() { if let Some(secret) = output.get_mut(&k) { if let Some(e) = &secret.error { - return Err(format!("secret for key '{}' was not retrieved: {}", k, e).into()); + return Err(format!("secret for key '{k}' was not retrieved: {e}").into()); } if let Some(v) = secret.value.take() { if v.is_empty() { - return Err(format!("secret for key '{}' was empty", k).into()); + return Err(format!("secret for key '{k}' was empty").into()); } secrets.insert(k.to_string(), v); } else { - return Err(format!("secret for key '{}' was empty", k).into()); + return Err(format!("secret for key '{k}' was empty").into()); } } else { - return Err(format!("secret for key '{}' was not retrieved", k).into()); + return Err(format!("secret for key '{k}' was not retrieved").into()); } } Ok(secrets) @@ -117,12 +117,10 @@ async fn query_backend( let mut stderr_stream = child .stderr .map(|s| codec::FramedRead::new(s, codec::LinesCodec::new())) - .take() .ok_or("unable to acquire stderr")?; let mut stdout_stream = child .stdout .map(|s| codec::FramedRead::new(s, codec::BytesCodec::new())) - .take() .ok_or("unable to acquire stdout")?; let query = serde_json::to_vec(&query)?; @@ -148,7 +146,7 @@ async fn query_backend( match stdout { None => break, Some(Ok(b)) => output.extend(b), - Some(Err(e)) => return Err(format!("Error while reading from an exec backend stdout: {}.", e).into()), + Some(Err(e)) => return Err(format!("Error while reading from an exec backend stdout: {e}.").into()), } } _ = &mut timeout => { diff --git a/src/secrets/file.rs b/src/secrets/file.rs new file mode 100644 index 0000000000000..c103385493791 --- /dev/null +++ b/src/secrets/file.rs @@ -0,0 +1,46 @@ +use std::collections::{HashMap, HashSet}; +use std::path::PathBuf; + +use vector_lib::configurable::{component::GenerateConfig, configurable_component}; + +use crate::{config::SecretBackend, signal}; + +/// Configuration for the `file` secrets backend. +#[configurable_component(secrets("file"))] +#[derive(Clone, Debug)] +pub struct FileBackend { + /// File path to read secrets from. + pub path: PathBuf, +} + +impl GenerateConfig for FileBackend { + fn generate_config() -> toml::Value { + toml::Value::try_from(FileBackend { + path: PathBuf::from("/path/to/secret"), + }) + .unwrap() + } +} + +impl SecretBackend for FileBackend { + async fn retrieve( + &mut self, + secret_keys: HashSet, + _: &mut signal::SignalRx, + ) -> crate::Result> { + let contents = tokio::fs::read_to_string(&self.path).await?; + let output = serde_json::from_str::>(&contents)?; + let mut secrets = HashMap::new(); + for k in secret_keys.into_iter() { + if let Some(secret) = output.get(&k) { + if secret.is_empty() { + return Err(format!("secret for key '{k}' was empty").into()); + } + secrets.insert(k, secret.to_string()); + } else { + return Err(format!("secret for key '{k}' was not retrieved").into()); + } + } + Ok(secrets) + } +} diff --git a/src/secrets/mod.rs b/src/secrets/mod.rs index 83eea23c4ea42..b8e9c9129a789 100644 --- a/src/secrets/mod.rs +++ b/src/secrets/mod.rs @@ -2,33 +2,86 @@ use std::collections::{HashMap, HashSet}; use enum_dispatch::enum_dispatch; -use vector_lib::configurable::{configurable_component, NamedComponent}; +use vector_lib::configurable::configurable_component; +use crate::config::GenerateConfig; use crate::{config::SecretBackend, signal}; +#[cfg(feature = "secrets-aws-secrets-manager")] +mod aws_secrets_manager; +mod directory; mod exec; +mod file; mod test; -/// Configurable secret backends in Vector. -#[configurable_component] +/// Configuration options to retrieve secrets from external backend in order to avoid storing secrets in plaintext +/// in Vector config. Multiple backends can be configured. Use `SECRET[.]` to tell Vector to retrieve the secret. This placeholder is replaced by the secret +/// retrieved from the relevant backend. +/// +/// When `type` is `exec`, the provided command will be run and provided a list of +/// secrets to fetch, determined from the configuration file, on stdin as JSON in the format: +/// +/// ```json +/// {"version": "1.0", "secrets": ["secret1", "secret2"]} +/// ``` +/// +/// The executable is expected to respond with the values of these secrets on stdout, also as JSON, in the format: +/// +/// ```json +/// { +/// "secret1": {"value": "secret_value", "error": null}, +/// "secret2": {"value": null, "error": "could not fetch the secret"} +/// } +/// ``` +/// If an `error` is returned for any secrets, or if the command exits with a non-zero status code, +/// Vector will log the errors and exit. +/// +/// Otherwise, the secret must be a JSON text string with key/value pairs. For example: +/// ```json +/// { +/// "username": "test", +/// "password": "example-password" +/// } +/// ``` +/// +/// If an error occurred while reading the file or retrieving the secrets, Vector logs the error and exits. +/// +/// Secrets are loaded when Vector starts or if Vector receives a `SIGHUP` signal triggering its +/// configuration reload process. +#[allow(clippy::large_enum_variant)] +#[configurable_component(global_option("secret"))] #[derive(Clone, Debug)] #[enum_dispatch(SecretBackend)] #[serde(tag = "type", rename_all = "snake_case")] +#[configurable(metadata( + docs::enum_tag_description = "secret type", + docs::common = false, + docs::required = false, +))] pub enum SecretBackends { + /// File. + File(file::FileBackend), + + /// Directory. + Directory(directory::DirectoryBackend), + /// Exec. Exec(exec::ExecBackend), + /// AWS Secrets Manager. + #[cfg(feature = "secrets-aws-secrets-manager")] + AwsSecretsManager(aws_secrets_manager::AwsSecretsManagerBackend), + /// Test. #[configurable(metadata(docs::hidden))] Test(test::TestBackend), } -// TODO: Use `enum_dispatch` here. -impl NamedComponent for SecretBackends { - fn get_component_name(&self) -> &'static str { - match self { - Self::Exec(config) => config.get_component_name(), - Self::Test(config) => config.get_component_name(), - } +impl GenerateConfig for SecretBackends { + fn generate_config() -> toml::Value { + toml::Value::try_from(Self::File(file::FileBackend { + path: "path/to/file".into(), + })) + .unwrap() } } diff --git a/src/secrets/test.rs b/src/secrets/test.rs index 920fbe1ba2adc..3185d93a5feb2 100644 --- a/src/secrets/test.rs +++ b/src/secrets/test.rs @@ -15,7 +15,7 @@ pub struct TestBackend { impl_generate_config_from_default!(TestBackend); impl SecretBackend for TestBackend { - fn retrieve( + async fn retrieve( &mut self, secret_keys: HashSet, _: &mut signal::SignalRx, diff --git a/src/serde.rs b/src/serde.rs index 6a0fd579cdd15..2bee11583e0ad 100644 --- a/src/serde.rs +++ b/src/serde.rs @@ -90,7 +90,7 @@ impl Fields { FieldsOrValue::Value(v) => Box::new(std::iter::once((k, v))), FieldsOrValue::Fields(f) => Box::new( f.all_fields() - .map(move |(nested_k, v)| (format!("{}.{}", k, nested_k), v)), + .map(move |(nested_k, v)| (format!("{k}.{nested_k}"), v)), ), } }) diff --git a/src/signal.rs b/src/signal.rs index 91a1e3515f798..ef095ad55be6b 100644 --- a/src/signal.rs +++ b/src/signal.rs @@ -1,6 +1,7 @@ #![allow(missing_docs)] use snafu::Snafu; +use std::collections::HashSet; use tokio::{runtime::Runtime, sync::broadcast}; use tokio_stream::{Stream, StreamExt}; @@ -14,6 +15,8 @@ pub type SignalRx = broadcast::Receiver; /// Control messages used by Vector to drive topology and shutdown events. #[allow(clippy::large_enum_variant)] // discovered during Rust upgrade to 1.57; just allowing for now since we did previously pub enum SignalTo { + /// Signal to reload given components. + ReloadComponents(HashSet), /// Signal to reload config from a string. ReloadFromConfigBuilder(ConfigBuilder), /// Signal to reload config from the filesystem. @@ -24,7 +27,23 @@ pub enum SignalTo { Quit, } -#[derive(Clone, Debug, Snafu)] +impl PartialEq for SignalTo { + fn eq(&self, other: &Self) -> bool { + use SignalTo::*; + + match (self, other) { + (ReloadComponents(a), ReloadComponents(b)) => a == b, + // TODO: This will require a lot of plumbing but ultimately we can derive equality for config builders. + (ReloadFromConfigBuilder(_), ReloadFromConfigBuilder(_)) => true, + (ReloadFromDisk, ReloadFromDisk) => true, + (Shutdown(a), Shutdown(b)) => a == b, + (Quit, Quit) => true, + _ => false, + } + } +} + +#[derive(Clone, Debug, Snafu, PartialEq, Eq)] pub enum ShutdownError { // For future work: It would be nice if we could keep the actual errors in here, but // `crate::Error` doesn't implement `Clone`, and adding `DynClone` for errors is tricky. @@ -154,7 +173,7 @@ impl SignalHandler { /// Signals from OS/user. #[cfg(unix)] -fn os_signals(runtime: &Runtime) -> impl Stream { +fn os_signals(runtime: &Runtime) -> impl Stream + use<> { use tokio::signal::unix::{signal, SignalKind}; // The `signal` function must be run within the context of a Tokio runtime. diff --git a/src/sinks/amqp/channel.rs b/src/sinks/amqp/channel.rs new file mode 100644 index 0000000000000..862ff06e5d0dc --- /dev/null +++ b/src/sinks/amqp/channel.rs @@ -0,0 +1,86 @@ +use super::config::AmqpSinkConfig; +use super::service::AmqpError; +use crate::amqp::AmqpConfig; +use deadpool::managed::Pool; +use lapin::options::ConfirmSelectOptions; + +pub type AmqpSinkChannels = Pool; + +pub(super) fn new_channel_pool(config: &AmqpSinkConfig) -> crate::Result { + let max_channels = config.max_channels.try_into().map_err(|_| { + Box::new(AmqpError::PoolError { + error: "max_channels must fit into usize".into(), + }) + })?; + if max_channels == 0 { + return Err(Box::new(AmqpError::PoolError { + error: "max_channels must be positive".into(), + })); + } + let channel_manager = AmqpSinkChannelManager::new(&config.connection); + let channels = Pool::builder(channel_manager) + .max_size(max_channels) + .runtime(deadpool::Runtime::Tokio1) + .build()?; + debug!("AMQP channel pool created with max size: {}", max_channels); + Ok(channels) +} + +/// A channel pool manager for the AMQP sink. +/// This manager is responsible for creating and recycling AMQP channels. +/// It uses the `deadpool` crate to manage the channels. +pub(crate) struct AmqpSinkChannelManager { + config: AmqpConfig, +} + +impl deadpool::managed::Manager for AmqpSinkChannelManager { + type Type = lapin::Channel; + type Error = AmqpError; + + async fn create(&self) -> Result { + let channel = Self::new_channel(&self.config).await?; + info!( + message = "Created a new channel to the AMQP broker.", + id = channel.id() + ); + Ok(channel) + } + + async fn recycle( + &self, + channel: &mut Self::Type, + _: &deadpool::managed::Metrics, + ) -> deadpool::managed::RecycleResult { + let state = channel.status().state(); + if state == lapin::ChannelState::Connected { + Ok(()) + } else { + Err((AmqpError::ChannelClosed { state }).into()) + } + } +} + +impl AmqpSinkChannelManager { + /// Creates a new channel pool manager for the AMQP sink. + pub fn new(config: &AmqpConfig) -> Self { + Self { + config: config.clone(), + } + } + + /// Creates a new AMQP channel using the configuration of this sink. + async fn new_channel(config: &AmqpConfig) -> Result { + let (_, channel) = config + .connect() + .await + .map_err(|e| AmqpError::ConnectFailed { error: e })?; + + // Enable confirmations on the channel. + channel + .confirm_select(ConfirmSelectOptions::default()) + .await + .map_err(|e| AmqpError::ConnectFailed { error: Box::new(e) })?; + + Ok(channel) + } +} diff --git a/src/sinks/amqp/config.rs b/src/sinks/amqp/config.rs index c24fcdc110ff3..afca8b47ccae5 100644 --- a/src/sinks/amqp/config.rs +++ b/src/sinks/amqp/config.rs @@ -1,8 +1,11 @@ //! Configuration functionality for the `AMQP` sink. +use super::channel::AmqpSinkChannels; use crate::{amqp::AmqpConfig, sinks::prelude::*}; use lapin::{types::ShortString, BasicProperties}; -use std::sync::Arc; -use vector_lib::codecs::TextSerializerConfig; +use vector_lib::{ + codecs::TextSerializerConfig, + internal_event::{error_stage, error_type}, +}; use super::sink::AmqpSink; @@ -12,16 +15,20 @@ use super::sink::AmqpSink; #[derive(Clone, Debug, Default)] pub struct AmqpPropertiesConfig { /// Content-Type for the AMQP messages. - #[configurable(derived)] pub(crate) content_type: Option, /// Content-Encoding for the AMQP messages. - #[configurable(derived)] pub(crate) content_encoding: Option, + + /// Expiration for AMQP messages (in milliseconds). + pub(crate) expiration_ms: Option, + + /// Priority for AMQP messages. It can be templated to an integer between 0 and 255 inclusive. + pub(crate) priority: Option, } impl AmqpPropertiesConfig { - pub(super) fn build(&self) -> BasicProperties { + pub(super) fn build(&self, event: &Event) -> Option { let mut prop = BasicProperties::default(); if let Some(content_type) = &self.content_type { prop = prop.with_content_type(ShortString::from(content_type.clone())); @@ -29,7 +36,26 @@ impl AmqpPropertiesConfig { if let Some(content_encoding) = &self.content_encoding { prop = prop.with_content_encoding(ShortString::from(content_encoding.clone())); } - prop + if let Some(expiration_ms) = &self.expiration_ms { + prop = prop.with_expiration(ShortString::from(expiration_ms.to_string())); + } + if let Some(priority_template) = &self.priority { + let priority = priority_template.render(event).unwrap_or_else(|error| { + warn!( + message = "Failed to render numeric template for \"properties.priority\".", + error = %error, + error_type = error_type::TEMPLATE_FAILED, + stage = error_stage::PROCESSING, + internal_log_rate_limit = true, + ); + Default::default() + }); + + // Clamp the value to the range of 0-255, as AMQP priority is a u8. + let priority = priority.clamp(0, u8::MAX.into()) as u8; + prop = prop.with_priority(priority); + } + Some(prop) } } @@ -64,6 +90,14 @@ pub struct AmqpSinkConfig { skip_serializing_if = "crate::serde::is_default" )] pub(crate) acknowledgements: AcknowledgementsConfig, + + /// Maximum number of AMQP channels to keep active (channels are created as needed). + #[serde(default = "default_max_channels")] + pub(crate) max_channels: u32, +} + +const fn default_max_channels() -> u32 { + 4 } impl Default for AmqpSinkConfig { @@ -75,6 +109,7 @@ impl Default for AmqpSinkConfig { encoding: TextSerializerConfig::default().into(), connection: AmqpConfig::default(), acknowledgements: AcknowledgementsConfig::default(), + max_channels: default_max_channels(), } } } @@ -85,7 +120,8 @@ impl GenerateConfig for AmqpSinkConfig { r#"connection_string = "amqp://localhost:5672/%2f" routing_key = "user_id" exchange = "test" - encoding.codec = "json""#, + encoding.codec = "json" + max_channels = 4"#, ) .unwrap() } @@ -96,7 +132,7 @@ impl GenerateConfig for AmqpSinkConfig { impl SinkConfig for AmqpSinkConfig { async fn build(&self, _cx: SinkContext) -> crate::Result<(VectorSink, Healthcheck)> { let sink = AmqpSink::new(self.clone()).await?; - let hc = healthcheck(Arc::clone(&sink.channel)).boxed(); + let hc = healthcheck(sink.channels.clone()).boxed(); Ok((VectorSink::from_event_streamsink(sink), hc)) } @@ -109,9 +145,11 @@ impl SinkConfig for AmqpSinkConfig { } } -pub(super) async fn healthcheck(channel: Arc) -> crate::Result<()> { +pub(super) async fn healthcheck(channels: AmqpSinkChannels) -> crate::Result<()> { trace!("Healthcheck started."); + let channel = channels.get().await?; + if !channel.status().connected() { return Err(Box::new(std::io::Error::new( std::io::ErrorKind::BrokenPipe, @@ -123,7 +161,126 @@ pub(super) async fn healthcheck(channel: Arc) -> crate::Result<( Ok(()) } -#[test] -pub fn generate_config() { - crate::test_util::test_generate_config::(); +#[cfg(test)] +mod tests { + use super::*; + use crate::config::format::{deserialize, Format}; + + #[test] + pub fn generate_config() { + crate::test_util::test_generate_config::(); + } + + fn assert_config_priority_eq(config: AmqpSinkConfig, event: &LogEvent, priority: u8) { + assert_eq!( + config + .properties + .unwrap() + .priority + .unwrap() + .render(event) + .unwrap(), + priority as u64 + ); + } + + #[test] + pub fn parse_config_priority_static() { + for (format, config) in [ + ( + Format::Yaml, + r#" + exchange: "test" + routing_key: "user_id" + encoding: + codec: "json" + connection_string: "amqp://user:password@127.0.0.1:5672/" + properties: + priority: 1 + "#, + ), + ( + Format::Toml, + r#" + exchange = "test" + routing_key = "user_id" + encoding.codec = "json" + connection_string = "amqp://user:password@127.0.0.1:5672/" + properties = { priority = 1 } + "#, + ), + ( + Format::Json, + r#" + { + "exchange": "test", + "routing_key": "user_id", + "encoding": { + "codec": "json" + }, + "connection_string": "amqp://user:password@127.0.0.1:5672/", + "properties": { + "priority": 1 + } + } + "#, + ), + ] { + let config: AmqpSinkConfig = deserialize(config, format).unwrap(); + let event = LogEvent::from_str_legacy("message"); + assert_config_priority_eq(config, &event, 1); + } + } + + #[test] + pub fn parse_config_priority_templated() { + for (format, config) in [ + ( + Format::Yaml, + r#" + exchange: "test" + routing_key: "user_id" + encoding: + codec: "json" + connection_string: "amqp://user:password@127.0.0.1:5672/" + properties: + priority: "{{ .priority }}" + "#, + ), + ( + Format::Toml, + r#" + exchange = "test" + routing_key = "user_id" + encoding.codec = "json" + connection_string = "amqp://user:password@127.0.0.1:5672/" + properties = { priority = "{{ .priority }}" } + "#, + ), + ( + Format::Json, + r#" + { + "exchange": "test", + "routing_key": "user_id", + "encoding": { + "codec": "json" + }, + "connection_string": "amqp://user:password@127.0.0.1:5672/", + "properties": { + "priority": "{{ .priority }}" + } + } + "#, + ), + ] { + let config: AmqpSinkConfig = deserialize(config, format).unwrap(); + let event = { + let mut event = LogEvent::from_str_legacy("message"); + event.insert("priority", 2); + event + }; + assert_config_priority_eq(config, &event, 2); + } + } } diff --git a/src/sinks/amqp/encoder.rs b/src/sinks/amqp/encoder.rs index 7529fa12b6ffa..3655ab3d933f1 100644 --- a/src/sinks/amqp/encoder.rs +++ b/src/sinks/amqp/encoder.rs @@ -26,7 +26,7 @@ impl encoding::Encoder for AmqpEncoder { let mut encoder = self.encoder.clone(); encoder .encode(input, &mut body) - .map_err(|_| io::Error::new(io::ErrorKind::Other, "unable to encode"))?; + .map_err(|_| io::Error::other("unable to encode"))?; let body = body.freeze(); write_all(writer, 1, body.as_ref())?; diff --git a/src/sinks/amqp/integration_tests.rs b/src/sinks/amqp/integration_tests.rs index b5f1bdd320d6d..be30e93a6a37b 100644 --- a/src/sinks/amqp/integration_tests.rs +++ b/src/sinks/amqp/integration_tests.rs @@ -3,16 +3,18 @@ use crate::{ amqp::await_connection, config::{SinkConfig, SinkContext}, shutdown::ShutdownSignal, - template::Template, + sinks::amqp::channel::new_channel_pool, + template::{Template, UnsignedIntTemplate}, test_util::{ components::{run_and_assert_sink_compliance, SINK_TAGS}, random_lines_with_stream, random_string, }, SourceSender, }; +use config::AmqpPropertiesConfig; use futures::StreamExt; -use std::{collections::HashSet, sync::Arc, time::Duration}; -use vector_lib::config::LogNamespace; +use std::{collections::HashSet, time::Duration}; +use vector_lib::{config::LogNamespace, event::LogEvent}; pub fn make_config() -> AmqpSinkConfig { let mut config = AmqpSinkConfig { @@ -21,9 +23,9 @@ pub fn make_config() -> AmqpSinkConfig { }; let user = std::env::var("AMQP_USER").unwrap_or_else(|_| "guest".to_string()); let pass = std::env::var("AMQP_PASSWORD").unwrap_or_else(|_| "guest".to_string()); + let host = std::env::var("AMQP_HOST").unwrap_or_else(|_| "rabbitmq".to_string()); let vhost = std::env::var("AMQP_VHOST").unwrap_or_else(|_| "%2f".to_string()); - config.connection.connection_string = - format!("amqp://{}:{}@rabbitmq:5672/{}", user, pass, vhost); + config.connection.connection_string = format!("amqp://{user}:{pass}@{host}:5672/{vhost}"); config } @@ -35,8 +37,8 @@ async fn healthcheck() { let mut config = make_config(); config.exchange = Template::try_from(exchange.as_str()).unwrap(); await_connection(&config.connection).await; - let (_conn, channel) = config.connection.connect().await.unwrap(); - super::config::healthcheck(Arc::new(channel)).await.unwrap(); + let channels = new_channel_pool(&config).unwrap(); + super::config::healthcheck(channels).await.unwrap(); } #[tokio::test] @@ -124,6 +126,10 @@ async fn amqp_happy_path() { { let msg = try_msg.unwrap(); let s = String::from_utf8_lossy(msg.data.as_slice()).into_owned(); + + let msg_priority = *msg.properties.priority(); + assert_eq!(msg_priority, None); + out.push(s); } else { failures += 1; @@ -218,3 +224,124 @@ async fn amqp_round_trip() { assert_eq!(output.len(), nb_events_published); } + +async fn amqp_priority_with_template( + template: &str, + event_field_priority: Option, + expected_priority: Option, +) { + let mut config = make_config(); + let exchange = format!("test-{}-exchange", random_string(10)); + config.exchange = Template::try_from(exchange.as_str()).unwrap(); + config.properties = Some(AmqpPropertiesConfig { + priority: Some(UnsignedIntTemplate::try_from(template).unwrap()), + ..Default::default() + }); + + await_connection(&config.connection).await; + let (_conn, channel) = config.connection.connect().await.unwrap(); + let exchange_opts = lapin::options::ExchangeDeclareOptions { + auto_delete: true, + ..Default::default() + }; + channel + .exchange_declare( + &exchange, + lapin::ExchangeKind::Fanout, + exchange_opts, + lapin::types::FieldTable::default(), + ) + .await + .unwrap(); + + let cx = SinkContext::default(); + let (sink, healthcheck) = config.build(cx).await.unwrap(); + healthcheck.await.expect("Health check failed"); + + // prepare consumer + let queue = format!("test-{}-queue", random_string(10)); + let queue_opts = lapin::options::QueueDeclareOptions { + auto_delete: true, + ..Default::default() + }; + let queue_args = { + let mut args = lapin::types::FieldTable::default(); + args.insert( + lapin::types::ShortString::from("x-max-priority"), + lapin::types::AMQPValue::ShortInt(10), // Maximum priority value + ); + args + }; + channel + .queue_declare(&queue, queue_opts, queue_args) + .await + .unwrap(); + + channel + .queue_bind( + &queue, + &exchange, + "", + lapin::options::QueueBindOptions::default(), + lapin::types::FieldTable::default(), + ) + .await + .unwrap(); + + let consumer = format!("test-{}-consumer", random_string(10)); + let mut consumer = channel + .basic_consume( + &queue, + &consumer, + lapin::options::BasicConsumeOptions::default(), + lapin::types::FieldTable::default(), + ) + .await + .unwrap(); + + // Send a single event with a priority defined in the event + let input = random_string(100); + let event = { + let mut event = LogEvent::from_str_legacy(&input); + if let Some(priority) = event_field_priority { + event.insert("priority", priority); + } + event + }; + + let events = futures::stream::iter(vec![event]); + run_and_assert_sink_compliance(sink, events, &SINK_TAGS).await; + + if let Ok(Some(try_msg)) = tokio::time::timeout(Duration::from_secs(10), consumer.next()).await + { + let msg = try_msg.unwrap(); + let msg_priority = *msg.properties.priority(); + let output = String::from_utf8_lossy(msg.data.as_slice()).into_owned(); + + assert_eq!(msg_priority, expected_priority); + assert_eq!(output, input); + } else { + panic!("Did not consume message in time."); + } +} + +#[tokio::test] +async fn amqp_priority_template_variable() { + crate::test_util::trace_init(); + + amqp_priority_with_template("{{ priority }}", Some(5), Some(5)).await; +} + +#[tokio::test] +async fn amqp_priority_template_constant() { + crate::test_util::trace_init(); + + amqp_priority_with_template("5", None, Some(5)).await; +} + +#[tokio::test] +async fn amqp_priority_template_out_of_bounds() { + crate::test_util::trace_init(); + + amqp_priority_with_template("100000", None, Some(u8::MAX)).await; +} diff --git a/src/sinks/amqp/mod.rs b/src/sinks/amqp/mod.rs index 749f892f1ccd2..6b478c4adc10d 100644 --- a/src/sinks/amqp/mod.rs +++ b/src/sinks/amqp/mod.rs @@ -1,5 +1,6 @@ //! `AMQP` sink. //! Handles version AMQP 0.9.1 which is used by RabbitMQ. +mod channel; mod config; mod encoder; mod request_builder; @@ -15,7 +16,5 @@ use snafu::Snafu; #[derive(Debug, Snafu)] enum BuildError { #[snafu(display("creating amqp producer failed: {}", source))] - AmqpCreateFailed { - source: Box, - }, + AmqpCreateFailed { source: vector_common::Error }, } diff --git a/src/sinks/amqp/service.rs b/src/sinks/amqp/service.rs index babc502b9c193..cee7d20344bd7 100644 --- a/src/sinks/amqp/service.rs +++ b/src/sinks/amqp/service.rs @@ -5,10 +5,9 @@ use bytes::Bytes; use futures::future::BoxFuture; use lapin::{options::BasicPublishOptions, BasicProperties}; use snafu::Snafu; -use std::{ - sync::Arc, - task::{Context, Poll}, -}; +use std::task::{Context, Poll}; + +use super::channel::AmqpSinkChannels; /// The request contains the data to send to `AMQP` together /// with the information need to route the message. @@ -22,7 +21,7 @@ pub(super) struct AmqpRequest { } impl AmqpRequest { - pub(super) fn new( + pub(super) const fn new( body: Bytes, exchange: String, routing_key: String, @@ -79,11 +78,11 @@ impl DriverResponse for AmqpResponse { /// The tower service that handles the actual sending of data to `AMQP`. pub(super) struct AmqpService { - pub(super) channel: Arc, + pub(super) channels: AmqpSinkChannels, } #[derive(Debug, Snafu)] -pub(super) enum AmqpError { +pub enum AmqpError { #[snafu(display("Failed retrieving Acknowledgement: {}", error))] AcknowledgementFailed { error: lapin::Error }, @@ -92,6 +91,15 @@ pub(super) enum AmqpError { #[snafu(display("Received Negative Acknowledgement from AMQP broker."))] Nack, + + #[snafu(display("Failed to open AMQP channel: {}", error))] + ConnectFailed { error: vector_common::Error }, + + #[snafu(display("Channel is not writeable: {:?}", state))] + ChannelClosed { state: lapin::ChannelState }, + + #[snafu(display("Channel pool error: {}", error))] + PoolError { error: vector_common::Error }, } impl Service for AmqpService { @@ -106,9 +114,13 @@ impl Service for AmqpService { } fn call(&mut self, req: AmqpRequest) -> Self::Future { - let channel = Arc::clone(&self.channel); + let channel = self.channels.clone(); Box::pin(async move { + let channel = channel.get().await.map_err(|error| AmqpError::PoolError { + error: Box::new(error), + })?; + let byte_size = req.body.len(); let fut = channel .basic_publish( diff --git a/src/sinks/amqp/sink.rs b/src/sinks/amqp/sink.rs index 922065a58a7a3..ca40fe46b21e5 100644 --- a/src/sinks/amqp/sink.rs +++ b/src/sinks/amqp/sink.rs @@ -1,10 +1,10 @@ //! The sink for the `AMQP` sink that wires together the main stream that takes the //! event and sends it to `AMQP`. use crate::sinks::prelude::*; -use lapin::{options::ConfirmSelectOptions, BasicProperties}; +use lapin::BasicProperties; use serde::Serialize; -use std::sync::Arc; +use super::channel::AmqpSinkChannels; use super::{ config::{AmqpPropertiesConfig, AmqpSinkConfig}, encoder::AmqpEncoder, @@ -27,7 +27,7 @@ pub(super) struct AmqpEvent { } pub(super) struct AmqpSink { - pub(super) channel: Arc, + pub(super) channels: AmqpSinkChannels, exchange: Template, routing_key: Option