Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions contract-tests/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,7 @@ async fn status() -> impl Responder {
"client-prereq-events".to_string(),
"event-gzip".to_string(),
"optional-event-gzip".to_string(),
"instance-id".to_string(),
],
})
}
Expand Down
16 changes: 9 additions & 7 deletions launchdarkly-server-sdk/src/client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -175,16 +175,18 @@ impl Client {
}

let tags = config.application_tag();
let instance_id = config.instance_id().to_string();

let endpoints = config.service_endpoints_builder().build()?;

let mut event_processor_builder = config.event_processor_builder().to_owned();
event_processor_builder.set_instance_id(instance_id.clone());
let event_processor =
config
.event_processor_builder()
.build(&endpoints, config.sdk_key(), tags.clone())?;
let data_source =
config
.data_source_builder()
.build(&endpoints, config.sdk_key(), tags.clone())?;
event_processor_builder.build(&endpoints, config.sdk_key(), tags.clone())?;

let mut data_source_builder = config.data_source_builder().to_owned();
data_source_builder.set_instance_id(instance_id);
let data_source = data_source_builder.build(&endpoints, config.sdk_key(), tags.clone())?;
let data_store = config.data_store_builder().build()?;

let events_default = EventsScope {
Expand Down
60 changes: 60 additions & 0 deletions launchdarkly-server-sdk/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,7 @@ pub struct Config {
data_source_builder: Box<dyn DataSourceFactory>,
event_processor_builder: Box<dyn EventProcessorFactory>,
application_tag: Option<String>,
instance_id: String,
offline: bool,
daemon_mode: bool,
}
Expand Down Expand Up @@ -182,6 +183,13 @@ impl Config {
pub fn application_tag(&self) -> &Option<String> {
&self.application_tag
}

/// Returns the per-SDK-instance identifier. This is a v4 UUID, generated once when the
/// [Config] is built, that is included in the `X-LaunchDarkly-Instance-Id` HTTP header
/// on outbound requests for the lifetime of the SDK instance.
pub fn instance_id(&self) -> &str {
&self.instance_id
}
}

/// Error type used to represent failures when building a Config instance.
Expand Down Expand Up @@ -381,13 +389,21 @@ impl ConfigBuilder {
_ => None,
};

// Per SCMP-server-connection-minutes-polling, every polling request must carry a
// per-SDK-instance v4 UUID. We generate it once here, store it on Config, and pass it
// into the data source, feature requester, and event processor so that streaming,
// polling, and event requests all carry the same stable identifier for the lifetime
// of this client.
let instance_id = uuid::Uuid::new_v4().to_string();

Ok(Config {
sdk_key: self.sdk_key,
service_endpoints_builder,
data_store_builder,
data_source_builder,
event_processor_builder,
application_tag,
instance_id,
offline: self.offline,
daemon_mode: self.daemon_mode,
})
Expand Down Expand Up @@ -431,6 +447,50 @@ mod tests {
assert_eq!(None, config.application_tag);
}

#[test]
#[cfg(any(
feature = "hyper-rustls-native-roots",
feature = "hyper-rustls-webpki-roots",
feature = "native-tls"
))]
fn instance_id_is_a_uuid_v4() {
let config = ConfigBuilder::new("sdk-key")
.build()
.expect("config should build");

let parsed = uuid::Uuid::parse_str(config.instance_id())
.expect("instance id should be a parseable UUID");
assert_eq!(
uuid::Version::Random,
parsed.get_version().expect("uuid should have a version"),
"instance id must be UUID v4"
);
}

#[test]
#[cfg(any(
feature = "hyper-rustls-native-roots",
feature = "hyper-rustls-webpki-roots",
feature = "native-tls"
))]
fn instance_id_is_unique_per_config() {
// Each call to ConfigBuilder::build represents a new SDK instance; each must get its own
// GUID so connection-minutes accounting on the server side can distinguish them.
let c1 = ConfigBuilder::new("sdk-key")
.build()
.expect("config should build");
let c2 = ConfigBuilder::new("sdk-key")
.build()
.expect("config should build");
assert!(!c1.instance_id().is_empty());
assert!(!c2.instance_id().is_empty());
assert_ne!(
c1.instance_id(),
c2.instance_id(),
"each SDK instance should generate its own instance id"
);
}

#[test_case("id", "version", Some("application-id/id application-version/version".to_string()))]
#[test_case("Invalid id", "version", Some("application-version/version".to_string()))]
#[test_case("id", "Invalid version", Some("application-id/id".to_string()))]
Expand Down
139 changes: 137 additions & 2 deletions launchdarkly-server-sdk/src/data_source.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use crate::feature_requester::FeatureRequesterError;
use crate::feature_requester_builders::FeatureRequesterFactory;
use crate::reqwest::is_http_error_recoverable;
use crate::stores::store::{DataStore, UpdateError};
use crate::LAUNCHDARKLY_TAGS_HEADER;
use crate::{LAUNCHDARKLY_INSTANCE_ID_HEADER, LAUNCHDARKLY_TAGS_HEADER};
use es::{Client, ClientBuilder, ReconnectOptionsBuilder};
use eventsource_client as es;
use futures::StreamExt;
Expand Down Expand Up @@ -75,6 +75,7 @@ impl StreamingDataSource {
sdk_key: &str,
initial_reconnect_delay: Duration,
tags: &Option<String>,
instance_id: Option<&str>,
transport: T,
) -> std::result::Result<Self, es::Error> {
let stream_url = format!("{base_url}/all");
Expand All @@ -91,6 +92,10 @@ impl StreamingDataSource {
.header("Authorization", sdk_key)?
.header("User-Agent", &crate::USER_AGENT)?;

if let Some(instance_id) = instance_id {
client_builder = client_builder.header(LAUNCHDARKLY_INSTANCE_ID_HEADER, instance_id)?;
}

if let Some(tags) = tags {
client_builder = client_builder.header(LAUNCHDARKLY_TAGS_HEADER, tags)?;
}
Expand Down Expand Up @@ -374,7 +379,15 @@ mod tests {

use super::{DataSource, PollingDataSource, StreamingDataSource};
use crate::feature_requester_builders::HttpFeatureRequesterBuilder;
use crate::{stores::store::InMemoryDataStore, LAUNCHDARKLY_TAGS_HEADER};
use crate::{
stores::store::InMemoryDataStore, LAUNCHDARKLY_INSTANCE_ID_HEADER, LAUNCHDARKLY_TAGS_HEADER,
};

// Matches lowercased canonical UUID v4 format, e.g.
// "550e8400-e29b-41d4-a716-446655440000". The third group must start with "4" (UUID
// version) and the fourth must start with one of 8/9/a/b (variant 10x).
const UUID_V4_REGEX: &str =
r"^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$";

#[test_case(Some("application-id/abc:application-sha/xyz".into()), "application-id/abc:application-sha/xyz")]
#[test_case(None, Matcher::Missing)]
Expand All @@ -401,6 +414,7 @@ mod tests {
"sdk-key",
Duration::from_secs(0),
&tag,
None,
launchdarkly_sdk_transport::HyperTransport::new()
.expect("Failed to create streaming data source"),
)
Expand Down Expand Up @@ -490,4 +504,125 @@ mod tests {

mock.assert()
}

// Asserts that streaming requests carry the X-LaunchDarkly-Instance-Id header, that its
// value matches the value passed in, and that a UUID-v4-shaped value is accepted (this
// mirrors how the value is generated in ConfigBuilder::build).
#[tokio::test(flavor = "multi_thread")]
async fn streaming_source_passes_along_instance_id_header() {
let mut server = mockito::Server::new_async().await;
let mock = server
.mock("GET", "/all")
.with_status(200)
.with_body("event:one\ndata:One\n\n")
.expect_at_least(1)
.match_header(
LAUNCHDARKLY_INSTANCE_ID_HEADER,
Matcher::Regex(UUID_V4_REGEX.into()),
)
.create_async()
.await;

let (shutdown_tx, _) = broadcast::channel::<()>(1);
let initialized = Arc::new(AtomicBool::new(false));

let instance_id = uuid::Uuid::new_v4().to_string();
let streaming = StreamingDataSource::new(
&server.url(),
"sdk-key",
Duration::from_secs(0),
&None,
Some(&instance_id),
launchdarkly_sdk_transport::HyperTransport::new()
.expect("Failed to create streaming data source"),
)
.unwrap();

let data_store = Arc::new(RwLock::new(InMemoryDataStore::new()));

let init_state = initialized.clone();
streaming.subscribe(
data_store,
Arc::new(move |success| init_state.store(success, Ordering::SeqCst)),
shutdown_tx.subscribe(),
);

let mut attempts = 0;
loop {
if initialized.load(Ordering::SeqCst) {
break;
}

attempts += 1;
if attempts > 10 {
break;
}

std::thread::sleep(Duration::from_millis(100));
}

let _ = shutdown_tx.send(());
mock.assert()
}

// Asserts that polling requests carry the X-LaunchDarkly-Instance-Id header. The polling
// feature requester is what actually issues the HTTP request, so this is the level at
// which the spec's "every polling request must carry the header" requirement is verified.
#[tokio::test(flavor = "multi_thread")]
async fn polling_source_passes_along_instance_id_header() {
let mut server = mockito::Server::new_async().await;
let mock = server
.mock("GET", "/sdk/latest-all")
.with_status(200)
.with_body("{}")
.expect_at_least(1)
.match_header(
LAUNCHDARKLY_INSTANCE_ID_HEADER,
Matcher::Regex(UUID_V4_REGEX.into()),
)
.create_async()
.await;

let (shutdown_tx, _) = broadcast::channel::<()>(1);
let initialized = Arc::new(AtomicBool::new(false));

let transport = launchdarkly_sdk_transport::HyperTransport::new()
.expect("Failed to create transport for polling data source");
let instance_id = uuid::Uuid::new_v4().to_string();
let hyper_builder = HttpFeatureRequesterBuilder::new(&server.url(), "sdk-key", transport)
.with_instance_id(&instance_id);

let polling = PollingDataSource::new(
Arc::new(Mutex::new(Box::new(hyper_builder))),
Duration::from_secs(10),
None,
);

let data_store = Arc::new(RwLock::new(InMemoryDataStore::new()));

let init_state = initialized.clone();
polling.subscribe(
data_store,
Arc::new(move |success| init_state.store(success, Ordering::SeqCst)),
shutdown_tx.subscribe(),
);

let mut attempts = 0;
loop {
if initialized.load(Ordering::SeqCst) {
break;
}

attempts += 1;
if attempts > 10 {
break;
}

std::thread::sleep(Duration::from_millis(100));
}

let _ = shutdown_tx.send(());

mock.assert()
}
}
Loading
Loading