Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: Remove lint exceptions for 2024 compatibility #2774

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -88,8 +88,6 @@ opentelemetry-stdout = { path = "opentelemetry-stdout" }
[workspace.lints.rust]
rust_2024_compatibility = { level = "warn", priority = -1 }
# No need to enable those, because it either not needed or results in ugly syntax
edition_2024_expr_fragment_specifier = "allow"
if_let_rescope = "allow"
tail_expr_drop_order = "allow"

[workspace.lints.clippy]
Expand Down
6 changes: 3 additions & 3 deletions opentelemetry-otlp/src/exporter/tonic/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -244,15 +244,15 @@ impl TonicExporterBuilder {
&self,
env_override: &str,
) -> Result<Option<CompressionEncoding>, crate::Error> {
if let Some(compression) = self.tonic_config.compression {
match self.tonic_config.compression { Some(compression) => {
Ok(Some(compression.try_into()?))
} else if let Ok(compression) = env::var(env_override) {
} _ => if let Ok(compression) = env::var(env_override) {
Ok(Some(compression.parse::<Compression>()?.try_into()?))
} else if let Ok(compression) = env::var(OTEL_EXPORTER_OTLP_COMPRESSION) {
Ok(Some(compression.parse::<Compression>()?.try_into()?))
} else {
Ok(None)
}
}}
}

/// Build a new tonic log exporter
Expand Down
6 changes: 3 additions & 3 deletions opentelemetry-sdk/src/growable_array.rs
Original file line number Diff line number Diff line change
Expand Up @@ -73,11 +73,11 @@ impl<
pub(crate) fn get(&self, index: usize) -> Option<&T> {
if index < self.count {
Some(&self.inline[index])
} else if let Some(ref overflow) = self.overflow {
} else { match self.overflow { Some(ref overflow) => {
overflow.get(index - MAX_INLINE_CAPACITY)
} else {
} _ => {
None
}
}}}
}

/// Returns the number of elements in the `GrowableArray`.
Expand Down
6 changes: 3 additions & 3 deletions opentelemetry-sdk/src/logs/simple_log_processor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -117,13 +117,13 @@ impl<T: LogExporter> LogProcessor for SimpleLogProcessor<T> {
fn shutdown(&self) -> OTelSdkResult {
self.is_shutdown
.store(true, std::sync::atomic::Ordering::Relaxed);
if let Ok(mut exporter) = self.exporter.lock() {
match self.exporter.lock() { Ok(mut exporter) => {
exporter.shutdown()
} else {
} _ => {
Err(OTelSdkError::InternalFailure(
"SimpleLogProcessor mutex poison at shutdown".into(),
))
}
}}
}

fn set_resource(&self, resource: &Resource) {
Expand Down
6 changes: 3 additions & 3 deletions opentelemetry-sdk/src/metrics/internal/aggregate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -114,13 +114,13 @@ impl AttributeSetFilter {
}

pub(crate) fn apply(&self, attrs: &[KeyValue], run: impl FnOnce(&[KeyValue])) {
if let Some(filter) = &self.filter {
match &self.filter { Some(filter) => {
let filtered_attrs: Vec<KeyValue> =
attrs.iter().filter(|kv| filter(kv)).cloned().collect();
run(&filtered_attrs);
} else {
} _ => {
run(attrs);
};
}};
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1469,7 +1469,7 @@ mod tests {
test_name
);

if let Some(a) = a.as_any().downcast_ref::<Gauge<T>>() {
match a.as_any().downcast_ref::<Gauge<T>>() { Some(a) => {
let b = b.as_any().downcast_ref::<Gauge<T>>().unwrap();
assert_eq!(
a.data_points.len(),
Expand All @@ -1480,7 +1480,7 @@ mod tests {
for (a, b) in a.data_points.iter().zip(b.data_points.iter()) {
assert_gauge_data_points_eq(a, b, "mismatching gauge data points", test_name);
}
} else if let Some(a) = a.as_any().downcast_ref::<Sum<T>>() {
} _ => { match a.as_any().downcast_ref::<Sum<T>>() { Some(a) => {
let b = b.as_any().downcast_ref::<Sum<T>>().unwrap();
assert_eq!(
a.temporality, b.temporality,
Expand All @@ -1501,7 +1501,7 @@ mod tests {
for (a, b) in a.data_points.iter().zip(b.data_points.iter()) {
assert_sum_data_points_eq(a, b, "mismatching sum data points", test_name);
}
} else if let Some(a) = a.as_any().downcast_ref::<Histogram<T>>() {
} _ => { match a.as_any().downcast_ref::<Histogram<T>>() { Some(a) => {
let b = b.as_any().downcast_ref::<Histogram<T>>().unwrap();
assert_eq!(
a.temporality, b.temporality,
Expand All @@ -1517,7 +1517,7 @@ mod tests {
for (a, b) in a.data_points.iter().zip(b.data_points.iter()) {
assert_hist_data_points_eq(a, b, "mismatching hist data points", test_name);
}
} else if let Some(a) = a.as_any().downcast_ref::<ExponentialHistogram<T>>() {
} _ => { match a.as_any().downcast_ref::<ExponentialHistogram<T>>() { Some(a) => {
let b = b
.as_any()
.downcast_ref::<ExponentialHistogram<T>>()
Expand All @@ -1541,9 +1541,9 @@ mod tests {
test_name,
);
}
} else {
} _ => {
panic!("Aggregation of unknown types")
}
}}}}}}}}
}

fn assert_sum_data_points_eq<T: Number>(
Expand Down
24 changes: 12 additions & 12 deletions opentelemetry-sdk/src/metrics/internal/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -125,11 +125,11 @@ where

// Recheck both the provided and sorted orders after acquiring the write lock
// in case another thread has pushed an update in the meantime.
if let Some(tracker) = trackers.get(attributes) {
match trackers.get(attributes) { Some(tracker) => {
tracker.update(value);
} else if let Some(tracker) = trackers.get(sorted_attrs.as_slice()) {
} _ => { match trackers.get(sorted_attrs.as_slice()) { Some(tracker) => {
tracker.update(value);
} else if is_under_cardinality_limit(self.count.load(Ordering::SeqCst)) {
} _ => if is_under_cardinality_limit(self.count.load(Ordering::SeqCst)) {
let new_tracker = Arc::new(A::create(&self.config));
new_tracker.update(value);

Expand All @@ -138,16 +138,16 @@ where
trackers.insert(sorted_attrs, new_tracker);

self.count.fetch_add(1, Ordering::SeqCst);
} else if let Some(overflow_value) = trackers.get(stream_overflow_attributes().as_slice()) {
} else { match trackers.get(stream_overflow_attributes().as_slice()) { Some(overflow_value) => {
overflow_value.update(value);
} else {
} _ => {
let new_tracker = A::create(&self.config);
new_tracker.update(value);
trackers.insert(stream_overflow_attributes().clone(), Arc::new(new_tracker));
otel_warn!( name: "ValueMap.measure",
message = "Maximum data points for metric stream exceeded. Entry added to overflow. Subsequent overflows to same metric until next collect will not be logged."
);
}
}}}}}}
}

/// Iterate through all attribute sets and populate `DataPoints` in readonly mode.
Expand Down Expand Up @@ -187,24 +187,24 @@ where
));
}

if let Ok(mut trackers_collect) = self.trackers_for_collect().write() {
if let Ok(mut trackers_current) = self.trackers.write() {
match self.trackers_for_collect().write() { Ok(mut trackers_collect) => {
match self.trackers.write() { Ok(mut trackers_current) => {
swap(trackers_collect.deref_mut(), trackers_current.deref_mut());
self.count.store(0, Ordering::SeqCst);
} else {
} _ => {
otel_warn!(name: "MeterProvider.InternalError", message = "Metric collection failed. Report this issue in OpenTelemetry repo.", details ="ValueMap trackers lock poisoned");
return;
}
}}

let mut seen = HashSet::new();
for (attrs, tracker) in trackers_collect.drain() {
if seen.insert(Arc::as_ptr(&tracker)) {
dest.push(map_fn(attrs, tracker.clone_and_reset(&self.config)));
}
}
} else {
} _ => {
otel_warn!(name: "MeterProvider.InternalError", message = "Metric collection failed. Report this issue in OpenTelemetry repo.", details ="ValueMap trackers for collect lock poisoned");
}
}}
}
}

Expand Down
12 changes: 6 additions & 6 deletions opentelemetry-sdk/src/metrics/meter_provider.rs
Original file line number Diff line number Diff line change
Expand Up @@ -191,29 +191,29 @@ impl MeterProvider for SdkMeterProvider {
otel_info!(name: "MeterNameEmpty", message = "Meter name is empty; consider providing a meaningful name. Meter will function normally and the provided name will be used as-is.");
};

if let Ok(mut meters) = self.inner.meters.lock() {
if let Some(existing_meter) = meters.get(&scope) {
match self.inner.meters.lock() { Ok(mut meters) => {
match meters.get(&scope) { Some(existing_meter) => {
otel_debug!(
name: "MeterProvider.ExistingMeterReturned",
meter_name = scope.name(),
);
Meter::new(existing_meter.clone())
} else {
} _ => {
let new_meter = Arc::new(SdkMeter::new(scope.clone(), self.inner.pipes.clone()));
meters.insert(scope.clone(), new_meter.clone());
otel_debug!(
name: "MeterProvider.NewMeterCreated",
meter_name = scope.name(),
);
Meter::new(new_meter)
}
} else {
}}
} _ => {
otel_debug!(
name: "MeterProvider.NoOpMeterReturned",
meter_name = scope.name(),
);
Meter::new(Arc::new(NoopMeter::new()))
}
}}
}
}

Expand Down
12 changes: 6 additions & 6 deletions opentelemetry-sdk/src/metrics/periodic_reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -358,20 +358,20 @@ impl<E: PushMetricExporter> PeriodicReaderInner<E> {

fn collect(&self, rm: &mut ResourceMetrics) -> MetricResult<()> {
let producer = self.producer.lock().expect("lock poisoned");
if let Some(p) = producer.as_ref() {
match producer.as_ref() { Some(p) => {
p.upgrade()
.ok_or_else(|| MetricError::Other("pipeline is dropped".into()))?
.produce(rm)?;
Ok(())
} else {
} _ => {
otel_warn!(
name: "PeriodReader.MeterProviderNotRegistered",
message = "PeriodicReader is not registered with MeterProvider. Metrics will not be collected. \
This occurs when a periodic reader is created but not associated with a MeterProvider \
by calling `.with_reader(reader)` on MeterProviderBuilder."
);
Err(MetricError::Other("MeterProvider is not registered".into()))
}
}}
}

fn collect_and_export(&self) -> OTelSdkResult {
Expand Down Expand Up @@ -429,16 +429,16 @@ impl<E: PushMetricExporter> PeriodicReaderInner<E> {
.send(Message::Flush(response_tx))
.map_err(|e| OTelSdkError::InternalFailure(e.to_string()))?;

if let Ok(response) = response_rx.recv() {
match response_rx.recv() { Ok(response) => {
// TODO: call exporter's force_flush method.
if response {
Ok(())
} else {
Err(OTelSdkError::InternalFailure("Failed to flush".into()))
}
} else {
} _ => {
Err(OTelSdkError::InternalFailure("Failed to flush".into()))
}
}}
}

fn shutdown(&self) -> OTelSdkResult {
Expand Down
6 changes: 3 additions & 3 deletions opentelemetry-sdk/src/trace/span_processor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -155,13 +155,13 @@ impl<T: SpanExporter> SpanProcessor for SimpleSpanProcessor<T> {
}

fn shutdown(&self) -> OTelSdkResult {
if let Ok(mut exporter) = self.exporter.lock() {
match self.exporter.lock() { Ok(mut exporter) => {
exporter.shutdown()
} else {
} _ => {
Err(OTelSdkError::InternalFailure(
"SimpleSpanProcessor mutex poison at shutdown".into(),
))
}
}}
}

fn set_resource(&mut self, resource: &Resource) {
Expand Down
6 changes: 3 additions & 3 deletions opentelemetry-zipkin/src/exporter/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ impl ZipkinExporterBuilder {
pub fn build(self) -> Result<ZipkinExporter, TraceError> {
let endpoint = Endpoint::new(self.service_addr);

if let Some(client) = self.client {
match self.client { Some(client) => {
let exporter = ZipkinExporter::new(
endpoint,
client,
Expand All @@ -87,9 +87,9 @@ impl ZipkinExporterBuilder {
.map_err::<Error, _>(Into::into)?,
);
Ok(exporter)
} else {
} _ => {
Err(Error::NoHttpClient.into())
}
}}
}

/// Assign client implementation
Expand Down
4 changes: 2 additions & 2 deletions opentelemetry/src/common.rs
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,7 @@ fn display_array_str<T: fmt::Display>(slice: &[T], fmt: &mut fmt::Formatter<'_>)
}

macro_rules! into_array {
($(($t:ty, $val:expr),)+) => {
($(($t:ty, $val:expr_2021),)+) => {
$(
impl From<$t> for Array {
fn from(t: $t) -> Self {
Expand Down Expand Up @@ -332,7 +332,7 @@ impl Value {
macro_rules! from_values {
(
$(
($t:ty, $val:expr);
($t:ty, $val:expr_2021);
)+
) => {
$(
Expand Down
12 changes: 6 additions & 6 deletions opentelemetry/src/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -214,15 +214,15 @@ impl Context {
/// assert_eq!(cx_with_a_and_b.get::<ValueB>(), Some(&ValueB(42)));
/// ```
pub fn with_value<T: 'static + Send + Sync>(&self, value: T) -> Self {
let entries = if let Some(current_entries) = &self.entries {
let entries = match &self.entries { Some(current_entries) => {
let mut inner_entries = (**current_entries).clone();
inner_entries.insert(TypeId::of::<T>(), Arc::new(value));
Some(Arc::new(inner_entries))
} else {
} _ => {
let mut entries = EntryMap::default();
entries.insert(TypeId::of::<T>(), Arc::new(value));
Some(Arc::new(entries))
};
}};
Context {
entries,
#[cfg(feature = "trace")]
Expand Down Expand Up @@ -336,12 +336,12 @@ impl fmt::Debug for Context {
let mut entries = self.entries.as_ref().map_or(0, |e| e.len());
#[cfg(feature = "trace")]
{
if let Some(span) = &self.span {
match &self.span { Some(span) => {
dbg.field("span", &span.span_context());
entries += 1;
} else {
} _ => {
dbg.field("span", &"None");
}
}}
}

dbg.field("entries count", &entries).finish()
Expand Down
Loading
Loading