From f619658e2c3db2f96154fcf61c33c063bbc726a8 Mon Sep 17 00:00:00 2001 From: Pascal Seitz Date: Thu, 17 Mar 2022 16:37:02 +0800 Subject: [PATCH] rename --- src/aggregation/agg_req.rs | 15 ++++++++------- src/aggregation/agg_result.rs | 10 +++++----- src/aggregation/bucket/histogram/histogram.rs | 6 +++--- src/aggregation/intermediate_agg_result.rs | 4 ++-- 4 files changed, 18 insertions(+), 17 deletions(-) diff --git a/src/aggregation/agg_req.rs b/src/aggregation/agg_req.rs index 10b6db9892..a8f8f059cf 100644 --- a/src/aggregation/agg_req.rs +++ b/src/aggregation/agg_req.rs @@ -61,12 +61,12 @@ pub type Aggregations = HashMap; /// Like Aggregations, but optimized to work with the aggregation result #[derive(Clone, Debug)] -pub(crate) struct CollectorAggregations { +pub(crate) struct AggregationsInternal { pub(crate) metrics: VecWithNames, - pub(crate) buckets: VecWithNames, + pub(crate) buckets: VecWithNames, } -impl From for CollectorAggregations { +impl From for AggregationsInternal { fn from(aggs: Aggregations) -> Self { let mut metrics = vec![]; let mut buckets = vec![]; @@ -74,7 +74,7 @@ impl From for CollectorAggregations { match agg { Aggregation::Bucket(bucket) => buckets.push(( key, - CollectorBucketAggregation { + BucketAggregationInternal { bucket_agg: bucket.bucket_agg, sub_aggregation: bucket.sub_aggregation.into(), }, @@ -90,15 +90,16 @@ impl From for CollectorAggregations { } #[derive(Clone, Debug)] -pub(crate) struct CollectorBucketAggregation { +// Like BucketAggregation, but optimized to work with the result +pub(crate) struct BucketAggregationInternal { /// Bucket aggregation strategy to group documents. pub bucket_agg: BucketAggregationType, /// The sub_aggregations in the buckets. Each bucket will aggregate on the document set in the /// bucket. - pub sub_aggregation: CollectorAggregations, + pub sub_aggregation: AggregationsInternal, } -impl CollectorBucketAggregation { +impl BucketAggregationInternal { pub(crate) fn as_histogram(&self) -> &HistogramAggregation { match &self.bucket_agg { BucketAggregationType::Range(_) => panic!("unexpected aggregation"), diff --git a/src/aggregation/agg_result.rs b/src/aggregation/agg_result.rs index 05c08aad1d..6132ba7cb1 100644 --- a/src/aggregation/agg_result.rs +++ b/src/aggregation/agg_result.rs @@ -10,7 +10,7 @@ use std::collections::HashMap; use itertools::Itertools; use serde::{Deserialize, Serialize}; -use super::agg_req::{Aggregations, CollectorAggregations, CollectorBucketAggregation}; +use super::agg_req::{Aggregations, AggregationsInternal, BucketAggregationInternal}; use super::bucket::intermediate_buckets_to_final_buckets; use super::intermediate_agg_result::{ IntermediateAggregationResults, IntermediateBucketResult, IntermediateHistogramBucketEntry, @@ -37,7 +37,7 @@ impl AggregationResults { /// for internal processing fn from_intermediate_and_req_internal( results: IntermediateAggregationResults, - req: &CollectorAggregations, + req: &AggregationsInternal, ) -> Self { let mut result = HashMap::default(); @@ -145,7 +145,7 @@ pub enum BucketResult { impl BucketResult { fn from_intermediate_and_req( bucket_result: IntermediateBucketResult, - req: &CollectorBucketAggregation, + req: &BucketAggregationInternal, ) -> Self { match bucket_result { IntermediateBucketResult::Range(range_map) => { @@ -217,7 +217,7 @@ pub struct BucketEntry { impl BucketEntry { pub(crate) fn from_intermediate_and_req( entry: IntermediateHistogramBucketEntry, - req: &CollectorAggregations, + req: &AggregationsInternal, ) -> Self { BucketEntry { key: Key::F64(entry.key), @@ -280,7 +280,7 @@ pub struct RangeBucketEntry { impl RangeBucketEntry { fn from_intermediate_and_req( entry: IntermediateRangeBucketEntry, - req: &CollectorAggregations, + req: &AggregationsInternal, ) -> Self { RangeBucketEntry { key: entry.key, diff --git a/src/aggregation/bucket/histogram/histogram.rs b/src/aggregation/bucket/histogram/histogram.rs index 4da8a434a7..47ce6b69ba 100644 --- a/src/aggregation/bucket/histogram/histogram.rs +++ b/src/aggregation/bucket/histogram/histogram.rs @@ -4,7 +4,7 @@ use std::fmt::Display; use itertools::Itertools; use serde::{Deserialize, Serialize}; -use crate::aggregation::agg_req::CollectorAggregations; +use crate::aggregation::agg_req::AggregationsInternal; use crate::aggregation::agg_req_with_accessor::{ AggregationsWithAccessor, BucketAggregationWithAccessor, }; @@ -389,7 +389,7 @@ fn get_bucket_val(val: f64, interval: f64, offset: f64) -> f64 { fn intermediate_buckets_to_final_buckets_fill_gaps( buckets: Vec, histogram_req: &HistogramAggregation, - sub_aggregation: &CollectorAggregations, + sub_aggregation: &AggregationsInternal, ) -> Vec { // Generate the the full list of buckets without gaps. // @@ -440,7 +440,7 @@ fn intermediate_buckets_to_final_buckets_fill_gaps( pub(crate) fn intermediate_buckets_to_final_buckets( buckets: Vec, histogram_req: &HistogramAggregation, - sub_aggregation: &CollectorAggregations, + sub_aggregation: &AggregationsInternal, ) -> Vec { if histogram_req.min_doc_count() == 0 { // With min_doc_count != 0, we may need to add buckets, so that there are no diff --git a/src/aggregation/intermediate_agg_result.rs b/src/aggregation/intermediate_agg_result.rs index 023ff43c70..6c577944b9 100644 --- a/src/aggregation/intermediate_agg_result.rs +++ b/src/aggregation/intermediate_agg_result.rs @@ -8,7 +8,7 @@ use fnv::FnvHashMap; use itertools::Itertools; use serde::{Deserialize, Serialize}; -use super::agg_req::{BucketAggregationType, CollectorAggregations, MetricAggregation}; +use super::agg_req::{AggregationsInternal, BucketAggregationType, MetricAggregation}; use super::metric::{IntermediateAverage, IntermediateStats}; use super::segment_agg_result::{ SegmentAggregationResultsCollector, SegmentBucketResultCollector, SegmentHistogramBucketEntry, @@ -34,7 +34,7 @@ impl From for IntermediateAggregationResults } impl IntermediateAggregationResults { - pub(crate) fn empty_from_req(req: &CollectorAggregations) -> Self { + pub(crate) fn empty_from_req(req: &AggregationsInternal) -> Self { let metrics = if req.metrics.is_empty() { None } else {