Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions packages/cubejs-schema-compiler/src/adapter/BaseQuery.js
Original file line number Diff line number Diff line change
Expand Up @@ -951,6 +951,7 @@ export class BaseQuery {
joinHints: this.options.joinHints,
cubestoreSupportMultistage: this.options.cubestoreSupportMultistage ?? getEnv('cubeStoreRollingWindowJoin'),
disableExternalPreAggregations: !!this.options.disableExternalPreAggregations,
convertTzForRawTimeDimension: !!this.options.convertTzForRawTimeDimension,
maskedMembers: this.options.maskedMembers,
memberToAlias: this.options.memberToAlias,
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5067,6 +5067,47 @@ SELECT 1 AS revenue, cast('2024-01-01' AS timestamp) as time UNION ALL
}]
));

it('raw time dimension with timezone', async () => runQueryTest(
{
measures: [
'visitors.visitor_revenue',
],
dimensions: ['visitors.created_at'],
timeDimensions: [{
dimension: 'visitors.created_at',
granularity: 'day',
dateRange: ['2017-01-01', '2017-01-30']
}],
timezone: 'America/Los_Angeles',
convertTzForRawTimeDimension: true,
order: [{
id: 'visitors.created_at'
}]
},
[
{
visitors__created_at: '2017-01-02T16:00:00.000Z',
visitors__created_at_day: '2017-01-02T00:00:00.000Z',
visitors__visitor_revenue: '100'
},
{
visitors__created_at: '2017-01-04T16:00:00.000Z',
visitors__created_at_day: '2017-01-04T00:00:00.000Z',
visitors__visitor_revenue: '200'
},
{
visitors__created_at: '2017-01-05T16:00:00.000Z',
visitors__created_at_day: '2017-01-05T00:00:00.000Z',
visitors__visitor_revenue: null
},
{
visitors__created_at: '2017-01-06T16:00:00.000Z',
visitors__created_at_day: '2017-01-06T00:00:00.000Z',
visitors__visitor_revenue: null
}
]
));

it('simple join with segment', async () => runQueryTest(
{
measures: [
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,8 @@ pub struct BaseQueryOptionsStatic {
pub disable_external_pre_aggregations: bool,
#[serde(rename = "preAggregationId")]
pub pre_aggregation_id: Option<String>,
#[serde(rename = "convertTzForRawTimeDimension")]
pub convert_tz_for_raw_time_dimension: Option<bool>,
#[serde(rename = "maskedMembers")]
pub masked_members: Option<Vec<String>>,
#[serde(rename = "memberToAlias", default)]
Expand Down
4 changes: 4 additions & 0 deletions rust/cubesqlplanner/cubesqlplanner/src/planner/base_query.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,10 @@ impl<IT: InnerTypes> BaseQuery<IT> {
options.join_graph()?,
options.static_data().timezone.clone(),
options.static_data().export_annotated_sql,
options
.static_data()
.convert_tz_for_raw_time_dimension
.unwrap_or(false),
options.static_data().masked_members.clone(),
options.static_data().member_to_alias.clone(),
)?;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@ use crate::cube_bridge::member_sql::FilterParamsColumn;
use crate::planner::query_tools::QueryTools;
use crate::planner::sql_evaluator::MemberSymbol;
use crate::planner::sql_templates::PlanSqlTemplates;
use crate::planner::{evaluate_with_context, FiltersContext, VisitorContext};
use crate::planner::visitor_context::evaluate_filter_with_context;
use crate::planner::{FiltersContext, VisitorContext};
use cubenativeutils::CubeError;
use std::rc::Rc;

Expand Down Expand Up @@ -109,7 +110,7 @@ impl TypedFilter {
}

let resolved = resolve_base_symbol(&self.member_evaluator);
let member_sql = evaluate_with_context(&resolved, context.clone(), plan_templates)?;
let member_sql = evaluate_filter_with_context(&resolved, context.clone(), plan_templates)?;

let filters_context = context.filters_context();
let ctx = FilterSqlContext {
Expand Down
7 changes: 7 additions & 0 deletions rust/cubesqlplanner/cubesqlplanner/src/planner/query_tools.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ pub struct QueryTools {
params_allocator: Rc<RefCell<ParamsAllocator>>,
evaluator_compiler: Rc<RefCell<Compiler>>,
timezone: Tz,
convert_tz_for_raw_time_dimension: bool,
masked_members: HashSet<String>,
}

Expand All @@ -41,6 +42,7 @@ impl QueryTools {
join_graph: Rc<dyn JoinGraph>,
timezone_name: Option<String>,
export_annotated_sql: bool,
convert_tz_for_raw_time_dimension: bool,
masked_members: Option<Vec<String>>,
member_to_alias: Option<HashMap<String, String>>,
) -> Result<Rc<Self>, CubeError> {
Expand All @@ -67,6 +69,7 @@ impl QueryTools {
params_allocator: Rc::new(RefCell::new(ParamsAllocator::new(export_annotated_sql))),
evaluator_compiler,
timezone,
convert_tz_for_raw_time_dimension,
masked_members: masked_members.unwrap_or_default().into_iter().collect(),
}))
}
Expand Down Expand Up @@ -96,6 +99,10 @@ impl QueryTools {
self.timezone
}

pub fn convert_tz_for_raw_time_dimension(&self) -> bool {
self.convert_tz_for_raw_time_dimension
}

pub fn join_for_hints(
&self,
hints: &JoinHints,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ impl SqlNode for EvaluateSqlNode {
Ok(res)
}
MemberSymbol::TimeDimension(ev) => {
let visitor = visitor.with_ignore_tz_convert();
let res = visitor.apply(&ev.base_symbol(), node_processor.clone(), templates)?;
Ok(res)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -257,12 +257,13 @@ impl SqlNodesFactory {
let input: Rc<dyn SqlNode> = CaseSqlNode::new(input);
input
};
let input: Rc<dyn SqlNode> =
TimeDimensionNode::new(self.dimensions_with_ignored_timezone.clone(), input);

let input: Rc<dyn SqlNode> =
AutoPrefixSqlNode::new(input, self.cube_name_references.clone());

let input: Rc<dyn SqlNode> =
TimeDimensionNode::new(self.dimensions_with_ignored_timezone.clone(), input);

let input = if !self.calendar_time_shifts.is_empty() {
CalendarTimeShiftSqlNode::new(self.calendar_time_shifts.clone(), input)
} else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,10 +53,11 @@ impl SqlNode for TimeDimensionNode {
);
}

let converted_tz = if self
let skip_convert_tz = self
.dimensions_with_ignored_timezone
.contains(&ev.full_name())
{
.contains(&ev.full_name());

let converted_tz = if skip_convert_tz {
input_sql
} else {
templates.convert_tz(input_sql)?
Expand All @@ -68,6 +69,16 @@ impl SqlNode for TimeDimensionNode {
};
Ok(res)
}
MemberSymbol::Dimension(ev) => {
if !visitor.ignore_tz_convert()
&& query_tools.convert_tz_for_raw_time_dimension()
&& ev.dimension_type() == "time"
{
Ok(templates.convert_tz(input_sql)?)
} else {
Ok(input_sql)
}
}
_ => Ok(input_sql),
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ pub struct SqlEvaluatorVisitor {
query_tools: Rc<QueryTools>,
cube_ref_evaluator: Rc<CubeRefEvaluator>,
all_filters: Option<Filter>, //To pass to FILTER_PARAMS and FILTER_GROUP
ignore_tz_convert: bool,
}

impl SqlEvaluatorVisitor {
Expand All @@ -25,9 +26,16 @@ impl SqlEvaluatorVisitor {
query_tools,
cube_ref_evaluator,
all_filters,
ignore_tz_convert: false,
}
}

pub fn with_ignore_tz_convert(&self) -> Self {
let mut self_copy = self.clone();
self_copy.ignore_tz_convert = true;
self_copy
}

pub fn all_filters(&self) -> Option<Filter> {
self.all_filters.clone()
}
Expand All @@ -48,6 +56,10 @@ impl SqlEvaluatorVisitor {
Ok(result)
}

pub fn ignore_tz_convert(&self) -> bool {
self.ignore_tz_convert
}

pub fn evaluate_cube_ref(
&self,
cube_ref: &CubeRef,
Expand Down
13 changes: 13 additions & 0 deletions rust/cubesqlplanner/cubesqlplanner/src/planner/visitor_context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,19 @@ pub fn evaluate_with_context(
visitor.apply(node, node_processor, templates)
}

pub fn evaluate_filter_with_context(
node: &Rc<MemberSymbol>,
context: Rc<VisitorContext>,
templates: &PlanSqlTemplates,
) -> Result<String, CubeError> {
let visitor = context
.make_visitor(context.query_tools())
.with_ignore_tz_convert();
let node_processor = context.node_processor();

visitor.apply(node, node_processor, templates)
}

pub fn evaluate_sql_call_with_context(
sql_call: &Rc<SqlCall>,
context: Rc<VisitorContext>,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,8 @@ pub struct MockBaseQueryOptions {
#[builder(default)]
pre_aggregation_id: Option<String>,
#[builder(default)]
convert_tz_for_raw_time_dimension: Option<bool>,
#[builder(default)]
masked_members: Option<Vec<String>>,
#[builder(default)]
member_to_alias: Option<HashMap<String, String>>,
Expand All @@ -91,6 +93,7 @@ impl_static_data!(
cubestore_support_multistage,
disable_external_pre_aggregations,
pre_aggregation_id,
convert_tz_for_raw_time_dimension,
masked_members,
member_to_alias
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,8 @@ pub struct YamlBaseQueryOptions {
pub disable_external_pre_aggregations: Option<bool>,
#[serde(default)]
pub pre_aggregation_id: Option<String>,
#[serde(default)]
pub convert_tz_for_raw_time_dimension: Option<bool>,
#[serde(default, rename = "joinHints")]
pub join_hints: Option<Vec<Vec<String>>>,
#[serde(default, rename = "memberToAlias")]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ pub struct TestContext {

impl TestContext {
pub fn new(schema: MockSchema) -> Result<Self, CubeError> {
Self::new_with_options(schema, Tz::UTC, None, None, false)
Self::new_with_options(schema, Tz::UTC, None, None, false, false)
}

#[allow(dead_code)]
Expand All @@ -48,6 +48,7 @@ impl TestContext {
join_graph,
Some(Tz::UTC.to_string()),
false,
false,
None,
None,
)?;
Expand All @@ -70,14 +71,14 @@ impl TestContext {

#[allow(dead_code)]
pub fn new_with_timezone(schema: MockSchema, timezone: Tz) -> Result<Self, CubeError> {
Self::new_with_options(schema, timezone, None, None, false)
Self::new_with_options(schema, timezone, None, None, false, false)
}

pub fn new_with_masked_members(
schema: MockSchema,
masked_members: Vec<String>,
) -> Result<Self, CubeError> {
Self::new_with_options(schema, Tz::UTC, Some(masked_members), None, false)
Self::new_with_options(schema, Tz::UTC, Some(masked_members), None, false, false)
}

fn for_options(&self, options: &dyn BaseQueryOptions) -> Result<Self, CubeError> {
Expand All @@ -94,6 +95,9 @@ impl TestContext {
static_data.masked_members.clone(),
static_data.member_to_alias.clone(),
static_data.export_annotated_sql,
static_data
.convert_tz_for_raw_time_dimension
.unwrap_or(false),
)
}

Expand All @@ -103,6 +107,7 @@ impl TestContext {
masked_members: Option<Vec<String>>,
member_to_alias: Option<std::collections::HashMap<String, String>>,
export_annotated_sql: bool,
convert_tz_for_raw_time_dimension: bool,
) -> Result<Self, CubeError> {
let base_tools = schema.create_base_tools_with_timezone(timezone.to_string())?;
let join_graph = Rc::new(schema.create_join_graph()?);
Expand All @@ -117,6 +122,7 @@ impl TestContext {
join_graph,
Some(timezone.to_string()),
export_annotated_sql,
convert_tz_for_raw_time_dimension,
masked_members,
member_to_alias,
)?;
Expand Down Expand Up @@ -343,6 +349,7 @@ impl TestContext {
.unwrap_or(false),
)
.pre_aggregation_id(yaml_options.pre_aggregation_id)
.convert_tz_for_raw_time_dimension(yaml_options.convert_tz_for_raw_time_dimension)
.member_to_alias(yaml_options.member_to_alias)
.masked_members(yaml_options.masked_members)
.timezone(yaml_options.timezone)
Expand Down Expand Up @@ -447,8 +454,9 @@ impl TestContext {
let tables = Self::collect_pre_agg_source_tables(pre_agg.source());
let yaml = Self::build_pre_agg_query_yaml(pre_agg);

let pa_ctx = Self::new_with_options(self.schema.clone(), Tz::UTC, None, None, false)
.expect("Failed to create pre-agg context");
let pa_ctx =
Self::new_with_options(self.schema.clone(), Tz::UTC, None, None, false, false)
.expect("Failed to create pre-agg context");

let (raw_sql, _) = pa_ctx
.build_sql_with_used_pre_aggregations(&yaml)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
---
source: cubesqlplanner/src/tests/integration/time_dimensions.rs
expression: result
---
orders__created_at | orders__created_at_month | orders__count
--------------------+--------------------------+--------------
2024-01-15 02:00:00 | 2024-01-01 00:00:00 | 1
2024-01-15 07:00:00 | 2024-01-01 00:00:00 | 1
2024-01-20 06:00:00 | 2024-01-01 00:00:00 | 1
2024-02-10 01:00:00 | 2024-02-01 00:00:00 | 1
2024-02-15 03:00:00 | 2024-02-01 00:00:00 | 1
2024-03-01 08:00:00 | 2024-03-01 00:00:00 | 1
2024-03-10 00:00:00 | 2024-03-01 00:00:00 | 1
2024-03-15 05:00:00 | 2024-03-01 00:00:00 | 1
2024-04-01 03:00:00 | 2024-04-01 00:00:00 | 1
Original file line number Diff line number Diff line change
Expand Up @@ -360,3 +360,31 @@ async fn test_multiple_time_dimensions() {
insta::assert_snapshot!(result);
}
}

#[tokio::test(flavor = "multi_thread")]
async fn test_convert_tz_for_raw_time_dimensions() {
let ctx = create_context();

let query = indoc! {"
measures:
- orders.count
dimensions:
- orders.created_at
time_dimensions:
- dimension: orders.created_at
granularity: month
order:
- id: orders.created_at
timezone: \"America/Los_Angeles\"
convert_tz_for_raw_time_dimension: true
"};

ctx.build_sql(query).unwrap();

if let Some(result) = ctx
.try_execute_pg(query, "integration_basic_tables.sql")
.await
{
insta::assert_snapshot!(result);
}
}
Loading