Skip to content

Commit af156fa

Browse files
committed
Rebasing the changes form Qbeast-io#455
1 parent ea4bcd8 commit af156fa

File tree

3 files changed

+41
-9
lines changed

3 files changed

+41
-9
lines changed

core/src/main/scala/io/qbeast/spark/writer/Rollup.scala

+8-5
Original file line numberDiff line numberDiff line change
@@ -60,12 +60,15 @@ private[writer] class Rollup(limit: Double) {
6060
val cubeId = queue.dequeue()
6161
val group = groups(cubeId)
6262
if (group.size < limit && !cubeId.isRoot) {
63-
val Some(parentCubeId) = cubeId.parent
64-
if (groups.contains(parentCubeId)) {
65-
groups(parentCubeId).add(group)
63+
val nextInLine = cubeId.nextSibling match {
64+
case Some(a) => a
65+
case None => cubeId.parent.get
66+
}
67+
if (groups.contains(nextInLine)) {
68+
groups(nextInLine).add(group)
6669
} else {
67-
groups.put(parentCubeId, group)
68-
queue.enqueue(parentCubeId)
70+
groups.put(nextInLine, group)
71+
queue.enqueue(nextInLine)
6972
}
7073
groups.remove(cubeId)
7174
}

src/test/scala/io/qbeast/spark/delta/DeltaRollupDataWriterTest.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ class DeltaRollupDataWriterTest extends QbeastIntegrationTestSpec {
7171
}
7272

7373
it should "compute rollup correctly when optimizing" in
74-
withSparkAndTmpDir { (spark, tmpDir) =>
74+
withSparkAndTmpDir { (_, tmpDir) =>
7575
val revision =
7676
Revision(1L, 0, QTableID(tmpDir), 20, Vector(EmptyTransformer("col_1")), Vector.empty)
7777

@@ -86,7 +86,7 @@ class DeltaRollupDataWriterTest extends QbeastIntegrationTestSpec {
8686
Map(root -> 20L, c1 -> 1L, c2 -> 20L))
8787

8888
val rollup = DeltaRollupDataWriter.computeRollup(tc)
89-
rollup shouldBe Map(root -> root, c1 -> root, c2 -> c2)
89+
rollup shouldBe Map(root -> root, c1 -> c2, c2 -> c2)
9090
}
9191

9292
}

src/test/scala/io/qbeast/spark/writer/RollupTest.scala

+31-2
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ import org.scalatest.matchers.should.Matchers
2424
*/
2525
class RollupTest extends AnyFlatSpec with Matchers {
2626

27-
"Rollup" should "work correctly" in {
27+
"Rollup" should "work correctly with basic cube structure" in {
2828
val root = CubeId.root(1)
2929
val c0 = root.firstChild
3030
val c1 = c0.nextSibling.get
@@ -44,8 +44,37 @@ class RollupTest extends AnyFlatSpec with Matchers {
4444
result(root) shouldBe root
4545
result(c00) shouldBe c0
4646
result(c01) shouldBe c0
47-
result(c10) shouldBe root
47+
result(c10) shouldBe c11 // rolliing up into the next siblings.
4848
result(c11) shouldBe c11
4949
}
5050

51+
it should "handle empty rollup" in {
52+
val result = new Rollup(3).compute()
53+
result shouldBe empty
54+
}
55+
56+
it should "handle single cube" in {
57+
val root = CubeId.root(1)
58+
val result = new Rollup(3)
59+
.populate(root, 2)
60+
.compute()
61+
62+
result(root) shouldBe root
63+
}
64+
65+
it should "roll up to parent when size exceeds limit" in {
66+
val root = CubeId.root(1)
67+
val kids = root.children.toSeq
68+
val child = kids(0)
69+
val grandChild = kids(1)
70+
71+
val result = new Rollup(2)
72+
.populate(root, 1)
73+
.populate(child, 2)
74+
.populate(grandChild, 3) // Exceeds limit
75+
.compute()
76+
77+
result(grandChild) shouldBe grandChild
78+
}
79+
5180
}

0 commit comments

Comments
 (0)