Skip to content

Commit 2c38972

Browse files
committed
added test debug logs
1 parent d0e5d6e commit 2c38972

File tree

1 file changed

+12
-1
lines changed

1 file changed

+12
-1
lines changed

integration-tests/src/test/scala/org/apache/spark/sql/arangodb/datasource/write/OverwriteModeTest.scala

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ package org.apache.spark.sql.arangodb.datasource.write
33
import com.arangodb.ArangoCollection
44
import com.arangodb.entity.BaseDocument
55
import com.arangodb.model.OverwriteMode
6+
import com.arangodb.util.RawJson
67
import org.apache.spark.SparkException
78
import org.apache.spark.sql.SaveMode
89
import org.apache.spark.sql.arangodb.commons.ArangoDBConf
@@ -14,6 +15,8 @@ import org.junit.jupiter.api.BeforeEach
1415
import org.junit.jupiter.params.ParameterizedTest
1516
import org.junit.jupiter.params.provider.MethodSource
1617

18+
import scala.jdk.CollectionConverters.{iterableAsScalaIterableConverter, mapAsJavaMapConverter}
19+
1720

1821
class OverwriteModeTest extends BaseSparkTest {
1922

@@ -165,7 +168,15 @@ class OverwriteModeTest extends BaseSparkTest {
165168
))
166169
.save()
167170

168-
assertThat(collection.count().getCount).isEqualTo(1L)
171+
val count = collection.count().getCount
172+
if (count > 1L) {
173+
// debug test failure
174+
val allDocs = db.query("FOR d in @@col RETURN d", classOf[RawJson],
175+
Map("@col" -> collectionName).asInstanceOf[Map[String, AnyRef]].asJava).asScala
176+
println("docs in collection: ")
177+
for (d <- allDocs) println(d.get())
178+
}
179+
assertThat(count).isEqualTo(1L)
169180
val c = collection.getDocument("Carlsen", classOf[BaseDocument])
170181
assertThat(c.getProperties.containsKey("name")).isTrue
171182
assertThat(c.getProperties.get("name")).isNull()

0 commit comments

Comments
 (0)