@@ -3,6 +3,7 @@ package org.apache.spark.sql.arangodb.datasource.write
33import com .arangodb .ArangoCollection
44import com .arangodb .entity .BaseDocument
55import com .arangodb .model .OverwriteMode
6+ import com .arangodb .util .RawJson
67import org .apache .spark .SparkException
78import org .apache .spark .sql .SaveMode
89import org .apache .spark .sql .arangodb .commons .ArangoDBConf
@@ -14,6 +15,8 @@ import org.junit.jupiter.api.BeforeEach
1415import org .junit .jupiter .params .ParameterizedTest
1516import org .junit .jupiter .params .provider .MethodSource
1617
18+ import scala .jdk .CollectionConverters .{iterableAsScalaIterableConverter , mapAsJavaMapConverter }
19+
1720
1821class OverwriteModeTest extends BaseSparkTest {
1922
@@ -165,7 +168,15 @@ class OverwriteModeTest extends BaseSparkTest {
165168 ))
166169 .save()
167170
168- assertThat(collection.count().getCount).isEqualTo(1L )
171+ val count = collection.count().getCount
172+ if (count > 1L ) {
173+ // debug test failure
174+ val allDocs = db.query(" FOR d in @@col RETURN d" , classOf [RawJson ],
175+ Map (" @col" -> collectionName).asInstanceOf [Map [String , AnyRef ]].asJava).asScala
176+ println(" docs in collection: " )
177+ for (d <- allDocs) println(d.get())
178+ }
179+ assertThat(count).isEqualTo(1L )
169180 val c = collection.getDocument(" Carlsen" , classOf [BaseDocument ])
170181 assertThat(c.getProperties.containsKey(" name" )).isTrue
171182 assertThat(c.getProperties.get(" name" )).isNull()
0 commit comments