@@ -43,6 +43,7 @@ import org.elasticsearch.hadoop.cfg.ConfigurationOptions.ES_RESOURCE
43
43
import org .elasticsearch .hadoop .mr .RestUtils
44
44
import org .elasticsearch .hadoop .util .TestSettings
45
45
import org .elasticsearch .hadoop .util .TestUtils
46
+ import org .elasticsearch .spark ._
46
47
import org .elasticsearch .spark .rdd .EsSpark
47
48
import org .elasticsearch .spark .rdd .Metadata .ID
48
49
import org .elasticsearch .spark .rdd .Metadata .TTL
@@ -384,6 +385,21 @@ class AbstractScalaEsScalaSpark(prefix: String, readMetadata: jl.Boolean) extend
384
385
println(RestUtils .getMapping(target))
385
386
}
386
387
388
+
389
+ @ Test
390
+ def testEsSparkVsScCount () {
391
+ val target = wrapIndex(" spark-test/check-counting" )
392
+ val rawCore = List ( Map (" colint" -> 1 , " colstr" -> " s" ),
393
+ Map (" colint" -> null , " colstr" -> null ) )
394
+ sc.parallelize(rawCore, 1 ).saveToEs(target)
395
+ val qjson =
396
+ """ {"query":{"range":{"colint":{"from":null,"to":"9","include_lower":true,"include_upper":true}}}}"""
397
+
398
+ val esRDD = EsSpark .esRDD(sc, target, qjson)
399
+ val scRDD = sc.esRDD(target, qjson)
400
+ assertEquals(esRDD.collect().size, scRDD.collect().size)
401
+ }
402
+
387
403
// @Test
388
404
def testLoadJsonFile () {
389
405
val target = " lost/id"
0 commit comments