Skip to content

Mark test cases that require spark2 #951

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Aug 28, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@
import co.cask.cdap.test.ServiceManager;
import co.cask.cdap.test.WorkflowManager;
import co.cask.cdap.test.suite.category.MapR5Incompatible;
import co.cask.cdap.test.suite.category.RequiresSpark;
import co.cask.common.http.HttpMethod;
import co.cask.common.http.HttpResponse;
import co.cask.common.http.ObjectResponse;
Expand Down Expand Up @@ -85,8 +86,7 @@ public void testJoinerMR() throws Exception {
}

@Category({
// Currently, coopr doesn't provision MapR cluster with Spark. Enable this test once COOK-108 is fixed
MapR5Incompatible.class // MapR5x category is used for all MapR version
RequiresSpark.class
})
@Test
public void testJoinerSpark() throws Exception {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
import co.cask.cdap.api.dataset.lib.FileSetProperties;
import co.cask.cdap.app.etl.ETLTestBase;
import co.cask.cdap.client.QueryClient;
import co.cask.cdap.client.config.ConnectionConfig;
import co.cask.cdap.common.UnauthenticatedException;
import co.cask.cdap.datapipeline.SmartWorkflow;
import co.cask.cdap.etl.api.action.Action;
Expand All @@ -29,15 +28,13 @@
import co.cask.cdap.etl.proto.v2.ETLStage;
import co.cask.cdap.examples.fileset.FileSetExample;
import co.cask.cdap.explore.client.ExploreExecutionResult;
import co.cask.cdap.proto.ConfigEntry;
import co.cask.cdap.proto.ProgramRunStatus;
import co.cask.cdap.proto.artifact.AppRequest;
import co.cask.cdap.proto.id.ApplicationId;
import co.cask.cdap.proto.id.NamespaceId;
import co.cask.cdap.test.ApplicationManager;
import co.cask.cdap.test.ServiceManager;
import co.cask.cdap.test.WorkflowManager;
import co.cask.cdap.test.suite.category.MapR5Incompatible;
import co.cask.cdap.test.suite.category.SDKIncompatible;
import co.cask.common.http.HttpMethod;
import co.cask.common.http.HttpRequest;
Expand All @@ -47,18 +44,13 @@
import com.google.common.collect.Lists;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.gson.Gson;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import org.junit.Assert;
import org.junit.Test;
import org.junit.experimental.categories.Category;

import java.io.IOException;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
import co.cask.cdap.test.ApplicationManager;
import co.cask.cdap.test.ServiceManager;
import co.cask.cdap.test.WorkflowManager;
import co.cask.cdap.test.suite.category.MapR5Incompatible;
import co.cask.cdap.test.suite.category.RequiresSpark;
import co.cask.common.http.HttpMethod;
import co.cask.common.http.HttpResponse;
import co.cask.common.http.ObjectResponse;
Expand All @@ -64,15 +64,14 @@ public void testMR() throws Exception {
}

@Category({
// Currently, coopr doesn't provision MapR cluster with Spark. Enable this test once COOK-108 is fixed
MapR5Incompatible.class // MapR5x category is used for all MapR version
RequiresSpark.class
})
@Test
public void testSpark() throws Exception {
testStreamTPFSWithProjection(Engine.SPARK);
}

public void testStreamTPFSWithProjection(Engine engine) throws Exception {
private void testStreamTPFSWithProjection(Engine engine) throws Exception {
//1. create a source stream and send an event
StreamId sourceStreamId = TEST_NAMESPACE.stream(SOURCE_STREAM);
streamClient.create(sourceStreamId);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@
import co.cask.cdap.test.SparkManager;
import co.cask.cdap.test.suite.category.CDH54Incompatible;
import co.cask.cdap.test.suite.category.HDP22Incompatible;
import co.cask.cdap.test.suite.category.MapR5Incompatible;
import co.cask.cdap.test.suite.category.RequiresSpark;
import co.cask.common.http.HttpMethod;
import co.cask.common.http.HttpRequest;
import co.cask.common.http.HttpResponse;
Expand All @@ -59,13 +59,11 @@
*/
public class DataStreamsTest extends ETLTestBase {

// DataStreams are based on Spark runtime, so marking incompatible for all Hadoop versions that don't support Spark
@Category({
RequiresSpark.class,
// (CDAP-10143) Mark HDP 2.2 and CDH 5.4 incompatible at least until we resolve this JIRA.
HDP22Incompatible.class,
CDH54Incompatible.class,
// Currently, coopr doesn't provision MapR cluster with Spark. Enable this test once COOK-108 is fixed
MapR5Incompatible.class // MapR5x category is used for all MapR version
})

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@
import co.cask.cdap.test.ApplicationManager;
import co.cask.cdap.test.ServiceManager;
import co.cask.cdap.test.WorkflowManager;
import co.cask.cdap.test.suite.category.MapR5Incompatible;
import co.cask.cdap.test.suite.category.RequiresSpark;
import co.cask.common.http.HttpMethod;
import co.cask.common.http.HttpResponse;
import co.cask.common.http.ObjectResponse;
Expand Down Expand Up @@ -82,8 +82,7 @@ public void testWranglerMR() throws Exception {
}

@Category({
// Currently, coopr doesn't provision MapR cluster with Spark. Enable this test once COOK-108 is fixed
MapR5Incompatible.class // MapR5x category is used for all MapR version
RequiresSpark.class,
})
@Test
public void testWranglerSpark() throws Exception {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
import co.cask.cdap.test.AudiTestBase;
import co.cask.cdap.test.ServiceManager;
import co.cask.cdap.test.SparkManager;
import co.cask.cdap.test.suite.category.RequiresSpark2;
import co.cask.common.http.HttpRequest;
import co.cask.common.http.HttpResponse;
import com.google.common.collect.ImmutableList;
Expand All @@ -58,8 +59,7 @@
import com.google.gson.reflect.TypeToken;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.junit.experimental.categories.Category;

import java.io.IOException;
import java.lang.reflect.Type;
Expand All @@ -71,8 +71,10 @@
import java.util.Map;
import java.util.concurrent.TimeUnit;

@Category({
RequiresSpark2.class,
})
public class ReportGenerationAppTest extends AudiTestBase {
private static final Logger LOG = LoggerFactory.getLogger(ReportGenerationAppTest.class);
private static final Gson GSON = new GsonBuilder()
.registerTypeAdapter(ReportContent.class, new ReportContentDeserializer())
.registerTypeAdapter(Filter.class, new FilterCodec())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,8 @@
import co.cask.cdap.test.ServiceManager;
import co.cask.cdap.test.SparkManager;
import co.cask.cdap.test.StreamManager;
import co.cask.cdap.test.suite.category.MapR5Incompatible;
import co.cask.cdap.test.suite.category.RequiresSpark;
import co.cask.cdap.test.suite.category.RequiresSpark2;
import co.cask.common.http.HttpRequest;
import co.cask.common.http.HttpResponse;
import com.google.common.base.Joiner;
Expand All @@ -67,8 +68,7 @@
* Tests the functionality of {@link SparkPageRankApp}
*/
@Category({
// Currently, coopr doesn't provision MapR cluster with Spark. Enable this test once COOK-108 is fixed
MapR5Incompatible.class
RequiresSpark2.class
})
public class SparkPageRankAppTest extends AudiTestBase {
private static final Gson GSON = new GsonBuilder()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@
import co.cask.cdap.apps.metadata.ProgramMetadataTest;
import co.cask.cdap.apps.metadata.PurchaseMetadataTest;
import co.cask.cdap.apps.purchase.PurchaseAudiTest;
import co.cask.cdap.apps.report.ReportGenerationAppTest;
import co.cask.cdap.apps.spark.sparkpagerank.SparkPageRankAppTest;
import co.cask.cdap.apps.transaction.TransactionTimeoutTest;
import co.cask.cdap.apps.wordcount.WordCountTest;
Expand Down Expand Up @@ -96,6 +97,7 @@
RemoteCubeTest.class,
RemoteKeyValueTest.class,
RemoteTableTest.class,
ReportGenerationAppTest.class,
RowDenormalizerTest.class,
ServiceWorkerTest.class,
SparkPageRankAppTest.class,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,20 +16,18 @@

package co.cask.cdap.test.suite;

import co.cask.cdap.apps.report.ReportGenerationAppTest;
import co.cask.cdap.test.suite.category.CMIncompatible;
import co.cask.cdap.test.suite.category.RequiresSpark2;
import org.junit.experimental.categories.Categories;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;

/**
* Junit suite for tests that should run on cluster with spark 2.
* Junit suite for tests that should run on Ambari-managed clusters.
*/
@RunWith(Categories.class)
@Categories.ExcludeCategory(CMIncompatible.class)
@Categories.ExcludeCategory(RequiresSpark2.class)
@Suite.SuiteClasses({
ReportGenerationAppTest.class
AllTests.class
})
public class ReportingSpark2 {
public class AmbariSuite {
}

Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
package co.cask.cdap.test.suite;

import co.cask.cdap.test.suite.category.CDH54Incompatible;
import co.cask.cdap.test.suite.category.RequiresSpark2;
import org.junit.experimental.categories.Categories;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
Expand All @@ -25,7 +26,7 @@
* Junit suite for tests that should run on CDH5.3.
*/
@RunWith(Categories.class)
@Categories.ExcludeCategory(CDH54Incompatible.class)
@Categories.ExcludeCategory({CDH54Incompatible.class, RequiresSpark2.class})
@Suite.SuiteClasses({
AllTests.class
})
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
package co.cask.cdap.test.suite;

import co.cask.cdap.test.suite.category.CDH55Incompatible;
import co.cask.cdap.test.suite.category.RequiresSpark2;
import org.junit.experimental.categories.Categories;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
Expand All @@ -25,7 +26,7 @@
* Junit suite for tests that should run on CDH5.5.
*/
@RunWith(Categories.class)
@Categories.ExcludeCategory(CDH55Incompatible.class)
@Categories.ExcludeCategory({CDH55Incompatible.class, RequiresSpark2.class})
@Suite.SuiteClasses({
AllTests.class
})
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
package co.cask.cdap.test.suite;

import co.cask.cdap.test.suite.category.CDH56Incompatible;
import co.cask.cdap.test.suite.category.RequiresSpark2;
import org.junit.experimental.categories.Categories;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
Expand All @@ -25,7 +26,7 @@
* Junit suite for tests that should run on CDH5.6.
*/
@RunWith(Categories.class)
@Categories.ExcludeCategory(CDH56Incompatible.class)
@Categories.ExcludeCategory({CDH56Incompatible.class, RequiresSpark2.class})
@Suite.SuiteClasses({
AllTests.class
})
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

package co.cask.cdap.test.suite;

import co.cask.cdap.test.suite.category.CMIncompatible;
import co.cask.cdap.test.suite.category.RequiresSpark2;
import org.junit.experimental.categories.Categories;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
Expand All @@ -25,7 +25,7 @@
* Junit suite for tests that should run on Cloudera Manager.
*/
@RunWith(Categories.class)
@Categories.ExcludeCategory(CMIncompatible.class)
@Categories.ExcludeCategory(RequiresSpark2.class)
@Suite.SuiteClasses({
AllTests.class
})
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
package co.cask.cdap.test.suite;

import co.cask.cdap.test.suite.category.EMRIncompatible;
import co.cask.cdap.test.suite.category.RequiresSpark2;
import org.junit.experimental.categories.Categories;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
Expand All @@ -25,7 +26,7 @@
* Junit suite for tests that should run on EMR.
*/
@RunWith(Categories.class)
@Categories.ExcludeCategory(EMRIncompatible.class)
@Categories.ExcludeCategory({EMRIncompatible.class, RequiresSpark2.class})
@Suite.SuiteClasses({
AllTests.class
})
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
package co.cask.cdap.test.suite;

import co.cask.cdap.test.suite.category.HDP22Incompatible;
import co.cask.cdap.test.suite.category.RequiresSpark2;
import org.junit.experimental.categories.Categories;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
Expand All @@ -25,7 +26,7 @@
* Junit suite for tests that should run on HDP2.2.
*/
@RunWith(Categories.class)
@Categories.ExcludeCategory(HDP22Incompatible.class)
@Categories.ExcludeCategory({HDP22Incompatible.class, RequiresSpark2.class})
@Suite.SuiteClasses({
AllTests.class
})
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
package co.cask.cdap.test.suite;

import co.cask.cdap.test.suite.category.HDP23Incompatible;
import co.cask.cdap.test.suite.category.RequiresSpark2;
import org.junit.experimental.categories.Categories;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
Expand All @@ -25,7 +26,7 @@
* Junit suite for tests that should run on HDP2.3.
*/
@RunWith(Categories.class)
@Categories.ExcludeCategory(HDP23Incompatible.class)
@Categories.ExcludeCategory({HDP23Incompatible.class, RequiresSpark2.class})
@Suite.SuiteClasses({
AllTests.class
})
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
package co.cask.cdap.test.suite;

import co.cask.cdap.test.suite.category.MapR5Incompatible;
import co.cask.cdap.test.suite.category.RequiresSpark;
import org.junit.experimental.categories.Categories;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
Expand All @@ -25,7 +26,8 @@
* Junit suite for tests that should run on MapR 5.x.
*/
@RunWith(Categories.class)
@Categories.ExcludeCategory(MapR5Incompatible.class)
// coopr doesn't provision MapR cluster with Spark. Remove exclusion of "RequiresSpark" once COOK-108 is fixed
@Categories.ExcludeCategory({MapR5Incompatible.class, RequiresSpark.class})
@Suite.SuiteClasses({
AllTests.class
})
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
/*
* Copyright © 2018 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/

package co.cask.cdap.test.suite.category;

import org.junit.experimental.categories.Category;

/**
* JUnit {@link Category} for tests that Require Spark.
* This should be excluded by test suites that do not support Spark.
*/
public interface RequiresSpark {
}
Loading