Skip to content

Accept property 'ai.project.group' for KabootarJob and Updated travis CI to use openjdk8 for avoiding build failure. #253

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 9 commits into from
Aug 30, 2019
2 changes: 1 addition & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
language: java
jdk:
- oraclejdk8
- openjdk8
before_cache:
- rm -f $HOME/.gradle/caches/modules-2/modules-2.lock
cache:
Expand Down
5 changes: 4 additions & 1 deletion CONTRIBUTORS.md
Original file line number Diff line number Diff line change
Expand Up @@ -171,4 +171,7 @@ The following were contributed by Abhishek Nimesh. Thanks, Abhishek!
* `Add ability for Hadoop DSL to understand the KabootarJob job type`

The following were contributed by Zhixiong Chen.
* `Sort config`
* `Sort config`

The following were contributed by Sayali Kadam.
* `Accept property 'ai.project.group' for KabootarJob`
3 changes: 3 additions & 0 deletions VERSIONS.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,9 @@ the License.
Note that the LinkedIn build system occasionally requires that we skip a
version bump, so you will see a few skipped version numbers in the list below.

0.15.10
* Accept property "ai.project.group" for KabootarJob.

0.15.9
* Sort config.

Expand Down
2 changes: 1 addition & 1 deletion gradle.properties
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
org.gradle.daemon=true
version=0.15.9
version=0.15.10
1 change: 1 addition & 0 deletions hadoop-plugin-test/expectedJobs/jobs1/jobs1_job27.job
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# This file generated from the Hadoop DSL. Do not edit by hand.
type=KabootarJob
dependencies=jobs1_job26
ai.project.group=AIFoundationOther
framework=PHOTON_CONNECT
initial.import=initial/import/File
model.supplementary.data.path=/user/testmodelregsvc/data
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,6 @@ RequiredFieldsChecker ERROR: TableauJob job19 must set workbookName
RequiredFieldsChecker ERROR: HdfsWaitJob job20 must set dirPath, freshness, timeout, and forceJobToFail
RequiredFieldsChecker ERROR: VenicePushJob job23 must set avroKeyField, avroValueField, clusterName, inputPath, veniceStoreName
RequiredFieldsChecker ERROR: WormholePushJob job26 must set inputPath, namespace and dataset
RequiredFieldsChecker ERROR: KabootarJob job27 must set trainingModelLocation, trainingName, wormholeNamespace, initialImport
RequiredFieldsChecker ERROR: KabootarJob job27 must set trainingModelLocation, trainingName, aiProjectGroup, wormholeNamespace, initialImport. Please see the job documentation for more details.
RequiredFieldsChecker WARNING: Properties properties1 does not set any confProperties, jobProperties, jvmProperties or basePropertySetName. Nothing will be built for this properties object.
:hadoop-plugin-test:test_missingFields FAILED
3 changes: 2 additions & 1 deletion hadoop-plugin-test/src/main/gradle/positive/jobs1.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -446,8 +446,9 @@ hadoop {
}

kabootarJob('job27') {
usesTrainedModelLocation '/user/testmodelregsvc/trained-models' // Required
usesTrainedModelLocation '/user/testmodelregsvc/trained-models' // Required
usesTrainingName 'AyeAyeCaptain' // Required
usesAiProjectGroup 'AIFoundationOther' // Required
usesWormholeNamespace 'testmodelregsvc' // Required
usesInitialImport 'initial/import/File' // Required
usesTrainingID '53fe1ff5-4439-4288-be43-11cb11629552' // Optional
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -248,13 +248,15 @@ class RequiredFieldsChecker extends BaseStaticChecker {
void visitJob(KabootarJob job) {
boolean emptyTrainedModelLocation = job.trainedModelLocation == null || job.trainedModelLocation.isEmpty();
boolean emptyTrainingName = job.trainingName == null || job.trainingName.isEmpty();
boolean emptyAiProjectGroup = job.aiProjectGroup == null || job.aiProjectGroup.isEmpty();
boolean emptyWormholeNamespace = job.wormholeNamespace == null || job.wormholeNamespace.isEmpty();
boolean emptyInitialImport = job.initialImport == null || job.initialImport.isEmpty();

if (emptyTrainedModelLocation || emptyTrainingName || emptyWormholeNamespace || emptyInitialImport) {
if (emptyTrainedModelLocation || emptyTrainingName || emptyAiProjectGroup || emptyWormholeNamespace || emptyInitialImport) {
project.logger.lifecycle(
"RequiredFieldsChecker ERROR: KabootarJob ${job.name} must set trainingModelLocation, " +
"trainingName, wormholeNamespace, initialImport");
"trainingName, aiProjectGroup, wormholeNamespace, initialImport. " +
"Please see the job documentation for more details.");
foundError = true;
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,22 +26,21 @@ import com.linkedin.gradle.hadoopdsl.HadoopDslMethod;
* illustrate the DSL. Please check that these values are appropriate for your application. In the
* DSL, a KabootarJob can be specified with:
* <pre>
* KabootarJob('jobName') {
* usesTrainedModelLocation '/user/testmodelregsvc/trained-models' // Required
* KabootarJob('jobName') {* usesTrainedModelLocation '/user/testmodelregsvc/trained-models' // Required
* usesTrainingName 'AyeAyeCaptain' // Required
* usesAiProjectGroup 'AIFoundationOther' // Required
* usesWormholeNamespace 'testmodelregsvc' // Required
* usesInitialImport 'initial/import/File' // Required
* usesTrainingID '53fe1ff5-4439-4288-be43-11cb11629552' // Optional
* usesOrigin 'FELLOWSHIP' // Optional
* usesFramework 'PHOTON_CONNECT' // Optional
* usesModelSupplementaryDataLocation '/user/testmodelregsvc/trained-models-supplementary-data' // Optional
* }
* </pre>
*/
*}* </pre>*/
class KabootarJob extends HadoopJavaJob {
// Required
String trainedModelLocation;
String trainingName;
String aiProjectGroup;
String wormholeNamespace;
String initialImport;

Expand Down Expand Up @@ -80,13 +79,14 @@ class KabootarJob extends HadoopJavaJob {
KabootarJob clone(KabootarJob cloneJob) {
cloneJob.trainedModelLocation = trainedModelLocation;
cloneJob.trainingName = trainingName;
cloneJob.ai.project.group = aiProjectGroup;
cloneJob.wormholeNamespace = wormholeNamespace;
cloneJob.initialImport = initialImport;
cloneJob.trainingID = trainingID;
cloneJob.origin = origin;
cloneJob.framework = framework;
cloneJob.modelSupplementaryDataLocation = modelSupplementaryDataLocation;
return ((KabootarJob)super.clone(cloneJob));
return ((KabootarJob) super.clone(cloneJob));
}

/**
Expand All @@ -113,6 +113,19 @@ class KabootarJob extends HadoopJavaJob {
setJobProperty("training.name", trainingName);
}

/**
* DSL usesAiProjectGroup method causes ai.project.group=value to be set in the job file.
*
* @param aiProjectGroup - This contains the project group at LinkedIn.
* The groups are used to connect related AI projects together.
* Currently, these project groups correspond to AI verticals.
*/
@HadoopDslMethod
void usesAiProjectGroup(String aiProjectGroup) {
this.aiProjectGroup = aiProjectGroup;
setJobProperty("ai.project.group", aiProjectGroup);
}

/**
* DSL usesWormholeNamespace method causes wormhole.namespace=value to be set in the job file.
*
Expand All @@ -137,7 +150,6 @@ class KabootarJob extends HadoopJavaJob {
setJobProperty("initial.import", initialImport);
}


/**
* DSL usesTrainingID method causes training.id=value to be set in the job file.
*
Expand Down