Skip to content

Commit a74a940

Browse files
saikadamabti
authored andcommitted
Accept property 'ai.project.group' for KabootarJob and Updated travis CI to use openjdk8 for avoiding build failure. (#253)
* Added support for KabootarJob job type to package trained models and make them available for Pro-ML model deployment pipeline. * Added ";" back which was removed by IDE. * Merged with upstream. * Accept property 'ai.project.group' for KabootarJob * Upgrading version in gradle.properties. * Updating JDK to openjdk8. * Updating the error message.
1 parent 3355ede commit a74a940

File tree

9 files changed

+36
-14
lines changed

9 files changed

+36
-14
lines changed

.travis.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
language: java
22
jdk:
3-
- oraclejdk8
3+
- openjdk8
44
before_cache:
55
- rm -f $HOME/.gradle/caches/modules-2/modules-2.lock
66
cache:

CONTRIBUTORS.md

+4-1
Original file line numberDiff line numberDiff line change
@@ -171,4 +171,7 @@ The following were contributed by Abhishek Nimesh. Thanks, Abhishek!
171171
* `Add ability for Hadoop DSL to understand the KabootarJob job type`
172172

173173
The following were contributed by Zhixiong Chen.
174-
* `Sort config`
174+
* `Sort config`
175+
176+
The following were contributed by Sayali Kadam.
177+
* `Accept property 'ai.project.group' for KabootarJob`

VERSIONS.md

+3
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,9 @@ the License.
1717
Note that the LinkedIn build system occasionally requires that we skip a
1818
version bump, so you will see a few skipped version numbers in the list below.
1919

20+
0.15.10
21+
* Accept property "ai.project.group" for KabootarJob.
22+
2023
0.15.9
2124
* Sort config.
2225

gradle.properties

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
11
org.gradle.daemon=true
2-
version=0.15.9
2+
version=0.15.10

hadoop-plugin-test/expectedJobs/jobs1/jobs1_job27.job

+1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
# This file generated from the Hadoop DSL. Do not edit by hand.
22
type=KabootarJob
33
dependencies=jobs1_job26
4+
ai.project.group=AIFoundationOther
45
framework=PHOTON_CONNECT
56
initial.import=initial/import/File
67
model.supplementary.data.path=/user/testmodelregsvc/data

hadoop-plugin-test/expectedOutput/negative/missingFields.out

+1-1
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,6 @@ RequiredFieldsChecker ERROR: TableauJob job19 must set workbookName
4545
RequiredFieldsChecker ERROR: HdfsWaitJob job20 must set dirPath, freshness, timeout, and forceJobToFail
4646
RequiredFieldsChecker ERROR: VenicePushJob job23 must set avroKeyField, avroValueField, clusterName, inputPath, veniceStoreName
4747
RequiredFieldsChecker ERROR: WormholePushJob job26 must set inputPath, namespace and dataset
48-
RequiredFieldsChecker ERROR: KabootarJob job27 must set trainingModelLocation, trainingName, wormholeNamespace, initialImport
48+
RequiredFieldsChecker ERROR: KabootarJob job27 must set trainingModelLocation, trainingName, aiProjectGroup, wormholeNamespace, initialImport. Please see the job documentation for more details.
4949
RequiredFieldsChecker WARNING: Properties properties1 does not set any confProperties, jobProperties, jvmProperties or basePropertySetName. Nothing will be built for this properties object.
5050
:hadoop-plugin-test:test_missingFields FAILED

hadoop-plugin-test/src/main/gradle/positive/jobs1.gradle

+2-1
Original file line numberDiff line numberDiff line change
@@ -446,8 +446,9 @@ hadoop {
446446
}
447447

448448
kabootarJob('job27') {
449-
usesTrainedModelLocation '/user/testmodelregsvc/trained-models' // Required
449+
usesTrainedModelLocation '/user/testmodelregsvc/trained-models' // Required
450450
usesTrainingName 'AyeAyeCaptain' // Required
451+
usesAiProjectGroup 'AIFoundationOther' // Required
451452
usesWormholeNamespace 'testmodelregsvc' // Required
452453
usesInitialImport 'initial/import/File' // Required
453454
usesTrainingID '53fe1ff5-4439-4288-be43-11cb11629552' // Optional

hadoop-plugin/src/main/groovy/com/linkedin/gradle/hadoopdsl/checker/RequiredFieldsChecker.groovy

+4-2
Original file line numberDiff line numberDiff line change
@@ -248,13 +248,15 @@ class RequiredFieldsChecker extends BaseStaticChecker {
248248
void visitJob(KabootarJob job) {
249249
boolean emptyTrainedModelLocation = job.trainedModelLocation == null || job.trainedModelLocation.isEmpty();
250250
boolean emptyTrainingName = job.trainingName == null || job.trainingName.isEmpty();
251+
boolean emptyAiProjectGroup = job.aiProjectGroup == null || job.aiProjectGroup.isEmpty();
251252
boolean emptyWormholeNamespace = job.wormholeNamespace == null || job.wormholeNamespace.isEmpty();
252253
boolean emptyInitialImport = job.initialImport == null || job.initialImport.isEmpty();
253254

254-
if (emptyTrainedModelLocation || emptyTrainingName || emptyWormholeNamespace || emptyInitialImport) {
255+
if (emptyTrainedModelLocation || emptyTrainingName || emptyAiProjectGroup || emptyWormholeNamespace || emptyInitialImport) {
255256
project.logger.lifecycle(
256257
"RequiredFieldsChecker ERROR: KabootarJob ${job.name} must set trainingModelLocation, " +
257-
"trainingName, wormholeNamespace, initialImport");
258+
"trainingName, aiProjectGroup, wormholeNamespace, initialImport. " +
259+
"Please see the job documentation for more details.");
258260
foundError = true;
259261
}
260262
}

hadoop-plugin/src/main/groovy/com/linkedin/gradle/hadoopdsl/job/KabootarJob.groovy

+19-7
Original file line numberDiff line numberDiff line change
@@ -26,22 +26,21 @@ import com.linkedin.gradle.hadoopdsl.HadoopDslMethod;
2626
* illustrate the DSL. Please check that these values are appropriate for your application. In the
2727
* DSL, a KabootarJob can be specified with:
2828
* <pre>
29-
* KabootarJob('jobName') {
30-
* usesTrainedModelLocation '/user/testmodelregsvc/trained-models' // Required
29+
* KabootarJob('jobName') {* usesTrainedModelLocation '/user/testmodelregsvc/trained-models' // Required
3130
* usesTrainingName 'AyeAyeCaptain' // Required
31+
* usesAiProjectGroup 'AIFoundationOther' // Required
3232
* usesWormholeNamespace 'testmodelregsvc' // Required
3333
* usesInitialImport 'initial/import/File' // Required
3434
* usesTrainingID '53fe1ff5-4439-4288-be43-11cb11629552' // Optional
3535
* usesOrigin 'FELLOWSHIP' // Optional
3636
* usesFramework 'PHOTON_CONNECT' // Optional
3737
* usesModelSupplementaryDataLocation '/user/testmodelregsvc/trained-models-supplementary-data' // Optional
38-
* }
39-
* </pre>
40-
*/
38+
*}* </pre>*/
4139
class KabootarJob extends HadoopJavaJob {
4240
// Required
4341
String trainedModelLocation;
4442
String trainingName;
43+
String aiProjectGroup;
4544
String wormholeNamespace;
4645
String initialImport;
4746

@@ -80,13 +79,14 @@ class KabootarJob extends HadoopJavaJob {
8079
KabootarJob clone(KabootarJob cloneJob) {
8180
cloneJob.trainedModelLocation = trainedModelLocation;
8281
cloneJob.trainingName = trainingName;
82+
cloneJob.ai.project.group = aiProjectGroup;
8383
cloneJob.wormholeNamespace = wormholeNamespace;
8484
cloneJob.initialImport = initialImport;
8585
cloneJob.trainingID = trainingID;
8686
cloneJob.origin = origin;
8787
cloneJob.framework = framework;
8888
cloneJob.modelSupplementaryDataLocation = modelSupplementaryDataLocation;
89-
return ((KabootarJob)super.clone(cloneJob));
89+
return ((KabootarJob) super.clone(cloneJob));
9090
}
9191

9292
/**
@@ -113,6 +113,19 @@ class KabootarJob extends HadoopJavaJob {
113113
setJobProperty("training.name", trainingName);
114114
}
115115

116+
/**
117+
* DSL usesAiProjectGroup method causes ai.project.group=value to be set in the job file.
118+
*
119+
* @param aiProjectGroup - This contains the project group at LinkedIn.
120+
* The groups are used to connect related AI projects together.
121+
* Currently, these project groups correspond to AI verticals.
122+
*/
123+
@HadoopDslMethod
124+
void usesAiProjectGroup(String aiProjectGroup) {
125+
this.aiProjectGroup = aiProjectGroup;
126+
setJobProperty("ai.project.group", aiProjectGroup);
127+
}
128+
116129
/**
117130
* DSL usesWormholeNamespace method causes wormhole.namespace=value to be set in the job file.
118131
*
@@ -137,7 +150,6 @@ class KabootarJob extends HadoopJavaJob {
137150
setJobProperty("initial.import", initialImport);
138151
}
139152

140-
141153
/**
142154
* DSL usesTrainingID method causes training.id=value to be set in the job file.
143155
*

0 commit comments

Comments
 (0)