Skip to content

Commit 9ae9a96

Browse files
authored
samples: update person detection to GA (#344)
* feat: update person detection to GA * fix: missing dependency and changed sample package name * fix: changed package name for tests
1 parent 888166d commit 9ae9a96

16 files changed

+217
-194
lines changed

video/src/main/java/com/example/video/Detect.java renamed to video/src/main/java/video/Detect.java

+84-75
Large diffs are not rendered by default.

video/src/main/java/beta/video/DetectPerson.java renamed to video/src/main/java/video/DetectPerson.java

+17-17
Original file line numberDiff line numberDiff line change
@@ -14,25 +14,25 @@
1414
* limitations under the License.
1515
*/
1616

17-
package beta.video;
17+
package video;
1818

19-
// [START video_detect_person_beta]
19+
// [START video_detect_person]
2020

2121
import com.google.api.gax.longrunning.OperationFuture;
22-
import com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoProgress;
23-
import com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoRequest;
24-
import com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse;
25-
import com.google.cloud.videointelligence.v1p3beta1.DetectedAttribute;
26-
import com.google.cloud.videointelligence.v1p3beta1.DetectedLandmark;
27-
import com.google.cloud.videointelligence.v1p3beta1.Feature;
28-
import com.google.cloud.videointelligence.v1p3beta1.PersonDetectionAnnotation;
29-
import com.google.cloud.videointelligence.v1p3beta1.PersonDetectionConfig;
30-
import com.google.cloud.videointelligence.v1p3beta1.TimestampedObject;
31-
import com.google.cloud.videointelligence.v1p3beta1.Track;
32-
import com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults;
33-
import com.google.cloud.videointelligence.v1p3beta1.VideoContext;
34-
import com.google.cloud.videointelligence.v1p3beta1.VideoIntelligenceServiceClient;
35-
import com.google.cloud.videointelligence.v1p3beta1.VideoSegment;
22+
import com.google.cloud.videointelligence.v1.AnnotateVideoProgress;
23+
import com.google.cloud.videointelligence.v1.AnnotateVideoRequest;
24+
import com.google.cloud.videointelligence.v1.AnnotateVideoResponse;
25+
import com.google.cloud.videointelligence.v1.DetectedAttribute;
26+
import com.google.cloud.videointelligence.v1.DetectedLandmark;
27+
import com.google.cloud.videointelligence.v1.Feature;
28+
import com.google.cloud.videointelligence.v1.PersonDetectionAnnotation;
29+
import com.google.cloud.videointelligence.v1.PersonDetectionConfig;
30+
import com.google.cloud.videointelligence.v1.TimestampedObject;
31+
import com.google.cloud.videointelligence.v1.Track;
32+
import com.google.cloud.videointelligence.v1.VideoAnnotationResults;
33+
import com.google.cloud.videointelligence.v1.VideoContext;
34+
import com.google.cloud.videointelligence.v1.VideoIntelligenceServiceClient;
35+
import com.google.cloud.videointelligence.v1.VideoSegment;
3636
import com.google.protobuf.ByteString;
3737
import java.nio.file.Files;
3838
import java.nio.file.Path;
@@ -118,4 +118,4 @@ public static void detectPerson(String localFilePath) throws Exception {
118118
}
119119
}
120120
}
121-
// [END video_detect_person_beta]
121+
// [END video_detect_person]

video/src/main/java/beta/video/DetectPersonGcs.java renamed to video/src/main/java/video/DetectPersonGcs.java

+17-17
Original file line numberDiff line numberDiff line change
@@ -14,25 +14,25 @@
1414
* limitations under the License.
1515
*/
1616

17-
package beta.video;
17+
package video;
1818

19-
// [START video_detect_person_gcs_beta]
19+
// [START video_detect_person_gcs]
2020

2121
import com.google.api.gax.longrunning.OperationFuture;
22-
import com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoProgress;
23-
import com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoRequest;
24-
import com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse;
25-
import com.google.cloud.videointelligence.v1p3beta1.DetectedAttribute;
26-
import com.google.cloud.videointelligence.v1p3beta1.DetectedLandmark;
27-
import com.google.cloud.videointelligence.v1p3beta1.Feature;
28-
import com.google.cloud.videointelligence.v1p3beta1.PersonDetectionAnnotation;
29-
import com.google.cloud.videointelligence.v1p3beta1.PersonDetectionConfig;
30-
import com.google.cloud.videointelligence.v1p3beta1.TimestampedObject;
31-
import com.google.cloud.videointelligence.v1p3beta1.Track;
32-
import com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults;
33-
import com.google.cloud.videointelligence.v1p3beta1.VideoContext;
34-
import com.google.cloud.videointelligence.v1p3beta1.VideoIntelligenceServiceClient;
35-
import com.google.cloud.videointelligence.v1p3beta1.VideoSegment;
22+
import com.google.cloud.videointelligence.v1.AnnotateVideoProgress;
23+
import com.google.cloud.videointelligence.v1.AnnotateVideoRequest;
24+
import com.google.cloud.videointelligence.v1.AnnotateVideoResponse;
25+
import com.google.cloud.videointelligence.v1.DetectedAttribute;
26+
import com.google.cloud.videointelligence.v1.DetectedLandmark;
27+
import com.google.cloud.videointelligence.v1.Feature;
28+
import com.google.cloud.videointelligence.v1.PersonDetectionAnnotation;
29+
import com.google.cloud.videointelligence.v1.PersonDetectionConfig;
30+
import com.google.cloud.videointelligence.v1.TimestampedObject;
31+
import com.google.cloud.videointelligence.v1.Track;
32+
import com.google.cloud.videointelligence.v1.VideoAnnotationResults;
33+
import com.google.cloud.videointelligence.v1.VideoContext;
34+
import com.google.cloud.videointelligence.v1.VideoIntelligenceServiceClient;
35+
import com.google.cloud.videointelligence.v1.VideoSegment;
3636

3737
public class DetectPersonGcs {
3838

@@ -110,4 +110,4 @@ public static void detectPersonGcs(String gcsUri) throws Exception {
110110
}
111111
}
112112
}
113-
// [END video_detect_person_gcs_beta]
113+
// [END video_detect_person_gcs]

video/src/main/java/com/example/video/LogoDetection.java renamed to video/src/main/java/video/LogoDetection.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
* limitations under the License.
1515
*/
1616

17-
package com.example.video;
17+
package video;
1818

1919
// [START video_detect_logo]
2020

video/src/main/java/com/example/video/LogoDetectionGcs.java renamed to video/src/main/java/video/LogoDetectionGcs.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
* limitations under the License.
1515
*/
1616

17-
package com.example.video;
17+
package video;
1818

1919
// [START video_detect_logo_gcs]
2020

video/src/main/java/com/example/video/QuickstartSample.java renamed to video/src/main/java/video/QuickstartSample.java

+15-14
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
* limitations under the License.
1515
*/
1616

17-
package com.example.video;
17+
package video;
1818

1919
// [START video_quickstart]
2020

@@ -32,20 +32,19 @@
3232

3333
public class QuickstartSample {
3434

35-
/**
36-
* Demonstrates using the video intelligence client to detect labels in a video file.
37-
*/
35+
/** Demonstrates using the video intelligence client to detect labels in a video file. */
3836
public static void main(String[] args) throws Exception {
3937
// Instantiate a video intelligence client
4038
try (VideoIntelligenceServiceClient client = VideoIntelligenceServiceClient.create()) {
4139
// The Google Cloud Storage path to the video to annotate.
4240
String gcsUri = "gs://cloud-samples-data/video/cat.mp4";
4341

4442
// Create an operation that will contain the response when the operation completes.
45-
AnnotateVideoRequest request = AnnotateVideoRequest.newBuilder()
46-
.setInputUri(gcsUri)
47-
.addFeatures(Feature.LABEL_DETECTION)
48-
.build();
43+
AnnotateVideoRequest request =
44+
AnnotateVideoRequest.newBuilder()
45+
.setInputUri(gcsUri)
46+
.addFeatures(Feature.LABEL_DETECTION)
47+
.build();
4948

5049
OperationFuture<AnnotateVideoResponse, AnnotateVideoProgress> response =
5150
client.annotateVideoAsync(request);
@@ -61,18 +60,20 @@ public static void main(String[] args) throws Exception {
6160
System.out.println("Labels:");
6261
// get video segment label annotations
6362
for (LabelAnnotation annotation : result.getSegmentLabelAnnotationsList()) {
64-
System.out
65-
.println("Video label description : " + annotation.getEntity().getDescription());
63+
System.out.println(
64+
"Video label description : " + annotation.getEntity().getDescription());
6665
// categories
6766
for (Entity categoryEntity : annotation.getCategoryEntitiesList()) {
6867
System.out.println("Label Category description : " + categoryEntity.getDescription());
6968
}
7069
// segments
7170
for (LabelSegment segment : annotation.getSegmentsList()) {
72-
double startTime = segment.getSegment().getStartTimeOffset().getSeconds()
73-
+ segment.getSegment().getStartTimeOffset().getNanos() / 1e9;
74-
double endTime = segment.getSegment().getEndTimeOffset().getSeconds()
75-
+ segment.getSegment().getEndTimeOffset().getNanos() / 1e9;
71+
double startTime =
72+
segment.getSegment().getStartTimeOffset().getSeconds()
73+
+ segment.getSegment().getStartTimeOffset().getNanos() / 1e9;
74+
double endTime =
75+
segment.getSegment().getEndTimeOffset().getSeconds()
76+
+ segment.getSegment().getEndTimeOffset().getNanos() / 1e9;
7677
System.out.printf("Segment location : %.3f:%.3f\n", startTime, endTime);
7778
System.out.println("Confidence : " + segment.getConfidence());
7879
}

video/src/main/java/com/example/video/TextDetection.java renamed to video/src/main/java/video/TextDetection.java

+39-29
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
* limitations under the License.
1515
*/
1616

17-
package com.example.video;
17+
package video;
1818

1919
import com.google.api.gax.longrunning.OperationFuture;
2020
import com.google.cloud.videointelligence.v1.AnnotateVideoProgress;
@@ -51,10 +51,11 @@ public static VideoAnnotationResults detectText(String filePath) throws Exceptio
5151
byte[] data = Files.readAllBytes(path);
5252

5353
// Create the request
54-
AnnotateVideoRequest request = AnnotateVideoRequest.newBuilder()
55-
.setInputContent(ByteString.copyFrom(data))
56-
.addFeatures(Feature.TEXT_DETECTION)
57-
.build();
54+
AnnotateVideoRequest request =
55+
AnnotateVideoRequest.newBuilder()
56+
.setInputContent(ByteString.copyFrom(data))
57+
.addFeatures(Feature.TEXT_DETECTION)
58+
.build();
5859

5960
// asynchronously perform object tracking on videos
6061
OperationFuture<AnnotateVideoResponse, AnnotateVideoProgress> future =
@@ -77,25 +78,29 @@ public static VideoAnnotationResults detectText(String filePath) throws Exceptio
7778
Duration startTimeOffset = videoSegment.getStartTimeOffset();
7879
Duration endTimeOffset = videoSegment.getEndTimeOffset();
7980
// Display the offset times in seconds, 1e9 is part of the formula to convert nanos to seconds
80-
System.out.println(String.format("Start time: %.2f",
81-
startTimeOffset.getSeconds() + startTimeOffset.getNanos() / 1e9));
82-
System.out.println(String.format("End time: %.2f",
83-
endTimeOffset.getSeconds() + endTimeOffset.getNanos() / 1e9));
81+
System.out.println(
82+
String.format(
83+
"Start time: %.2f", startTimeOffset.getSeconds() + startTimeOffset.getNanos() / 1e9));
84+
System.out.println(
85+
String.format(
86+
"End time: %.2f", endTimeOffset.getSeconds() + endTimeOffset.getNanos() / 1e9));
8487

8588
// Show the first result for the first frame in the segment.
8689
TextFrame textFrame = textSegment.getFrames(0);
8790
Duration timeOffset = textFrame.getTimeOffset();
88-
System.out.println(String.format("Time offset for the first frame: %.2f",
89-
timeOffset.getSeconds() + timeOffset.getNanos() / 1e9));
91+
System.out.println(
92+
String.format(
93+
"Time offset for the first frame: %.2f",
94+
timeOffset.getSeconds() + timeOffset.getNanos() / 1e9));
9095

9196
// Display the rotated bounding box for where the text is on the frame.
9297
System.out.println("Rotated Bounding Box Vertices:");
9398
List<NormalizedVertex> vertices = textFrame.getRotatedBoundingBox().getVerticesList();
9499
for (NormalizedVertex normalizedVertex : vertices) {
95-
System.out.println(String.format(
96-
"\tVertex.x: %.2f, Vertex.y: %.2f",
97-
normalizedVertex.getX(),
98-
normalizedVertex.getY()));
100+
System.out.println(
101+
String.format(
102+
"\tVertex.x: %.2f, Vertex.y: %.2f",
103+
normalizedVertex.getX(), normalizedVertex.getY()));
99104
}
100105
return results;
101106
}
@@ -111,10 +116,11 @@ public static VideoAnnotationResults detectText(String filePath) throws Exceptio
111116
public static VideoAnnotationResults detectTextGcs(String gcsUri) throws Exception {
112117
try (VideoIntelligenceServiceClient client = VideoIntelligenceServiceClient.create()) {
113118
// Create the request
114-
AnnotateVideoRequest request = AnnotateVideoRequest.newBuilder()
115-
.setInputUri(gcsUri)
116-
.addFeatures(Feature.TEXT_DETECTION)
117-
.build();
119+
AnnotateVideoRequest request =
120+
AnnotateVideoRequest.newBuilder()
121+
.setInputUri(gcsUri)
122+
.addFeatures(Feature.TEXT_DETECTION)
123+
.build();
118124

119125
// asynchronously perform object tracking on videos
120126
OperationFuture<AnnotateVideoResponse, AnnotateVideoProgress> future =
@@ -137,25 +143,29 @@ public static VideoAnnotationResults detectTextGcs(String gcsUri) throws Excepti
137143
Duration startTimeOffset = videoSegment.getStartTimeOffset();
138144
Duration endTimeOffset = videoSegment.getEndTimeOffset();
139145
// Display the offset times in seconds, 1e9 is part of the formula to convert nanos to seconds
140-
System.out.println(String.format("Start time: %.2f",
141-
startTimeOffset.getSeconds() + startTimeOffset.getNanos() / 1e9));
142-
System.out.println(String.format("End time: %.2f",
143-
endTimeOffset.getSeconds() + endTimeOffset.getNanos() / 1e9));
146+
System.out.println(
147+
String.format(
148+
"Start time: %.2f", startTimeOffset.getSeconds() + startTimeOffset.getNanos() / 1e9));
149+
System.out.println(
150+
String.format(
151+
"End time: %.2f", endTimeOffset.getSeconds() + endTimeOffset.getNanos() / 1e9));
144152

145153
// Show the first result for the first frame in the segment.
146154
TextFrame textFrame = textSegment.getFrames(0);
147155
Duration timeOffset = textFrame.getTimeOffset();
148-
System.out.println(String.format("Time offset for the first frame: %.2f",
149-
timeOffset.getSeconds() + timeOffset.getNanos() / 1e9));
156+
System.out.println(
157+
String.format(
158+
"Time offset for the first frame: %.2f",
159+
timeOffset.getSeconds() + timeOffset.getNanos() / 1e9));
150160

151161
// Display the rotated bounding box for where the text is on the frame.
152162
System.out.println("Rotated Bounding Box Vertices:");
153163
List<NormalizedVertex> vertices = textFrame.getRotatedBoundingBox().getVerticesList();
154164
for (NormalizedVertex normalizedVertex : vertices) {
155-
System.out.println(String.format(
156-
"\tVertex.x: %.2f, Vertex.y: %.2f",
157-
normalizedVertex.getX(),
158-
normalizedVertex.getY()));
165+
System.out.println(
166+
String.format(
167+
"\tVertex.x: %.2f, Vertex.y: %.2f",
168+
normalizedVertex.getX(), normalizedVertex.getY()));
159169
}
160170
return results;
161171
}

0 commit comments

Comments
 (0)