Skip to content

pull type def #7

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 10 commits into from
Dec 30, 2020
  •  
  •  
  •  
24 changes: 1 addition & 23 deletions ndarray/src/main/java/org/tensorflow/ndarray/NdArray.java
Original file line number Diff line number Diff line change
Expand Up @@ -55,29 +55,7 @@
*
* @param <T> the type of values to be mapped
*/
public interface NdArray<T> {

/**
* @return the shape of this N-dimensional array
*/
Shape shape();

/**
* @return the rank of this N-dimensional array
*/
default int rank() {
return shape().numDimensions();
}

/**
* Computes and returns the total size of this N-dimensional array, in number of values.
*
* <p>For example, given a 3x3x2 matrix, the return value will be 18.
* @return total size of this nd array
*/
default long size() {
return shape().size();
}
public interface NdArray<T> extends Shaped {

/**
* Returns a sequence of all elements at a given dimension.
Expand Down
8 changes: 4 additions & 4 deletions ndarray/src/main/java/org/tensorflow/ndarray/NdArrays.java
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ public static ByteNdArray vectorOf(byte... values) {
if (values == null) {
throw new IllegalArgumentException("Values cannot be null");
}
return wrap(DataBuffers.of(values, false, false), Shape.of(values.length));
return wrap(Shape.of(values.length), DataBuffers.of(values, false, false));
}

/**
Expand All @@ -81,19 +81,19 @@ public static ByteNdArray ofBytes(Shape shape) {
if (shape == null) {
throw new IllegalArgumentException("Shape cannot be null");
}
return wrap(DataBuffers.ofBytes(shape.size()), shape);
return wrap(shape, DataBuffers.ofBytes(shape.size()));
}

/**
* Wraps a buffer in a byte N-dimensional array of a given shape.
*
* @param buffer buffer to wrap
* @param shape shape of the array
* @param buffer buffer to wrap
* @return new byte N-dimensional array
* @throws IllegalArgumentException if shape is null, has unknown dimensions or has size bigger
* in the buffer size
*/
public static ByteNdArray wrap(ByteDataBuffer buffer, Shape shape) {
public static ByteNdArray wrap(Shape shape, ByteDataBuffer buffer) {
return ByteDenseNdArray.create(buffer, shape);
}

Expand Down
51 changes: 51 additions & 0 deletions ndarray/src/main/java/org/tensorflow/ndarray/Shaped.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
/*
Copyright 2020 The TensorFlow Authors. All Rights Reserved.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=======================================================================
*/
package org.tensorflow.ndarray;

import java.util.function.BiConsumer;
import java.util.function.Consumer;
import org.tensorflow.ndarray.buffer.DataBuffer;
import org.tensorflow.ndarray.index.Index;

/**
* Any data container with a given {@link Shape}.
*/
public interface Shaped {

/**
* @return the shape of this container
*/
Shape shape();

/**
* @return the rank of this container
*/
default int rank() {
return shape().numDimensions();
}

/**
* Computes and returns the total size of this container, in number of values.
*
* <p>For example, given a 3x3x2 matrix, the return value will be 18.
*
* @return number of values in this element
*/
default long size() {
return shape().size();
}
}
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
op {
graph_op_name: "BarrierIncompleteSize"
out_arg {
name: "size"
rename_to: "output"
}
}
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
op {
graph_op_name: "BarrierReadySize"
out_arg {
name: "size"
rename_to: "output"
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,8 @@ op {
endpoint {
name: "LookupTableSize"
}
out_arg {
name: "size"
rename_to: "output"
}
}
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
op {
graph_op_name: "MapIncompleteSize"
out_arg {
name: "size"
rename_to: "output"
}
}
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
op {
graph_op_name: "MapSize"
out_arg {
name: "size"
rename_to: "output"
}
}
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
op {
graph_op_name: "OrderedMapIncompleteSize"
out_arg {
name: "size"
rename_to: "output"
}
}
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
op {
graph_op_name: "OrderedMapSize"
out_arg {
name: "size"
rename_to: "output"
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,8 @@ op {
endpoint {
name: "io.QueueSize"
}
out_arg {
name: "size"
rename_to: "output"
}
}
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
op {
graph_op_name: "SetSize"
out_arg {
name: "size"
rename_to: "output"
}
}
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
op {
graph_op_name: "StageSize"
out_arg {
name: "size"
rename_to: "output"
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,8 @@ op {
endpoint {
name: "TensorArraySize"
}
out_arg {
name: "size"
rename_to: "output"
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -97,9 +97,6 @@ class Type {
static Type IterableOf(const Type& type) {
return Interface("Iterable").add_parameter(type);
}
static Type DataTypeOf(const Type& type) {
return Class("DataType", "org.tensorflow").add_parameter(type);
}
static Type ForDataType(DataType data_type) {
switch (data_type) {
case DataType::DT_BOOL:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -103,39 +103,55 @@ void CollectOpDependencies(const OpSpec& op, RenderMode mode,
}
for (const AttributeSpec& attribute : op.attributes()) {
out->push_back(attribute.var().type());
out->push_back(attribute.jni_type());
if (attribute.jni_type().name() == "DataType") {
out->push_back(Type::Class("Operands", "org.tensorflow.op"));
} else {
out->push_back(attribute.jni_type());
}
if (attribute.has_default_value() &&
attribute.type().kind() == Type::GENERIC) {
out->push_back(Type::ForDataType(attribute.default_value()->type()));
}
}
for (const AttributeSpec& optional_attribute : op.optional_attributes()) {
if (optional_attribute.jni_type().name() == "DataType") {
out->push_back(Type::Class("Operands", "org.tensorflow.op"));
} else {
out->push_back(optional_attribute.jni_type());
}
out->push_back(optional_attribute.var().type());
}
}

void WriteSetAttrDirective(const AttributeSpec& attr, bool optional,
SourceWriter* writer) {
string var_name = optional ? "opts." + attr.var().name() : attr.var().name();
if (attr.iterable()) {
string array_name = attr.var().name() + "Array";
writer->AppendType(attr.jni_type())
.Append("[] " + array_name + " = new ")
.AppendType(attr.jni_type())
.Append("[" + var_name + ".size()];")
.EndLine()
.BeginBlock("for (int i = 0; i < " + array_name + ".length; ++i)")
.Append(array_name + "[i] = ");
writer->Append(var_name + ".get(i);");
writer->EndLine()
.EndBlock()
.Append("opBuilder.setAttr(\"" + attr.op_def_name() + "\", ")
.Append(array_name + ");")
.EndLine();
} else {
if (attr.jni_type().name() == "DataType") {
writer->Append("opBuilder.setAttr(\"" + attr.op_def_name() + "\", ")
.Append(var_name + ");")
.EndLine();
.Append(attr.iterable() ? "Operands.toDataTypes(" : "Operands.toDataType(")
.Append(attr.var().name() + "));")
.EndLine();
} else {
if (attr.iterable()) {
string array_name = attr.var().name() + "Array";
writer->AppendType(attr.jni_type())
.Append("[] " + array_name + " = new ")
.AppendType(attr.jni_type())
.Append("[" + var_name + ".size()];")
.EndLine()
.BeginBlock("for (int i = 0; i < " + array_name + ".length; ++i)")
.Append(array_name + "[i] = ");
writer->Append(var_name + ".get(i);");
writer->EndLine()
.EndBlock()
.Append("opBuilder.setAttr(\"" + attr.op_def_name() + "\", ")
.Append(array_name + ");")
.EndLine();
} else {
writer->Append("opBuilder.setAttr(\"" + attr.op_def_name() + "\", ")
.Append(var_name + ");")
.EndLine();
}
}
}

Expand Down Expand Up @@ -177,7 +193,7 @@ void RenderSecondaryFactoryMethod(const OpSpec& op, const Type& op_class,
if (attr.type().kind() == Type::GENERIC &&
default_types.find(attr.type().name()) != default_types.end()) {
factory_statement << default_types.at(attr.type().name()).name()
<< ".DTYPE";
<< ".class";
} else {
AddArgument(attr.var(), attr.description(), &factory, &factory_doc);
factory_statement << attr.var().name();
Expand Down Expand Up @@ -246,8 +262,9 @@ void RenderFactoryMethods(const OpSpec& op, const Type& op_class,
writer->EndLine();
}
}

// Add control dependencies, if any.
writer->Append("opBuilder = scope.applyControlDependencies(opBuilder);");
writer->Append("opBuilder = scope.apply(opBuilder);");
writer->EndLine();

for (const AttributeSpec& attribute : op.attributes()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,13 +81,19 @@ class TypeResolver {
std::pair<Type, Type> MakeTypePair(const Type& type) {
return std::make_pair(type, type);
}
Type NextGeneric() {
Type NextGeneric(const OpDef_AttrDef& attr_def) {
char generic_letter = next_generic_letter_++;
if (next_generic_letter_ > 'Z') {
next_generic_letter_ = 'A';
}
return Type::Generic(string(1, generic_letter))
.add_supertype(Type::Class("TType", "org.tensorflow.types.family"));
return Type::Generic(string(1, generic_letter));
}
Type TypeFamilyOf(const OpDef_AttrDef& attr_def) {
// TODO(karllessard) support more type families
if (IsRealNumbers(attr_def.allowed_values())) {
return Type::Interface("TNumber", "org.tensorflow.types.family");
}
return Type::Interface("TType", "org.tensorflow.types.family");
}
};

Expand Down Expand Up @@ -152,15 +158,12 @@ std::pair<Type, Type> TypeResolver::TypesOf(const OpDef_AttrDef& attr_def,
types = MakeTypePair(Type::Class("Shape", "org.tensorflow.ndarray"));

} else if (attr_type == "tensor") {
types = MakeTypePair(Type::Class("Tensor", "org.tensorflow")
.add_parameter(Type::Wildcard()));
types = MakeTypePair(Type::Class("Tensor", "org.tensorflow"));

} else if (attr_type == "type") {
Type type = *iterable_out ? Type::Wildcard() : NextGeneric();
if (IsRealNumbers(attr_def.allowed_values())) {
type.add_supertype(Type::Class("TNumber", "org.tensorflow.types.family"));
}
types = MakeTypePair(type, Type::Enum("DataType", "org.tensorflow"));
Type type = *iterable_out ? Type::Wildcard() : NextGeneric(attr_def);
type.add_supertype(TypeFamilyOf(attr_def));
types = MakeTypePair(type, Type::Enum("DataType", "org.tensorflow.proto.framework"));

} else {
LOG(FATAL) << "Cannot resolve data type for attribute \"" << attr_type
Expand Down Expand Up @@ -306,7 +309,7 @@ AttributeSpec CreateAttribute(const OpDef_AttrDef& attr_def,
bool iterable = false;
std::pair<Type, Type> types = type_resolver->TypesOf(attr_def, &iterable);
Type var_type = types.first.kind() == Type::GENERIC
? Type::DataTypeOf(types.first)
? Type::ClassOf(types.first)
: types.first;
if (iterable) {
var_type = Type::ListOf(var_type);
Expand Down
Loading