From 2235585d51ad860202c5a2ee9ad1de46a62717cf Mon Sep 17 00:00:00 2001 From: Shanqing Cai Date: Tue, 22 Jan 2019 15:07:13 -0500 Subject: [PATCH 01/30] Working on TensorBoard op support --- binding/tfjs_backend.cc | 13 ++++++++++++ binding/tfjs_binding.cc | 1 + src/nodejs_kernel_backend.ts | 39 +++++++++++++++++++++++++++++++++++- src/tfjs_binding.d.ts | 1 + src/try_tensorboard.ts | 19 ++++++++++++++++++ 5 files changed, 72 insertions(+), 1 deletion(-) create mode 100644 src/try_tensorboard.ts diff --git a/binding/tfjs_backend.cc b/binding/tfjs_backend.cc index 777727c9..c097045a 100644 --- a/binding/tfjs_backend.cc +++ b/binding/tfjs_backend.cc @@ -23,6 +23,7 @@ #include #include +#include // DEBUG #include #include #include @@ -229,6 +230,8 @@ void CopyTFE_TensorHandleDataToTypedArray(napi_env env, TF_DataType tensor_data_type, napi_typedarray_type array_type, napi_value *result) { + std::cout << "CopyTFE_TensorHandleDataToTypedArray() 0:" + << std::endl; // DEBUG TF_AutoStatus tf_status; TF_AutoTensor tensor( @@ -237,6 +240,8 @@ void CopyTFE_TensorHandleDataToTypedArray(napi_env env, // Determine the length of the array based on the shape of the tensor. size_t num_elements = GetTensorNumElements(tensor.tensor); + std::cout << "CopyTFE_TensorHandleDataToTypedArray() 10: num_elements = " + << num_elements << std::endl; // DEBUG if (tensor_data_type == TF_COMPLEX64) { // Dimension length will be double for Complex 64. @@ -244,6 +249,8 @@ void CopyTFE_TensorHandleDataToTypedArray(napi_env env, } size_t byte_length = TF_TensorByteSize(tensor.tensor); + std::cout << "CopyTFE_TensorHandleDataToTypedArray() 20: byte_length = " + << byte_length << std::endl; // DEBUG napi_value array_buffer_value; void *array_buffer_data; @@ -328,6 +335,7 @@ void CopyTFE_TensorHandleDataToStringArray(napi_env env, void CopyTFE_TensorHandleDataToJSData(napi_env env, TFE_Context *tfe_context, TFE_TensorHandle *tfe_tensor_handle, napi_value *result) { + std::cout << "In CopyTFE_TensorHandleDataToJSData()" << std::endl; // DEBUG if (tfe_context == nullptr) { NAPI_THROW_ERROR(env, "Invalid TFE_Context"); return; @@ -355,6 +363,11 @@ void CopyTFE_TensorHandleDataToJSData(napi_env env, TFE_Context *tfe_context, case TF_STRING: is_string = true; break; + case TF_RESOURCE: + std::cout << "CopyTFE_TensorHandleDataToJSData(): TF_RESORUCE type" + << std::endl; // DEBUG + typed_array_type = napi_int32_array; + break; default: REPORT_UNKNOWN_TF_DATA_TYPE(env, TFE_TensorHandleDataType(tfe_tensor_handle)); diff --git a/binding/tfjs_binding.cc b/binding/tfjs_binding.cc index cf42a1d4..e0280ff9 100644 --- a/binding/tfjs_binding.cc +++ b/binding/tfjs_binding.cc @@ -166,6 +166,7 @@ static napi_value InitTFNodeJSBinding(napi_env env, napi_value exports) { EXPORT_INT_PROPERTY(TF_BOOL); EXPORT_INT_PROPERTY(TF_COMPLEX64); EXPORT_INT_PROPERTY(TF_STRING); + EXPORT_INT_PROPERTY(TF_RESOURCE); // Op AttrType EXPORT_INT_PROPERTY(TF_ATTR_STRING); diff --git a/src/nodejs_kernel_backend.ts b/src/nodejs_kernel_backend.ts index 5d061cb9..f114d66f 100644 --- a/src/nodejs_kernel_backend.ts +++ b/src/nodejs_kernel_backend.ts @@ -16,7 +16,7 @@ */ // tslint:disable-next-line:max-line-length -import {BackendTimingInfo, DataMover, DataType, fill, KernelBackend, ones, Rank, rsqrt, scalar, ShapeMap, Tensor, Tensor1D, tensor1d, Tensor2D, tensor2d, Tensor3D, tensor3d, Tensor4D} from '@tensorflow/tfjs-core'; +import {BackendTimingInfo, DataMover, DataType, fill, KernelBackend, ones, Rank, rsqrt, Scalar, scalar, ShapeMap, Tensor, Tensor1D, tensor1d, Tensor2D, tensor2d, Tensor3D, tensor3d, Tensor4D} from '@tensorflow/tfjs-core'; import {Conv2DInfo, Conv3DInfo} from '@tensorflow/tfjs-core/dist/ops/conv_util'; import {Tensor5D} from '@tensorflow/tfjs-core/dist/tensor'; import {upcastType} from '@tensorflow/tfjs-core/dist/types'; @@ -62,6 +62,10 @@ export class NodeJSKernelBackend extends KernelBackend { }); let dtype: DataType; + console.log(`metadata.dtype = ${metadata.dtype}`); // DEBUG + console.log(`this.binding.TF_STRING = ${this.binding.TF_STRING}`); // DEBUG + console.log( + `this.binding.TF_RESOURCE = ${this.binding.TF_RESOURCE}`); // DEBUG switch (metadata.dtype) { case this.binding.TF_FLOAT: dtype = 'float32'; @@ -78,6 +82,11 @@ export class NodeJSKernelBackend extends KernelBackend { case this.binding.TF_STRING: dtype = 'string'; break; + case this.binding.TF_RESOURCE: + // TODO(cais): This should probably be made into a resource-specific + // type. + dtype = 'int32'; + break; default: throw new Error(`Unknown dtype enum ${metadata.dtype}`); } @@ -156,10 +165,12 @@ export class NodeJSKernelBackend extends KernelBackend { } readSync(dataId: object): Float32Array|Int32Array|Uint8Array { + console.log('In readSync()'); // DEBUG if (!this.tensorMap.has(dataId)) { throw new Error(`Tensor ${dataId} was not registered!`); } const info = this.tensorMap.get(dataId); + console.log(`readSync(): info = ${JSON.stringify(info)}`); // DEBUG if (info.values != null) { return info.values; } else { @@ -1461,6 +1472,32 @@ export class NodeJSKernelBackend extends KernelBackend { return tensor3d(values, outShape, 'int32'); } + summaryWriter(): Scalar { + console.log('In node-backend summaryWriter()'); + const opAttrs = [ + { + name: 'shared_name', + type: this.binding.TF_ATTR_STRING, + value: `logdir:foo` // TODO(cais): Use more specific name. + }, + { + name: 'container', + type: this.binding.TF_ATTR_STRING, + value: `logdir:foo-container` // TODO(cais): Use more specific name. + } + ]; + const writerResource = + this.executeSingleOutput('SummaryWriter', opAttrs, []); + console.log(`writerResource = ${writerResource}`); // DEBUG + return scalar(1337); // TODO(cais): Implement this. + } + + createSummaryFileWriter( + logdir: string, maxQueue?: number, flushMillis?: number, + filenameSuffix?: string): void { + // TODO(cais): Implement this. + } + memory() { // Due to automatic garbage collection, the numbers are unreliable. // TODO(kreeger): Since there is finalization in C, count the true diff --git a/src/tfjs_binding.d.ts b/src/tfjs_binding.d.ts index 656ef4d1..7af94b45 100644 --- a/src/tfjs_binding.d.ts +++ b/src/tfjs_binding.d.ts @@ -53,6 +53,7 @@ export interface TFJSBinding { TF_BOOL: number; TF_COMPLEX64: number; TF_STRING: number; + TF_RESOURCE: number; // TF OpAttrTypes TF_ATTR_STRING: number; diff --git a/src/try_tensorboard.ts b/src/try_tensorboard.ts new file mode 100644 index 00000000..c68798cd --- /dev/null +++ b/src/try_tensorboard.ts @@ -0,0 +1,19 @@ +import * as tf from './index'; + +(async function main() { + console.log(tf.version); + tf.createTensorBoardSummaryWriter('/tmp/tfjs_tb_logdir'); + + // const x = tf.scalar(21.0); + // const y = tf.neg(x); + // const z = tf.reciprocal(y); + + // const x = tf.tensor1d([1.1, 2.2]); + // const y = tf.tensor1d([3.3]); + // let z = tf.concat([x, y]); + // z.print(); + // z = tf.concat([z, x]); + // z.print(); + // z.print(); + // z.print(); +})(); From af891d7b28ee429d5971f8a434fabb2a18e7c3e3 Mon Sep 17 00:00:00 2001 From: Shanqing Cai Date: Tue, 22 Jan 2019 16:48:46 -0500 Subject: [PATCH 02/30] Calling SummaryWriter op and getting DT_RESOURCE output as a string is working --- binding/tfjs_backend.cc | 63 +++++++++++++++++++++++++++++++++++- src/nodejs_kernel_backend.ts | 11 ++++--- src/tensorboard.ts | 0 3 files changed, 69 insertions(+), 5 deletions(-) create mode 100644 src/tensorboard.ts diff --git a/binding/tfjs_backend.cc b/binding/tfjs_backend.cc index c097045a..39f2c306 100644 --- a/binding/tfjs_backend.cc +++ b/binding/tfjs_backend.cc @@ -331,6 +331,62 @@ void CopyTFE_TensorHandleDataToStringArray(napi_env env, } } +void CopyTFE_TensorHandleDataToResourceArray( + napi_env env, TFE_Context *tfe_context, TFE_TensorHandle *tfe_tensor_handle, + napi_value *result) { + TF_AutoStatus tf_status; + + TF_AutoTensor tensor( + TFE_TensorHandleResolve(tfe_tensor_handle, tf_status.status)); + ENSURE_TF_OK(env, tf_status); + + std::cout << "CopyTFE_TensorHandleDataToResourceArray(): 0: " + << "dtype = " << TF_TensorType(tensor.tensor) + << std::endl; // DEBUG + if (TF_TensorType(tensor.tensor) != TF_RESOURCE) { + NAPI_THROW_ERROR(env, "Tensor is not of type TF_RESOURCE"); + return; + } + + void *tensor_data = TF_TensorData(tensor.tensor); + ENSURE_VALUE_IS_NOT_NULL(env, tensor_data); + + size_t byte_length = TF_TensorByteSize(tensor.tensor); + std::cout << "CopyTFE_TensorHandleDataToResourceArray(): 10: " + << "byte_length = " << byte_length << std::endl; // DEBUG + const char *limit = static_cast(tensor_data) + byte_length; + + size_t num_elements = GetTensorNumElements(tensor.tensor); + std::cout << "CopyTFE_TensorHandleDataToResourceArray(): 20: " + << "num_elements = " << num_elements << std::endl; // DEBUG + + // String values are stored in offsets. + const char *data = static_cast(tensor_data); + // const size_t offsets_size = sizeof(char) * num_elements; + + // Skip passed the offsets and find the first string: + // const char *data = static_cast(tensor_data) + offsets_size; + + TF_AutoStatus status; + + // Create a JS string to stash strings into + napi_status nstatus; + nstatus = napi_create_array_with_length(env, byte_length, result); + std::cout << "CopyTFE_TensorHandleDataToResourceArray(): 30: " + << "created array with length = " << byte_length + << std::endl; // DEBUG + + char *str_ptr = (char *)malloc(sizeof(char *) * byte_length); + memcpy(str_ptr, data, byte_length); + + napi_value str_value; + nstatus = napi_create_string_utf8(env, str_ptr, byte_length, &str_value); + ENSURE_NAPI_OK(env, nstatus); + + nstatus = napi_set_element(env, *result, 0, str_value); + ENSURE_NAPI_OK(env, nstatus); +} + // Handles converting the stored TF_Tensor data into the correct JS value. void CopyTFE_TensorHandleDataToJSData(napi_env env, TFE_Context *tfe_context, TFE_TensorHandle *tfe_tensor_handle, @@ -348,6 +404,7 @@ void CopyTFE_TensorHandleDataToJSData(napi_env env, TFE_Context *tfe_context, // Determine the type of the array napi_typedarray_type typed_array_type; bool is_string = false; + bool is_resource = false; TF_DataType tensor_data_type = TFE_TensorHandleDataType(tfe_tensor_handle); switch (tensor_data_type) { case TF_COMPLEX64: @@ -366,7 +423,8 @@ void CopyTFE_TensorHandleDataToJSData(napi_env env, TFE_Context *tfe_context, case TF_RESOURCE: std::cout << "CopyTFE_TensorHandleDataToJSData(): TF_RESORUCE type" << std::endl; // DEBUG - typed_array_type = napi_int32_array; + is_resource = true; + // typed_array_type = napi_int32_array; break; default: REPORT_UNKNOWN_TF_DATA_TYPE(env, @@ -377,6 +435,9 @@ void CopyTFE_TensorHandleDataToJSData(napi_env env, TFE_Context *tfe_context, if (is_string) { CopyTFE_TensorHandleDataToStringArray(env, tfe_context, tfe_tensor_handle, result); + } else if (is_resource) { + CopyTFE_TensorHandleDataToResourceArray(env, tfe_context, tfe_tensor_handle, + result); } else { CopyTFE_TensorHandleDataToTypedArray(env, tfe_context, tfe_tensor_handle, tensor_data_type, typed_array_type, diff --git a/src/nodejs_kernel_backend.ts b/src/nodejs_kernel_backend.ts index f114d66f..7a96c34f 100644 --- a/src/nodejs_kernel_backend.ts +++ b/src/nodejs_kernel_backend.ts @@ -62,8 +62,7 @@ export class NodeJSKernelBackend extends KernelBackend { }); let dtype: DataType; - console.log(`metadata.dtype = ${metadata.dtype}`); // DEBUG - console.log(`this.binding.TF_STRING = ${this.binding.TF_STRING}`); // DEBUG + console.log(`metadata.dtype = ${metadata.dtype}`); // DEBUG console.log( `this.binding.TF_RESOURCE = ${this.binding.TF_RESOURCE}`); // DEBUG switch (metadata.dtype) { @@ -85,7 +84,7 @@ export class NodeJSKernelBackend extends KernelBackend { case this.binding.TF_RESOURCE: // TODO(cais): This should probably be made into a resource-specific // type. - dtype = 'int32'; + dtype = 'string'; break; default: throw new Error(`Unknown dtype enum ${metadata.dtype}`); @@ -1488,7 +1487,11 @@ export class NodeJSKernelBackend extends KernelBackend { ]; const writerResource = this.executeSingleOutput('SummaryWriter', opAttrs, []); - console.log(`writerResource = ${writerResource}`); // DEBUG + writerResource.print(); // DEBUG + console.log(`writerResource.dtype = ${writerResource.dtype}`); // DEBUG + console.log( + `writerResource.shape = ` + + `${JSON.stringify(writerResource.shape)}`); // DEBUG return scalar(1337); // TODO(cais): Implement this. } diff --git a/src/tensorboard.ts b/src/tensorboard.ts new file mode 100644 index 00000000..e69de29b From 0b56dd0e8c755152ed2f7c3b89c97dfbacdc0f93 Mon Sep 17 00:00:00 2001 From: Shanqing Cai Date: Tue, 22 Jan 2019 20:43:16 -0500 Subject: [PATCH 03/30] WIP --- src/index.ts | 3 +++ src/nodejs_kernel_backend.ts | 28 +++++++++++++++++++++++----- src/tensorboard.ts | 31 +++++++++++++++++++++++++++++++ src/tfjs_binding.d.ts | 1 + src/try_tensorboard.ts | 2 +- 5 files changed, 59 insertions(+), 6 deletions(-) diff --git a/src/index.ts b/src/index.ts index 3c021a97..f1e17de4 100644 --- a/src/index.ts +++ b/src/index.ts @@ -36,6 +36,9 @@ export const io = { ...tf.io, ...nodeIo }; + +export * from './tensorboard'; + // Export all union package symbols export * from '@tensorflow/tfjs'; diff --git a/src/nodejs_kernel_backend.ts b/src/nodejs_kernel_backend.ts index 7a96c34f..05d14596 100644 --- a/src/nodejs_kernel_backend.ts +++ b/src/nodejs_kernel_backend.ts @@ -1492,13 +1492,31 @@ export class NodeJSKernelBackend extends KernelBackend { console.log( `writerResource.shape = ` + `${JSON.stringify(writerResource.shape)}`); // DEBUG - return scalar(1337); // TODO(cais): Implement this. + return writerResource as Scalar; // TODO(cais): Implement this. } - createSummaryFileWriter( - logdir: string, maxQueue?: number, flushMillis?: number, - filenameSuffix?: string): void { - // TODO(cais): Implement this. + createSummaryFileWriter2( // TODO(cais): Rename. DO NOT SUBMIT. + resourceHandle: string, logdir: string, maxQueue?: number, + flushMillis?: number, filenameSuffix?: string): void { + const opAttrs = [ + { + name: 'writer', + type: this.binding.TF_ATTR_RESOURCE, + value: resourceHandle + }, + {name: 'logdir', type: this.binding.TF_ATTR_STRING, value: logdir}, + {name: 'max_queue', type: this.binding.TF_ATTR_INT, value: maxQueue}, { + name: 'flush_millis', + type: this.binding.TF_ATTR_INT, + value: flushMillis + }, + { + name: 'filename_suffix', + type: this.binding.TF_ATTR_STRING, + value: filenameSuffix + } + ]; + this.executeMultipleOutputs('CreateSummaryFileWriter', opAttrs, [], 0); } memory() { diff --git a/src/tensorboard.ts b/src/tensorboard.ts index e69de29b..ef725e57 100644 --- a/src/tensorboard.ts +++ b/src/tensorboard.ts @@ -0,0 +1,31 @@ + +/** + * @license + * Copyright 2019 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + +import {nodeBackend} from './ops/op_utils'; + +export function createSummaryWriter( + logdir: string, maxQueue?: number, flushMillis?: number, + filenameSuffix?: string) { + console.log('In createSummaryWriter()'); // DEBUG + const backend = nodeBackend(); + const writerResource = backend.summaryWriter(); + console.log(writerResource); // DEBUG + // backend.createSummaryFileWriter2(writeRe) + console.log(writerResource.dataSync()[0]); // DEBUG + // backend.createSummaryFileWriter() +} diff --git a/src/tfjs_binding.d.ts b/src/tfjs_binding.d.ts index 7af94b45..1d0df01b 100644 --- a/src/tfjs_binding.d.ts +++ b/src/tfjs_binding.d.ts @@ -62,6 +62,7 @@ export interface TFJSBinding { TF_ATTR_BOOL: number; TF_ATTR_TYPE: number; TF_ATTR_SHAPE: number; + TF_ATTR_RESOURCE: number; TF_Version: string; } diff --git a/src/try_tensorboard.ts b/src/try_tensorboard.ts index c68798cd..d38ba868 100644 --- a/src/try_tensorboard.ts +++ b/src/try_tensorboard.ts @@ -2,7 +2,7 @@ import * as tf from './index'; (async function main() { console.log(tf.version); - tf.createTensorBoardSummaryWriter('/tmp/tfjs_tb_logdir'); + tf.createSummaryWriter('/tmp/tfjs_tb_logdir'); // const x = tf.scalar(21.0); // const y = tf.neg(x); From 7623739d3aa5562e8087b07a4ea358bf0a2cbdee Mon Sep 17 00:00:00 2001 From: Shanqing Cai Date: Tue, 22 Jan 2019 23:33:14 -0500 Subject: [PATCH 04/30] WIP: createSummaryFileWriter() seems to be creating dirs now --- binding/tfjs_backend.cc | 40 +++++++++++++++++++++--- binding/tfjs_binding.cc | 7 +++++ src/nodejs_kernel_backend.ts | 60 +++++++++++++++++++++--------------- src/tensorboard.ts | 8 +++-- 4 files changed, 83 insertions(+), 32 deletions(-) diff --git a/binding/tfjs_backend.cc b/binding/tfjs_backend.cc index 39f2c306..c3739d4f 100644 --- a/binding/tfjs_backend.cc +++ b/binding/tfjs_backend.cc @@ -376,14 +376,28 @@ void CopyTFE_TensorHandleDataToResourceArray( << "created array with length = " << byte_length << std::endl; // DEBUG - char *str_ptr = (char *)malloc(sizeof(char *) * byte_length); - memcpy(str_ptr, data, byte_length); + // char *str_ptr = (char *)malloc(sizeof(char *) * byte_length); + // memcpy(str_ptr, data, byte_length); - napi_value str_value; - nstatus = napi_create_string_utf8(env, str_ptr, byte_length, &str_value); + // napi_value str_value; + // nstatus = napi_create_string_utf8(env, str_ptr, byte_length, &str_value); + // ENSURE_NAPI_OK(env, nstatus); + + // nstatus = napi_set_element(env, *result, 0, str_value); + // ENSURE_NAPI_OK(env, nstatus); + + napi_value array_buffer_value; + void *array_buffer_data; + nstatus = napi_create_arraybuffer(env, byte_length, &array_buffer_data, + &array_buffer_value); ENSURE_NAPI_OK(env, nstatus); - nstatus = napi_set_element(env, *result, 0, str_value); + // TFE_TensorHandleResolve can use a shared data pointer, memcpy() the current + // value to the newly allocated NAPI buffer. + memcpy(array_buffer_data, tensor_data, byte_length); + + nstatus = napi_create_typedarray(env, napi_uint8_array, byte_length, + array_buffer_value, 0, result); ENSURE_NAPI_OK(env, nstatus); } @@ -423,6 +437,8 @@ void CopyTFE_TensorHandleDataToJSData(napi_env env, TFE_Context *tfe_context, case TF_RESOURCE: std::cout << "CopyTFE_TensorHandleDataToJSData(): TF_RESORUCE type" << std::endl; // DEBUG + // Represent a resource handle with an Uint8Array. + typed_array_type = napi_uint8_array; is_resource = true; // typed_array_type = napi_int32_array; break; @@ -500,6 +516,9 @@ void AssignOpAttr(napi_env env, TFE_Op *tfe_op, napi_value attr_value) { nstatus = GetStringParam(env, attr_name_value, attr_name_string); ENSURE_NAPI_OK(env, nstatus); + std::cout << " attr_name_string = " << attr_name_string + << std::endl; // DEBUG + // OpAttr will be used beyond the scope of this function call. Stash ops in a // set for re-use instead of dynamically reallocating strings for operations. const char *attr_name = @@ -532,6 +551,7 @@ void AssignOpAttr(napi_env env, TFE_Op *tfe_op, napi_value attr_value) { } case TF_ATTR_INT: { + std::cout << " In TF_ATTR_FLOAT" << std::endl; // DEBUG if (IsArray(env, nstatus, &js_value)) { uint32_t length; nstatus = napi_get_array_length(env, js_value, &length); @@ -559,6 +579,7 @@ void AssignOpAttr(napi_env env, TFE_Op *tfe_op, napi_value attr_value) { } case TF_ATTR_FLOAT: { + std::cout << " In TF_ATTR_FLOAT" << std::endl; // DEBUG if (IsArray(env, nstatus, &js_value)) { uint32_t length; nstatus = napi_get_array_length(env, js_value, &length); @@ -585,6 +606,7 @@ void AssignOpAttr(napi_env env, TFE_Op *tfe_op, napi_value attr_value) { } case TF_ATTR_BOOL: { + std::cout << " In TF_ATTR_BOOL" << std::endl; // DEBUG if (IsArray(env, nstatus, &js_value)) { uint32_t length; nstatus = napi_get_array_length(env, js_value, &length); @@ -611,6 +633,7 @@ void AssignOpAttr(napi_env env, TFE_Op *tfe_op, napi_value attr_value) { } case TF_ATTR_TYPE: { + std::cout << " In TF_ATTR_TYPE" << std::endl; // DEBUG TF_DataType tf_data_type; nstatus = napi_get_value_int32( env, js_value, reinterpret_cast(&tf_data_type)); @@ -621,6 +644,7 @@ void AssignOpAttr(napi_env env, TFE_Op *tfe_op, napi_value attr_value) { } case TF_ATTR_SHAPE: { + std::cout << " In TF_ATTR_SHAPE" << std::endl; // DEBUG std::vector shape_vector; ExtractArrayShape(env, js_value, &shape_vector); @@ -632,6 +656,7 @@ void AssignOpAttr(napi_env env, TFE_Op *tfe_op, napi_value attr_value) { } default: + std::cout << " In default attr type" << std::endl; // DEBUG REPORT_UNKNOWN_TF_ATTR_TYPE(env, tf_attr_type); break; } @@ -763,6 +788,7 @@ napi_value TFJSBackend::ExecuteOp(napi_env env, napi_value op_name_value, napi_value op_attr_inputs, napi_value input_tensor_ids, napi_value num_output_values) { + std::cout << "TFJSBackend::ExecuteOp(): 0" << std::endl; // DEBUG napi_status nstatus; std::string op_name; @@ -778,6 +804,8 @@ napi_value TFJSBackend::ExecuteOp(napi_env env, napi_value op_name_value, ENSURE_NAPI_OK_RETVAL(env, nstatus, nullptr); for (uint32_t i = 0; i < num_input_ids; i++) { + std::cout << "TFJSBackend::ExecuteOp(): input i = " << i + << std::endl; // DEBUG napi_value cur_input_id; nstatus = napi_get_element(env, input_tensor_ids, i, &cur_input_id); ENSURE_NAPI_OK_RETVAL(env, nstatus, nullptr); @@ -801,11 +829,13 @@ napi_value TFJSBackend::ExecuteOp(napi_env env, napi_value op_name_value, nstatus = napi_get_array_length(env, op_attr_inputs, &op_attrs_length); ENSURE_NAPI_OK_RETVAL(env, nstatus, nullptr); + std::cout << "TFJSBackend::ExecuteOp(): 50" << std::endl; // DEBUG for (uint32_t i = 0; i < op_attrs_length; i++) { napi_value cur_op_attr; nstatus = napi_get_element(env, op_attr_inputs, i, &cur_op_attr); ENSURE_NAPI_OK_RETVAL(env, nstatus, nullptr); + std::cout << " i = " << i << std::endl; // DEBUG AssignOpAttr(env, tfe_op.op, cur_op_attr); // Check to see if an exception exists, if so return a failure. diff --git a/binding/tfjs_binding.cc b/binding/tfjs_binding.cc index e0280ff9..432ce890 100644 --- a/binding/tfjs_binding.cc +++ b/binding/tfjs_binding.cc @@ -16,6 +16,7 @@ */ #include +#include // dEBUG #include "tfjs_backend.h" #include "utils.h" @@ -107,6 +108,7 @@ static napi_value TensorDataSync(napi_env env, napi_callback_info info) { } static napi_value ExecuteOp(napi_env env, napi_callback_info info) { + std::cout << "In ExecuteOp: 0" << std::endl; // DEBUG napi_status nstatus; // Create tensor takes 3 params: op-name, op-attrs, input-tensor-ids, @@ -122,10 +124,15 @@ static napi_value ExecuteOp(napi_env env, napi_callback_info info) { return nullptr; } + std::cout << "In ExecuteOp: 10" << std::endl; // DEBUG ENSURE_VALUE_IS_STRING_RETVAL(env, args[0], nullptr); + std::cout << "In ExecuteOp: 20" << std::endl; // DEBUG ENSURE_VALUE_IS_ARRAY_RETVAL(env, args[1], nullptr); + std::cout << "In ExecuteOp: 30" << std::endl; // DEBUG ENSURE_VALUE_IS_ARRAY_RETVAL(env, args[2], nullptr); + std::cout << "In ExecuteOp: 40" << std::endl; // DEBUG ENSURE_VALUE_IS_NUMBER_RETVAL(env, args[3], nullptr); + std::cout << "In ExecuteOp: 50" << std::endl; // DEBUG return gBackend->ExecuteOp(env, args[0], args[1], args[2], args[3]); } diff --git a/src/nodejs_kernel_backend.ts b/src/nodejs_kernel_backend.ts index 05d14596..2e25cfb0 100644 --- a/src/nodejs_kernel_backend.ts +++ b/src/nodejs_kernel_backend.ts @@ -1471,7 +1471,7 @@ export class NodeJSKernelBackend extends KernelBackend { return tensor3d(values, outShape, 'int32'); } - summaryWriter(): Scalar { + summaryWriter(): Tensor1D { console.log('In node-backend summaryWriter()'); const opAttrs = [ { @@ -1492,31 +1492,41 @@ export class NodeJSKernelBackend extends KernelBackend { console.log( `writerResource.shape = ` + `${JSON.stringify(writerResource.shape)}`); // DEBUG - return writerResource as Scalar; // TODO(cais): Implement this. - } - - createSummaryFileWriter2( // TODO(cais): Rename. DO NOT SUBMIT. - resourceHandle: string, logdir: string, maxQueue?: number, - flushMillis?: number, filenameSuffix?: string): void { - const opAttrs = [ - { - name: 'writer', - type: this.binding.TF_ATTR_RESOURCE, - value: resourceHandle - }, - {name: 'logdir', type: this.binding.TF_ATTR_STRING, value: logdir}, - {name: 'max_queue', type: this.binding.TF_ATTR_INT, value: maxQueue}, { - name: 'flush_millis', - type: this.binding.TF_ATTR_INT, - value: flushMillis - }, - { - name: 'filename_suffix', - type: this.binding.TF_ATTR_STRING, - value: filenameSuffix - } + return writerResource as Tensor1D; // TODO(cais): Implement this. + } + + createSummaryFileWriter2( // TODO(cais): Rename. DO NOT SUBMIT. + resourceHandle: Tensor, // TOOD(cais): Use more principled typing. + logdir: string, maxQueue?: number, flushMillis?: number, + filenameSuffix?: string): void { + // const inputArgs = [ + // { + // name: 'writer', + // type: this.binding.TF_ATTR_RESOURCE, + // value: resourceHandle + // }, + // {name: 'logdir', type: this.binding.TF_ATTR_STRING, value: logdir}, + // {name: 'max_queue', type: this.binding.TF_ATTR_INT, value: maxQueue}, { + // name: 'flush_millis', + // type: this.binding.TF_ATTR_INT, + // value: flushMillis + // }, + // { + // name: 'filename_suffix', + // type: this.binding.TF_ATTR_STRING, + // value: filenameSuffix + // } + // ]; + console.log('createSummaryFileWriter2(): 0'); // DEBUG + const inputArgs = [ + resourceHandle, scalar(logdir), + scalar(maxQueue == null ? 10 : maxQueue, 'int32'), + scalar(flushMillis == null ? 2 * 60 * 1000 : flushMillis, 'int32'), + scalar(filenameSuffix == null ? '.v2' : filenameSuffix) ]; - this.executeMultipleOutputs('CreateSummaryFileWriter', opAttrs, [], 0); + console.log( // DEBUG + `createSummaryFileWriter2(): inputArgs.length = ${inputArgs.length}`); + this.executeMultipleOutputs('CreateSummaryFileWriter', [], inputArgs, 0); } memory() { diff --git a/src/tensorboard.ts b/src/tensorboard.ts index ef725e57..8bb5ae80 100644 --- a/src/tensorboard.ts +++ b/src/tensorboard.ts @@ -26,6 +26,10 @@ export function createSummaryWriter( const writerResource = backend.summaryWriter(); console.log(writerResource); // DEBUG // backend.createSummaryFileWriter2(writeRe) - console.log(writerResource.dataSync()[0]); // DEBUG - // backend.createSummaryFileWriter() + const resourceHandle = writerResource.dataSync() as Uint8Array; + console.log(typeof resourceHandle); // DEBUG + console.log(resourceHandle.length); // DEBUG + + backend.createSummaryFileWriter2( + writerResource, logdir, maxQueue, flushMillis, filenameSuffix); } From d3ae88ea9f20f21acd248b95c61f2e9852685170 Mon Sep 17 00:00:00 2001 From: Shanqing Cai Date: Thu, 24 Jan 2019 12:22:39 -0500 Subject: [PATCH 05/30] At a stage where WriteScalarSummary op says int32 != int64 for step --- src/nodejs_kernel_backend.ts | 83 +++++++++++++++++++++++++++--------- src/tensorboard.ts | 27 ++++++++++-- src/try_tensorboard.ts | 17 ++------ 3 files changed, 90 insertions(+), 37 deletions(-) diff --git a/src/nodejs_kernel_backend.ts b/src/nodejs_kernel_backend.ts index 2e25cfb0..5afae4b4 100644 --- a/src/nodejs_kernel_backend.ts +++ b/src/nodejs_kernel_backend.ts @@ -16,7 +16,7 @@ */ // tslint:disable-next-line:max-line-length -import {BackendTimingInfo, DataMover, DataType, fill, KernelBackend, ones, Rank, rsqrt, Scalar, scalar, ShapeMap, Tensor, Tensor1D, tensor1d, Tensor2D, tensor2d, Tensor3D, tensor3d, Tensor4D} from '@tensorflow/tfjs-core'; +import {BackendTimingInfo, DataMover, DataType, fill, KernelBackend, ones, Rank, rsqrt, Scalar, scalar, ShapeMap, Tensor, Tensor1D, tensor1d, Tensor2D, tensor2d, Tensor3D, tensor3d, Tensor4D, tidy, util} from '@tensorflow/tfjs-core'; import {Conv2DInfo, Conv3DInfo} from '@tensorflow/tfjs-core/dist/ops/conv_util'; import {Tensor5D} from '@tensorflow/tfjs-core/dist/tensor'; import {upcastType} from '@tensorflow/tfjs-core/dist/types'; @@ -50,6 +50,24 @@ export class NodeJSKernelBackend extends KernelBackend { // TODO(kreeger, smilkov): Implement this. } + private typeAttributeFromTensor(value: Tensor): number { + switch (value.dtype) { + case 'float32': + return this.binding.TF_FLOAT; + case 'int32': + return this.binding.TF_INT32; + case 'bool': + return this.binding.TF_BOOL; + case 'complex64': + return this.binding.TF_COMPLEX64; + case 'string': + return this.binding.TF_STRING; + default: + throw new Error(`Unsupported dtype ${value.dtype}`) + } + } + + // Creates a new Tensor and maps the dataId to the passed in ID. private createOutputTensor(metadata: TensorMetadata): Tensor { const newId = {}; @@ -1471,7 +1489,10 @@ export class NodeJSKernelBackend extends KernelBackend { return tensor3d(values, outShape, 'int32'); } - summaryWriter(): Tensor1D { + // ------------------------------------------------------------ + // TensorBoard-related (tfjs-node-specific) backend kernels. + + summaryWriter(): Tensor1D { // TODO(cais): Fix typing. console.log('In node-backend summaryWriter()'); const opAttrs = [ { @@ -1499,24 +1520,6 @@ export class NodeJSKernelBackend extends KernelBackend { resourceHandle: Tensor, // TOOD(cais): Use more principled typing. logdir: string, maxQueue?: number, flushMillis?: number, filenameSuffix?: string): void { - // const inputArgs = [ - // { - // name: 'writer', - // type: this.binding.TF_ATTR_RESOURCE, - // value: resourceHandle - // }, - // {name: 'logdir', type: this.binding.TF_ATTR_STRING, value: logdir}, - // {name: 'max_queue', type: this.binding.TF_ATTR_INT, value: maxQueue}, { - // name: 'flush_millis', - // type: this.binding.TF_ATTR_INT, - // value: flushMillis - // }, - // { - // name: 'filename_suffix', - // type: this.binding.TF_ATTR_STRING, - // value: filenameSuffix - // } - // ]; console.log('createSummaryFileWriter2(): 0'); // DEBUG const inputArgs = [ resourceHandle, scalar(logdir), @@ -1529,6 +1532,46 @@ export class NodeJSKernelBackend extends KernelBackend { this.executeMultipleOutputs('CreateSummaryFileWriter', [], inputArgs, 0); } + writeScalarSummary( + resourceHandle: Tensor, step: number, name: string, + value: Scalar|number): void { + console.log('In writeScalarSummary(): 0'); // DEBUG + tidy(() => { + util.assert( + Number.isInteger(step), + `step is expected to be an integer, but is instead ${step}`); + // TODO(cais): step ought to be a int64-type tensor. But int64 doesn't + // exist as a type in TensorFlow.js yet. This may cause problems for + // large step values. + const inputArgs: Tensor[] = + [resourceHandle, scalar(step, 'int32'), scalar(name, 'string')]; + + let typeAttr: number; + if (typeof value === 'number') { + inputArgs.push(scalar(value)); + typeAttr = this.binding.TF_FLOAT; + } else { + // `value` is a Scalar. + util.assert( + value.rank === 0, + `A non-scalar tensor (rank ${value.rank}) is passed to ` + + `writeScalarSummary()`); + inputArgs.push(value); + typeAttr = this.typeAttributeFromTensor(value); + } + const opAttrs: TFEOpAttr[] = + [{name: 'T', type: this.binding.TF_ATTR_TYPE, value: typeAttr}]; + console.log('opAttrs:', opAttrs); // DEBUG + + // DEBUG + console.log( + 'In writeScalarSummary(): 10. Executing WriteScalarSummary op'); + this.executeMultipleOutputs('WriteScalarSummary', opAttrs, inputArgs, 0); + }); + } + + // ~ TensorBoard-related (tfjs-node-specific) backend kernels. + // ------------------------------------------------------------ memory() { // Due to automatic garbage collection, the numbers are unreliable. // TODO(kreeger): Since there is finalization in C, count the true diff --git a/src/tensorboard.ts b/src/tensorboard.ts index 8bb5ae80..14b8cb99 100644 --- a/src/tensorboard.ts +++ b/src/tensorboard.ts @@ -16,20 +16,41 @@ * ============================================================================= */ +import {Scalar, Tensor} from '@tensorflow/tfjs'; import {nodeBackend} from './ops/op_utils'; -export function createSummaryWriter( +export class SummaryWriter { + constructor(private readonly resourceHandle: Tensor) {} + + scalar(step: number, name: string, value: Scalar, family?: string) { + // N.B.: Unlike the Python TensorFlow API, step is a required parameter, + // because the construct of global step does not exist in TensorFlow.js. + if (family != null) { + throw new Error('family support for scalar() is not implemented yet'); + } + // TODO(cais): Deduplicate backend with createSummaryWriter. + const backend = nodeBackend(); + backend.writeScalarSummary(this.resourceHandle, step, name, value); + } + + // TODO(cais): Add close(), calling into the CloseSummaryWriter() op. +} + +export async function createSummaryWriter( logdir: string, maxQueue?: number, flushMillis?: number, - filenameSuffix?: string) { + filenameSuffix?: string): Promise { + // TODO(cais): Use more specific typing for ResourceHandle. console.log('In createSummaryWriter()'); // DEBUG const backend = nodeBackend(); const writerResource = backend.summaryWriter(); console.log(writerResource); // DEBUG // backend.createSummaryFileWriter2(writeRe) - const resourceHandle = writerResource.dataSync() as Uint8Array; + const resourceHandle = (await writerResource.data()) as Uint8Array; console.log(typeof resourceHandle); // DEBUG console.log(resourceHandle.length); // DEBUG backend.createSummaryFileWriter2( writerResource, logdir, maxQueue, flushMillis, filenameSuffix); + + return new SummaryWriter(writerResource); } diff --git a/src/try_tensorboard.ts b/src/try_tensorboard.ts index d38ba868..733249b1 100644 --- a/src/try_tensorboard.ts +++ b/src/try_tensorboard.ts @@ -2,18 +2,7 @@ import * as tf from './index'; (async function main() { console.log(tf.version); - tf.createSummaryWriter('/tmp/tfjs_tb_logdir'); - - // const x = tf.scalar(21.0); - // const y = tf.neg(x); - // const z = tf.reciprocal(y); - - // const x = tf.tensor1d([1.1, 2.2]); - // const y = tf.tensor1d([3.3]); - // let z = tf.concat([x, y]); - // z.print(); - // z = tf.concat([z, x]); - // z.print(); - // z.print(); - // z.print(); + const summaryWriter = await tf.createSummaryWriter('/tmp/tfjs_tb_logdir_2'); + console.log(summaryWriter); + summaryWriter.scalar(1, 'loss1', tf.scalar(42)); })(); From c787a3653fd8efdca34cd91618ad4c8b3a5c6702 Mon Sep 17 00:00:00 2001 From: Shanqing Cai Date: Thu, 24 Jan 2019 13:30:38 -0500 Subject: [PATCH 06/30] Seems to be writing scalar summaries now, though step value seems wrong. --- binding/tfjs_backend.cc | 15 ++++++++++++++- binding/tfjs_binding.cc | 1 + src/nodejs_kernel_backend.ts | 21 ++++++++++++++++----- src/ops/op_utils.ts | 2 ++ src/tfjs_binding.d.ts | 1 + src/try_tensorboard.ts | 6 +++++- 6 files changed, 39 insertions(+), 7 deletions(-) diff --git a/binding/tfjs_backend.cc b/binding/tfjs_backend.cc index c3739d4f..a3d901e5 100644 --- a/binding/tfjs_backend.cc +++ b/binding/tfjs_backend.cc @@ -73,6 +73,11 @@ TFE_TensorHandle *CreateTFE_TensorHandleFromTypedArray(napi_env env, } width = sizeof(uint8_t); break; + case napi_float64_array: + std::cout << "CreateTFE_TensorHandleFromTypedArray(): napi_float64_array" + << std::endl; // DEBUG + width = sizeof(uint64_t); // Hack for TensorBoard. NOTE(cais): + break; default: REPORT_UNKNOWN_TYPED_ARRAY_TYPE(env, array_type); return nullptr; @@ -428,6 +433,9 @@ void CopyTFE_TensorHandleDataToJSData(napi_env env, TFE_Context *tfe_context, case TF_INT32: typed_array_type = napi_int32_array; break; + case TF_INT64: // Hack for TensorBoard. + typed_array_type = napi_float64_array; + break; case TF_BOOL: typed_array_type = napi_uint8_array; break; @@ -720,6 +728,8 @@ napi_value TFJSBackend::CreateTensor(napi_env env, napi_value shape_value, int32_t dtype_int32; nstatus = napi_get_value_int32(env, dtype_value, &dtype_int32); ENSURE_NAPI_OK_RETVAL(env, nstatus, nullptr); + std::cout << "TFJSBackend::CreateTensor(): " + << "dtype = " << dtype_int32 << std::endl; // DEBUG TFE_TensorHandle *tfe_handle = CreateTFE_TensorHandleFromJSValues( env, shape_vector.data(), shape_vector.size(), @@ -788,7 +798,6 @@ napi_value TFJSBackend::ExecuteOp(napi_env env, napi_value op_name_value, napi_value op_attr_inputs, napi_value input_tensor_ids, napi_value num_output_values) { - std::cout << "TFJSBackend::ExecuteOp(): 0" << std::endl; // DEBUG napi_status nstatus; std::string op_name; @@ -803,6 +812,8 @@ napi_value TFJSBackend::ExecuteOp(napi_env env, napi_value op_name_value, nstatus = napi_get_array_length(env, input_tensor_ids, &num_input_ids); ENSURE_NAPI_OK_RETVAL(env, nstatus, nullptr); + std::cout << "TFJSBackend::ExecuteOp(): 0: num_input_ids = " << num_input_ids + << std::endl; // DEBUG for (uint32_t i = 0; i < num_input_ids; i++) { std::cout << "TFJSBackend::ExecuteOp(): input i = " << i << std::endl; // DEBUG @@ -815,6 +826,8 @@ napi_value TFJSBackend::ExecuteOp(napi_env env, napi_value op_name_value, ENSURE_NAPI_OK_RETVAL(env, nstatus, nullptr); auto input_tensor_entry = tfe_handle_map_.find(cur_input_tensor_id); + std::cout << "TFJSBackend::ExecuteOp(): cur_input_tensor_id = " + << cur_input_tensor_id << std::endl; // DEBUG if (input_tensor_entry == tfe_handle_map_.end()) { NAPI_THROW_ERROR(env, "Input Tensor ID not referenced (tensor_id: %d)", cur_input_tensor_id); diff --git a/binding/tfjs_binding.cc b/binding/tfjs_binding.cc index 432ce890..78073c79 100644 --- a/binding/tfjs_binding.cc +++ b/binding/tfjs_binding.cc @@ -170,6 +170,7 @@ static napi_value InitTFNodeJSBinding(napi_env env, napi_value exports) { // Types EXPORT_INT_PROPERTY(TF_FLOAT); EXPORT_INT_PROPERTY(TF_INT32); + EXPORT_INT_PROPERTY(TF_INT64); EXPORT_INT_PROPERTY(TF_BOOL); EXPORT_INT_PROPERTY(TF_COMPLEX64); EXPORT_INT_PROPERTY(TF_STRING); diff --git a/src/nodejs_kernel_backend.ts b/src/nodejs_kernel_backend.ts index 5afae4b4..5c540dc8 100644 --- a/src/nodejs_kernel_backend.ts +++ b/src/nodejs_kernel_backend.ts @@ -16,7 +16,7 @@ */ // tslint:disable-next-line:max-line-length -import {BackendTimingInfo, DataMover, DataType, fill, KernelBackend, ones, Rank, rsqrt, Scalar, scalar, ShapeMap, Tensor, Tensor1D, tensor1d, Tensor2D, tensor2d, Tensor3D, tensor3d, Tensor4D, tidy, util} from '@tensorflow/tfjs-core'; +import {BackendTimingInfo, complex, DataMover, DataType, fill, KernelBackend, ones, Rank, rsqrt, Scalar, scalar, ShapeMap, Tensor, Tensor1D, tensor1d, Tensor2D, tensor2d, Tensor3D, tensor3d, Tensor4D, tidy, util} from '@tensorflow/tfjs-core'; import {Conv2DInfo, Conv3DInfo} from '@tensorflow/tfjs-core/dist/ops/conv_util'; import {Tensor5D} from '@tensorflow/tfjs-core/dist/tensor'; import {upcastType} from '@tensorflow/tfjs-core/dist/types'; @@ -112,6 +112,7 @@ export class NodeJSKernelBackend extends KernelBackend { // Prepares Tensor instances for Op execution. private getInputTensorIds(tensors: Tensor[]): number[] { + console.log('In getInputTensorIds()'); // DEBUG const ids: number[] = []; for (let i = 0; i < tensors.length; i++) { const info = this.tensorMap.get(tensors[i].dataId); @@ -119,6 +120,8 @@ export class NodeJSKernelBackend extends KernelBackend { if (info.values != null) { // Values were delayed to write into the TensorHandle. Do that before Op // execution and clear stored values. + console.group( + `getInputTensorIds(): info.dtype = ${info.dtype}`); // DEBUG info.id = this.binding.createTensor(info.shape, info.dtype, info.values); info.values = null; @@ -214,6 +217,8 @@ export class NodeJSKernelBackend extends KernelBackend { } register(dataId: object, shape: number[], dtype: DataType): void { + console.log(`In register(): dataId = ${JSON.stringify(dataId)}, dtype = ${ + dtype}`); // DEBUG if (!this.tensorMap.has(dataId)) { this.tensorMap.set( dataId, {shape, dtype: getTFDType(dtype), values: null, id: -1}); @@ -1535,7 +1540,7 @@ export class NodeJSKernelBackend extends KernelBackend { writeScalarSummary( resourceHandle: Tensor, step: number, name: string, value: Scalar|number): void { - console.log('In writeScalarSummary(): 0'); // DEBUG + console.log('==== writeScalarSummary(): 0'); // DEBUG tidy(() => { util.assert( Number.isInteger(step), @@ -1543,8 +1548,14 @@ export class NodeJSKernelBackend extends KernelBackend { // TODO(cais): step ought to be a int64-type tensor. But int64 doesn't // exist as a type in TensorFlow.js yet. This may cause problems for // large step values. - const inputArgs: Tensor[] = - [resourceHandle, scalar(step, 'int32'), scalar(name, 'string')]; + console.log('==== writeScalarSummary(): 10'); // DEBUG + const inputArgs: Tensor[] = [ + resourceHandle, + scalar(step, 'int64' as any), // 'int64' as any is a hack. + // scalar(step, 'complex64'), // 'int64' as any is a hack. + // complex(step, 0), // Hack + scalar(name, 'string') + ]; let typeAttr: number; if (typeof value === 'number') { @@ -1565,7 +1576,7 @@ export class NodeJSKernelBackend extends KernelBackend { // DEBUG console.log( - 'In writeScalarSummary(): 10. Executing WriteScalarSummary op'); + '==== writeScalarSummary(): 20. Executing WriteScalarSummary op'); this.executeMultipleOutputs('WriteScalarSummary', opAttrs, inputArgs, 0); }); } diff --git a/src/ops/op_utils.ts b/src/ops/op_utils.ts index 9968367e..081212f4 100644 --- a/src/ops/op_utils.ts +++ b/src/ops/op_utils.ts @@ -45,6 +45,8 @@ export function getTFDType(dataType: tfc.DataType): number { return binding.TF_COMPLEX64; case 'string': return binding.TF_STRING; + case 'int64': + return binding.TF_INT64; default: const errorMessage = `Unknown dtype: ${dataType}`; throw new Error(errorMessage); diff --git a/src/tfjs_binding.d.ts b/src/tfjs_binding.d.ts index 1d0df01b..bd92811c 100644 --- a/src/tfjs_binding.d.ts +++ b/src/tfjs_binding.d.ts @@ -50,6 +50,7 @@ export interface TFJSBinding { // TF Types TF_FLOAT: number; TF_INT32: number; + TF_INT64: number; TF_BOOL: number; TF_COMPLEX64: number; TF_STRING: number; diff --git a/src/try_tensorboard.ts b/src/try_tensorboard.ts index 733249b1..f4d78596 100644 --- a/src/try_tensorboard.ts +++ b/src/try_tensorboard.ts @@ -2,7 +2,11 @@ import * as tf from './index'; (async function main() { console.log(tf.version); - const summaryWriter = await tf.createSummaryWriter('/tmp/tfjs_tb_logdir_2'); + const summaryWriter = + await tf.createSummaryWriter('/tmp/tfjs_tb_logdir_6', 0, 0); console.log(summaryWriter); + console.log('About to call scalar();'); // DEBUG summaryWriter.scalar(1, 'loss1', tf.scalar(42)); + summaryWriter.scalar(2, 'loss1', tf.scalar(41)); + summaryWriter.scalar(3, 'loss1', tf.scalar(100)); })(); From f2592900bc80c6ca1991176322045c564591bdb0 Mon Sep 17 00:00:00 2001 From: Shanqing Cai Date: Tue, 29 Jan 2019 16:27:45 -0500 Subject: [PATCH 07/30] Checkpoint: TB summary scalar writing basically works --- binding/tfjs_backend.cc | 36 ++++++++++++++++++++++++++++-------- src/nodejs_kernel_backend.ts | 5 +++++ src/tensorboard.ts | 19 ++++++++++++++----- src/try_tensorboard.ts | 14 +++++++++----- 4 files changed, 56 insertions(+), 18 deletions(-) diff --git a/binding/tfjs_backend.cc b/binding/tfjs_backend.cc index a3d901e5..19c4b47b 100644 --- a/binding/tfjs_backend.cc +++ b/binding/tfjs_backend.cc @@ -60,7 +60,7 @@ TFE_TensorHandle *CreateTFE_TensorHandleFromTypedArray(napi_env env, width = sizeof(float); break; case napi_int32_array: - if (dtype != TF_INT32) { + if (dtype != TF_INT32 && dtype != TF_INT64) { // HACK(cais): NAPI_THROW_ERROR(env, "Tensor type does not match Int32Array"); return nullptr; } @@ -74,6 +74,10 @@ TFE_TensorHandle *CreateTFE_TensorHandleFromTypedArray(napi_env env, width = sizeof(uint8_t); break; case napi_float64_array: + if (dtype != TF_INT64) { + NAPI_THROW_ERROR(env, "Tensor type does not match int64"); + return nullptr; + } std::cout << "CreateTFE_TensorHandleFromTypedArray(): napi_float64_array" << std::endl; // DEBUG width = sizeof(uint64_t); // Hack for TensorBoard. NOTE(cais): @@ -84,12 +88,23 @@ TFE_TensorHandle *CreateTFE_TensorHandleFromTypedArray(napi_env env, } // Double check that width matches TF data type size: - if (width != TF_DataTypeSize(dtype)) { - NAPI_THROW_ERROR(env, - "Byte size of elements differs between JavaScript VM " - "(%zu) and TensorFlow (%zu)", - width, TF_DataTypeSize(dtype)); - return nullptr; + if (dtype == TF_INT64) { + if (width * 2 != TF_DataTypeSize(dtype)) { + NAPI_THROW_ERROR(env, + "Byte size of elements differs between JavaScript VM " + "(%zu) and TensorFlow (%zu)", + width, TF_DataTypeSize(dtype)); + // TODO(cais): Better error message. + return nullptr; + } + } else { + if (width != TF_DataTypeSize(dtype)) { + NAPI_THROW_ERROR(env, + "Byte size of elements differs between JavaScript VM " + "(%zu) and TensorFlow (%zu)", + width, TF_DataTypeSize(dtype)); + return nullptr; + } } // Determine the size of the buffer based on the dimensions. @@ -97,6 +112,7 @@ TFE_TensorHandle *CreateTFE_TensorHandleFromTypedArray(napi_env env, for (size_t i = 0; i < shape_length; i++) { num_elements *= shape[i]; } + std::cout << "num_elments = " << num_elements << std::endl; // DEBUG // Ensure the shape matches the length of the passed in typed-array. if (num_elements != array_length) { @@ -108,10 +124,14 @@ TFE_TensorHandle *CreateTFE_TensorHandleFromTypedArray(napi_env env, } // Allocate and memcpy JS data to Tensor. - const size_t byte_size = num_elements * width; + const size_t byte_size = + dtype == TF_INT64 ? num_elements * width * 2 : num_elements * width; TF_AutoTensor tensor( TF_AllocateTensor(dtype, shape, shape_length, byte_size)); + std::cout << "Calling memcpy(): byte_size = " << byte_size + << std::endl; // DEBUG memcpy(TF_TensorData(tensor.tensor), array_data, byte_size); + std::cout << "Done calling memcpy()" << std::endl; // DEBUG TF_AutoStatus tf_status; TFE_TensorHandle *tfe_tensor_handle = diff --git a/src/nodejs_kernel_backend.ts b/src/nodejs_kernel_backend.ts index 5c540dc8..d5b03441 100644 --- a/src/nodejs_kernel_backend.ts +++ b/src/nodejs_kernel_backend.ts @@ -1581,6 +1581,11 @@ export class NodeJSKernelBackend extends KernelBackend { }); } + flushSummaryWriter(resourceHandle: Tensor): void { + const inputArgs: Tensor[] = [resourceHandle]; + this.executeMultipleOutputs('FlushSummaryWriter', [], inputArgs, 0); + } + // ~ TensorBoard-related (tfjs-node-specific) backend kernels. // ------------------------------------------------------------ memory() { diff --git a/src/tensorboard.ts b/src/tensorboard.ts index 14b8cb99..5eaa85c3 100644 --- a/src/tensorboard.ts +++ b/src/tensorboard.ts @@ -17,20 +17,29 @@ */ import {Scalar, Tensor} from '@tensorflow/tfjs'; +import {NodeJSKernelBackend} from './nodejs_kernel_backend'; import {nodeBackend} from './ops/op_utils'; export class SummaryWriter { - constructor(private readonly resourceHandle: Tensor) {} + backend: NodeJSKernelBackend; - scalar(step: number, name: string, value: Scalar, family?: string) { + constructor(private readonly resourceHandle: Tensor) { + // TODO(cais): Deduplicate backend with createSummaryWriter. + this.backend = nodeBackend(); + } + + scalar(step: number, name: string, value: Scalar|number, family?: string) { // N.B.: Unlike the Python TensorFlow API, step is a required parameter, // because the construct of global step does not exist in TensorFlow.js. if (family != null) { throw new Error('family support for scalar() is not implemented yet'); } - // TODO(cais): Deduplicate backend with createSummaryWriter. - const backend = nodeBackend(); - backend.writeScalarSummary(this.resourceHandle, step, name, value); + + this.backend.writeScalarSummary(this.resourceHandle, step, name, value); + } + + flush() { + this.backend.flushSummaryWriter(this.resourceHandle); } // TODO(cais): Add close(), calling into the CloseSummaryWriter() op. diff --git a/src/try_tensorboard.ts b/src/try_tensorboard.ts index f4d78596..674ae1bf 100644 --- a/src/try_tensorboard.ts +++ b/src/try_tensorboard.ts @@ -2,11 +2,15 @@ import * as tf from './index'; (async function main() { console.log(tf.version); - const summaryWriter = - await tf.createSummaryWriter('/tmp/tfjs_tb_logdir_6', 0, 0); + const summaryWriter = await tf.createSummaryWriter('/tmp/tfjs_tb_logdir'); console.log(summaryWriter); console.log('About to call scalar();'); // DEBUG - summaryWriter.scalar(1, 'loss1', tf.scalar(42)); - summaryWriter.scalar(2, 'loss1', tf.scalar(41)); - summaryWriter.scalar(3, 'loss1', tf.scalar(100)); + + for (let i = 0; i < 2; ++i) { + summaryWriter.scalar(i, 'loss1', i % 2 === 0 ? 20 : 40); + summaryWriter.scalar(i, 'acc', i % 2 === 0 ? 30 : 10); + } + summaryWriter.flush(); + // summaryWriter.scalar(2, 'loss1', tf.scalar(41)); + // summaryWriter.scalar(3, 'loss1', tf.scalar(100)); })(); From aa0c3458fcc1fe6234defe1a448b7bb3d4794a6f Mon Sep 17 00:00:00 2001 From: Shanqing Cai Date: Tue, 29 Jan 2019 16:37:44 -0500 Subject: [PATCH 08/30] Simplify CopyTFE_TensorHandleDataToResourceArray --- binding/tfjs_backend.cc | 40 +++++++++------------------------------- 1 file changed, 9 insertions(+), 31 deletions(-) diff --git a/binding/tfjs_backend.cc b/binding/tfjs_backend.cc index 19c4b47b..a6871b3f 100644 --- a/binding/tfjs_backend.cc +++ b/binding/tfjs_backend.cc @@ -365,11 +365,8 @@ void CopyTFE_TensorHandleDataToResourceArray( TFE_TensorHandleResolve(tfe_tensor_handle, tf_status.status)); ENSURE_TF_OK(env, tf_status); - std::cout << "CopyTFE_TensorHandleDataToResourceArray(): 0: " - << "dtype = " << TF_TensorType(tensor.tensor) - << std::endl; // DEBUG if (TF_TensorType(tensor.tensor) != TF_RESOURCE) { - NAPI_THROW_ERROR(env, "Tensor is not of type TF_RESOURCE"); + NAPI_THROW_ERROR(env, "Tensor is not of type TF_RESOURCE"); return; } @@ -377,39 +374,23 @@ void CopyTFE_TensorHandleDataToResourceArray( ENSURE_VALUE_IS_NOT_NULL(env, tensor_data); size_t byte_length = TF_TensorByteSize(tensor.tensor); - std::cout << "CopyTFE_TensorHandleDataToResourceArray(): 10: " - << "byte_length = " << byte_length << std::endl; // DEBUG const char *limit = static_cast(tensor_data) + byte_length; size_t num_elements = GetTensorNumElements(tensor.tensor); - std::cout << "CopyTFE_TensorHandleDataToResourceArray(): 20: " - << "num_elements = " << num_elements << std::endl; // DEBUG + if (num_elements != 1) { + NAPI_THROW_ERROR(env, + "For DT_RESOURCE tensors, Node.js binding currently " + "supports only exactly 1 element."); + } - // String values are stored in offsets. + // The resource handle is represented as a string of `char`s const char *data = static_cast(tensor_data); - // const size_t offsets_size = sizeof(char) * num_elements; - - // Skip passed the offsets and find the first string: - // const char *data = static_cast(tensor_data) + offsets_size; TF_AutoStatus status; - // Create a JS string to stash strings into + // Create a JS string to stash the resouce handle into. napi_status nstatus; nstatus = napi_create_array_with_length(env, byte_length, result); - std::cout << "CopyTFE_TensorHandleDataToResourceArray(): 30: " - << "created array with length = " << byte_length - << std::endl; // DEBUG - - // char *str_ptr = (char *)malloc(sizeof(char *) * byte_length); - // memcpy(str_ptr, data, byte_length); - - // napi_value str_value; - // nstatus = napi_create_string_utf8(env, str_ptr, byte_length, &str_value); - // ENSURE_NAPI_OK(env, nstatus); - - // nstatus = napi_set_element(env, *result, 0, str_value); - // ENSURE_NAPI_OK(env, nstatus); napi_value array_buffer_value; void *array_buffer_data; @@ -463,12 +444,9 @@ void CopyTFE_TensorHandleDataToJSData(napi_env env, TFE_Context *tfe_context, is_string = true; break; case TF_RESOURCE: - std::cout << "CopyTFE_TensorHandleDataToJSData(): TF_RESORUCE type" - << std::endl; // DEBUG - // Represent a resource handle with an Uint8Array. + // We currently represent a resource handle as an `Uint8Array`. typed_array_type = napi_uint8_array; is_resource = true; - // typed_array_type = napi_int32_array; break; default: REPORT_UNKNOWN_TF_DATA_TYPE(env, From ff82823e8c8ae47eba782dec922f9e4b4769485e Mon Sep 17 00:00:00 2001 From: Shanqing Cai Date: Tue, 29 Jan 2019 17:27:06 -0500 Subject: [PATCH 09/30] Gradually removing the int64 hack --- src/int64_tensors.ts | 54 +++++++++++++++++++++++++++++++++++ src/nodejs_kernel_backend.ts | 55 +++++++++++++++++++++--------------- 2 files changed, 87 insertions(+), 22 deletions(-) create mode 100644 src/int64_tensors.ts diff --git a/src/int64_tensors.ts b/src/int64_tensors.ts new file mode 100644 index 00000000..1ac79f26 --- /dev/null +++ b/src/int64_tensors.ts @@ -0,0 +1,54 @@ +/** + * @license + * Copyright 2018 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + +import {Shape} from '@tensorflow/tfjs'; + +/** + * Node.js-specific tensor type: int64-type scalar. + * + * This class is created for a specifici purpose: to support + * writing `step`s to TensorBoard via op-kernel bindings. + * `step` is required to have an int64 dtype, but TensorFlow.js + * (tfjs-core) doesn't have a built-in int64 dtype. This is + * related to a lack of `Int64Array` or `Uint64Array` typed + * array in basic JavaScript. + * + * This class is introduced as a work around. + */ +export class Int64Scalar { + readonly dtype: string = 'int64'; + readonly rank: number = 1; + private valueArray_: Int32Array; + + constructor(readonly value: number) { + console.log(`In Int64Scalar ctor: value = ${value}`); // DEBUG + if (value < -2147483648 || value > 2147483647) { + throw new Error( + `Value ${value} is out of bound of Int32Array, which is how int64 ` + + `values are represented in Node.js-TensorFlow binding currently.`); + } + this.valueArray_ = new Int32Array([value]); + } + + get shape(): Shape { + return []; + } + + get valueArray(): Int32Array { + return this.valueArray_; + } +} diff --git a/src/nodejs_kernel_backend.ts b/src/nodejs_kernel_backend.ts index d5b03441..e89249d1 100644 --- a/src/nodejs_kernel_backend.ts +++ b/src/nodejs_kernel_backend.ts @@ -22,6 +22,7 @@ import {Tensor5D} from '@tensorflow/tfjs-core/dist/tensor'; import {upcastType} from '@tensorflow/tfjs-core/dist/types'; import {isNullOrUndefined} from 'util'; +import {Int64Scalar} from './int64_tensors'; // tslint:disable-next-line:max-line-length import {createTensorsTypeOpAttr, createTypeOpAttr, getTFDType} from './ops/op_utils'; import {TensorMetadata, TFEOpAttr, TFJSBinding} from './tfjs_binding'; @@ -67,7 +68,6 @@ export class NodeJSKernelBackend extends KernelBackend { } } - // Creates a new Tensor and maps the dataId to the passed in ID. private createOutputTensor(metadata: TensorMetadata): Tensor { const newId = {}; @@ -111,23 +111,36 @@ export class NodeJSKernelBackend extends KernelBackend { } // Prepares Tensor instances for Op execution. - private getInputTensorIds(tensors: Tensor[]): number[] { + private getInputTensorIds(tensors: Array): number[] { console.log('In getInputTensorIds()'); // DEBUG const ids: number[] = []; for (let i = 0; i < tensors.length; i++) { - const info = this.tensorMap.get(tensors[i].dataId); - // TODO - what about ID in this case? Handle in write()?? - if (info.values != null) { - // Values were delayed to write into the TensorHandle. Do that before Op - // execution and clear stored values. - console.group( - `getInputTensorIds(): info.dtype = ${info.dtype}`); // DEBUG - info.id = - this.binding.createTensor(info.shape, info.dtype, info.values); - info.values = null; - this.tensorMap.set(tensors[i].dataId, info); + console.log(` getInputTensorIds(): i = ${i}`); + if (tensors[i] instanceof Tensor) { + const info = this.tensorMap.get((tensors[i] as Tensor).dataId); + // TODO - what about ID in this case? Handle in write()?? + if (info.values != null) { + // Values were delayed to write into the TensorHandle. Do that before + // Op execution and clear stored values. + console.group( + `getInputTensorIds(): info.dtype = ${info.dtype}`); // DEBUG + info.id = + this.binding.createTensor(info.shape, info.dtype, info.values); + info.values = null; + this.tensorMap.set((tensors[i] as Tensor).dataId, info); + } + ids.push(info.id); + } else { + // Then `tensors[i]` is a Int64Scalar, which we currently represent + // using an `Int32Array`. + console.log(` getInputTensorIds(): Creating int64 (${ + this.binding.TF_INT64}) tensor`); // DEBUG + const value = (tensors[i] as Int64Scalar).valueArray; + console.log(` getInputTensorIds(): int64 value = ${value}`); // DEBUG + const id = this.binding.createTensor([], this.binding.TF_INT64, value); + console.log(` getInputTensorIds(): int64 tensor id = ${id}`); // DEBUG + ids.push(id); } - ids.push(info.id); } return ids; } @@ -1549,13 +1562,8 @@ export class NodeJSKernelBackend extends KernelBackend { // exist as a type in TensorFlow.js yet. This may cause problems for // large step values. console.log('==== writeScalarSummary(): 10'); // DEBUG - const inputArgs: Tensor[] = [ - resourceHandle, - scalar(step, 'int64' as any), // 'int64' as any is a hack. - // scalar(step, 'complex64'), // 'int64' as any is a hack. - // complex(step, 0), // Hack - scalar(name, 'string') - ]; + const inputArgs: Array = + [resourceHandle, new Int64Scalar(step), scalar(name, 'string')]; let typeAttr: number; if (typeof value === 'number') { @@ -1577,7 +1585,10 @@ export class NodeJSKernelBackend extends KernelBackend { // DEBUG console.log( '==== writeScalarSummary(): 20. Executing WriteScalarSummary op'); - this.executeMultipleOutputs('WriteScalarSummary', opAttrs, inputArgs, 0); + // this.executeMultipleOutputs( + // 'WriteScalarSummary', opAttrs, inputArgs, 0); + this.binding.executeOp( + 'WriteScalarSummary', opAttrs, this.getInputTensorIds(inputArgs), 0); }); } From 9c986ff8e98028576fdb40cde3d715b7d00c9da2 Mon Sep 17 00:00:00 2001 From: Shanqing Cai Date: Tue, 29 Jan 2019 17:29:29 -0500 Subject: [PATCH 10/30] Remove some debugging prints --- binding/tfjs_backend.cc | 6 ------ src/int64_tensors.ts | 1 - 2 files changed, 7 deletions(-) diff --git a/binding/tfjs_backend.cc b/binding/tfjs_backend.cc index a6871b3f..0afe5f9b 100644 --- a/binding/tfjs_backend.cc +++ b/binding/tfjs_backend.cc @@ -557,7 +557,6 @@ void AssignOpAttr(napi_env env, TFE_Op *tfe_op, napi_value attr_value) { } case TF_ATTR_INT: { - std::cout << " In TF_ATTR_FLOAT" << std::endl; // DEBUG if (IsArray(env, nstatus, &js_value)) { uint32_t length; nstatus = napi_get_array_length(env, js_value, &length); @@ -585,7 +584,6 @@ void AssignOpAttr(napi_env env, TFE_Op *tfe_op, napi_value attr_value) { } case TF_ATTR_FLOAT: { - std::cout << " In TF_ATTR_FLOAT" << std::endl; // DEBUG if (IsArray(env, nstatus, &js_value)) { uint32_t length; nstatus = napi_get_array_length(env, js_value, &length); @@ -612,7 +610,6 @@ void AssignOpAttr(napi_env env, TFE_Op *tfe_op, napi_value attr_value) { } case TF_ATTR_BOOL: { - std::cout << " In TF_ATTR_BOOL" << std::endl; // DEBUG if (IsArray(env, nstatus, &js_value)) { uint32_t length; nstatus = napi_get_array_length(env, js_value, &length); @@ -639,7 +636,6 @@ void AssignOpAttr(napi_env env, TFE_Op *tfe_op, napi_value attr_value) { } case TF_ATTR_TYPE: { - std::cout << " In TF_ATTR_TYPE" << std::endl; // DEBUG TF_DataType tf_data_type; nstatus = napi_get_value_int32( env, js_value, reinterpret_cast(&tf_data_type)); @@ -650,7 +646,6 @@ void AssignOpAttr(napi_env env, TFE_Op *tfe_op, napi_value attr_value) { } case TF_ATTR_SHAPE: { - std::cout << " In TF_ATTR_SHAPE" << std::endl; // DEBUG std::vector shape_vector; ExtractArrayShape(env, js_value, &shape_vector); @@ -662,7 +657,6 @@ void AssignOpAttr(napi_env env, TFE_Op *tfe_op, napi_value attr_value) { } default: - std::cout << " In default attr type" << std::endl; // DEBUG REPORT_UNKNOWN_TF_ATTR_TYPE(env, tf_attr_type); break; } diff --git a/src/int64_tensors.ts b/src/int64_tensors.ts index 1ac79f26..ee2e2508 100644 --- a/src/int64_tensors.ts +++ b/src/int64_tensors.ts @@ -35,7 +35,6 @@ export class Int64Scalar { private valueArray_: Int32Array; constructor(readonly value: number) { - console.log(`In Int64Scalar ctor: value = ${value}`); // DEBUG if (value < -2147483648 || value > 2147483647) { throw new Error( `Value ${value} is out of bound of Int32Array, which is how int64 ` + From f725d0479e33a6c3d393eaac4bb4bf8412a767fe Mon Sep 17 00:00:00 2001 From: Shanqing Cai Date: Tue, 29 Jan 2019 23:30:36 -0500 Subject: [PATCH 11/30] Make int64 full range work; Clean up C++ code; Delete debug code --- binding/tfjs_backend.cc | 93 ++++++++++++++++-------------------- binding/tfjs_binding.cc | 7 --- src/int64_tensors.ts | 25 +++++++--- src/nodejs_kernel_backend.ts | 36 +------------- src/tensorboard.ts | 8 +--- src/try_tensorboard.ts | 12 ++--- 6 files changed, 66 insertions(+), 115 deletions(-) diff --git a/binding/tfjs_backend.cc b/binding/tfjs_backend.cc index 0afe5f9b..7c05d247 100644 --- a/binding/tfjs_backend.cc +++ b/binding/tfjs_backend.cc @@ -23,7 +23,6 @@ #include #include -#include // DEBUG #include #include #include @@ -60,7 +59,10 @@ TFE_TensorHandle *CreateTFE_TensorHandleFromTypedArray(napi_env env, width = sizeof(float); break; case napi_int32_array: - if (dtype != TF_INT32 && dtype != TF_INT64) { // HACK(cais): + if (dtype != TF_INT32 && dtype != TF_INT64) { + // Currently, both int32- and int64- type Tensors are represented + // as Int32Arrays in JavaScript. See int64_tensors.ts for details + // about the latter. NAPI_THROW_ERROR(env, "Tensor type does not match Int32Array"); return nullptr; } @@ -73,15 +75,6 @@ TFE_TensorHandle *CreateTFE_TensorHandleFromTypedArray(napi_env env, } width = sizeof(uint8_t); break; - case napi_float64_array: - if (dtype != TF_INT64) { - NAPI_THROW_ERROR(env, "Tensor type does not match int64"); - return nullptr; - } - std::cout << "CreateTFE_TensorHandleFromTypedArray(): napi_float64_array" - << std::endl; // DEBUG - width = sizeof(uint64_t); // Hack for TensorBoard. NOTE(cais): - break; default: REPORT_UNKNOWN_TYPED_ARRAY_TYPE(env, array_type); return nullptr; @@ -89,6 +82,9 @@ TFE_TensorHandle *CreateTFE_TensorHandleFromTypedArray(napi_env env, // Double check that width matches TF data type size: if (dtype == TF_INT64) { + // Currently, int64-type Tensors are represented as Int32Arrays. So the + // logic for comparing the byte size of the typed-array representation and + // the byte size of the tensor dtype needs to be special-cased for int64. if (width * 2 != TF_DataTypeSize(dtype)) { NAPI_THROW_ERROR(env, "Byte size of elements differs between JavaScript VM " @@ -112,26 +108,40 @@ TFE_TensorHandle *CreateTFE_TensorHandleFromTypedArray(napi_env env, for (size_t i = 0; i < shape_length; i++) { num_elements *= shape[i]; } - std::cout << "num_elments = " << num_elements << std::endl; // DEBUG // Ensure the shape matches the length of the passed in typed-array. - if (num_elements != array_length) { - NAPI_THROW_ERROR(env, - "Shape does not match typed-array in bindData() " - "(num_elements=%zu, array_length=%zu)", - num_elements, array_length); - return nullptr; + if (dtype == TF_INT64) { + // Currently, int64-type Tensors are represented as Int32Arrays. + // To represent a int64-type Tensor of `n` elements, an Int32Array of + // length `2 * n` is requried. This is why the length-match checking + // logic is special-cased for int64. + if (array_length != num_elements * 2) { + NAPI_THROW_ERROR( + env, + "Shape does not match two times typed-array in bindData() " + "(num_elements=%zu, array_length=%zu) for int64 data type", + num_elements, array_length); + return nullptr; + } + } else { + if (num_elements != array_length) { + NAPI_THROW_ERROR(env, + "Shape does not match typed-array in bindData() " + "(num_elements=%zu, array_length=%zu)", + num_elements, array_length); + return nullptr; + } } // Allocate and memcpy JS data to Tensor. + // Currently, int64-type Tensors are represented as Int32Arrays. + // So the logic for comparing the byte size of the typed-array representation + // and the byte size of the tensor dtype needs to be special-cased for int64. const size_t byte_size = dtype == TF_INT64 ? num_elements * width * 2 : num_elements * width; TF_AutoTensor tensor( TF_AllocateTensor(dtype, shape, shape_length, byte_size)); - std::cout << "Calling memcpy(): byte_size = " << byte_size - << std::endl; // DEBUG memcpy(TF_TensorData(tensor.tensor), array_data, byte_size); - std::cout << "Done calling memcpy()" << std::endl; // DEBUG TF_AutoStatus tf_status; TFE_TensorHandle *tfe_tensor_handle = @@ -139,7 +149,7 @@ TFE_TensorHandle *CreateTFE_TensorHandleFromTypedArray(napi_env env, ENSURE_TF_OK_RETVAL(env, tf_status, nullptr); return tfe_tensor_handle; -} +} // namespace tfnodejs // Creates a TFE_TensorHandle from a JS array of string values. TFE_TensorHandle *CreateTFE_TensorHandleFromStringArray( @@ -255,8 +265,6 @@ void CopyTFE_TensorHandleDataToTypedArray(napi_env env, TF_DataType tensor_data_type, napi_typedarray_type array_type, napi_value *result) { - std::cout << "CopyTFE_TensorHandleDataToTypedArray() 0:" - << std::endl; // DEBUG TF_AutoStatus tf_status; TF_AutoTensor tensor( @@ -265,8 +273,6 @@ void CopyTFE_TensorHandleDataToTypedArray(napi_env env, // Determine the length of the array based on the shape of the tensor. size_t num_elements = GetTensorNumElements(tensor.tensor); - std::cout << "CopyTFE_TensorHandleDataToTypedArray() 10: num_elements = " - << num_elements << std::endl; // DEBUG if (tensor_data_type == TF_COMPLEX64) { // Dimension length will be double for Complex 64. @@ -274,8 +280,6 @@ void CopyTFE_TensorHandleDataToTypedArray(napi_env env, } size_t byte_length = TF_TensorByteSize(tensor.tensor); - std::cout << "CopyTFE_TensorHandleDataToTypedArray() 20: byte_length = " - << byte_length << std::endl; // DEBUG napi_value array_buffer_value; void *array_buffer_data; @@ -284,8 +288,8 @@ void CopyTFE_TensorHandleDataToTypedArray(napi_env env, &array_buffer_value); ENSURE_NAPI_OK(env, nstatus); - // TFE_TensorHandleResolve can use a shared data pointer, memcpy() the current - // value to the newly allocated NAPI buffer. + // TFE_TensorHandleResolve can use a shared data pointer, memcpy() the + // current value to the newly allocated NAPI buffer. memcpy(array_buffer_data, TF_TensorData(tensor.tensor), byte_length); nstatus = napi_create_typedarray(env, array_type, num_elements, @@ -398,8 +402,8 @@ void CopyTFE_TensorHandleDataToResourceArray( &array_buffer_value); ENSURE_NAPI_OK(env, nstatus); - // TFE_TensorHandleResolve can use a shared data pointer, memcpy() the current - // value to the newly allocated NAPI buffer. + // TFE_TensorHandleResolve can use a shared data pointer, memcpy() the + // current value to the newly allocated NAPI buffer. memcpy(array_buffer_data, tensor_data, byte_length); nstatus = napi_create_typedarray(env, napi_uint8_array, byte_length, @@ -411,7 +415,6 @@ void CopyTFE_TensorHandleDataToResourceArray( void CopyTFE_TensorHandleDataToJSData(napi_env env, TFE_Context *tfe_context, TFE_TensorHandle *tfe_tensor_handle, napi_value *result) { - std::cout << "In CopyTFE_TensorHandleDataToJSData()" << std::endl; // DEBUG if (tfe_context == nullptr) { NAPI_THROW_ERROR(env, "Invalid TFE_Context"); return; @@ -434,9 +437,6 @@ void CopyTFE_TensorHandleDataToJSData(napi_env env, TFE_Context *tfe_context, case TF_INT32: typed_array_type = napi_int32_array; break; - case TF_INT64: // Hack for TensorBoard. - typed_array_type = napi_float64_array; - break; case TF_BOOL: typed_array_type = napi_uint8_array; break; @@ -522,11 +522,9 @@ void AssignOpAttr(napi_env env, TFE_Op *tfe_op, napi_value attr_value) { nstatus = GetStringParam(env, attr_name_value, attr_name_string); ENSURE_NAPI_OK(env, nstatus); - std::cout << " attr_name_string = " << attr_name_string - << std::endl; // DEBUG - - // OpAttr will be used beyond the scope of this function call. Stash ops in a - // set for re-use instead of dynamically reallocating strings for operations. + // OpAttr will be used beyond the scope of this function call. Stash ops in + // a set for re-use instead of dynamically reallocating strings for + // operations. const char *attr_name = ATTR_NAME_SET.insert(attr_name_string.c_str()).first->c_str(); @@ -720,8 +718,6 @@ napi_value TFJSBackend::CreateTensor(napi_env env, napi_value shape_value, int32_t dtype_int32; nstatus = napi_get_value_int32(env, dtype_value, &dtype_int32); ENSURE_NAPI_OK_RETVAL(env, nstatus, nullptr); - std::cout << "TFJSBackend::CreateTensor(): " - << "dtype = " << dtype_int32 << std::endl; // DEBUG TFE_TensorHandle *tfe_handle = CreateTFE_TensorHandleFromJSValues( env, shape_vector.data(), shape_vector.size(), @@ -804,11 +800,7 @@ napi_value TFJSBackend::ExecuteOp(napi_env env, napi_value op_name_value, nstatus = napi_get_array_length(env, input_tensor_ids, &num_input_ids); ENSURE_NAPI_OK_RETVAL(env, nstatus, nullptr); - std::cout << "TFJSBackend::ExecuteOp(): 0: num_input_ids = " << num_input_ids - << std::endl; // DEBUG for (uint32_t i = 0; i < num_input_ids; i++) { - std::cout << "TFJSBackend::ExecuteOp(): input i = " << i - << std::endl; // DEBUG napi_value cur_input_id; nstatus = napi_get_element(env, input_tensor_ids, i, &cur_input_id); ENSURE_NAPI_OK_RETVAL(env, nstatus, nullptr); @@ -818,8 +810,6 @@ napi_value TFJSBackend::ExecuteOp(napi_env env, napi_value op_name_value, ENSURE_NAPI_OK_RETVAL(env, nstatus, nullptr); auto input_tensor_entry = tfe_handle_map_.find(cur_input_tensor_id); - std::cout << "TFJSBackend::ExecuteOp(): cur_input_tensor_id = " - << cur_input_tensor_id << std::endl; // DEBUG if (input_tensor_entry == tfe_handle_map_.end()) { NAPI_THROW_ERROR(env, "Input Tensor ID not referenced (tensor_id: %d)", cur_input_tensor_id); @@ -834,13 +824,11 @@ napi_value TFJSBackend::ExecuteOp(napi_env env, napi_value op_name_value, nstatus = napi_get_array_length(env, op_attr_inputs, &op_attrs_length); ENSURE_NAPI_OK_RETVAL(env, nstatus, nullptr); - std::cout << "TFJSBackend::ExecuteOp(): 50" << std::endl; // DEBUG for (uint32_t i = 0; i < op_attrs_length; i++) { napi_value cur_op_attr; nstatus = napi_get_element(env, op_attr_inputs, i, &cur_op_attr); ENSURE_NAPI_OK_RETVAL(env, nstatus, nullptr); - std::cout << " i = " << i << std::endl; // DEBUG AssignOpAttr(env, tfe_op.op, cur_op_attr); // Check to see if an exception exists, if so return a failure. @@ -853,7 +841,8 @@ napi_value TFJSBackend::ExecuteOp(napi_env env, napi_value op_name_value, nstatus = napi_get_value_int32(env, num_output_values, &num_outputs); ENSURE_NAPI_OK_RETVAL(env, nstatus, nullptr); - // Push `nullptr` to get a valid pointer in the call to `TFE_Execute()` below. + // Push `nullptr` to get a valid pointer in the call to `TFE_Execute()` + // below. std::vector result_handles(num_outputs, nullptr); int size = result_handles.size(); diff --git a/binding/tfjs_binding.cc b/binding/tfjs_binding.cc index 78073c79..bcf66850 100644 --- a/binding/tfjs_binding.cc +++ b/binding/tfjs_binding.cc @@ -16,7 +16,6 @@ */ #include -#include // dEBUG #include "tfjs_backend.h" #include "utils.h" @@ -108,7 +107,6 @@ static napi_value TensorDataSync(napi_env env, napi_callback_info info) { } static napi_value ExecuteOp(napi_env env, napi_callback_info info) { - std::cout << "In ExecuteOp: 0" << std::endl; // DEBUG napi_status nstatus; // Create tensor takes 3 params: op-name, op-attrs, input-tensor-ids, @@ -124,15 +122,10 @@ static napi_value ExecuteOp(napi_env env, napi_callback_info info) { return nullptr; } - std::cout << "In ExecuteOp: 10" << std::endl; // DEBUG ENSURE_VALUE_IS_STRING_RETVAL(env, args[0], nullptr); - std::cout << "In ExecuteOp: 20" << std::endl; // DEBUG ENSURE_VALUE_IS_ARRAY_RETVAL(env, args[1], nullptr); - std::cout << "In ExecuteOp: 30" << std::endl; // DEBUG ENSURE_VALUE_IS_ARRAY_RETVAL(env, args[2], nullptr); - std::cout << "In ExecuteOp: 40" << std::endl; // DEBUG ENSURE_VALUE_IS_NUMBER_RETVAL(env, args[3], nullptr); - std::cout << "In ExecuteOp: 50" << std::endl; // DEBUG return gBackend->ExecuteOp(env, args[0], args[1], args[2], args[3]); } diff --git a/src/int64_tensors.ts b/src/int64_tensors.ts index ee2e2508..95017439 100644 --- a/src/int64_tensors.ts +++ b/src/int64_tensors.ts @@ -16,6 +16,9 @@ */ import {Shape} from '@tensorflow/tfjs'; +import {endianness} from 'os'; + +const INT32_MAX = 2147483648; /** * Node.js-specific tensor type: int64-type scalar. @@ -27,20 +30,30 @@ import {Shape} from '@tensorflow/tfjs'; * related to a lack of `Int64Array` or `Uint64Array` typed * array in basic JavaScript. * - * This class is introduced as a work around. + * This class is introduced as a workaround. */ export class Int64Scalar { readonly dtype: string = 'int64'; readonly rank: number = 1; private valueArray_: Int32Array; + private static endiannessOkay_: boolean; + constructor(readonly value: number) { - if (value < -2147483648 || value > 2147483647) { - throw new Error( - `Value ${value} is out of bound of Int32Array, which is how int64 ` + - `values are represented in Node.js-TensorFlow binding currently.`); + if (Int64Scalar.endiannessOkay_ == null) { + if (endianness() !== 'LE') { + throw new Error( + `Int64Scalar does not support endianness of this machine: ` + + `${endianness()}`); + } + Int64Scalar.endiannessOkay_ = true; } - this.valueArray_ = new Int32Array([value]); + + // We use two int32 elements to represent a int64 value. This assumes + // little endian, which is checked above. + const highPart = Math.floor(value / INT32_MAX); + const lowPart = value % INT32_MAX; + this.valueArray_ = new Int32Array([lowPart, highPart]); } get shape(): Shape { diff --git a/src/nodejs_kernel_backend.ts b/src/nodejs_kernel_backend.ts index e89249d1..0201a041 100644 --- a/src/nodejs_kernel_backend.ts +++ b/src/nodejs_kernel_backend.ts @@ -80,9 +80,6 @@ export class NodeJSKernelBackend extends KernelBackend { }); let dtype: DataType; - console.log(`metadata.dtype = ${metadata.dtype}`); // DEBUG - console.log( - `this.binding.TF_RESOURCE = ${this.binding.TF_RESOURCE}`); // DEBUG switch (metadata.dtype) { case this.binding.TF_FLOAT: dtype = 'float32'; @@ -112,18 +109,14 @@ export class NodeJSKernelBackend extends KernelBackend { // Prepares Tensor instances for Op execution. private getInputTensorIds(tensors: Array): number[] { - console.log('In getInputTensorIds()'); // DEBUG const ids: number[] = []; for (let i = 0; i < tensors.length; i++) { - console.log(` getInputTensorIds(): i = ${i}`); if (tensors[i] instanceof Tensor) { const info = this.tensorMap.get((tensors[i] as Tensor).dataId); // TODO - what about ID in this case? Handle in write()?? if (info.values != null) { // Values were delayed to write into the TensorHandle. Do that before // Op execution and clear stored values. - console.group( - `getInputTensorIds(): info.dtype = ${info.dtype}`); // DEBUG info.id = this.binding.createTensor(info.shape, info.dtype, info.values); info.values = null; @@ -133,12 +126,8 @@ export class NodeJSKernelBackend extends KernelBackend { } else { // Then `tensors[i]` is a Int64Scalar, which we currently represent // using an `Int32Array`. - console.log(` getInputTensorIds(): Creating int64 (${ - this.binding.TF_INT64}) tensor`); // DEBUG const value = (tensors[i] as Int64Scalar).valueArray; - console.log(` getInputTensorIds(): int64 value = ${value}`); // DEBUG const id = this.binding.createTensor([], this.binding.TF_INT64, value); - console.log(` getInputTensorIds(): int64 tensor id = ${id}`); // DEBUG ids.push(id); } } @@ -198,12 +187,10 @@ export class NodeJSKernelBackend extends KernelBackend { } readSync(dataId: object): Float32Array|Int32Array|Uint8Array { - console.log('In readSync()'); // DEBUG if (!this.tensorMap.has(dataId)) { throw new Error(`Tensor ${dataId} was not registered!`); } const info = this.tensorMap.get(dataId); - console.log(`readSync(): info = ${JSON.stringify(info)}`); // DEBUG if (info.values != null) { return info.values; } else { @@ -230,8 +217,6 @@ export class NodeJSKernelBackend extends KernelBackend { } register(dataId: object, shape: number[], dtype: DataType): void { - console.log(`In register(): dataId = ${JSON.stringify(dataId)}, dtype = ${ - dtype}`); // DEBUG if (!this.tensorMap.has(dataId)) { this.tensorMap.set( dataId, {shape, dtype: getTFDType(dtype), values: null, id: -1}); @@ -1511,7 +1496,7 @@ export class NodeJSKernelBackend extends KernelBackend { // TensorBoard-related (tfjs-node-specific) backend kernels. summaryWriter(): Tensor1D { // TODO(cais): Fix typing. - console.log('In node-backend summaryWriter()'); + // console.log('In node-backend summaryWriter()'); const opAttrs = [ { name: 'shared_name', @@ -1526,11 +1511,6 @@ export class NodeJSKernelBackend extends KernelBackend { ]; const writerResource = this.executeSingleOutput('SummaryWriter', opAttrs, []); - writerResource.print(); // DEBUG - console.log(`writerResource.dtype = ${writerResource.dtype}`); // DEBUG - console.log( - `writerResource.shape = ` + - `${JSON.stringify(writerResource.shape)}`); // DEBUG return writerResource as Tensor1D; // TODO(cais): Implement this. } @@ -1538,30 +1518,22 @@ export class NodeJSKernelBackend extends KernelBackend { resourceHandle: Tensor, // TOOD(cais): Use more principled typing. logdir: string, maxQueue?: number, flushMillis?: number, filenameSuffix?: string): void { - console.log('createSummaryFileWriter2(): 0'); // DEBUG const inputArgs = [ resourceHandle, scalar(logdir), scalar(maxQueue == null ? 10 : maxQueue, 'int32'), scalar(flushMillis == null ? 2 * 60 * 1000 : flushMillis, 'int32'), scalar(filenameSuffix == null ? '.v2' : filenameSuffix) ]; - console.log( // DEBUG - `createSummaryFileWriter2(): inputArgs.length = ${inputArgs.length}`); this.executeMultipleOutputs('CreateSummaryFileWriter', [], inputArgs, 0); } writeScalarSummary( resourceHandle: Tensor, step: number, name: string, value: Scalar|number): void { - console.log('==== writeScalarSummary(): 0'); // DEBUG tidy(() => { util.assert( Number.isInteger(step), `step is expected to be an integer, but is instead ${step}`); - // TODO(cais): step ought to be a int64-type tensor. But int64 doesn't - // exist as a type in TensorFlow.js yet. This may cause problems for - // large step values. - console.log('==== writeScalarSummary(): 10'); // DEBUG const inputArgs: Array = [resourceHandle, new Int64Scalar(step), scalar(name, 'string')]; @@ -1580,13 +1552,7 @@ export class NodeJSKernelBackend extends KernelBackend { } const opAttrs: TFEOpAttr[] = [{name: 'T', type: this.binding.TF_ATTR_TYPE, value: typeAttr}]; - console.log('opAttrs:', opAttrs); // DEBUG - // DEBUG - console.log( - '==== writeScalarSummary(): 20. Executing WriteScalarSummary op'); - // this.executeMultipleOutputs( - // 'WriteScalarSummary', opAttrs, inputArgs, 0); this.binding.executeOp( 'WriteScalarSummary', opAttrs, this.getInputTensorIds(inputArgs), 0); }); diff --git a/src/tensorboard.ts b/src/tensorboard.ts index 5eaa85c3..07dc6836 100644 --- a/src/tensorboard.ts +++ b/src/tensorboard.ts @@ -48,15 +48,9 @@ export class SummaryWriter { export async function createSummaryWriter( logdir: string, maxQueue?: number, flushMillis?: number, filenameSuffix?: string): Promise { - // TODO(cais): Use more specific typing for ResourceHandle. - console.log('In createSummaryWriter()'); // DEBUG const backend = nodeBackend(); const writerResource = backend.summaryWriter(); - console.log(writerResource); // DEBUG - // backend.createSummaryFileWriter2(writeRe) - const resourceHandle = (await writerResource.data()) as Uint8Array; - console.log(typeof resourceHandle); // DEBUG - console.log(resourceHandle.length); // DEBUG + // const resourceHandle = (await writerResource.data()) as Uint8Array; backend.createSummaryFileWriter2( writerResource, logdir, maxQueue, flushMillis, filenameSuffix); diff --git a/src/try_tensorboard.ts b/src/try_tensorboard.ts index 674ae1bf..afcaa566 100644 --- a/src/try_tensorboard.ts +++ b/src/try_tensorboard.ts @@ -1,16 +1,12 @@ import * as tf from './index'; (async function main() { - console.log(tf.version); const summaryWriter = await tf.createSummaryWriter('/tmp/tfjs_tb_logdir'); - console.log(summaryWriter); - console.log('About to call scalar();'); // DEBUG - for (let i = 0; i < 2; ++i) { - summaryWriter.scalar(i, 'loss1', i % 2 === 0 ? 20 : 40); - summaryWriter.scalar(i, 'acc', i % 2 === 0 ? 30 : 10); + // for (let i = -1e10; i < 1e10; i += 1e8) { + for (let i = -1e3; i < 1e3; i += 10) { + summaryWriter.scalar(i, 'loss', i * i); + summaryWriter.scalar(i, 'acc', -i * i); } summaryWriter.flush(); - // summaryWriter.scalar(2, 'loss1', tf.scalar(41)); - // summaryWriter.scalar(3, 'loss1', tf.scalar(100)); })(); From 4a708df1648fc17d4f2b6873c1b163fdcde9f7ce Mon Sep 17 00:00:00 2001 From: Shanqing Cai Date: Tue, 29 Jan 2019 23:35:54 -0500 Subject: [PATCH 12/30] Fix some comments --- binding/tfjs_backend.cc | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/binding/tfjs_backend.cc b/binding/tfjs_backend.cc index 7c05d247..a1ba6636 100644 --- a/binding/tfjs_backend.cc +++ b/binding/tfjs_backend.cc @@ -60,7 +60,7 @@ TFE_TensorHandle *CreateTFE_TensorHandleFromTypedArray(napi_env env, break; case napi_int32_array: if (dtype != TF_INT32 && dtype != TF_INT64) { - // Currently, both int32- and int64- type Tensors are represented + // Currently, both int32- and int64-type Tensors are represented // as Int32Arrays in JavaScript. See int64_tensors.ts for details // about the latter. NAPI_THROW_ERROR(env, "Tensor type does not match Int32Array"); @@ -86,11 +86,11 @@ TFE_TensorHandle *CreateTFE_TensorHandleFromTypedArray(napi_env env, // logic for comparing the byte size of the typed-array representation and // the byte size of the tensor dtype needs to be special-cased for int64. if (width * 2 != TF_DataTypeSize(dtype)) { - NAPI_THROW_ERROR(env, - "Byte size of elements differs between JavaScript VM " - "(%zu) and TensorFlow (%zu)", - width, TF_DataTypeSize(dtype)); - // TODO(cais): Better error message. + NAPI_THROW_ERROR( + env, + "Byte size of elements differs between JavaScript VM " + "(%zu * 2 = %zu) and TensorFlow (%zu) for int64-type tensor", + width, width * 2, TF_DataTypeSize(dtype)); return nullptr; } } else { @@ -119,8 +119,8 @@ TFE_TensorHandle *CreateTFE_TensorHandleFromTypedArray(napi_env env, NAPI_THROW_ERROR( env, "Shape does not match two times typed-array in bindData() " - "(num_elements=%zu, array_length=%zu) for int64 data type", - num_elements, array_length); + "(num_elements * 2 = %zu, array_length=%zu) for int64 data type", + num_elements * 2, array_length); return nullptr; } } else { @@ -149,7 +149,7 @@ TFE_TensorHandle *CreateTFE_TensorHandleFromTypedArray(napi_env env, ENSURE_TF_OK_RETVAL(env, tf_status, nullptr); return tfe_tensor_handle; -} // namespace tfnodejs +} // Creates a TFE_TensorHandle from a JS array of string values. TFE_TensorHandle *CreateTFE_TensorHandleFromStringArray( @@ -384,7 +384,9 @@ void CopyTFE_TensorHandleDataToResourceArray( if (num_elements != 1) { NAPI_THROW_ERROR(env, "For DT_RESOURCE tensors, Node.js binding currently " - "supports only exactly 1 element."); + "supports only exactly 1 element, but encountered " + "DT_RESOURCE tensor with %zu elements.", + num_elements); } // The resource handle is represented as a string of `char`s From d8d10b0377bb4648575d343ac0700471f95260aa Mon Sep 17 00:00:00 2001 From: Shanqing Cai Date: Tue, 29 Jan 2019 23:43:35 -0500 Subject: [PATCH 13/30] Some clean up --- src/nodejs_kernel_backend.ts | 15 +++++++-------- src/tensorboard.ts | 5 ++--- src/try_tensorboard.ts | 4 ++-- 3 files changed, 11 insertions(+), 13 deletions(-) diff --git a/src/nodejs_kernel_backend.ts b/src/nodejs_kernel_backend.ts index 0201a041..2043c0f3 100644 --- a/src/nodejs_kernel_backend.ts +++ b/src/nodejs_kernel_backend.ts @@ -97,8 +97,8 @@ export class NodeJSKernelBackend extends KernelBackend { dtype = 'string'; break; case this.binding.TF_RESOURCE: - // TODO(cais): This should probably be made into a resource-specific - // type. + // NOTE(cais): We currently represent resource-type Tensors + // as string of ubytes. dtype = 'string'; break; default: @@ -1495,7 +1495,7 @@ export class NodeJSKernelBackend extends KernelBackend { // ------------------------------------------------------------ // TensorBoard-related (tfjs-node-specific) backend kernels. - summaryWriter(): Tensor1D { // TODO(cais): Fix typing. + summaryWriter(): Tensor1D { // console.log('In node-backend summaryWriter()'); const opAttrs = [ { @@ -1511,13 +1511,12 @@ export class NodeJSKernelBackend extends KernelBackend { ]; const writerResource = this.executeSingleOutput('SummaryWriter', opAttrs, []); - return writerResource as Tensor1D; // TODO(cais): Implement this. + return writerResource as Tensor1D; } - createSummaryFileWriter2( // TODO(cais): Rename. DO NOT SUBMIT. - resourceHandle: Tensor, // TOOD(cais): Use more principled typing. - logdir: string, maxQueue?: number, flushMillis?: number, - filenameSuffix?: string): void { + createSummaryFileWriter( + resourceHandle: Tensor, logdir: string, maxQueue?: number, + flushMillis?: number, filenameSuffix?: string): void { const inputArgs = [ resourceHandle, scalar(logdir), scalar(maxQueue == null ? 10 : maxQueue, 'int32'), diff --git a/src/tensorboard.ts b/src/tensorboard.ts index 07dc6836..0250a192 100644 --- a/src/tensorboard.ts +++ b/src/tensorboard.ts @@ -25,6 +25,7 @@ export class SummaryWriter { constructor(private readonly resourceHandle: Tensor) { // TODO(cais): Deduplicate backend with createSummaryWriter. + // TODO(cais): Use writer cache. this.backend = nodeBackend(); } @@ -41,8 +42,6 @@ export class SummaryWriter { flush() { this.backend.flushSummaryWriter(this.resourceHandle); } - - // TODO(cais): Add close(), calling into the CloseSummaryWriter() op. } export async function createSummaryWriter( @@ -52,7 +51,7 @@ export async function createSummaryWriter( const writerResource = backend.summaryWriter(); // const resourceHandle = (await writerResource.data()) as Uint8Array; - backend.createSummaryFileWriter2( + backend.createSummaryFileWriter( writerResource, logdir, maxQueue, flushMillis, filenameSuffix); return new SummaryWriter(writerResource); diff --git a/src/try_tensorboard.ts b/src/try_tensorboard.ts index afcaa566..7c009d78 100644 --- a/src/try_tensorboard.ts +++ b/src/try_tensorboard.ts @@ -5,8 +5,8 @@ import * as tf from './index'; // for (let i = -1e10; i < 1e10; i += 1e8) { for (let i = -1e3; i < 1e3; i += 10) { - summaryWriter.scalar(i, 'loss', i * i); - summaryWriter.scalar(i, 'acc', -i * i); + summaryWriter.scalar(i, 'loss', i * i * i * i); + summaryWriter.scalar(i, 'acc', -i * i * i * i); } summaryWriter.flush(); })(); From baed1e291662608ce798a3558ecc25064d101d95 Mon Sep 17 00:00:00 2001 From: Shanqing Cai Date: Tue, 29 Jan 2019 23:47:26 -0500 Subject: [PATCH 14/30] Revise API --- src/tensorboard.ts | 11 ++++++----- src/try_tensorboard.ts | 6 +++--- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/src/tensorboard.ts b/src/tensorboard.ts index 0250a192..eeb29d5e 100644 --- a/src/tensorboard.ts +++ b/src/tensorboard.ts @@ -29,11 +29,12 @@ export class SummaryWriter { this.backend = nodeBackend(); } - scalar(step: number, name: string, value: Scalar|number, family?: string) { + scalar( + name: string, value: Scalar|number, step: number, description?: string) { // N.B.: Unlike the Python TensorFlow API, step is a required parameter, // because the construct of global step does not exist in TensorFlow.js. - if (family != null) { - throw new Error('family support for scalar() is not implemented yet'); + if (description != null) { + throw new Error('scalar() does not support description yet'); } this.backend.writeScalarSummary(this.resourceHandle, step, name, value); @@ -44,9 +45,9 @@ export class SummaryWriter { } } -export async function createSummaryWriter( +export async function summaryFileWriter( logdir: string, maxQueue?: number, flushMillis?: number, - filenameSuffix?: string): Promise { + filenameSuffix = '.v2'): Promise { const backend = nodeBackend(); const writerResource = backend.summaryWriter(); // const resourceHandle = (await writerResource.data()) as Uint8Array; diff --git a/src/try_tensorboard.ts b/src/try_tensorboard.ts index 7c009d78..bfa23314 100644 --- a/src/try_tensorboard.ts +++ b/src/try_tensorboard.ts @@ -1,12 +1,12 @@ import * as tf from './index'; (async function main() { - const summaryWriter = await tf.createSummaryWriter('/tmp/tfjs_tb_logdir'); + const summaryWriter = await tf.summaryFileWriter('/tmp/tfjs_tb_logdir'); // for (let i = -1e10; i < 1e10; i += 1e8) { for (let i = -1e3; i < 1e3; i += 10) { - summaryWriter.scalar(i, 'loss', i * i * i * i); - summaryWriter.scalar(i, 'acc', -i * i * i * i); + summaryWriter.scalar('loss', i * i * i * i, i); + summaryWriter.scalar('acc', -i * i * i * i, i); } summaryWriter.flush(); })(); From 093681adc0ff9f0618791d4744bf246f4fd48e94 Mon Sep 17 00:00:00 2001 From: Shanqing Cai Date: Wed, 30 Jan 2019 11:04:02 -0500 Subject: [PATCH 15/30] Fix linter errors --- src/nodejs_kernel_backend.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/nodejs_kernel_backend.ts b/src/nodejs_kernel_backend.ts index 2043c0f3..a4af6e88 100644 --- a/src/nodejs_kernel_backend.ts +++ b/src/nodejs_kernel_backend.ts @@ -16,7 +16,7 @@ */ // tslint:disable-next-line:max-line-length -import {BackendTimingInfo, complex, DataMover, DataType, fill, KernelBackend, ones, Rank, rsqrt, Scalar, scalar, ShapeMap, Tensor, Tensor1D, tensor1d, Tensor2D, tensor2d, Tensor3D, tensor3d, Tensor4D, tidy, util} from '@tensorflow/tfjs-core'; +import {BackendTimingInfo, DataMover, DataType, fill, KernelBackend, ones, Rank, rsqrt, Scalar, scalar, ShapeMap, Tensor, Tensor1D, tensor1d, Tensor2D, tensor2d, Tensor3D, tensor3d, Tensor4D, tidy, util} from '@tensorflow/tfjs-core'; import {Conv2DInfo, Conv3DInfo} from '@tensorflow/tfjs-core/dist/ops/conv_util'; import {Tensor5D} from '@tensorflow/tfjs-core/dist/tensor'; import {upcastType} from '@tensorflow/tfjs-core/dist/types'; @@ -64,7 +64,7 @@ export class NodeJSKernelBackend extends KernelBackend { case 'string': return this.binding.TF_STRING; default: - throw new Error(`Unsupported dtype ${value.dtype}`) + throw new Error(`Unsupported dtype ${value.dtype}`); } } From 47354ba2d51f016207fe67eb84d1add5330dda72 Mon Sep 17 00:00:00 2001 From: Shanqing Cai Date: Wed, 30 Jan 2019 11:14:30 -0500 Subject: [PATCH 16/30] Fix linter errors; add comments --- src/ops/op_utils.ts | 7 ++++++- src/try_tensorboard.ts | 12 ------------ 2 files changed, 6 insertions(+), 13 deletions(-) delete mode 100644 src/try_tensorboard.ts diff --git a/src/ops/op_utils.ts b/src/ops/op_utils.ts index 081212f4..a0a65a96 100644 --- a/src/ops/op_utils.ts +++ b/src/ops/op_utils.ts @@ -45,7 +45,12 @@ export function getTFDType(dataType: tfc.DataType): number { return binding.TF_COMPLEX64; case 'string': return binding.TF_STRING; - case 'int64': + // tslint:disable-next-line:no-any + case 'int64' as any: + // int64 is not a generally supported dtype in TensorFlow.js + // (tfjs-core). However, it needs to be included here for the purpose of + // writing the `step` value to TensorBoard via WriteScalarSummary and + // other op kernels. return binding.TF_INT64; default: const errorMessage = `Unknown dtype: ${dataType}`; diff --git a/src/try_tensorboard.ts b/src/try_tensorboard.ts deleted file mode 100644 index bfa23314..00000000 --- a/src/try_tensorboard.ts +++ /dev/null @@ -1,12 +0,0 @@ -import * as tf from './index'; - -(async function main() { - const summaryWriter = await tf.summaryFileWriter('/tmp/tfjs_tb_logdir'); - - // for (let i = -1e10; i < 1e10; i += 1e8) { - for (let i = -1e3; i < 1e3; i += 10) { - summaryWriter.scalar('loss', i * i * i * i, i); - summaryWriter.scalar('acc', -i * i * i * i, i); - } - summaryWriter.flush(); -})(); From da37fd742af8c048092d16ad99bbbba6c8ecc771 Mon Sep 17 00:00:00 2001 From: Shanqing Cai Date: Wed, 30 Jan 2019 11:30:03 -0500 Subject: [PATCH 17/30] Add doc strings --- src/nodejs_kernel_backend.ts | 1 + src/tensorboard.ts | 59 ++++++++++++++++++++++++++++++------ 2 files changed, 50 insertions(+), 10 deletions(-) diff --git a/src/nodejs_kernel_backend.ts b/src/nodejs_kernel_backend.ts index a4af6e88..5b8da9df 100644 --- a/src/nodejs_kernel_backend.ts +++ b/src/nodejs_kernel_backend.ts @@ -1564,6 +1564,7 @@ export class NodeJSKernelBackend extends KernelBackend { // ~ TensorBoard-related (tfjs-node-specific) backend kernels. // ------------------------------------------------------------ + memory() { // Due to automatic garbage collection, the numbers are unreliable. // TODO(kreeger): Since there is finalization in C, count the true diff --git a/src/tensorboard.ts b/src/tensorboard.ts index eeb29d5e..5e359da0 100644 --- a/src/tensorboard.ts +++ b/src/tensorboard.ts @@ -16,11 +16,11 @@ * ============================================================================= */ -import {Scalar, Tensor} from '@tensorflow/tfjs'; +import {Scalar, Tensor, util} from '@tensorflow/tfjs'; import {NodeJSKernelBackend} from './nodejs_kernel_backend'; import {nodeBackend} from './ops/op_utils'; -export class SummaryWriter { +export class SummaryFileWriter { backend: NodeJSKernelBackend; constructor(private readonly resourceHandle: Tensor) { @@ -29,6 +29,17 @@ export class SummaryWriter { this.backend = nodeBackend(); } + /** + * Write a scalar summary. + * + * @param name A name of the summary. The summary tag for TensorBoard will be + * this name. + * @param value A real numeric scalar value, as `tf.Scalar` or a JavaScript + * `number`. + * @param step Required `int64`-castable, monotically-increasing step value. + * @param description Optinal long-form description for this summary, as a + * `string`. *Not implemented yet*. + */ scalar( name: string, value: Scalar|number, step: number, description?: string) { // N.B.: Unlike the Python TensorFlow API, step is a required parameter, @@ -40,20 +51,48 @@ export class SummaryWriter { this.backend.writeScalarSummary(this.resourceHandle, step, name, value); } + /** + * Force summary writer to send all buffered data to storage. + */ flush() { this.backend.flushSummaryWriter(this.resourceHandle); } } +/** + * Use a cache for `SummaryFileWriter` instance. + * + * Using multiple instances of `SummaryFileWriter` pointing to the same + * logdir has potential problems. Using this cache avoids those problems. + */ +const summaryFileWriterCache: {[logdir: string]: SummaryFileWriter} = {}; + +/** + * Create a summary file writer for TensorBoard. + * + * @param logdir Log directory in which the summary data will be written. + * @param maxQueue Maximum queue length (default: `10`). + * @param flushMillis Flush every __ milliseconds (default: `120e3`, i.e, + * `120` seconds). + * @param filenameSuffix Suffix of the protocol buffer file names to be + * written in the `logdir` (default: `.v2`). + * @returns An instance of `SummaryFileWriter`. + */ export async function summaryFileWriter( - logdir: string, maxQueue?: number, flushMillis?: number, - filenameSuffix = '.v2'): Promise { - const backend = nodeBackend(); - const writerResource = backend.summaryWriter(); - // const resourceHandle = (await writerResource.data()) as Uint8Array; + logdir: string, maxQueue = 10, flushMillis = 120000, + filenameSuffix = '.v2'): Promise { + util.assert( + logdir != null && typeof logdir === 'string' && logdir.length > 0, + `logdir is null, undefined, not a string, or an empty string`); + if (!(logdir in summaryFileWriterCache)) { + const backend = nodeBackend(); + const writerResource = backend.summaryWriter(); + // const resourceHandle = (await writerResource.data()) as Uint8Array; - backend.createSummaryFileWriter( - writerResource, logdir, maxQueue, flushMillis, filenameSuffix); + backend.createSummaryFileWriter( + writerResource, logdir, maxQueue, flushMillis, filenameSuffix); - return new SummaryWriter(writerResource); + summaryFileWriterCache[logdir] = new SummaryFileWriter(writerResource); + } + return summaryFileWriterCache[logdir]; } From 5bd42cfec813fe7a6f97c507bc51c1584f37851d Mon Sep 17 00:00:00 2001 From: Shanqing Cai Date: Wed, 30 Jan 2019 11:32:34 -0500 Subject: [PATCH 18/30] Cleanup in summaryWriter() --- src/nodejs_kernel_backend.ts | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/src/nodejs_kernel_backend.ts b/src/nodejs_kernel_backend.ts index 5b8da9df..ec1fa6d5 100644 --- a/src/nodejs_kernel_backend.ts +++ b/src/nodejs_kernel_backend.ts @@ -1496,18 +1496,9 @@ export class NodeJSKernelBackend extends KernelBackend { // TensorBoard-related (tfjs-node-specific) backend kernels. summaryWriter(): Tensor1D { - // console.log('In node-backend summaryWriter()'); const opAttrs = [ - { - name: 'shared_name', - type: this.binding.TF_ATTR_STRING, - value: `logdir:foo` // TODO(cais): Use more specific name. - }, - { - name: 'container', - type: this.binding.TF_ATTR_STRING, - value: `logdir:foo-container` // TODO(cais): Use more specific name. - } + {name: 'shared_name', type: this.binding.TF_ATTR_STRING, value: ''}, + {name: 'container', type: this.binding.TF_ATTR_STRING, value: ''} ]; const writerResource = this.executeSingleOutput('SummaryWriter', opAttrs, []); From d7858138628804c8f7b71bb11aa570eb6812612d Mon Sep 17 00:00:00 2001 From: Shanqing Cai Date: Wed, 30 Jan 2019 11:41:37 -0500 Subject: [PATCH 19/30] Adjust API exports --- src/index.ts | 4 ++-- src/node.ts | 24 ++++++++++++++++++++++++ 2 files changed, 26 insertions(+), 2 deletions(-) create mode 100644 src/node.ts diff --git a/src/index.ts b/src/index.ts index f1e17de4..9ca078f7 100644 --- a/src/index.ts +++ b/src/index.ts @@ -37,8 +37,6 @@ export const io = { ...nodeIo }; -export * from './tensorboard'; - // Export all union package symbols export * from '@tensorflow/tfjs'; @@ -64,3 +62,5 @@ tf.io.registerLoadRouter(nodeHTTPRequestRouter); import {ProgbarLogger} from './callbacks'; // Register the ProgbarLogger for Model.fit() at verbosity level 1. tf.registerCallbackConstructor(1, ProgbarLogger); + +export * from './node'; diff --git a/src/node.ts b/src/node.ts new file mode 100644 index 00000000..a35f993f --- /dev/null +++ b/src/node.ts @@ -0,0 +1,24 @@ +/** + * @license + * Copyright 2018 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + +/** + * Public API symbols under the tf.node.* namespace. + */ + +import {summaryFileWriter} from './tensorboard'; + +export const node = {summaryFileWriter}; From 3558b260207781fa95068497fe94defedd49e09b Mon Sep 17 00:00:00 2001 From: Shanqing Cai Date: Wed, 30 Jan 2019 11:47:10 -0500 Subject: [PATCH 20/30] Change async summaryFileWriter() to sync --- src/tensorboard.ts | 5 ++--- src/try_tensorboard.ts | 8 ++++++++ 2 files changed, 10 insertions(+), 3 deletions(-) create mode 100644 src/try_tensorboard.ts diff --git a/src/tensorboard.ts b/src/tensorboard.ts index 5e359da0..3b48c23f 100644 --- a/src/tensorboard.ts +++ b/src/tensorboard.ts @@ -78,16 +78,15 @@ const summaryFileWriterCache: {[logdir: string]: SummaryFileWriter} = {}; * written in the `logdir` (default: `.v2`). * @returns An instance of `SummaryFileWriter`. */ -export async function summaryFileWriter( +export function summaryFileWriter( logdir: string, maxQueue = 10, flushMillis = 120000, - filenameSuffix = '.v2'): Promise { + filenameSuffix = '.v2'): SummaryFileWriter { util.assert( logdir != null && typeof logdir === 'string' && logdir.length > 0, `logdir is null, undefined, not a string, or an empty string`); if (!(logdir in summaryFileWriterCache)) { const backend = nodeBackend(); const writerResource = backend.summaryWriter(); - // const resourceHandle = (await writerResource.data()) as Uint8Array; backend.createSummaryFileWriter( writerResource, logdir, maxQueue, flushMillis, filenameSuffix); diff --git a/src/try_tensorboard.ts b/src/try_tensorboard.ts new file mode 100644 index 00000000..9f385329 --- /dev/null +++ b/src/try_tensorboard.ts @@ -0,0 +1,8 @@ +import * as tf from './index'; + +const summaryWriter = tf.node.summaryFileWriter('/tmp/tfjs_tb_logdir'); + +for (let i = -1e3; i < 1e3; i += 10) { + summaryWriter.scalar('loss', i * i * i * i, i); + summaryWriter.scalar('acc', -i * i * i * i, i); +} From 73ad84b3e4642cbd4ce51d6d935fd50aa00a5341 Mon Sep 17 00:00:00 2001 From: Shanqing Cai Date: Wed, 30 Jan 2019 11:50:58 -0500 Subject: [PATCH 21/30] Doc string fix --- src/int64_tensors.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/int64_tensors.ts b/src/int64_tensors.ts index 95017439..66653faf 100644 --- a/src/int64_tensors.ts +++ b/src/int64_tensors.ts @@ -23,7 +23,7 @@ const INT32_MAX = 2147483648; /** * Node.js-specific tensor type: int64-type scalar. * - * This class is created for a specifici purpose: to support + * This class is created for a specific purpose: to support * writing `step`s to TensorBoard via op-kernel bindings. * `step` is required to have an int64 dtype, but TensorFlow.js * (tfjs-core) doesn't have a built-in int64 dtype. This is @@ -60,6 +60,7 @@ export class Int64Scalar { return []; } + /** Get the Int32Array that represents the int64 value. */ get valueArray(): Int32Array { return this.valueArray_; } From 2b2ca41ceb3c0356a952c94408a99f16b75891ab Mon Sep 17 00:00:00 2001 From: Shanqing Cai Date: Wed, 30 Jan 2019 11:56:13 -0500 Subject: [PATCH 22/30] Add code snippet in doc string of `summaryFileWriter()` --- src/tensorboard.ts | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/src/tensorboard.ts b/src/tensorboard.ts index 3b48c23f..6a2be5eb 100644 --- a/src/tensorboard.ts +++ b/src/tensorboard.ts @@ -70,6 +70,17 @@ const summaryFileWriterCache: {[logdir: string]: SummaryFileWriter} = {}; /** * Create a summary file writer for TensorBoard. * + * Example: + * ```javascript + * const tf = require('@tensorflow/tfjs-node'); + * + * const summaryWriter = tf.node.summaryFileWriter('/tmp/tfjs_tb_logdir'); + * + * for (let step = 0; step < 100; ++step) { + * summaryWriter.scalar('dummyValu', Math.sin(2 * Math.PI * i / 10), step); + * } + * ``` + * * @param logdir Log directory in which the summary data will be written. * @param maxQueue Maximum queue length (default: `10`). * @param flushMillis Flush every __ milliseconds (default: `120e3`, i.e, From 65985a486fd8d20d826e33987554741d2ea0e54a Mon Sep 17 00:00:00 2001 From: Shanqing Cai Date: Wed, 30 Jan 2019 11:58:06 -0500 Subject: [PATCH 23/30] Remove obsolete TODO items --- src/tensorboard.ts | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/tensorboard.ts b/src/tensorboard.ts index 6a2be5eb..3bcf95c0 100644 --- a/src/tensorboard.ts +++ b/src/tensorboard.ts @@ -24,8 +24,6 @@ export class SummaryFileWriter { backend: NodeJSKernelBackend; constructor(private readonly resourceHandle: Tensor) { - // TODO(cais): Deduplicate backend with createSummaryWriter. - // TODO(cais): Use writer cache. this.backend = nodeBackend(); } From c9a00ad2ab07f5226ee47693cb82be4267d3be3f Mon Sep 17 00:00:00 2001 From: Shanqing Cai Date: Wed, 30 Jan 2019 12:24:05 -0500 Subject: [PATCH 24/30] Fix typo; remove unwanted file --- src/tensorboard.ts | 2 +- src/try_tensorboard.ts | 8 -------- 2 files changed, 1 insertion(+), 9 deletions(-) delete mode 100644 src/try_tensorboard.ts diff --git a/src/tensorboard.ts b/src/tensorboard.ts index 3bcf95c0..0fcefd3f 100644 --- a/src/tensorboard.ts +++ b/src/tensorboard.ts @@ -75,7 +75,7 @@ const summaryFileWriterCache: {[logdir: string]: SummaryFileWriter} = {}; * const summaryWriter = tf.node.summaryFileWriter('/tmp/tfjs_tb_logdir'); * * for (let step = 0; step < 100; ++step) { - * summaryWriter.scalar('dummyValu', Math.sin(2 * Math.PI * i / 10), step); + * summaryWriter.scalar('dummyValue', Math.sin(2 * Math.PI * i / 10), step); * } * ``` * diff --git a/src/try_tensorboard.ts b/src/try_tensorboard.ts deleted file mode 100644 index 9f385329..00000000 --- a/src/try_tensorboard.ts +++ /dev/null @@ -1,8 +0,0 @@ -import * as tf from './index'; - -const summaryWriter = tf.node.summaryFileWriter('/tmp/tfjs_tb_logdir'); - -for (let i = -1e3; i < 1e3; i += 10) { - summaryWriter.scalar('loss', i * i * i * i, i); - summaryWriter.scalar('acc', -i * i * i * i, i); -} From e1ae2d9ae10dad08b0d4d83435ab52f62e4bbced Mon Sep 17 00:00:00 2001 From: Shanqing Cai Date: Wed, 30 Jan 2019 12:28:39 -0500 Subject: [PATCH 25/30] Fix typo in code snippet --- src/tensorboard.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/tensorboard.ts b/src/tensorboard.ts index 0fcefd3f..affe7224 100644 --- a/src/tensorboard.ts +++ b/src/tensorboard.ts @@ -75,7 +75,7 @@ const summaryFileWriterCache: {[logdir: string]: SummaryFileWriter} = {}; * const summaryWriter = tf.node.summaryFileWriter('/tmp/tfjs_tb_logdir'); * * for (let step = 0; step < 100; ++step) { - * summaryWriter.scalar('dummyValue', Math.sin(2 * Math.PI * i / 10), step); + * summaryWriter.scalar('dummyValue', Math.sin(2 * Math.PI * step / 8), step); * } * ``` * From 92ea73cee22be1ac328b863c548b96b7c4cec979 Mon Sep 17 00:00:00 2001 From: Shanqing Cai Date: Fri, 1 Feb 2019 15:15:47 -0500 Subject: [PATCH 26/30] Address review comments; add guard for int32 value bounds --- binding/tfjs_backend.cc | 11 ++++------- src/int64_tensors.ts | 11 ++++++++++- 2 files changed, 14 insertions(+), 8 deletions(-) diff --git a/binding/tfjs_backend.cc b/binding/tfjs_backend.cc index ea213f5c..50ba3317 100644 --- a/binding/tfjs_backend.cc +++ b/binding/tfjs_backend.cc @@ -377,9 +377,6 @@ void CopyTFE_TensorHandleDataToResourceArray( void *tensor_data = TF_TensorData(tensor.tensor); ENSURE_VALUE_IS_NOT_NULL(env, tensor_data); - size_t byte_length = TF_TensorByteSize(tensor.tensor); - const char *limit = static_cast(tensor_data) + byte_length; - size_t num_elements = GetTensorNumElements(tensor.tensor); if (num_elements != 1) { NAPI_THROW_ERROR(env, @@ -389,17 +386,16 @@ void CopyTFE_TensorHandleDataToResourceArray( num_elements); } - // The resource handle is represented as a string of `char`s - const char *data = static_cast(tensor_data); - TF_AutoStatus status; // Create a JS string to stash the resouce handle into. napi_status nstatus; + size_t byte_length = TF_TensorByteSize(tensor.tensor); nstatus = napi_create_array_with_length(env, byte_length, result); + ENSURE_NAPI_OK(env, nstatus); napi_value array_buffer_value; - void *array_buffer_data; + void *array_buffer_data = nullptr; nstatus = napi_create_arraybuffer(env, byte_length, &array_buffer_data, &array_buffer_value); ENSURE_NAPI_OK(env, nstatus); @@ -408,6 +404,7 @@ void CopyTFE_TensorHandleDataToResourceArray( // current value to the newly allocated NAPI buffer. memcpy(array_buffer_data, tensor_data, byte_length); + // This method will only return uint8 arrays. nstatus = napi_create_typedarray(env, napi_uint8_array, byte_length, array_buffer_value, 0, result); ENSURE_NAPI_OK(env, nstatus); diff --git a/src/int64_tensors.ts b/src/int64_tensors.ts index 66653faf..96488375 100644 --- a/src/int64_tensors.ts +++ b/src/int64_tensors.ts @@ -15,7 +15,7 @@ * ============================================================================= */ -import {Shape} from '@tensorflow/tfjs'; +import {Shape, util} from '@tensorflow/tfjs'; import {endianness} from 'os'; const INT32_MAX = 2147483648; @@ -49,9 +49,18 @@ export class Int64Scalar { Int64Scalar.endiannessOkay_ = true; } + util.assert( + value > -INT32_MAX && value < INT32_MAX - 1, + `Got a value outside of the bound of values supported for int64 ` + + `dtype ([-${INT32_MAX}, ${INT32_MAX - 1}]): ${value}`); + util.assert( + Number.isInteger(value), + `Expected value to be an integer, but got ${value}`); + // We use two int32 elements to represent a int64 value. This assumes // little endian, which is checked above. const highPart = Math.floor(value / INT32_MAX); + console.log(`highPart = ${highPart}`); // DEBUG const lowPart = value % INT32_MAX; this.valueArray_ = new Int32Array([lowPart, highPart]); } From bf6800c7f7db0f299704f1f964a0168d5c9277e7 Mon Sep 17 00:00:00 2001 From: Shanqing Cai Date: Fri, 1 Feb 2019 15:24:11 -0500 Subject: [PATCH 27/30] Add unit tests for int64_tensors.ts --- src/int64_tensors.ts | 1 - src/int64_tensors_test.ts | 61 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 61 insertions(+), 1 deletion(-) create mode 100644 src/int64_tensors_test.ts diff --git a/src/int64_tensors.ts b/src/int64_tensors.ts index 96488375..88dbfbea 100644 --- a/src/int64_tensors.ts +++ b/src/int64_tensors.ts @@ -60,7 +60,6 @@ export class Int64Scalar { // We use two int32 elements to represent a int64 value. This assumes // little endian, which is checked above. const highPart = Math.floor(value / INT32_MAX); - console.log(`highPart = ${highPart}`); // DEBUG const lowPart = value % INT32_MAX; this.valueArray_ = new Int32Array([lowPart, highPart]); } diff --git a/src/int64_tensors_test.ts b/src/int64_tensors_test.ts new file mode 100644 index 00000000..e4d171f6 --- /dev/null +++ b/src/int64_tensors_test.ts @@ -0,0 +1,61 @@ +/** + * @license + * Copyright 2018 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + +import {Int64Scalar} from './int64_tensors'; + +describe('int64 tensors', () => { + it('positive value', () => { + const x = new Int64Scalar(42); + expect(x.dtype).toEqual('int64'); + const valueArray = x.valueArray; + expect(valueArray.constructor.name).toEqual('Int32Array'); + expect(valueArray.length).toEqual(2); + expect(valueArray[0]).toEqual(42); + expect(valueArray[1]).toEqual(0); + }); + + it('zero value', () => { + const x = new Int64Scalar(0); + expect(x.dtype).toEqual('int64'); + const valueArray = x.valueArray; + expect(valueArray.constructor.name).toEqual('Int32Array'); + expect(valueArray.length).toEqual(2); + expect(valueArray[0]).toEqual(0); + expect(valueArray[1]).toEqual(0); + }); + + it('negative value', () => { + const x = new Int64Scalar(-3); + expect(x.dtype).toEqual('int64'); + const valueArray = x.valueArray; + expect(valueArray.constructor.name).toEqual('Int32Array'); + expect(valueArray.length).toEqual(2); + expect(valueArray[0]).toEqual(-3); + expect(valueArray[1]).toEqual(-1); + }); + + it('Non-integer value leads to error', () => { + expect(() => new Int64Scalar(0.4)).toThrowError(/integer/); + expect(() => new Int64Scalar(-3.2)).toThrowError(/integer/); + }); + + it('Out-of-bound value leads to error', () => { + expect(() => new Int64Scalar(2147483648)).toThrowError(/bound/); + expect(() => new Int64Scalar(2147483648 * 2)).toThrowError(/bound/); + expect(() => new Int64Scalar(-2147483648 - 1)).toThrowError(/bound/); + }); +}); From 1045d82e8b58f46fa8f44c01145c2907e9f9744c Mon Sep 17 00:00:00 2001 From: Shanqing Cai Date: Fri, 1 Feb 2019 16:11:32 -0500 Subject: [PATCH 28/30] Add unit tests for tensorboard summary methods --- package.json | 2 + src/nodejs_kernel_backend.ts | 8 +- src/tensorboard.ts | 2 +- src/tensorboard_test.ts | 146 +++++++++++++++++++++++++++++++++++ yarn.lock | 33 ++++++++ 5 files changed, 188 insertions(+), 3 deletions(-) create mode 100644 src/tensorboard_test.ts diff --git a/package.json b/package.json index 13be6788..5e67367f 100644 --- a/package.json +++ b/package.json @@ -34,6 +34,8 @@ "clang-format": "~1.2.2", "jasmine": "~3.1.0", "nyc": "^12.0.2", + "shelljs": "^0.8.3", + "tmp": "^0.0.33", "ts-node": "^5.0.1", "tslint": "~5.9.1", "typescript": "~2.9.2", diff --git a/src/nodejs_kernel_backend.ts b/src/nodejs_kernel_backend.ts index ec1fa6d5..af40b442 100644 --- a/src/nodejs_kernel_backend.ts +++ b/src/nodejs_kernel_backend.ts @@ -1495,9 +1495,13 @@ export class NodeJSKernelBackend extends KernelBackend { // ------------------------------------------------------------ // TensorBoard-related (tfjs-node-specific) backend kernels. - summaryWriter(): Tensor1D { + summaryWriter(logdir: string): Tensor1D { const opAttrs = [ - {name: 'shared_name', type: this.binding.TF_ATTR_STRING, value: ''}, + { + name: 'shared_name', + type: this.binding.TF_ATTR_STRING, + value: `logdir:${logdir}` + }, {name: 'container', type: this.binding.TF_ATTR_STRING, value: ''} ]; const writerResource = diff --git a/src/tensorboard.ts b/src/tensorboard.ts index affe7224..1456e13f 100644 --- a/src/tensorboard.ts +++ b/src/tensorboard.ts @@ -95,7 +95,7 @@ export function summaryFileWriter( `logdir is null, undefined, not a string, or an empty string`); if (!(logdir in summaryFileWriterCache)) { const backend = nodeBackend(); - const writerResource = backend.summaryWriter(); + const writerResource = backend.summaryWriter(logdir); backend.createSummaryFileWriter( writerResource, logdir, maxQueue, flushMillis, filenameSuffix); diff --git a/src/tensorboard_test.ts b/src/tensorboard_test.ts new file mode 100644 index 00000000..03755bbf --- /dev/null +++ b/src/tensorboard_test.ts @@ -0,0 +1,146 @@ +/** + * @license + * Copyright 2018 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + +import {scalar} from '@tensorflow/tfjs'; +import * as fs from 'fs'; +import * as path from 'path'; + +// tslint:disable-next-line:no-require-imports +const shelljs = require('shelljs'); +// tslint:disable-next-line:no-require-imports +const tmp = require('tmp'); + +import {summaryFileWriter} from './tensorboard'; + +describe('tensorboard', () => { + let tmpLogDir: string; + + beforeEach(() => { + tmpLogDir = tmp.dirSync().name; + }); + + afterEach(() => { + if (tmpLogDir != null) { + shelljs.rm('-rf', tmpLogDir); + } + }); + + it('Create summaryFileWriter and write scalar', () => { + const writer = summaryFileWriter(tmpLogDir); + writer.scalar('foo', 42, 0); + writer.flush(); + + // Currently, we only verify that the file exists and the size + // increases in a sensible way as we write more scalars to it. + // The difficulty is in reading the protobuf contents of the event + // file in JavaScript/TypeScript. + const fileNames = fs.readdirSync(tmpLogDir); + expect(fileNames.length).toEqual(1); + const eventFilePath = path.join(tmpLogDir, fileNames[0]); + const fileSize0 = fs.statSync(eventFilePath).size; + + writer.scalar('foo', 43, 1); + writer.flush(); + const fileSize1 = fs.statSync(eventFilePath).size; + const incrementPerScalar = fileSize1 - fileSize0; + expect(incrementPerScalar).toBeGreaterThan(0); + + writer.scalar('foo', 44, 2); + writer.scalar('foo', 45, 3); + writer.flush(); + const fileSize2 = fs.statSync(eventFilePath).size; + expect(fileSize2 - fileSize1).toEqual(2 * incrementPerScalar); + }); + + it('Writing tf.Scalar works', () => { + const writer = summaryFileWriter(tmpLogDir); + writer.scalar('foo', scalar(42), 0); + writer.flush(); + + // Currently, we only verify that the file exists and the size + // increases in a sensible way as we write more scalars to it. + // The difficulty is in reading the protobuf contents of the event + // file in JavaScript/TypeScript. + const fileNames = fs.readdirSync(tmpLogDir); + expect(fileNames.length).toEqual(1); + }); + + it('No crosstalk between two summary writers', () => { + const logDir1 = path.join(tmpLogDir, '1'); + const writer1 = summaryFileWriter(logDir1); + writer1.scalar('foo', 42, 0); + writer1.flush(); + + const logDir2 = path.join(tmpLogDir, '2'); + const writer2 = summaryFileWriter(logDir2); + writer2.scalar('foo', 1.337, 0); + writer2.flush(); + + // Currently, we only verify that the file exists and the size + // increases in a sensible way as we write more scalars to it. + // The difficulty is in reading the protobuf contents of the event + // file in JavaScript/TypeScript. + let fileNames = fs.readdirSync(logDir1); + expect(fileNames.length).toEqual(1); + const eventFilePath1 = path.join(logDir1, fileNames[0]); + const fileSize1Num0 = fs.statSync(eventFilePath1).size; + + fileNames = fs.readdirSync(logDir2); + expect(fileNames.length).toEqual(1); + const eventFilePath2 = path.join(logDir2, fileNames[0]); + const fileSize2Num0 = fs.statSync(eventFilePath2).size; + expect(fileSize2Num0).toBeGreaterThan(0); + + writer1.scalar('foo', 43, 1); + writer1.flush(); + const fileSize1Num1 = fs.statSync(eventFilePath1).size; + const incrementPerScalar = fileSize1Num1 - fileSize1Num0; + expect(incrementPerScalar).toBeGreaterThan(0); + + writer1.scalar('foo', 44, 2); + writer1.scalar('foo', 45, 3); + writer1.flush(); + const fileSize1Num2 = fs.statSync(eventFilePath1).size; + expect(fileSize1Num2 - fileSize1Num1).toEqual(2 * incrementPerScalar); + + const fileSize2Num1 = fs.statSync(eventFilePath2).size; + expect(fileSize2Num1).toEqual(fileSize2Num0); + + writer2.scalar('foo', 1.336, 1); + writer2.scalar('foo', 1.335, 2); + writer2.flush(); + + const fileSize1Num3 = fs.statSync(eventFilePath1).size; + expect(fileSize1Num3).toEqual(fileSize1Num2); + const fileSize2Num2 = fs.statSync(eventFilePath2).size; + expect(fileSize2Num2 - fileSize2Num1).toEqual(2 * incrementPerScalar); + }); + + it('Writing into existing directory works', () => { + shelljs.mkdir('-p', tmpLogDir); + const writer = summaryFileWriter(path.join(tmpLogDir, '22')); + writer.scalar('foo', 42, 0); + writer.flush(); + + const fileNames = fs.readdirSync(tmpLogDir); + expect(fileNames.length).toEqual(1); + }); + + it('empty logdir leads to error', () => { + expect(() => summaryFileWriter('')).toThrowError(/empty string/); + }); +}); diff --git a/yarn.lock b/yarn.lock index 72b2f10e..de79fdc8 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1095,6 +1095,11 @@ inherits@2, inherits@~2.0.0: resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4= +interpret@^1.0.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.2.0.tgz#d5061a6224be58e8083985f5014d844359576296" + integrity sha512-mT34yGKMNceBQUoVn7iCDKDntA7SC6gycMAWzGx1z/CMCTV7b2AAtXlo3nRyHZ1FelRkQbQjprHSYGwzLtkVbw== + invariant@^2.2.0: version "2.2.4" resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.4.tgz#610f3c92c9359ce1db616e538008d23ff35158e6" @@ -1756,6 +1761,11 @@ os-locale@^2.0.0: lcid "^1.0.0" mem "^1.1.0" +os-tmpdir@~1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" + integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ= + p-finally@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" @@ -1970,6 +1980,13 @@ read-pkg@^3.0.0: normalize-package-data "^2.3.2" path-type "^3.0.0" +rechoir@^0.6.2: + version "0.6.2" + resolved "https://registry.yarnpkg.com/rechoir/-/rechoir-0.6.2.tgz#85204b54dba82d5742e28c96756ef43af50e3384" + integrity sha1-hSBLVNuoLVdC4oyWdW70OvUOM4Q= + dependencies: + resolve "^1.1.6" + regex-not@^1.0.0, regex-not@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c" @@ -2098,6 +2115,15 @@ shebang-regex@^1.0.0: resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3" integrity sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM= +shelljs@^0.8.3: + version "0.8.3" + resolved "https://registry.yarnpkg.com/shelljs/-/shelljs-0.8.3.tgz#a7f3319520ebf09ee81275b2368adb286659b097" + integrity sha512-fc0BKlAWiLpwZljmOvAOTE/gXawtCoNrP5oaY7KIaQbbyHeQVg01pSEuEGvGh3HEdBU4baCD7wQBwADmM/7f7A== + dependencies: + glob "^7.0.0" + interpret "^1.0.0" + rechoir "^0.6.2" + signal-exit@^3.0.0, signal-exit@^3.0.1, signal-exit@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d" @@ -2327,6 +2353,13 @@ test-exclude@^4.2.0: read-pkg-up "^3.0.0" require-main-filename "^1.0.1" +tmp@^0.0.33: + version "0.0.33" + resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9" + integrity sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw== + dependencies: + os-tmpdir "~1.0.2" + to-fast-properties@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" From 310121702b55d3e1171602201f39f28bb0bca4b2 Mon Sep 17 00:00:00 2001 From: Shanqing Cai Date: Mon, 4 Feb 2019 22:23:59 -0500 Subject: [PATCH 29/30] Respond to review comments --- src/int64_tensors.ts | 2 +- src/tensorboard.ts | 2 +- src/tensorboard_test.ts | 15 +++++++-------- 3 files changed, 9 insertions(+), 10 deletions(-) diff --git a/src/int64_tensors.ts b/src/int64_tensors.ts index 88dbfbea..511a258e 100644 --- a/src/int64_tensors.ts +++ b/src/int64_tensors.ts @@ -59,7 +59,7 @@ export class Int64Scalar { // We use two int32 elements to represent a int64 value. This assumes // little endian, which is checked above. - const highPart = Math.floor(value / INT32_MAX); + const highPart = value >= 0 ? 0 : -1; const lowPart = value % INT32_MAX; this.valueArray_ = new Int32Array([lowPart, highPart]); } diff --git a/src/tensorboard.ts b/src/tensorboard.ts index 1456e13f..8b929681 100644 --- a/src/tensorboard.ts +++ b/src/tensorboard.ts @@ -92,7 +92,7 @@ export function summaryFileWriter( filenameSuffix = '.v2'): SummaryFileWriter { util.assert( logdir != null && typeof logdir === 'string' && logdir.length > 0, - `logdir is null, undefined, not a string, or an empty string`); + `Invalid logdir: ${logdir}. Expected a non-empty string for logdir.`); if (!(logdir in summaryFileWriterCache)) { const backend = nodeBackend(); const writerResource = backend.summaryWriter(logdir); diff --git a/src/tensorboard_test.ts b/src/tensorboard_test.ts index 03755bbf..32f1b477 100644 --- a/src/tensorboard_test.ts +++ b/src/tensorboard_test.ts @@ -18,14 +18,13 @@ import {scalar} from '@tensorflow/tfjs'; import * as fs from 'fs'; import * as path from 'path'; +import * as tfn from './index'; // tslint:disable-next-line:no-require-imports const shelljs = require('shelljs'); // tslint:disable-next-line:no-require-imports const tmp = require('tmp'); -import {summaryFileWriter} from './tensorboard'; - describe('tensorboard', () => { let tmpLogDir: string; @@ -40,7 +39,7 @@ describe('tensorboard', () => { }); it('Create summaryFileWriter and write scalar', () => { - const writer = summaryFileWriter(tmpLogDir); + const writer = tfn.node.summaryFileWriter(tmpLogDir); writer.scalar('foo', 42, 0); writer.flush(); @@ -67,7 +66,7 @@ describe('tensorboard', () => { }); it('Writing tf.Scalar works', () => { - const writer = summaryFileWriter(tmpLogDir); + const writer = tfn.node.summaryFileWriter(tmpLogDir); writer.scalar('foo', scalar(42), 0); writer.flush(); @@ -81,12 +80,12 @@ describe('tensorboard', () => { it('No crosstalk between two summary writers', () => { const logDir1 = path.join(tmpLogDir, '1'); - const writer1 = summaryFileWriter(logDir1); + const writer1 = tfn.node.summaryFileWriter(logDir1); writer1.scalar('foo', 42, 0); writer1.flush(); const logDir2 = path.join(tmpLogDir, '2'); - const writer2 = summaryFileWriter(logDir2); + const writer2 = tfn.node.summaryFileWriter(logDir2); writer2.scalar('foo', 1.337, 0); writer2.flush(); @@ -132,7 +131,7 @@ describe('tensorboard', () => { it('Writing into existing directory works', () => { shelljs.mkdir('-p', tmpLogDir); - const writer = summaryFileWriter(path.join(tmpLogDir, '22')); + const writer = tfn.node.summaryFileWriter(path.join(tmpLogDir, '22')); writer.scalar('foo', 42, 0); writer.flush(); @@ -141,6 +140,6 @@ describe('tensorboard', () => { }); it('empty logdir leads to error', () => { - expect(() => summaryFileWriter('')).toThrowError(/empty string/); + expect(() => tfn.node.summaryFileWriter('')).toThrowError(/empty string/); }); }); From 91e3621b13ade18d7f3a1c97b54681b4b5493364 Mon Sep 17 00:00:00 2001 From: Shanqing Cai Date: Tue, 5 Feb 2019 14:34:12 -0500 Subject: [PATCH 30/30] Address review comments --- src/int64_tensors.ts | 8 ++++++++ src/nodejs_kernel_backend.ts | 4 +++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/src/int64_tensors.ts b/src/int64_tensors.ts index 511a258e..e8bf45e5 100644 --- a/src/int64_tensors.ts +++ b/src/int64_tensors.ts @@ -40,6 +40,14 @@ export class Int64Scalar { private static endiannessOkay_: boolean; constructor(readonly value: number) { + // The reason why we need to check endianness of the machine here is + // negative int64 values and the way in which we represent them + // using Int32Arrays in JavaScript. We represent each int64 value with + // two consecutive elements of an Int32Array. For positive values, + // the high part is simply zero; for negative values, the high part + // should be -1. The ordering of the low and high parts assumes + // little endian (i.e., least significant digits appear first). + // This assumption is checked by the lines below. if (Int64Scalar.endiannessOkay_ == null) { if (endianness() !== 'LE') { throw new Error( diff --git a/src/nodejs_kernel_backend.ts b/src/nodejs_kernel_backend.ts index af40b442..c979cfe8 100644 --- a/src/nodejs_kernel_backend.ts +++ b/src/nodejs_kernel_backend.ts @@ -123,12 +123,14 @@ export class NodeJSKernelBackend extends KernelBackend { this.tensorMap.set((tensors[i] as Tensor).dataId, info); } ids.push(info.id); - } else { + } else if (tensors[i] instanceof Int64Scalar) { // Then `tensors[i]` is a Int64Scalar, which we currently represent // using an `Int32Array`. const value = (tensors[i] as Int64Scalar).valueArray; const id = this.binding.createTensor([], this.binding.TF_INT64, value); ids.push(id); + } else { + throw new Error(`Invalid Tensor type: ${typeof tensors[i]}`); } } return ids;