diff --git a/example/read-query-infer.f90 b/example/read-query-infer.f90 new file mode 100644 index 000000000..1746ccedd --- /dev/null +++ b/example/read-query-infer.f90 @@ -0,0 +1,56 @@ +! Copyright (c), The Regents of the University of California +! Terms of use are as specified in LICENSE.txt +program read_query_infer + !! This program demonstrates how to read a neural network from a JSON file, + !! query the network object for some of its properties, print those properties, + !! and use the network to perform inference. + use inference_engine_m, only : inference_engine_t, relu_t, tensor_t + use julienne_m, only : string_t, command_line_t, file_t + use kind_parameters_m, only : rkind + implicit none + + type(command_line_t) command_line + type(inference_engine_t) inference_engine + + associate(file_name => string_t(command_line%flag_value("--input-file"))) + + if (len(file_name%string())==0) then + error stop new_line('a') // new_line('a') // & + 'Usage: fpm run --example read-query -- --input-file ""' + end if + + print *, "Reading an inference_engine_t object from the same JSON file '"//file_name%string()//"'." + associate(inference_engine => inference_engine_t(file_t(file_name))) + + print *, "Querying the new inference_engine_t object for several properties:" + associate(activation_name => inference_engine%activation_function_name()) + print *, "Activation function: ", activation_name%string() + end associate + print *, "Number of outputs:", inference_engine%num_outputs() + print *, "Number of inputs:", inference_engine%num_inputs() + print *, "Nodes per layer:", inference_engine%nodes_per_layer() + print *, "Performing inference:" + + block + integer, parameter :: tensor_size = 2, num_tests = 3 + real, parameter :: tensor_range = 11. + real harvest(tensor_size) + integer i + + call random_init(repeatable=.false., image_distinct=.true.) + + print *, "Inputs | Outputs " + + do i = 1, num_tests + call random_number(harvest) + associate(inputs => tensor_t(tensor_range*harvest)) + associate(outputs => inference_engine%infer(inputs)) + print '(2(2g12.5,a,2x))', inputs%values(), "|", outputs%values() + end associate + end associate + end do + + end block + end associate ! associate(inference_engine => ...) + end associate ! associate(file_name => ...) +end program diff --git a/src/inference_engine/inference_engine_m_.f90 b/src/inference_engine/inference_engine_m_.f90 index be0780228..029068c78 100644 --- a/src/inference_engine/inference_engine_m_.f90 +++ b/src/inference_engine/inference_engine_m_.f90 @@ -6,6 +6,7 @@ module inference_engine_m_ use julienne_file_m, only : file_t use julienne_string_m, only : string_t use kind_parameters_m, only : rkind + use metadata_m, only : metadata_t use tensor_m, only : tensor_t use tensor_range_m, only : tensor_range_t use differentiable_activation_strategy_m, only :differentiable_activation_strategy_t @@ -24,7 +25,7 @@ module inference_engine_m_ !! Encapsulate the minimal information needed to perform inference private type(tensor_range_t) input_range_, output_range_ - type(string_t) metadata_(size(key)) + type(metadata_t) metadata_ real(rkind), allocatable :: weights_(:,:,:), biases_(:,:) integer, allocatable :: nodes_(:) class(activation_strategy_t), allocatable :: activation_strategy_ ! Strategy Pattern facilitates elemental activation @@ -46,7 +47,7 @@ module inference_engine_m_ type exchange_t type(tensor_range_t) input_range_, output_range_ - type(string_t) metadata_(size(key)) + type(metadata_t) metadata_ real(rkind), allocatable :: weights_(:,:,:), biases_(:,:) integer, allocatable :: nodes_(:) class(activation_strategy_t), allocatable :: activation_strategy_ ! Strategy Pattern facilitates elemental activation @@ -72,7 +73,7 @@ impure module function construct_from_padded_arrays(metadata, weights, biases, n type(inference_engine_t) inference_engine end function - impure elemental module function construct_from_json(file_) result(inference_engine) + impure elemental module function from_json(file_) result(inference_engine) implicit none type(file_t), intent(in) :: file_ type(inference_engine_t) inference_engine diff --git a/src/inference_engine/inference_engine_s.F90 b/src/inference_engine/inference_engine_s.F90 index 4c2ee8ee0..cdb673972 100644 --- a/src/inference_engine/inference_engine_s.F90 +++ b/src/inference_engine/inference_engine_s.F90 @@ -30,7 +30,9 @@ module procedure to_exchange exchange%input_range_ = self%input_range_ exchange%output_range_ = self%output_range_ - exchange%metadata_ = self%metadata_ + associate(strings => self%metadata_%strings()) + exchange%metadata_ = metadata_t(strings(1),strings(2),strings(3),strings(4),strings(5)) + end associate exchange%weights_ = self%weights_ exchange%biases_ = self%biases_ exchange%nodes_ = self%nodes_ @@ -129,29 +131,29 @@ pure subroutine difference_consistency(self) end subroutine - impure subroutine set_activation_strategy(inference_engine) - type(inference_engine_t), intent(inout) :: inference_engine - character(len=:), allocatable :: function_name - function_name = inference_engine%metadata_(findloc(key, "activationFunction", dim=1))%string() - select case(function_name) + impure function activation_factory(activation_name) result(activation) + character(len=*), intent(in) :: activation_name + class(activation_strategy_t), allocatable :: activation + + select case(activation_name) case("swish") - inference_engine%activation_strategy_ = swish_t() + activation = swish_t() case("sigmoid") - inference_engine%activation_strategy_ = sigmoid_t() + activation = sigmoid_t() case("step") - inference_engine%activation_strategy_ = step_t() + activation = step_t() case("gelu") - inference_engine%activation_strategy_ = gelu_t() + activation = gelu_t() case("relu") - inference_engine%activation_strategy_ = relu_t() + activation = relu_t() case default - error stop "inference_engine_s(set_activation_strategy): unrecognized activation strategy '"//function_name//"'" + error stop "inference_engine_s(activation_factory): unrecognized activation strategy '"//activation_name//"'" end select - end subroutine + end function module procedure construct_from_padded_arrays - inference_engine%metadata_ = metadata + inference_engine%metadata_ = metadata_t(metadata(1),metadata(2),metadata(3),metadata(4),metadata(5)) inference_engine%weights_ = weights inference_engine%biases_ = biases inference_engine%nodes_ = nodes @@ -180,86 +182,117 @@ impure subroutine set_activation_strategy(inference_engine) end if end block - call set_activation_strategy(inference_engine) + associate(strings => inference_engine%metadata_%strings()) + inference_engine%activation_strategy_ = activation_factory(strings(4)%string()) + end associate + call assert_consistency(inference_engine) end procedure construct_from_padded_arrays - module procedure construct_from_json + module procedure from_json type(string_t), allocatable :: lines(:), metadata(:) type(tensor_range_t) input_range, output_range type(layer_t) hidden_layers, output_layer - type(neuron_t) output_neuron real(rkind), allocatable :: hidden_weights(:,:,:) + character(len=:), allocatable :: justified_line integer l +#ifdef _CRAYFTN + type(tensor_range_t) proto_range + type(metadata_t) proto_meta + type(neuron_t) proto_neuron + proto_range = tensor_range_t("",[0.],[1.]) + proto_meta = metadata_t(string_t(""),string_t(""),string_t(""),string_t(""),string_t("")) + proto_neuron = neuron_t(weights=[0.], bias=0.) +#endif lines = file_%lines() + call assert(adjustl(lines(1)%string())=="{", "inference_engine_s(from_json): expected outermost object '{'") - l = 1 -#ifndef NAGFOR - call assert(adjustl(lines(l)%string())=="{", "construct_from_json: expecting '{' to start outermost object", lines(l)%string()) + associate(num_lines => size(lines)) + +#ifndef _CRAYFTN + associate(proto_range => tensor_range_t("",[0.],[1.])) #endif + associate(range_lines => size(proto_range%to_json())) + + find_inputs_range: & + do l = 1, num_lines + justified_line = adjustl(lines(l)%string()) + if (justified_line == '"inputs_range": {') exit + end do find_inputs_range + call assert(justified_line =='"inputs_range": {', 'from_json: expecting "inputs_range": {', justified_line) + input_range = tensor_range_t(lines(l:l+range_lines-1)) + + find_outputs_range: & + do l = 1, num_lines + justified_line = adjustl(lines(l)%string()) + if (justified_line == '"outputs_range": {') exit + end do find_outputs_range + call assert(justified_line =='"outputs_range": {', 'from_json: expecting "outputs_range": {', justified_line) + output_range = tensor_range_t(lines(l:l+range_lines-1)) - l = 2 - metadata = [string_t(""),string_t(""),string_t(""),string_t(""),string_t("false")] - if (adjustl(lines(l)%string()) == '"metadata": {') then - block - character(len=:), allocatable :: justified_line - do - l = l + 1 - justified_line = adjustl(lines(l)%string()) - if (justified_line == "},") exit - metadata(findloc(key, trim(get_key_string(justified_line)), dim=1)) = get_key_value(justified_line) - end do - l = l + 1 - end block - end if - - call assert(adjustl(lines(l)%string())=='"tensor_range": {', 'from_json: expecting "tensor_range": {', lines(l)%string()) - + end associate #ifndef _CRAYFTN - associate(prototype => tensor_range_t("",[0.],[1.])) -#else - block - type(tensor_range_t) prototype - prototype = tensor_range_t("",[0.],[1.]) -#endif - associate(num_lines => size(prototype%to_json())) - input_range = tensor_range_t(lines(l:l+num_lines-1)) - l = l + num_lines - output_range = tensor_range_t(lines(l:l+num_lines-1)) - l = l + num_lines end associate -#ifndef _CRAYFTN - end associate -#else - end block #endif - call assert(adjustl(lines(l)%string())=='"hidden_layers": [', 'from_json: expecting "hidden_layers": [', lines(l)%string()) - l = l + 1 + find_hidden_layers: & + do l = 1, num_lines + justified_line = adjustl(lines(l)%string()) + if (justified_line == '"hidden_layers": [') exit + end do find_hidden_layers + call assert(justified_line=='"hidden_layers": [', 'from_json: expecting "hidden_layers": [', justified_line) + + read_hidden_layers: & + block + integer, parameter :: bracket_lines_per_layer=2 + character(len=:), allocatable :: output_layer_line + + hidden_layers = layer_t(lines, start=l+1) - block - integer, parameter :: lines_per_neuron=4, bracket_lines_per_layer=2 - character(len=:), allocatable :: output_layer_line - - hidden_layers = layer_t(lines, start=l) +#ifndef _CRAYFTN + associate(proto_neuron => neuron_t(weights=[0.], bias=0.)) +#endif + associate(num_neuron_lines => size(proto_neuron%to_json())) + associate( output_layer_line_number => l + 1 + num_neuron_lines*sum(hidden_layers%count_neurons()) & + + bracket_lines_per_layer*hidden_layers%count_layers() + 1) - associate( output_layer_line_number => l + lines_per_neuron*sum(hidden_layers%count_neurons()) & - + bracket_lines_per_layer*hidden_layers%count_layers() + 1) + output_layer_line = lines(output_layer_line_number)%string() + call assert(adjustl(output_layer_line)=='"output_layer": [', 'from_json: expecting "output_layer": [', & + lines(output_layer_line_number)%string()) - output_layer_line = lines(output_layer_line_number)%string() - call assert(adjustl(output_layer_line)=='"output_layer": [', 'from_json: expecting "output_layer": [', & - lines(output_layer_line_number)%string()) + output_layer = layer_t(lines, start=output_layer_line_number) + end associate + end associate +#ifndef _CRAYFTN + end associate +#endif + end block read_hidden_layers + + find_metadata: & + do l = 1, num_lines + justified_line = adjustl(lines(l)%string()) + if (justified_line == '"metadata": {') exit + end do find_metadata + call assert(justified_line=='"metadata": {', 'from_json: expecting "metadata": {', justified_line) - output_layer = layer_t(lines, start=output_layer_line_number) +#ifndef _CRAYFTN + associate(proto_meta => metadata_t(string_t(""),string_t(""),string_t(""),string_t(""),string_t(""))) +#endif + associate(metadata_object => metadata_t(lines(l:l+size(proto_meta%to_json())-1))) + inference_engine = hidden_layers%inference_engine(metadata_object%strings(), output_layer, input_range, output_range) end associate - end block +#ifndef _CRAYFTN + end associate +#endif + end associate ! associate(num_lines ... ) - inference_engine = hidden_layers%inference_engine(metadata, output_layer, input_range, output_range) + associate(strings => inference_engine%metadata_%strings()) + inference_engine%activation_strategy_ = activation_factory(strings(4)%string()) + end associate - call set_activation_strategy(inference_engine) call assert_consistency(inference_engine) contains @@ -303,7 +336,7 @@ function get_key_value(line) result(value_) #endif end function - end procedure construct_from_json + end procedure from_json module procedure assert_conformable_with @@ -397,171 +430,160 @@ function get_key_value(line) result(value_) character(len=:), allocatable :: comma_separated_values, csv_format character(len=17) :: single_value integer, parameter :: & - outer_object_braces = 2, hidden_layer_outer_brackets = 2, lines_per_neuron = 4, inner_brackets_per_layer = 2, & - output_layer_brackets = 2, metadata_outer_braces = 2, input_range_object = 5, output_range_object = 5 + outer_object_braces = 2, hidden_layer_outer_brackets = 2, inner_brackets_per_layer = 2, & + output_layer_brackets = 2 +#ifdef _CRAYFTN + type(tensor_range_t) proto_range + type(metadata_t) proto_meta + type(neuron_t) proto_neuron + proto_range = tensor_range_t("",[0._rkind],[1._rkind]) + proto_meta = metadata_t(string_t(""),string_t(""),string_t(""),string_t(""),string_t("")) + proto_neuron = neuron_t([0._rkind],0._rkind) +#endif call assert_consistency(self) csv_format = separated_values(separator=",", mold=[real(rkind)::]) - associate(num_hidden_layers => size(self%nodes_)-2, & + associate( & + num_hidden_layers => size(self%nodes_)-2, & neurons_per_layer => self%nodes_(lbound(self%nodes_,1)+1), & num_outputs => self%num_outputs(), & num_inputs => self%num_inputs() & ) - - call assert(all(neurons_per_layer==self%nodes_(lbound(self%nodes_,1)+1 : ubound(self%nodes_,1)-1)), & - "to_json: uniform hidden layers") - - associate(num_lines => & - outer_object_braces & - + metadata_outer_braces + size(key) & - + input_range_object + output_range_object & - + hidden_layer_outer_brackets + (num_hidden_layers)*(inner_brackets_per_layer + neurons_per_layer*lines_per_neuron) & - + output_layer_brackets + num_outputs*lines_per_neuron & +#ifndef _CRAYFTN + associate( & + proto_range => tensor_range_t("",[0._rkind],[1._rkind]), & + proto_meta => metadata_t(string_t(""),string_t(""),string_t(""),string_t(""),string_t("")), & + proto_neuron => neuron_t([0._rkind],0._rkind) & ) - allocate(lines(num_lines)) - - line = 1 - lines(line) = string_t('{') - - line = line + 1 - lines(line) = string_t(' "metadata": {') - - line = line + 1 - lines(line) = string_t(' "modelName": "' // & - self%metadata_(findloc(key, "modelName", dim=1))%string() // '",') - line = line + 1 - lines(line) = string_t(' "modelAuthor": "' // & - self%metadata_(findloc(key, "modelAuthor", dim=1))%string() // '",') - line = line + 1 - lines(line) = string_t(' "compilationDate": "' // & - self%metadata_(findloc(key, "compilationDate", dim=1))%string() // '",') - line = line + 1 - lines(line) = string_t(' "activationFunction": "' // & - self%metadata_(findloc(key, "activationFunction", dim=1))%string() // '",') - line = line + 1 - lines(line) = string_t(' "usingSkipConnections": ' // & - self%metadata_(findloc(key, "usingSkipConnections", dim=1))%string()) - - line = line + 1 - lines(line) = string_t(' },') +#endif + associate( & + num_tensor_range_lines => size(proto_range%to_json()), & + num_metadata_lines => size(proto_meta%to_json()), & + num_neuron_lines => size(proto_neuron%to_json()) & + ) + call assert(all(neurons_per_layer==self%nodes_(lbound(self%nodes_,1)+1 : ubound(self%nodes_,1)-1)), & + "to_json: uniform hidden layers") + + associate( & + num_lines => outer_object_braces + num_metadata_lines + 2 * num_tensor_range_lines & + + hidden_layer_outer_brackets + num_hidden_layers*(inner_brackets_per_layer + neurons_per_layer*num_neuron_lines) & + + output_layer_brackets + num_outputs*num_neuron_lines& + ) + allocate(lines(num_lines)) + + line = 1 + lines(line) = string_t('{') + line = line + 1 - block - type(string_t), allocatable :: input_range_json(:), output_range_json(:) - - line = line + 1 - input_range_json = self%input_range_%to_json() - associate(last_line => ubound(input_range_json,1)) - call assert(last_line==input_range_object, "inference_engine_s(to_json): input_range object line count") - input_range_json(last_line) = input_range_json(last_line) // "," - lines(line:line+input_range_object-1) = input_range_json - line = line + input_range_object-1 - end associate + lines(line:line+num_metadata_lines-1) = self%metadata_%to_json() + line = line + num_metadata_lines - line = line + 1 - output_range_json = self%output_range_%to_json() - associate(last_line => ubound(output_range_json,1)) - call assert(last_line==output_range_object, "inference_engine_s(to_json): output_range object line count") - output_range_json(last_line) = output_range_json(last_line) // "," - lines(line:line+output_range_object-1) = output_range_json - line = line + input_range_object-1 - end associate - end block + lines(line:line+num_tensor_range_lines-1) = self%input_range_%to_json() + lines(line+num_tensor_range_lines-1) = lines(line+num_tensor_range_lines-1) // "," + line = line + num_tensor_range_lines - line = line + 1 - lines(line) = string_t(' "hidden_layers": [') - - layer = 1 - line = line + 1 - lines(line) = string_t(' [') - do neuron = 1, neurons_per_layer - line = line + 1 - lines(line) = string_t(' {') - line = line + 1 - if (allocated(comma_separated_values)) deallocate(comma_separated_values) - allocate(character(len=num_inputs*(characters_per_value+1)-1)::comma_separated_values) - block - integer l - associate(n => self%nodes_) - l = 1 - write(comma_separated_values, fmt = csv_format) self%weights_(neuron,1:n(l-1),l) - end associate - end block - lines(line) = string_t(' "weights": [' // trim(comma_separated_values) // '],') - line = line + 1 - write(single_value, fmt = csv_format) self%biases_(neuron,layer) - lines(line) = string_t(' "bias": ' // trim(single_value)) - line = line + 1 - lines(line) = string_t(" }" // trim(merge(' ',',',neuron==neurons_per_layer))) - end do - line = line + 1 - lines(line) = string_t(trim(merge(" ] ", " ],", any(num_hidden_layers==[1,line])))) - - do layer = 1, num_hidden_layers-1 - line = line + 1 - lines(line) = string_t(' [') - block - real(rkind), allocatable :: hidden_layer_weights(:,:) - integer j, l - - associate(n => self%nodes_, l => layer + 1) - allocate(hidden_layer_weights(n(l),n(l-1))) - do concurrent(j = 1:n(l)) - hidden_layer_weights(j,1:n(l-1)) = self%weights_(j,1:n(l-1),l) - end do - hidden_layer_weights = transpose(hidden_layer_weights) - end associate - do neuron = 1, neurons_per_layer + lines(line:line+num_tensor_range_lines-1) = self%output_range_%to_json() + lines(line+num_tensor_range_lines-1) = lines(line+num_tensor_range_lines-1) // "," + line = line + num_tensor_range_lines + + lines(line) = string_t(' "hidden_layers": [') line = line + 1 - lines(line) = string_t(' {') + + layer = 1 + lines(line) = string_t(' [') + do neuron = 1, neurons_per_layer + line = line + 1 + lines(line) = string_t(' {') + line = line + 1 + if (allocated(comma_separated_values)) deallocate(comma_separated_values) + allocate(character(len=num_inputs*(characters_per_value+1)-1)::comma_separated_values) + block + integer l + associate(n => self%nodes_) + l = 1 + write(comma_separated_values, fmt = csv_format) self%weights_(neuron,1:n(l-1),l) + end associate + end block + lines(line) = string_t(' "weights": [' // trim(comma_separated_values) // '],') + line = line + 1 + write(single_value, fmt = csv_format) self%biases_(neuron,layer) + lines(line) = string_t(' "bias": ' // trim(single_value)) + line = line + 1 + lines(line) = string_t(" }" // trim(merge(' ',',',neuron==neurons_per_layer))) + end do line = line + 1 - if (allocated(comma_separated_values)) deallocate(comma_separated_values) - allocate(character(len=neurons_per_layer*(characters_per_value+1)-1)::comma_separated_values) - write(comma_separated_values, fmt = csv_format) hidden_layer_weights(:, neuron) - lines(line) = string_t(' "weights": [' // trim(comma_separated_values) // '],') + lines(line) = string_t(trim(merge(" ] ", " ],", any(num_hidden_layers==[1,line])))) + + do layer = 1, num_hidden_layers-1 + line = line + 1 + lines(line) = string_t(' [') + block + real(rkind), allocatable :: hidden_layer_weights(:,:) + integer j, l + + associate(n => self%nodes_, l => layer + 1) + allocate(hidden_layer_weights(n(l),n(l-1))) + do concurrent(j = 1:n(l)) + hidden_layer_weights(j,1:n(l-1)) = self%weights_(j,1:n(l-1),l) + end do + hidden_layer_weights = transpose(hidden_layer_weights) + end associate + do neuron = 1, neurons_per_layer + line = line + 1 + lines(line) = string_t(' {') + line = line + 1 + if (allocated(comma_separated_values)) deallocate(comma_separated_values) + allocate(character(len=neurons_per_layer*(characters_per_value+1)-1)::comma_separated_values) + write(comma_separated_values, fmt = csv_format) hidden_layer_weights(:, neuron) + lines(line) = string_t(' "weights": [' // trim(comma_separated_values) // '],') + line = line + 1 + write(single_value, fmt = csv_format) self%biases_(neuron,layer+1) + lines(line) = string_t(' "bias": ' // trim(single_value)) + line = line + 1 + lines(line) = string_t(" }" // trim(merge(' ',',',neuron==neurons_per_layer))) + end do + end block + line = line + 1 + lines(line) = string_t(" ]" // trim(merge(' ',',',layer==num_hidden_layers-1))) + end do + line = line + 1 - write(single_value, fmt = csv_format) self%biases_(neuron,layer+1) - lines(line) = string_t(' "bias": ' // trim(single_value)) + lines(line) = string_t(" ],") + line = line + 1 - lines(line) = string_t(" }" // trim(merge(' ',',',neuron==neurons_per_layer))) - end do - end block - line = line + 1 - lines(line) = string_t(" ]" // trim(merge(' ',',',layer==num_hidden_layers-1))) - end do + lines(line) = string_t(' "output_layer": [') + + do neuron = 1, num_outputs + line = line + 1 + lines(line) = string_t(' {') + line = line + 1 + if (allocated(comma_separated_values)) deallocate(comma_separated_values) + allocate(character(len=neurons_per_layer*(characters_per_value+1)-1)::comma_separated_values) + associate(n => self%nodes_, l => ubound(self%nodes_,1)) + write(comma_separated_values, fmt = csv_format) self%weights_(neuron,1:n(l-1),l) + end associate + lines(line) = string_t(' "weights": [' // trim(comma_separated_values) // '],') + line = line + 1 + write(single_value, fmt = csv_format) self%biases_(neuron,ubound(self%biases_,2)) + lines(line) = string_t(' "bias": ' // trim(single_value)) + line = line + 1 + lines(line) = string_t(" }" // trim(merge(' ',',',neuron==num_outputs))) + end do - line = line + 1 - lines(line) = string_t(" ],") + line = line + 1 + lines(line) = string_t(' ]') - line = line + 1 - lines(line) = string_t(' "output_layer": [') + line = line + 1 + lines(line) = string_t('}') - do neuron = 1, num_outputs - line = line + 1 - lines(line) = string_t(' {') - line = line + 1 - if (allocated(comma_separated_values)) deallocate(comma_separated_values) - allocate(character(len=neurons_per_layer*(characters_per_value+1)-1)::comma_separated_values) - associate(n => self%nodes_, l => ubound(self%nodes_,1)) - write(comma_separated_values, fmt = csv_format) self%weights_(neuron,1:n(l-1),l) + call assert(line == num_lines, "inference_engine_t%to_json: all lines defined", intrinsic_array_t([num_lines, line])) end associate - lines(line) = string_t(' "weights": [' // trim(comma_separated_values) // '],') - line = line + 1 - write(single_value, fmt = csv_format) self%biases_(neuron,ubound(self%biases_,2)) - lines(line) = string_t(' "bias": ' // trim(single_value)) - line = line + 1 - lines(line) = string_t(" }" // trim(merge(' ',',',neuron==num_outputs))) - end do - - line = line + 1 - lines(line) = string_t(' ]') - - line = line + 1 - lines(line) = string_t('}') - - call assert(line == num_lines, "inference_engine_t%to_json: all lines defined", intrinsic_array_t([num_lines, line])) + end associate +#ifndef _CRAYFTN end associate +#endif end associate json_file = file_t(lines) @@ -569,11 +591,15 @@ function get_key_value(line) result(value_) end procedure to_json module procedure skip - use_skip_connections = self%metadata_(findloc(key, "usingSkipConnections", dim=1))%string() == "true" + associate(strings => self%metadata_%strings()) + use_skip_connections = merge(.true., .false., strings(5) == "true") + end associate end procedure module procedure activation_function_name - activation_name = self%metadata_(findloc(key, "activationFunction", dim=1)) + associate(strings => self%metadata_%strings()) + activation_name = strings(4) + end associate end procedure end submodule inference_engine_s diff --git a/src/inference_engine/metadata_m.f90 b/src/inference_engine/metadata_m.f90 new file mode 100644 index 000000000..09713c83a --- /dev/null +++ b/src/inference_engine/metadata_m.f90 @@ -0,0 +1,57 @@ +module metadata_m + use julienne_string_m, only : string_t + implicit none + + private + public :: metadata_t + + type metadata_t + private + type(string_t) modelName_, modelAuthor_, compilationDate_, activationFunction_, usingSkipConnections_ + contains + procedure :: strings + procedure :: to_json + procedure :: equals + generic :: operator(==) => equals + end type + + interface metadata_t + + pure module function from_json(lines) result(metadata) + implicit none + type(string_t), intent(in) :: lines(:) + type(metadata_t) metadata + end function + + pure module function from_components(modelName, modelAuthor, compilationDate, activationFunction, usingSkipConnections) & + result(metadata) + implicit none + type(string_t), intent(in) :: modelName, modelAuthor, compilationDate, activationFunction, usingSkipConnections + type(metadata_t) metadata + end function + + end interface + + interface + + pure module function strings(self) result(components) + implicit none + class(metadata_t), intent(in) :: self + type(string_t), allocatable :: components(:) + end function + + pure module function to_json(self) result(lines) + implicit none + class(metadata_t), intent(in) :: self + type(string_t), allocatable :: lines(:) + end function + + elemental module function equals(lhs, rhs) result(lhs_equals_rhs) + implicit none + class(metadata_t), intent(in) :: lhs, rhs + logical lhs_equals_rhs + end function + + end interface + +end module diff --git a/src/inference_engine/metadata_s.f90 b/src/inference_engine/metadata_s.f90 new file mode 100644 index 000000000..fbafa1508 --- /dev/null +++ b/src/inference_engine/metadata_s.f90 @@ -0,0 +1,70 @@ +submodule(metadata_m) metadata_s + use assert_m, only : assert + implicit none + +contains + + module procedure strings + components = [self%modelName_, self%modelAuthor_, self%compilationDate_, self%activationFunction_, self%usingSkipConnections_] + end procedure + + module procedure from_components + metadata%modelName_ = modelName + metadata%modelAuthor_ = modelAuthor + metadata%compilationDate_ = compilationDate + metadata%activationFunction_ = activationFunction + metadata%usingSkipConnections_ = usingSkipConnections + end procedure + + module procedure from_json + integer l + + call assert(lines(1)%get_json_key() == "metadata", "metadata_s(from_json): metadata found") + + do l = 2, size(lines)-1 + associate(key => lines(l)%get_json_key()) + select case (key%string()) + case("modelName") + metadata%modelName_ = lines(l)%get_json_value(key, mold=string_t("")) + case("modelAuthor") + metadata%modelAuthor_ = lines(l)%get_json_value(key, mold=string_t("")) + case("compilationDate") + metadata%compilationDate_ = lines(l)%get_json_value(key, mold=string_t("")) + case("activationFunction") + metadata%activationFunction_ = lines(l)%get_json_value(key, mold=string_t("")) + case("usingSkipConnections") + metadata%usingSkipConnections_ = lines(l)%get_json_value(key, mold=string_t("")) + case default + error stop "metadata_s(from_json): missing key " // key%string() + end select + end associate + end do + + call assert(trim(adjustl(lines(size(lines))%string())) == "}," , "metadata_s(from_json): metadata object end found") + end procedure + + module procedure to_json + + character(len=*), parameter :: indent = repeat(" ",ncopies=4) + + lines = [ & + string_t(indent // '"metadata": {'), & + string_t(indent // indent // '"modelName" : "' // trim(adjustl(self%modelName_%string())) // '",' ), & + string_t(indent // indent // '"modelAuthor" : "' // trim(adjustl(self%modelAuthor_%string())) // '",' ), & + string_t(indent // indent // '"compilationDate" : "' // trim(adjustl(self%compilationDate_%string())) // '",'), & + string_t(indent // indent // '"activationFunction" : "' // trim(adjustl(self%activationFunction_%string())) // '",'), & + string_t(indent // indent // '"usingSkipConnections" : "' // trim(adjustl(self%usingSkipConnections_%string())) // '"'), & + string_t(indent // '},') & + ] + end procedure + + module procedure equals + lhs_equals_rhs = & + lhs%modelName_ == rhs%modelName_ .and. & + lhs%modelAuthor_ == rhs%modelAuthor_ .and. & + lhs%compilationDate_ == rhs%compilationDate_ .and. & + lhs%activationFunction_ == rhs%activationFunction_ .and. & + lhs%usingSkipConnections_ == rhs%usingSkipConnections_ + end procedure + +end submodule metadata_s diff --git a/src/inference_engine/neuron_m.f90 b/src/inference_engine/neuron_m.f90 index a0f416bf3..bdfb5e42f 100644 --- a/src/inference_engine/neuron_m.f90 +++ b/src/inference_engine/neuron_m.f90 @@ -15,6 +15,7 @@ module neuron_m real(rkind) bias_ type(neuron_t), allocatable :: next contains + procedure :: to_json procedure :: weights procedure :: bias procedure :: next_allocated @@ -24,7 +25,7 @@ module neuron_m interface neuron_t - pure recursive module function construct(neuron_lines, start) result(neuron) + pure recursive module function from_json(neuron_lines, start) result(neuron) !! construct linked list of neuron_t objects from an array of JSON-formatted text lines implicit none type(string_t), intent(in) :: neuron_lines(:) @@ -32,10 +33,23 @@ pure recursive module function construct(neuron_lines, start) result(neuron) type(neuron_t) neuron end function + pure module function from_components(weights, bias) result(neuron) + !! construct single neuron_t object from an array of weights and a bias + real(rkind), intent(in) :: weights(:) + real(rkind), intent(in) :: bias + type(neuron_t) neuron + end function + end interface interface + pure module function to_json(self) result(lines) + implicit none + class(neuron_t), intent(in) :: self + type(string_t), allocatable :: lines(:) + end function + module function weights(self) result(my_weights) implicit none class(neuron_t), intent(in) :: self diff --git a/src/inference_engine/neuron_s.f90 b/src/inference_engine/neuron_s.f90 index 115675af3..ec6f4c159 100644 --- a/src/inference_engine/neuron_s.f90 +++ b/src/inference_engine/neuron_s.f90 @@ -2,11 +2,32 @@ ! Terms of use are as specified in LICENSE.txt submodule(neuron_m) neuron_s use assert_m, only : assert + use julienne_formats_m, only : separated_values implicit none contains - module procedure construct + module procedure to_json + integer, parameter :: characters_per_value=17 + character(len=*), parameter :: indent = repeat(" ",ncopies=4) + character(len=:), allocatable :: csv_format, weights_string, bias_string + + call assert(allocated(self%weights_), "neuron_s(to_json): allocated weights_") + + csv_format = separated_values(separator=",", mold=[real(rkind)::]) + allocate(character(len=size(self%weights_)*(characters_per_value+1)-1)::weights_string) + allocate(character(len=characters_per_value)::bias_string) + write(weights_string, fmt = csv_format) self%weights_ + write(bias_string,*) self%bias_ + lines = [ & + string_t(indent // '{'), & + string_t(indent // ' "weights": [' // trim(adjustl(weights_string)) // '],'), & + string_t(indent // ' "bias": ' // trim(adjustl(bias_string))), & + string_t(indent // '}') & + ] + end procedure + + module procedure from_json character(len=:), allocatable :: line integer i @@ -37,8 +58,13 @@ line = adjustl(neuron_lines(start+3)%string()) call assert(line(1:1)=='}', "neuron_s(construct): neuron object end", line) line = adjustr(neuron_lines(start+3)%string()) - if (line(len(line):len(line)) == ",") neuron%next = construct(neuron_lines, start+4) + if (line(len(line):len(line)) == ",") neuron%next = from_json(neuron_lines, start+4) + + end procedure + module procedure from_components + neuron%weights_ = weights + neuron%bias_ = bias end procedure module procedure weights diff --git a/src/inference_engine/tensor_range_s.f90 b/src/inference_engine/tensor_range_s.f90 index ce022a3e5..1e7264a67 100644 --- a/src/inference_engine/tensor_range_s.f90 +++ b/src/inference_engine/tensor_range_s.f90 @@ -22,7 +22,7 @@ tensor_range_key_found = .false. do l=1,size(lines) - if (lines(l)%get_json_key() == "tensor_range") then + if (lines(l)%get_json_key() == "inputs_range" .or. lines(l)%get_json_key() == "outputs_range") then tensor_range_key_found = .true. tensor_range%layer_ = lines(l+1)%get_json_value(key=string_t("layer"), mold=string_t("")) tensor_range%minima_ = lines(l+2)%get_json_value(key=string_t("minima"), mold=[0.]) @@ -62,13 +62,17 @@ allocate(character(len=size(self%maxima_)*(characters_per_value+1)-1)::maxima_string) write(minima_string, fmt = csv_format) self%minima_ write(maxima_string, fmt = csv_format) self%maxima_ - lines = [ & - string_t(indent // '"tensor_range": {'), & - string_t(indent // ' "layer": "' // trim(adjustl(self%layer_)) // '",'), & - string_t(indent // ' "minima": [' // trim(adjustl(minima_string)) // '],'), & - string_t(indent // ' "maxima": [' // trim(adjustl(maxima_string)) // ']'), & - string_t(indent // '}') & - ] + block + character(len=:), allocatable :: layer + layer = trim(adjustl(self%layer_)) + lines = [ & + string_t(indent // '"'//layer//'_range": {'), & + string_t(indent // ' "layer": "' // layer // '",'), & + string_t(indent // ' "minima": [' // trim(adjustl(minima_string)) // '],'), & + string_t(indent // ' "maxima": [' // trim(adjustl(maxima_string)) // ']'), & + string_t(indent // '}') & + ] + end block end procedure module procedure map_to_training_range diff --git a/src/inference_engine/trainable_engine_m.F90 b/src/inference_engine/trainable_engine_m.F90 index ac3428af2..6c8b4e7cd 100644 --- a/src/inference_engine/trainable_engine_m.F90 +++ b/src/inference_engine/trainable_engine_m.F90 @@ -6,6 +6,7 @@ module trainable_engine_m use inference_engine_m_, only : inference_engine_t use differentiable_activation_strategy_m, only : differentiable_activation_strategy_t use kind_parameters_m, only : rkind + use metadata_m, only : metadata_t use tensor_m, only : tensor_t use tensor_range_m, only : tensor_range_t use mini_batch_m, only : mini_batch_t @@ -19,7 +20,7 @@ module trainable_engine_m !! Encapsulate the information needed to perform training private type(tensor_range_t) input_range_, output_range_ - type(string_t), allocatable :: metadata_(:) + type(metadata_t) metadata_ real(rkind), allocatable :: w(:,:,:) ! weights real(rkind), allocatable :: b(:,:) ! biases integer, allocatable :: n(:) ! nodes per layer diff --git a/src/inference_engine/trainable_engine_s.F90 b/src/inference_engine/trainable_engine_s.F90 index e1d5fe970..63845084c 100644 --- a/src/inference_engine/trainable_engine_s.F90 +++ b/src/inference_engine/trainable_engine_s.F90 @@ -262,8 +262,7 @@ module procedure construct_from_padded_arrays #endif - - trainable_engine%metadata_ = metadata + trainable_engine%metadata_ = metadata_t(metadata(1),metadata(2),metadata(3),metadata(4),metadata(5)) trainable_engine%n = nodes trainable_engine%w = weights trainable_engine%b = biases @@ -296,7 +295,8 @@ ! assignment-stmt disallows the procedure from being pure because it might ! deallocate polymorphic allocatable subcomponent `activation_strategy_` ! TODO: consider how this affects design - inference_engine = inference_engine_t(self%metadata_, self%w, self%b, self%n, self%input_range_, self%output_range_) + inference_engine = inference_engine_t( & + self%metadata_%strings(), self%w, self%b, self%n, self%input_range_, self%output_range_) end procedure module procedure perturbed_identity_network diff --git a/src/inference_engine_m.f90 b/src/inference_engine_m.f90 index 4c7178eab..5e373d165 100644 --- a/src/inference_engine_m.f90 +++ b/src/inference_engine_m.f90 @@ -8,6 +8,7 @@ module inference_engine_m use input_output_pair_m, only : input_output_pair_t, shuffle use inference_engine_m_, only : inference_engine_t, difference_t, infer use kind_parameters_m, only : rkind + use metadata_m, only : metadata_t use mini_batch_m, only : mini_batch_t use network_configuration_m, only : network_configuration_t use gelu_m, only : gelu_t diff --git a/test/main.F90 b/test/main.F90 index 7ab2c0859..e6ecdcf0b 100644 --- a/test/main.F90 +++ b/test/main.F90 @@ -4,6 +4,7 @@ program main use inference_engine_test_m, only : inference_engine_test_t use asymmetric_engine_test_m, only : asymmetric_engine_test_t use trainable_engine_test_m, only : trainable_engine_test_t + use metadata_test_m, only : metadata_test_t use hyperparameters_test_m, only : hyperparameters_test_t use network_configuration_test_m, only : network_configuration_test_t use training_configuration_test_m, only : training_configuration_test_t @@ -15,6 +16,7 @@ program main type(asymmetric_engine_test_t) asymmetric_engine_test type(trainable_engine_test_t) trainable_engine_test type(hyperparameters_test_t) hyperparameters_test + type(metadata_test_t) metadata_test type(network_configuration_test_t) network_configuration_test type(training_configuration_test_t) training_configuration_test type(tensor_range_test_t) tensor_range_test @@ -39,6 +41,7 @@ program main call random_init(repeatable=.true.,image_distinct=.true.) call hyperparameters_test%report(passes, tests) call network_configuration_test%report(passes, tests) + call metadata_test%report(passes, tests) call training_configuration_test%report(passes, tests) call tensor_range_test%report(passes, tests) call asymmetric_engine_test%report(passes, tests) diff --git a/test/metadata_test_m.F90 b/test/metadata_test_m.F90 new file mode 100644 index 000000000..b87b499b3 --- /dev/null +++ b/test/metadata_test_m.F90 @@ -0,0 +1,94 @@ +! Copyright (c), The Regents of the University of California +! Terms of use are as specified in LICENSE.txt +module metadata_test_m + !! Test metadata_t object I/O and construction + + ! External dependencies + use inference_engine_m, only : metadata_t + use julienne_m, only : test_t, test_result_t, test_description_t, test_description_substring, string_t +#ifdef __GFORTRAN__ + use julienne_m, only : test_function_i +#endif + + ! Internal dependencies + use metadata_m, only : metadata_t + + implicit none + + private + public :: metadata_test_t + + type, extends(test_t) :: metadata_test_t + contains + procedure, nopass :: subject + procedure, nopass :: results + end type + +contains + + pure function subject() result(specimen) + character(len=:), allocatable :: specimen + specimen = "A metadata_t object" + end function + + function results() result(test_results) + type(test_description_t), allocatable :: test_descriptions(:) + type(test_result_t), allocatable :: test_results(:) + +#ifndef __GFORTRAN__ + test_descriptions = [ & + test_description_t( & + string_t("component-wise construction followed by conversion to and from JSON"), & + write_then_read_metadata) & + ] +#else + procedure(test_function_i), pointer :: check_write_then_read_ptr + check_write_then_read_ptr => write_then_read_metadata + + test_descriptions = [ & + test_description_t( & + string_t("component-wise construction followed by conversion to and from JSON"), & + check_write_then_read_ptr) & + ] +#endif + associate( & + substring_in_subject => index(subject(), test_description_substring) /= 0, & + substring_in_description => test_descriptions%contains_text(string_t(test_description_substring)) & + ) + test_descriptions = pack(test_descriptions, substring_in_subject .or. substring_in_description) + end associate + test_results = test_descriptions%run() + end function + + function write_then_read_metadata() result(test_passes) + logical test_passes +#ifdef _CRAYFTN + type(metadata_t) :: metadata, from_json + metadata = metadata_t( & + modelName = string_t("Metadata Unit Test"), & + modelAuthor = string_t("Julienne"), & + compilationDate = string_t("2024-06-27"), & + activationFunction = string_t("sigmoid"), & + usingSkipConnections = string_t("false") & + ) & + from_json = metadata_t(metadata%to_json()) +#else + associate(metadata => & + metadata_t( & + modelName = string_t("Metadata Unit Test"), & + modelAuthor = string_t("Julienne"), & + compilationDate = string_t("2024-06-27"), & + activationFunction = string_t("sigmoid"), & + usingSkipConnections = string_t("false") & + ) & + ) + associate(from_json => metadata_t(metadata%to_json())) +#endif + test_passes = metadata == from_json +#ifndef _CRAYFTN + end associate + end associate +#endif + end function + +end module metadata_test_m diff --git a/test/tensor_range_test_m.F90 b/test/tensor_range_test_m.F90 index 2796781f2..2b4259493 100644 --- a/test/tensor_range_test_m.F90 +++ b/test/tensor_range_test_m.F90 @@ -65,9 +65,9 @@ function write_then_read_tensor_range() result(test_passes) type(file_t) :: json_file #ifdef _CRAYFTN type(tensor_range_t) :: tensor_range - tensor_range = tensor_range_t(layer="input", minima=[-1., 0., 1.], maxima=[1., 2., 4.]) + tensor_range = tensor_range_t(layer="inputs", minima=[-1., 0., 1.], maxima=[1., 2., 4.]) #else - associate(tensor_range => tensor_range_t(layer="input", minima=[-1., 0., 1.], maxima=[1., 2., 4.])) + associate(tensor_range => tensor_range_t(layer="inputs", minima=[-1., 0., 1.], maxima=[1., 2., 4.])) #endif associate(from_json => tensor_range_t(tensor_range%to_json())) test_passes = tensor_range == from_json