write_read_query_infer Subroutine

subroutine write_read_query_infer(output_file_name)

Arguments

Type IntentOptional Attributes Name
type(string_t), intent(in) :: output_file_name

Calls

proc~~write_read_query_infer~~CallsGraph proc~write_read_query_infer write_read_query_infer file_t file_t proc~write_read_query_infer->file_t interface~activation_function_name inference_engine_t%activation_function_name proc~write_read_query_infer->interface~activation_function_name interface~infer~2 inference_engine_t%infer proc~write_read_query_infer->interface~infer~2 interface~nodes_per_layer~3 inference_engine_t%nodes_per_layer proc~write_read_query_infer->interface~nodes_per_layer~3 interface~num_inputs~3 inference_engine_t%num_inputs proc~write_read_query_infer->interface~num_inputs~3 interface~num_outputs~2 inference_engine_t%num_outputs proc~write_read_query_infer->interface~num_outputs~2 interface~to_json~5 inference_engine_t%to_json proc~write_read_query_infer->interface~to_json~5 interface~values tensor_t%values proc~write_read_query_infer->interface~values proc~identity_network identity_network proc~write_read_query_infer->proc~identity_network string string proc~write_read_query_infer->string write_lines write_lines proc~write_read_query_infer->write_lines string_t string_t proc~identity_network->string_t

Called by

proc~~write_read_query_infer~~CalledByGraph proc~write_read_query_infer write_read_query_infer program~write_read_infer write_read_infer program~write_read_infer->proc~write_read_query_infer

Source Code

  subroutine write_read_query_infer(output_file_name)
    type(string_t), intent(in) :: output_file_name
    type(string_t) activation_name
    integer i, j
    integer, parameter :: num_neurons = 3, num_hidden_layers = 2
    type(inference_engine_t) network, inference_engine
    type(file_t) json_output_file, json_input_file
    type(tensor_t) inputs, outputs 

    print *, "Constructing an inference_engine_t neural-network object from scratch."
    network = identity_network()

    print *, "Converting an inference_engine_t object to a file_t object."
    json_output_file = network%to_json()

    print *, "Writing an inference_engine_t object to the file '"//output_file_name%string()//"' in JSON format."
    call json_output_file%write_lines(output_file_name)

    print *, "Reading an inference_engine_t object from the same JSON file '"//output_file_name%string()//"'."
    json_input_file = file_t(output_file_name)

    print *, "Constructing a new inference_engine_t object from the parameters read."
    inference_engine = inference_engine_t(json_input_file)

    print *, "Querying the new inference_engine_t object for several properties:"
    print *, "Number of outputs:", inference_engine%num_outputs()
    print *, "Number of inputs:", inference_engine%num_inputs()
    print *, "Nodes per layer:", inference_engine%nodes_per_layer()
    activation_name = inference_engine%activation_function_name()
    print *, "Activation function: ", activation_name%string()
    print *, "Performing inference:"
    inputs = tensor_t([2.,3.])
    print *, "Inputs: ", inputs%values()
    outputs = inference_engine%infer(inputs)
    print *, "Actual outputs: ", outputs%values()
    print *, "Correct outputs:  ", inputs%values()
  end subroutine write_read_query_infer