Welcome to mirror list, hosted at ThFree Co, Russian Federation.

dump_rnn.py « training - github.com/mumble-voip/rnnoise.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
blob: 9f267a7e585ba823278514453fdb3df5ce9ecec8 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
#!/usr/bin/python

from __future__ import print_function

from keras.models import Sequential
from keras.layers import Dense
from keras.layers import LSTM
from keras.layers import GRU
from keras.models import load_model
from keras import backend as K
import sys
import re
import numpy as np

def printVector(f, vector, name):
    v = np.reshape(vector, (-1));
    #print('static const float ', name, '[', len(v), '] = \n', file=f)
    f.write('static const rnn_weight {}[{}] = {{\n   '.format(name, len(v)))
    for i in range(0, len(v)):
        f.write('{}'.format(min(127, int(round(256*v[i])))))
        if (i!=len(v)-1):
            f.write(',')
        else:
            break;
        if (i%8==7):
            f.write("\n   ")
        else:
            f.write(" ")
    #print(v, file=f)
    f.write('\n};\n\n')
    return;

def printLayer(f, hf, layer):
    weights = layer.get_weights()
    printVector(f, weights[0], layer.name + '_weights')
    if len(weights) > 2:
        printVector(f, weights[1], layer.name + '_recurrent_weights')
    printVector(f, weights[-1], layer.name + '_bias')
    name = layer.name
    activation = re.search('function (.*) at', str(layer.activation)).group(1).upper()
    if len(weights) > 2:
        f.write('const GRULayer {} = {{\n   {}_bias,\n   {}_weights,\n   {}_recurrent_weights,\n   {}, {}, ACTIVATION_{}\n}};\n\n'
                .format(name, name, name, name, weights[0].shape[0], weights[0].shape[1]/3, activation))
        hf.write('#define {}_SIZE {}\n'.format(name.upper(), weights[0].shape[1]/3))
        hf.write('extern const GRULayer {};\n\n'.format(name));
    else:
        f.write('const DenseLayer {} = {{\n   {}_bias,\n   {}_weights,\n   {}, {}, ACTIVATION_{}\n}};\n\n'
                .format(name, name, name, weights[0].shape[0], weights[0].shape[1], activation))
        hf.write('#define {}_SIZE {}\n'.format(name.upper(), weights[0].shape[1]))
        hf.write('extern const DenseLayer {};\n\n'.format(name));


def foo(c, name):
    return 1

def mean_squared_sqrt_error(y_true, y_pred):
    return K.mean(K.square(K.sqrt(y_pred) - K.sqrt(y_true)), axis=-1)


model = load_model(sys.argv[1], custom_objects={'msse': mean_squared_sqrt_error, 'mean_squared_sqrt_error': mean_squared_sqrt_error, 'my_crossentropy': mean_squared_sqrt_error, 'mycost': mean_squared_sqrt_error, 'WeightClip': foo})

weights = model.get_weights()

f = open(sys.argv[2], 'w')
hf = open(sys.argv[3], 'w')

f.write('/*This file is automatically generated from a Keras model*/\n\n')
f.write('#ifdef HAVE_CONFIG_H\n#include "config.h"\n#endif\n\n#include "rnn.h"\n\n')

hf.write('/*This file is automatically generated from a Keras model*/\n\n')
hf.write('#ifndef RNN_DATA_H\n#define RNN_DATA_H\n\n#include "rnn.h"\n\n')

layer_list = []
for i, layer in enumerate(model.layers):
    if len(layer.get_weights()) > 0:
        printLayer(f, hf, layer)
    if len(layer.get_weights()) > 2:
        layer_list.append(layer.name)

hf.write('struct RNNState {\n')
for i, name in enumerate(layer_list):
    hf.write('  float {}_state[{}_SIZE];\n'.format(name, name.upper())) 
hf.write('};\n')

hf.write('\n\n#endif\n')

f.close()
hf.close()