diff --git a/.gitignore b/.gitignore index 46f4be9..160114d 100644 --- a/.gitignore +++ b/.gitignore @@ -10,4 +10,4 @@ __pycache__ *.h5 *.obj *.sconsign.dblite -.ipynb_checkpoints +.ipynb_checkpoints \ No newline at end of file diff --git a/docs/index.md b/docs/index.md index 2595f1d..7dfd590 100644 --- a/docs/index.md +++ b/docs/index.md @@ -5,6 +5,8 @@ NNoM is a high-level inference Neural Network library specifically for microcontrollers. +Document version 0.2.1 + [[Chinese Intro]](rt-thread_guide.md) **Highlights** diff --git a/examples/auto_test/.gitignore b/examples/auto_test/.gitignore index 4e831c2..6bbeb39 100644 --- a/examples/auto_test/.gitignore +++ b/examples/auto_test/.gitignore @@ -11,4 +11,6 @@ Debug *.sconsign.dblite .ipynb_checkpoints weights.h -result.csv \ No newline at end of file +result.csv +test_* +_cifar.py diff --git a/inc/nnom.h b/inc/nnom.h index c2e9fd0..bbe75b4 100644 --- a/inc/nnom.h +++ b/inc/nnom.h @@ -25,6 +25,12 @@ #define q15_t int16_t #define q31_t int32_t +/* version */ +#define NNOM_MAJORVERSION 0L /**< major version number */ +#define NNOM_SUBVERSION 2L /**< minor version number */ +#define NNOM_REVISION 1L /**< revise version number */ +#define NNOM_VERSION (NNOM_MAJORVERSION * 10000) + (NNOM_SUBVERSION * 100) + NNOM_REVISION) + typedef enum { NN_SUCCESS = 0, /**< No error */ diff --git a/scripts/nnom_utils.py b/scripts/nnom_utils.py index ff99444..af62b76 100644 --- a/scripts/nnom_utils.py +++ b/scripts/nnom_utils.py @@ -350,7 +350,7 @@ def layers_output_ranges(model, x_test, kld=True, calibrate_size=1000): # saturation shift, using KLD method # Ref: http://on-demand.gputechconf.com/gtc/2017/presentation/s7310-8-bit-inference-with-tensorrt.pdf - if(kld and not is_shift_fixed(layer) and "input" not in layer.name): # test, also do not use kld in input layer + if(kld and not is_shift_fixed(layer) and "input" not in layer.name and "dense" not in layer.name): # test, also do not use kld in input layer import scipy.stats abs_max = max(abs(max_val), abs(min_val)) small_var = 1e-5 @@ -359,7 +359,7 @@ def layers_output_ranges(model, x_test, kld=True, calibrate_size=1000): flat_hist = np.histogram(features.flatten(), bins=bins)[0] kl_loss = [] kl_shifts = [] - for shift in range(8): + for shift in range(4): t = 2 ** (dec_bits + shift) # 2-based threshold act = np.round(features.flatten() * t) act = act / t @@ -701,6 +701,29 @@ def is_skipable_layer(layer): fp.write('\tlayer[%s] = model.hook(Softmax(), layer[%s]);\n'%(id, LI[inp][0])) else: raise Exception('unsupported layer', layer.name, layer) + + """ + # temporary fixed for activations attached into layers in construction + def is_activation_attached(layer): + if(("Softmax" in layer.output.name and "softmax" not in layer.name)or + ("Relu" in layer.output.name and "re_lu" not in layer.name) or + ("Sigmoid" in layer.output.name and "sigmoid" not in layer.name) or + ("Tanh" in layer.output.name and "tanh" not in layer.name)): + return True + return False + if "input" not in layer.name and is_activation_attached(layer): + inp = layer.output.name.replace(':', '/').split('/')[0] + cfg = layer.get_config() + if(cfg['activation'] == 'relu'): + fp.write('\tlayer[%s] = model.active(act_relu(), layer[%s]);\n'%(id, LI[inp][0])) + if(cfg['activation'] == 'tanh'): + fp.write('\tlayer[%s] = model.active(act_tanh(%s_OUTPUT_SHIFT), layer[%s]);\n'%(id, inp.upper(), LI[inp][0])) + if(cfg['activation'] == 'sigmoid'): + fp.write('\tlayer[%s] = model.active(act_sigmoid(%s_OUTPUT_SHIFT), layer[%s]);\n'%(id, inp.upper(), LI[inp][0])) + elif(cfg['activation'] == 'softmax'): + fp.write('\tlayer[%s] = model.hook(Softmax(), layer[%s]);\n'%(id, LI[inp][0])) + """ + # FIXME, test later. if('softmax' in layer.name or ('activation' in layer.name and layer.get_config()['activation'] == 'softmax')): diff --git a/src/nnom.c b/src/nnom.c index c2a3480..87644e8 100644 --- a/src/nnom.c +++ b/src/nnom.c @@ -868,7 +868,8 @@ nnom_status_t model_compile(nnom_model_t *m, nnom_layer_t *input, nnom_layer_t * if (output == NULL) m->tail = find_last(input); - NNOM_LOG("\nStart compiling model...\n"); + NNOM_LOG("\nNNoM version %d.%d.%d\n", NNOM_MAJORVERSION, NNOM_SUBVERSION, NNOM_REVISION); + NNOM_LOG("Start compiling model...\n"); NNOM_LOG("Layer(#) Activation output shape ops(MAC) mem(in, out, buf) mem blk lifetime\n"); NNOM_LOG("-------------------------------------------------------------------------------------------------\n");