diff --git a/APP_Framework/Applications/knowing_app/Kconfig b/APP_Framework/Applications/knowing_app/Kconfig index 1dec5ea1d..5090c42df 100755 --- a/APP_Framework/Applications/knowing_app/Kconfig +++ b/APP_Framework/Applications/knowing_app/Kconfig @@ -9,7 +9,8 @@ menu "knowing app" source "$APP_DIR/Applications/knowing_app/iris_ml_demo/Kconfig" source "$APP_DIR/Applications/knowing_app/k210_fft_test/Kconfig" source "$APP_DIR/Applications/knowing_app/image_processing/Kconfig" - source "$APP_DIR/Applications/knowing_app/cmsis_5_demo/Kconfig" + source "$APP_DIR/Applications/knowing_app/cmsis_5_demo/Kconfig" + source "$APP_DIR/Applications/knowing_app/nnom_demo/Kconfig" endif endmenu diff --git a/APP_Framework/Applications/knowing_app/cmsis_5_demo/Kconfig b/APP_Framework/Applications/knowing_app/cmsis_5_demo/Kconfig index b7e6fb884..dd09e11d9 100644 --- a/APP_Framework/Applications/knowing_app/cmsis_5_demo/Kconfig +++ b/APP_Framework/Applications/knowing_app/cmsis_5_demo/Kconfig @@ -1,6 +1,6 @@ menuconfig USING_CMSIS_5_DEMOAPP bool "CMSIS-5 demo app" - depends on USING_USING_CMSIS_5_NN + depends on USING_CMSIS_5_NN default n if USING_CMSIS_5_DEMOAPP diff --git a/APP_Framework/Applications/knowing_app/cmsis_5_demo/cmsisnn-cifar10/SConscript b/APP_Framework/Applications/knowing_app/cmsis_5_demo/cmsisnn-cifar10/SConscript index 1f082bbfc..89010764a 100644 --- a/APP_Framework/Applications/knowing_app/cmsis_5_demo/cmsisnn-cifar10/SConscript +++ b/APP_Framework/Applications/knowing_app/cmsis_5_demo/cmsisnn-cifar10/SConscript @@ -13,6 +13,6 @@ path = [ cwd + '/demo' ] -group = DefineGroup('CMSISNN-cifar10', src, depend = ['USING_CMSIS_5_DEMOAPP'], CPPPATH = path) +group = DefineGroup('CMSISNN-cifar10', src, depend = ['USING_CMSIS_5_NN_DEMOAPP'], CPPPATH = path) Return('group') diff --git a/APP_Framework/Applications/knowing_app/nnom_demo/Kconfig b/APP_Framework/Applications/knowing_app/nnom_demo/Kconfig new file mode 100644 index 000000000..e898505c4 --- /dev/null +++ b/APP_Framework/Applications/knowing_app/nnom_demo/Kconfig @@ -0,0 +1,14 @@ +menuconfig USING_NNOM_DEMOAPP + bool "NNOM demo app" + depends on USING_NNOM + default n + + if USING_NNOM_DEMOAPP + + config USING_NNOM_MNIST_DEMOAPP + bool "Using NNOM mnist demo app" + default n + + endif + + diff --git a/APP_Framework/Applications/knowing_app/nnom_demo/SConscript b/APP_Framework/Applications/knowing_app/nnom_demo/SConscript new file mode 100644 index 000000000..f307e3f70 --- /dev/null +++ b/APP_Framework/Applications/knowing_app/nnom_demo/SConscript @@ -0,0 +1,14 @@ +import os +Import('RTT_ROOT') +from building import * + +cwd = GetCurrentDir() +objs = [] +list = os.listdir(cwd) + +for d in list: + path = os.path.join(cwd, d) + if os.path.isfile(os.path.join(path, 'SConscript')): + objs = objs + SConscript(os.path.join(path, 'SConscript')) + +Return('objs') diff --git a/APP_Framework/Applications/knowing_app/nnom_demo/mnist_nnom/README.md b/APP_Framework/Applications/knowing_app/nnom_demo/mnist_nnom/README.md new file mode 100644 index 000000000..9d36ca344 --- /dev/null +++ b/APP_Framework/Applications/knowing_app/nnom_demo/mnist_nnom/README.md @@ -0,0 +1,16 @@ +# NNoM Mnist-simple Example + +This example is from [[NNoM](https://github.com/majianjia/nnom)/**[mnist-simple](https://github.com/majianjia/nnom/tree/master/examples/mnist-simple)**] and can be deployed on Arm CPUs and RISC-V CPUs. CMSIS-NN can be used to accelerate on Arm Cortex-M CPUs. + +## Requirements: + +- NNoM in Framework/knowing/nnom +- To use CMSIS-NN backend, select in menuconfig "APP_Framework->Framework->support knowing framework->NNoM->Select NNoM Backend" + +## To run this demo: + +- Run demo by type the command + +``` +mnist_nnom num +``` \ No newline at end of file diff --git a/APP_Framework/Applications/knowing_app/nnom_demo/mnist_nnom/SConscript b/APP_Framework/Applications/knowing_app/nnom_demo/mnist_nnom/SConscript new file mode 100644 index 000000000..7f02d086b --- /dev/null +++ b/APP_Framework/Applications/knowing_app/nnom_demo/mnist_nnom/SConscript @@ -0,0 +1,10 @@ +import os +from building import * + +cwd = GetCurrentDir() +src = Glob('*.c') +path = [cwd] + +group = DefineGroup('NNOM mnist application', src, depend = ['USING_NNOM_MNIST_DEMOAPP'], CPPPATH = path) + +Return('group') diff --git a/APP_Framework/Applications/knowing_app/nnom_demo/mnist_nnom/image.h b/APP_Framework/Applications/knowing_app/nnom_demo/mnist_nnom/image.h new file mode 100644 index 000000000..e46ba726d --- /dev/null +++ b/APP_Framework/Applications/knowing_app/nnom_demo/mnist_nnom/image.h @@ -0,0 +1,36 @@ +#define IMG0 {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 88, 126, 126, 126, 126, 127, 64, 56, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 81, 126, 126, 126, 126, 126, 126, 126, 126, 109, 42, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 30, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 28, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 89, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 61, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 83, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 126, 126, 127, 126, 114, 23, 0, 31, 89, 126, 126, 126, 127, 126, 59, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14, 100, 126, 126, 126, 111, 26, 0, 0, 0, 28, 116, 126, 126, 126, 126, 107, 14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 42, 126, 126, 126, 126, 83, 0, 0, 0, 0, 0, 37, 116, 126, 126, 126, 126, 42, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 42, 126, 126, 126, 126, 37, 0, 0, 0, 0, 0, 0, 84, 126, 126, 126, 126, 49, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 81, 126, 126, 126, 126, 13, 0, 0, 0, 0, 0, 0, 84, 126, 126, 126, 126, 111, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 113, 126, 126, 126, 127, 13, 0, 0, 0, 0, 0, 0, 84, 126, 127, 126, 126, 112, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 89, 126, 126, 126, 126, 13, 0, 0, 0, 0, 0, 0, 61, 126, 126, 126, 126, 72, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 42, 126, 126, 126, 126, 68, 0, 0, 0, 0, 0, 0, 30, 126, 126, 126, 126, 111, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 42, 126, 126, 126, 126, 112, 29, 5, 0, 0, 5, 69, 112, 126, 126, 126, 126, 104, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, 110, 126, 126, 126, 126, 126, 88, 70, 70, 89, 126, 126, 126, 126, 126, 126, 42, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 99, 126, 126, 127, 126, 126, 126, 126, 127, 126, 126, 126, 126, 127, 126, 98, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 75, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 111, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 77, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 114, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 42, 118, 126, 126, 126, 126, 126, 126, 126, 124, 72, 23, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 25, 56, 56, 64, 126, 126, 126, 70, 49, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0} +#define IMG0_LABLE 0 + +#define IMG1 {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 60, 127, 14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 113, 126, 14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14, 126, 126, 14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14, 126, 107, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14, 127, 127, 14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14, 126, 98, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14, 126, 84, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14, 126, 84, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 40, 127, 84, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14, 126, 84, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 58, 126, 84, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 126, 84, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14, 127, 84, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14, 126, 84, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14, 126, 98, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14, 126, 126, 14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14, 127, 127, 14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 117, 126, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 85, 126, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 85, 126, 14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0} +#define IMG1_LABLE 1 + +#define IMG2 {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, 83, 87, 87, 47, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16, 78, 122, 121, 113, 113, 127, 61, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 29, 117, 106, 64, 16, 0, 0, 67, 112, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 64, 120, 75, 7, 0, 0, 0, 0, 0, 93, 73, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 52, 124, 60, 0, 0, 0, 0, 0, 0, 0, 29, 120, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 106, 111, 30, 0, 0, 0, 0, 0, 0, 0, 0, 12, 121, 43, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 80, 57, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, 114, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 102, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 69, 86, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 33, 122, 72, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 54, 121, 114, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 28, 70, 112, 127, 125, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14, 101, 126, 126, 126, 127, 117, 63, 23, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 127, 127, 127, 127, 127, 127, 127, 127, 100, 53, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 40, 27, 6, 6, 6, 29, 69, 92, 117, 127, 122, 78, 34, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 45, 94, 121, 126, 107, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16, 53, 30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0} +#define IMG2_LABLE 2 + +#define IMG3 {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 61, 120, 108, 108, 96, 37, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 25, 127, 127, 127, 127, 127, 127, 40, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 58, 70, 70, 115, 127, 127, 121, 51, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 105, 127, 127, 119, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 24, 102, 127, 127, 117, 30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 116, 127, 127, 71, 28, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 44, 127, 127, 127, 52, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 82, 127, 127, 124, 85, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 22, 110, 127, 127, 98, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 17, 76, 127, 127, 94, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 73, 127, 124, 52, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15, 110, 127, 111, 39, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 49, 127, 127, 75, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 102, 127, 127, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 69, 123, 127, 120, 63, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 30, 44, 119, 127, 127, 127, 22, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 69, 103, 52, 70, 125, 127, 127, 100, 63, 21, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 75, 127, 127, 127, 127, 127, 127, 94, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 98, 127, 127, 127, 115, 88, 38, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 76, 110, 47, 19, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0} +#define IMG3_LABLE 3 + +#define IMG4 {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 44, 103, 126, 0, 0, 0, 0, 0, 57, 126, 111, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 38, 123, 126, 126, 63, 0, 0, 0, 0, 56, 126, 126, 42, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 42, 126, 126, 126, 55, 0, 0, 0, 0, 56, 126, 126, 99, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 42, 126, 126, 126, 0, 0, 0, 0, 0, 19, 117, 126, 126, 56, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 42, 126, 126, 126, 0, 0, 0, 0, 0, 0, 73, 126, 126, 94, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 43, 126, 126, 126, 0, 0, 0, 0, 0, 0, 42, 126, 126, 126, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 42, 126, 126, 126, 0, 0, 0, 0, 0, 0, 42, 126, 126, 126, 81, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, 100, 126, 126, 126, 0, 0, 0, 0, 0, 0, 9, 104, 126, 126, 126, 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 56, 126, 126, 126, 126, 14, 38, 84, 84, 84, 54, 9, 90, 126, 126, 126, 61, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 56, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 107, 44, 126, 126, 126, 83, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 120, 126, 126, 126, 127, 126, 126, 126, 126, 127, 126, 126, 126, 126, 127, 84, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 50, 126, 126, 126, 126, 88, 83, 75, 53, 126, 126, 126, 126, 126, 126, 83, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 27, 27, 27, 28, 3, 0, 0, 0, 28, 27, 27, 93, 126, 126, 116, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 37, 126, 126, 126, 101, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 94, 126, 126, 126, 42, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 39, 127, 126, 126, 42, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 126, 126, 126, 42, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 126, 126, 126, 57, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 118, 126, 126, 104, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 25, 79, 126, 42, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0} +#define IMG4_LABLE 4 + +#define IMG5 {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 6, 9, 66, 95, 78, 83, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 12, 64, 95, 126, 126, 126, 126, 126, 123, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 92, 126, 126, 126, 126, 126, 126, 115, 96, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 36, 114, 126, 126, 126, 126, 126, 102, 67, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 18, 126, 126, 126, 126, 100, 96, 38, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 64, 126, 126, 115, 69, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 94, 126, 126, 90, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 126, 126, 126, 90, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 82, 126, 126, 105, 54, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 124, 126, 126, 126, 126, 121, 94, 60, 61, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 98, 126, 126, 126, 126, 126, 126, 126, 127, 126, 80, 6, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 54, 54, 54, 54, 105, 114, 119, 126, 126, 126, 126, 87, 23, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 23, 51, 109, 126, 126, 126, 112, 58, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 50, 102, 119, 126, 126, 18, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 90, 126, 126, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 48, 117, 126, 126, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 29, 102, 102, 9, 20, 43, 76, 106, 126, 126, 114, 64, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 36, 126, 126, 110, 117, 126, 126, 126, 126, 91, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 104, 126, 126, 126, 126, 126, 126, 115, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 25, 65, 65, 123, 95, 65, 28, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0} +#define IMG5_LABLE 5 + +#define IMG6 {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 38, 56, 88, 45, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14, 97, 121, 126, 103, 121, 25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 100, 126, 99, 27, 12, 99, 82, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 53, 126, 121, 96, 14, 0, 0, 14, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 53, 126, 126, 44, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, 72, 126, 126, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 65, 126, 121, 44, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 60, 123, 126, 57, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 53, 126, 121, 96, 14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 53, 126, 126, 107, 70, 23, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 85, 126, 127, 126, 126, 126, 56, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 25, 121, 126, 126, 88, 93, 126, 118, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 83, 126, 126, 28, 3, 6, 93, 126, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 117, 126, 118, 0, 0, 0, 84, 126, 47, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 42, 126, 126, 56, 0, 0, 0, 84, 126, 70, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 43, 126, 126, 87, 0, 0, 0, 84, 126, 39, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 101, 126, 126, 81, 28, 9, 98, 126, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 44, 126, 126, 126, 116, 104, 126, 126, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 49, 111, 126, 126, 126, 116, 72, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 126, 126, 110, 18, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0} +#define IMG6_LABLE 6 + +#define IMG7 {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 37, 52, 122, 126, 126, 127, 126, 78, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 98, 126, 126, 126, 126, 114, 103, 126, 115, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 35, 123, 126, 118, 105, 44, 26, 28, 123, 126, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 47, 66, 4, 0, 0, 0, 0, 82, 126, 88, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 82, 126, 121, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 82, 126, 126, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 82, 126, 106, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 82, 126, 88, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 89, 126, 88, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 30, 126, 126, 88, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 30, 126, 126, 89, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 30, 126, 126, 88, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 78, 126, 126, 40, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 104, 126, 126, 14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15, 110, 126, 126, 14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 52, 126, 126, 126, 14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 52, 126, 126, 126, 14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 52, 126, 126, 104, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 52, 126, 124, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 52, 64, 48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0} +#define IMG7_LABLE 7 + +#define IMG8 {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 107, 126, 127, 106, 66, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 81, 126, 126, 126, 126, 126, 86, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 36, 126, 127, 65, 31, 51, 96, 126, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 116, 126, 86, 5, 0, 0, 35, 126, 51, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 51, 127, 126, 0, 0, 0, 0, 25, 126, 51, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 71, 126, 85, 0, 0, 0, 0, 46, 126, 51, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 101, 127, 45, 0, 0, 0, 20, 107, 126, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 121, 116, 15, 0, 0, 20, 121, 126, 106, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 56, 126, 117, 15, 0, 0, 86, 126, 127, 45, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 35, 126, 126, 86, 10, 91, 126, 126, 126, 45, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 91, 127, 126, 127, 126, 127, 86, 127, 126, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 30, 106, 126, 126, 126, 65, 5, 65, 126, 61, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 56, 126, 127, 25, 0, 0, 25, 126, 101, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 116, 126, 86, 5, 0, 0, 25, 126, 111, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 127, 126, 0, 0, 0, 0, 25, 126, 101, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 126, 126, 0, 0, 0, 0, 46, 126, 81, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 51, 127, 126, 0, 0, 5, 86, 127, 116, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 51, 126, 126, 20, 20, 86, 126, 106, 15, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 117, 126, 127, 126, 127, 86, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 35, 126, 126, 106, 45, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0} +#define IMG8_LABLE 8 + +#define IMG9 {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 21, 59, 109, 120, 25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 49, 116, 121, 127, 127, 127, 120, 25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 80, 127, 127, 114, 89, 96, 127, 127, 83, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 78, 127, 127, 70, 20, 0, 19, 117, 127, 127, 59, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 127, 127, 82, 1, 0, 0, 0, 36, 127, 127, 110, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 127, 123, 25, 0, 0, 0, 0, 18, 127, 127, 127, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 63, 127, 120, 0, 0, 0, 0, 0, 83, 127, 127, 127, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 97, 127, 90, 0, 0, 0, 0, 0, 15, 127, 127, 127, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 34, 124, 127, 75, 0, 0, 0, 0, 0, 14, 127, 127, 127, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 102, 127, 126, 103, 36, 22, 0, 0, 10, 112, 127, 127, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 41, 119, 127, 127, 127, 115, 93, 93, 86, 83, 127, 127, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 40, 121, 127, 127, 127, 127, 127, 127, 127, 127, 127, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 33, 55, 84, 124, 124, 127, 127, 127, 127, 127, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 18, 76, 93, 127, 127, 127, 51, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 3, 75, 127, 127, 58, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, 127, 127, 39, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 127, 127, 85, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 127, 127, 92, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 122, 127, 127, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 62, 127, 127, 64, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0} +#define IMG9_LABLE 9 + +#define TOTAL_IMAGE 10 + +static const int8_t img[10][784] = {IMG0,IMG1,IMG2,IMG3,IMG4,IMG5,IMG6,IMG7,IMG8,IMG9}; + +static const int8_t label[10] = {IMG0_LABLE,IMG1_LABLE,IMG2_LABLE,IMG3_LABLE,IMG4_LABLE,IMG5_LABLE,IMG6_LABLE,IMG7_LABLE,IMG8_LABLE,IMG9_LABLE}; + diff --git a/APP_Framework/Applications/knowing_app/nnom_demo/mnist_nnom/main.c b/APP_Framework/Applications/knowing_app/nnom_demo/mnist_nnom/main.c new file mode 100644 index 000000000..55b6b3af0 --- /dev/null +++ b/APP_Framework/Applications/knowing_app/nnom_demo/mnist_nnom/main.c @@ -0,0 +1,77 @@ +/* + * Copyright (c) 2018-2020, Jianjia Ma + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2019-03-29 Jianjia Ma first implementation + */ + +#include +#include + +#include "nnom.h" +#include "image.h" +#include "weights.h" + +nnom_model_t *model; + +const char codeLib[] = "@B%8&WM#*oahkbdpqwmZO0QLCJUYXzcvunxrjft/\\|()1{}[]?-_+~<>i!lI;:,\"^`'. "; +void print_img(int8_t * buf) +{ + for(int y = 0; y < 28; y++) + { + for (int x = 0; x < 28; x++) + { + int index = 69 / 127.0 * (127 - buf[y*28+x]); + if(index > 69) index =69; + if(index < 0) index = 0; + printf("%c",codeLib[index]); + printf("%c",codeLib[index]); + } + printf("\n"); + } +} + +// Do simple test using image in "image.h" with model created previously. +void mnist_nnom(int argc, char **argv) +{ + model = nnom_model_create(); + + uint32_t tick, time; + uint32_t predic_label; + float prob; + int32_t index = atoi(argv[1]); + + if (index >= TOTAL_IMAGE || argc != 2) + { + printf("Please input image number within %d\n", TOTAL_IMAGE - 1); + return; + } + + printf("\nprediction start.. \n"); + #ifdef __RT_THREAD_H__ + tick = rt_tick_get(); + #endif + + memcpy(nnom_input_data, (int8_t *)&img[index][0], 784); + nnom_predict(model, &predic_label, &prob); + + #ifdef __RT_THREAD_H__ + time = rt_tick_get() - tick; + #endif + // print original image to console + print_img((int8_t *)&img[index][0]); + + #ifdef __RT_THREAD_H__ + printf("Time: %d tick\n", time); + #endif + printf("Truth label: %d\n", label[index]); + printf("Predicted label: %d\n", predic_label); + printf("Probability: %d%%\n", (int)(prob * 100)); +} + +#ifdef __RT_THREAD_H__ +MSH_CMD_EXPORT(mnist_nnom, nnom mnist demo and image number should be followed); +#endif diff --git a/APP_Framework/Applications/knowing_app/nnom_demo/mnist_nnom/mnist_simple.py b/APP_Framework/Applications/knowing_app/nnom_demo/mnist_nnom/mnist_simple.py new file mode 100644 index 000000000..1d3dc9d5e --- /dev/null +++ b/APP_Framework/Applications/knowing_app/nnom_demo/mnist_nnom/mnist_simple.py @@ -0,0 +1,166 @@ +''' + Copyright (c) 2018-2020 + Jianjia Ma + majianjia@live.com + SPDX-License-Identifier: Apache-2.0 + Change Logs: + Date Author Notes + 2019-02-12 Jianjia Ma The first version +''' + + +import matplotlib.pyplot as plt +import sys +import os +nnscript = os.path.abspath('../../../Framework/knowing/nnom/scripts') +sys.path.append(nnscript) + +from tensorflow.keras import * +from tensorflow.keras.datasets import mnist +from tensorflow.keras.layers import * +from tensorflow.keras.models import load_model, save_model +import tensorflow as tf +import numpy as np +from nnom import * + +model_name = 'mnist_simple_trained_model.h5' + +def image_to_cfile(data, label, num_of_image, file='image.h'): + with open(file, 'w') as f: + for i in range(num_of_image): + selected = np.random.randint(0, 1000) # select 10 out of 1000. + f.write('#define IMG%d {'% (i)) + np.round(data[selected]).flatten().tofile(f, sep=", ", format="%d") # convert 0~1 to 0~127 + f.write('} \n') + f.write('#define IMG%d_LABLE'% (i)) + f.write(' %d \n \n' % label[selected]) + f.write('#define TOTAL_IMAGE %d \n \n'%(num_of_image)) + + f.write('static const int8_t img[%d][%d] = {' % (num_of_image, data[0].flatten().shape[0])) + f.write('IMG0') + for i in range(num_of_image -1): + f.write(',IMG%d'%(i+1)) + f.write('};\n\n') + + f.write('static const int8_t label[%d] = {' % (num_of_image)) + f.write('IMG0_LABLE') + for i in range(num_of_image -1): + f.write(',IMG%d_LABLE'%(i+1)) + f.write('};\n\n') + + +def train(x_train, y_train, x_test, y_test, batch_size=64, epochs=100): + inputs = Input(shape=x_train.shape[1:]) + x = Conv2D(12, kernel_size=(3, 3), strides=(1, 1), padding='same')(inputs) + x = ReLU()(x) + x = MaxPool2D((2,2),strides=(2,2), padding="same")(x) + + x = Conv2D(24 ,kernel_size=(3,3), strides=(1,1), padding="same")(x) + x = ReLU()(x) + x = MaxPool2D((2,2),strides=(2,2), padding="same")(x) + + x = Conv2D(48, kernel_size=(3,3), strides=(1,1), padding="same")(x) + x = ReLU()(x) + x = Dropout(0.2)(x) + x = MaxPool2D((2,2),strides=(2,2), padding="same")(x) + + x = Flatten()(x) + x = Dense(96)(x) + x = Dropout(0.2)(x) + + x = ReLU()(x) + x = Dense(10)(x) + predictions = Softmax()(x) + + model = Model(inputs=inputs, outputs=predictions) + + model.compile(loss='categorical_crossentropy', + optimizer='adam', + metrics=['accuracy']) + model.summary() + + history = model.fit(x_train, y_train, + batch_size=batch_size, + epochs=epochs, + verbose=2, + validation_data=(x_test, y_test), + shuffle=True) + + # free the session to avoid nesting naming while we load the best model after. + save_model(model, model_name) + del model + tf.keras.backend.clear_session() + return history + + +if __name__ == "__main__": + epochs = 2 + num_classes = 10 + + # The data, split between train and test sets: + (x_train, y_train_num), (x_test, y_test_num) = mnist.load_data() + + print(x_train.shape[0], 'train samples') + print(x_test.shape[0], 'test samples') + + # Convert class vectors to binary class matrices. + y_train = tf.keras.utils.to_categorical(y_train_num, num_classes) + y_test = tf.keras.utils.to_categorical(y_test_num, num_classes) + + # reshape to 4 d becaue we build for 4d? + x_train = x_train.reshape(x_train.shape[0], x_train.shape[1], x_train.shape[2], 1) + x_test = x_test.reshape(x_test.shape[0], x_test.shape[1], x_test.shape[2], 1) + print('x_train shape:', x_train.shape) + + # quantize the range to 0~255 -> 0~1 + x_test = x_test/255 + x_train = x_train/255 + print("data range", x_test.min(), x_test.max()) + + # select a few image and write them to image.h + image_to_cfile(x_test*127, y_test_num, 10, file='image.h') + + # train model, save the best accuracy model + history = train(x_train, y_train, x_test, y_test, batch_size=64, epochs=epochs) + + # reload best model + model = load_model(model_name) + + # evaluate + evaluate_model(model, x_test, y_test) + + # save weight + generate_model(model, np.vstack((x_train, x_test)), name="weights.h") + + # plot + acc = history.history['accuracy'] + val_acc = history.history['val_accuracy'] + + plt.plot(range(0, epochs), acc, color='red', label='Training acc') + plt.plot(range(0, epochs), val_acc, color='green', label='Validation acc') + plt.title('Training and validation accuracy') + plt.xlabel('Epochs') + plt.ylabel('Loss') + plt.legend() + plt.show() + + + + + + + + + + + + + + + + + + + + + diff --git a/APP_Framework/Applications/knowing_app/nnom_demo/mnist_nnom/weights.h b/APP_Framework/Applications/knowing_app/nnom_demo/mnist_nnom/weights.h new file mode 100644 index 000000000..7ec88be65 --- /dev/null +++ b/APP_Framework/Applications/knowing_app/nnom_demo/mnist_nnom/weights.h @@ -0,0 +1,420 @@ +#include "nnom.h" + +/* Weights, bias and Q format */ +#define TENSOR_CONV2D_KERNEL_0 {43, 49, 37, 63, 24, 74, -39, -26, 7, 86, 41, 20, 56, -10, -64, -4, 7, -78, 38, 74, 28, -16, 51, -1, -82, -87, -14, -26, -25, 61, -69, -19, 68, -55, -19, 66, 10, -34, -86, 58, 41, -36, 65, -22, 80, 7, 56, 16, -31, -31, 39, -43, -11, -76, -26, -5, 66, -14, -9, 43, 4, -32, 76, 2, -72, -70, 32, 45, 32, 44, -17, 31, 76, -2, 32, 64, 85, -60, 5, 21, -96, -24, -81, -61, 45, 69, -3, 14, 55, 69, 82, -44, -71, 83, -16, -66, 49, 55, 31, -30, 17, 46, -3, 16, 7, 19, 80, 72} + +#define TENSOR_CONV2D_KERNEL_0_DEC_BITS {8} + +#define TENSOR_CONV2D_BIAS_0 {-20, 13, 92, 22, -3, 83, -81, -4, -13, -3, -10, -16} + +#define TENSOR_CONV2D_BIAS_0_DEC_BITS {11} + +#define CONV2D_BIAS_LSHIFT {4} + +#define CONV2D_OUTPUT_RSHIFT {9} + +#define TENSOR_CONV2D_1_KERNEL_0 {25, 1, 56, -15, -11, 1, -38, 32, -22, -19, -3, -33, 16, -34, -34, 15, 25, -9, 4, -6, -16, 38, -13, 30, 41, -14, -22, 43, -24, -31, 29, -20, 45, 13, 20, 20, 19, -28, 0, 57, 5, -22, 48, -19, -35, 38, 18, 9, 21, -7, -27, 20, 2, -88, 21, -15, 39, 33, 1, 57, 19, 47, -13, -40, -34, -29, 3, -14, 41, -28, 33, -21, -1, 23, -61, -22, 20, -66, -16, 2, -1, 45, -38, 60, 3, 0, -24, -40, -16, -31, -32, -46, 33, -1, 25, 13, 19, 58, -8, -50, -9, -2, -54, -35, 46, -63, 37, -59, 48, 48, 48, -1, -11, 7, 30, 13, 55, 13, 16, 26, 37, 51, 30, 2, 0, 12, 38, -42, 60, -28, 21, 0, 12, 25, -5, -32, -39, 2, -40, -31, -10, -61, 12, -45, 36, 21, 89, 2, -48, 24, 21, -69, 11, -70, 9, -43, -1, 51, 24, -5, -43, 39, -28, -36, 21, -74, -13, -46, -41, -48, -34, 19, -5, -54, 40, -12, -64, 1, -25, 17, -35, -6, -25, -27, -16, -3, -36, 20, -32, -10, -7, -7, -55, -17, -67, 30, -31, -27, -4, 12, -59, -31, -19, 18, 17, -12, -20, 45, 27, -56, 36, -16, -16, -4, 22, 57, -33, 31, 29, 23, 6, -6, -13, -17, -12, -33, -8, -10, -28, -52, -44, 28, -20, -29, 37, -20, 2, -4, 12, -5, -22, -17, -63, -3, 14, -35, 48, 30, 18, 5, -21, 41, -30, -31, -14, 18, -25, -36, -48, -5, -29, -27, -3, -24, -30, -60, -44, 34, -35, -55, 37, -39, -3, 16, -8, 37, 42, -6, -4, 4, -32, -47, 5, -4, 20, 26, 31, 49, -25, 28, -11, -32, -12, 23, -27, -37, 28, 7, -22, -35, -6, -57, -42, 36, -36, -10, 46, -17, -51, 26, -5, 21, 21, 0, -16, 27, -8, -14, 34, -42, -22, 21, 25, 15, -14, -1, 37, 53, -17, 9, -7, 15, 31, -48, 25, -3, -20, -9, -48, 49, -4, -52, 27, -16, -31, 30, -27, 16, 6, 45, -61, 8, 41, -57, -32, -4, 40, 16, -14, 1, -7, 18, -29, 40, -34, -28, 41, -42, -42, -32, 3, -20, -6, -42, -48, 37, -31, -54, -14, -28, -15, -1, -8, 50, -17, 26, -40, 23, -8, -5, -40, 33, 11, 11, 42, -27, -6, -53, -24, -6, -16, -28, 14, -32, -68, 7, -49, 13, -3, 5, -45, 23, -21, 6, 0, -5, -25, -33, -26, -12, -10, 48, -32, -1, 47, -16, 21, -16, 19, 23, 38, 21, -22, 20, 29, 20, -49, 61, -45, -56, 6, -38, -31, -27, -20, 12, 103, -27, -22, 49, -4, -13, -8, -39, 2, -40, -25, 31, 76, -18, 17, 10, -29, -26, -34, -42, -8, -9, -11, -47, 2, -44, 17, -60, 23, 50, 8, 47, 41, 44, -22, -43, -39, -25, 53, -10, -4, 42, -46, 49, -11, 39, -37, -56, -21, 20, 0, -27, -16, 26, 5, 15, 44, 53, 17, 46, 38, 14, -21, 67, 24, 17, 38, 35, -25, -10, 29, 3, 46, -13, 1, 15, 34, 31, 19, 27, -10, 20, 20, -2, 54, -1, -11, 36, -9, 20, -18, 25, -15, 31, -4, -8, 45, 21, 23, 56, 33, -32, 29, -29, -33, 9, 21, -14, 19, 38, -39, 54, 3, -13, 34, -45, -4, -8, 5, 24, 79, -26, -26, 54, 25, 4, -6, -14, -16, 4, -42, 9, 39, -12, 17, 43, -53, -4, -3, 23, -15, -21, -53, -19, 61, -6, 3, 45, 18, -14, -11, -21, 18, -26, -23, -20, 11, -40, -16, 37, -18, 11, -41, 32, -3, -27, -3, -8, -7, 6, 40, -33, 8, 20, 3, 51, -19, 43, 22, -37, -62, -6, 51, -26, 37, 39, -28, 17, 32, 40, -17, -35, -65, 31, 43, -29, 43, -2, -17, 52, 16, 27, 26, 12, -8, 43, -31, -26, 39, -27, -22, -16, -42, -7, -8, 21, 2, 33, 23, -61, 5, 21, 13, 9, 30, 10, 22, 0, -39, 37, 35, -25, 23, -32, 47, -36, 25, 7, -14, -57, -13, 36, -61, -28, -10, -48, -15, -32, -44, 46, 1, -9, -16, -8, 17, -22, 29, -35, -23, -10, 25, -14, 29, 37, -24, -6, 23, -82, 6, -6, 34, 28, 36, 7, 6, -57, -43, 9, -22, -15, 33, -9, -47, -14, -46, -5, 25, 12, -27, -20, -24, -32, 23, -15, 28, -23, -2, 36, -7, -15, -40, -38, 10, -20, 11, 2, -21, 23, 36, -5, 16, 56, -14, 12, 39, -33, 31, -39, 74, -43, 42, 1, 8, 8, 20, -41, 12, -22, -39, -15, 64, -64, 40, -81, -53, -37, -29, 13, -14, -1, -39, -5, -76, -29, -55, -15, 15, 42, 38, -73, 4, 5, -72, -47, 57, -40, 22, -45, -8, -4, -13, -15, -28, 10, -2, -7, 12, 14, -42, 17, -23, -41, 7, 12, -26, 7, 11, 24, -57, 45, -15, 8, -49, -33, -49, 20, 22, 3, -23, -11, -44, 37, 0, -19, 4, -46, -25, 41, 13, -46, 44, 22, -48, 8, -1, 47, 16, -17, 8, 38, -12, -8, 12, -16, 0, 55, 11, 15, -8, 15, 25, -4, 28, -31, -16, 10, 4, 43, 56, -11, 7, 16, -37, 31, -9, -31, -8, 39, 31, 46, 40, 42, 61, -10, -52, 2, 23, -13, 55, 12, 13, 48, 21, 20, 25, 41, 37, 15, -8, 77, 18, 20, 24, -18, -5, -6, 20, 34, 25, 2, -44, 14, 4, -32, 40, -37, -4, -24, 18, 22, 40, -7, -23, 47, 18, -5, 23, -50, -50, -2, -24, -7, 14, -28, -41, 15, -43, -29, -28, 5, -67, -8, -14, -8, 29, 18, 8, 8, 5, -11, -1, -43, -51, -41, -26, -10, 4, 13, 4, 13, -40, -19, -12, -19, -26, -22, -28, -24, -34, 43, -41, -84, 7, -5, -1, 38, -45, 53, 12, 38, -65, 28, 5, -52, 15, -28, 52, 8, -7, 38, 17, 70, 47, -67, -21, -7, -51, -25, 16, -10, 40, -37, -1, -25, -45, 24, -28, -73, 27, -20, 29, 26, 2, 8, 40, 45, -27, -29, -4, -67, 18, 9, 19, -23, 18, 15, -8, 48, 5, -57, -3, 11, -48, -2, 43, 9, -2, -25, 37, 9, -54, 12, 15, -65, 10, -43, 49, -17, 13, 41, 40, -13, -46, 20, -12, -3, -11, -9, 13, 1, 45, -23, -26, -4, 20, -47, 17, 36, -64, -3, 24, 9, 24, -10, -40, -53, -28, 38, -5, -21, 1, 9, -10, -9, -37, 37, -18, 6, -61, 36, 8, -25, 42, -24, 7, -17, -4, -5, -5, -15, -65, 30, 1, -1, -18, 29, -4, 8, -24, 16, 17, -19, 3, 33, 7, 11, 37, -15, 11, -12, -25, 23, 2, -19, -6, 14, 3, -51, 38, 30, 0, 22, 28, 33, 23, 46, -58, -44, -11, -2, 23, 4, -11, -28, 14, -19, 20, -21, -1, -17, -1, -27, -9, -35, -6, -30, -37, 44, 3, 29, -9, -5, -4, -11, 44, 12, 5, 15, 5, -4, -4, -11, -64, -7, -18, -31, 15, -21, 21, 12, 43, -14, 46, 24, 16, 48, 20, 5, -6, -9, 16, 36, 16, 0, -5, -15, 14, 32, -3, -6, -2, -3, 29, 29, 34, 16, 59, -6, 15, -13, 8, -2, 33, 28, -17, 50, 22, 53, -4, -54, 7, -13, -12, 31, -3, -43, -37, -55, -44, -21, 45, 40, 22, -11, -32, 4, 33, -39, -22, -11, -54, 20, 46, -13, 21, 42, -41, -15, 48, -34, -30, -1, 15, 20, -69, -53, 38, -62, -32, 3, -25, 7, -34, 22, -20, -48, -51, -45, 15, 33, 1, -26, -17, -6, -9, -19, 10, -34, 32, -36, 0, -16, -42, -24, 31, -19, -42, -16, -50, 41, -28, 25, 24, -59, 18, 36, -55, -9, 26, -36, 12, -21, -32, -1, 15, -16, 16, -15, -32, -21, -52, -42, -36, 18, 42, -49, -25, 46, -34, -60, 33, 16, -19, -20, -9, 52, -26, -2, 10, 15, -14, 34, -20, 30, -5, -30, 35, 2, -39, -27, 15, 21, 13, -15, 9, 12, -19, -23, -33, 5, 20, -38, -3, 54, -19, -31, 33, -34, 12, -18, -8, 46, -22, 47, 1, 3, -21, 15, 15, -14, -24, -21, -9, 21, -20, -34, -4, 32, -26, -15, 22, 8, 4, 31, 13, 35, 40, 40, -27, 49, -40, -17, 31, -41, 26, -8, 16, 26, -19, 2, -48, 18, 16, -7, 3, -43, -7, -42, -32, -23, -15, -11, -6, -7, 26, -4, 23, 19, 31, -30, -7, -16, 4, -18, -67, 30, -31, 1, 4, -8, -34, -7, 5, 30, 28, -7, -53, -30, 22, -3, 19, 39, 27, 48, 34, 18, 22, 9, -10, -18, 9, -42, 17, 27, 6, 24, -7, 39, 16, -7, 26, -49, 9, -35, 8, -1, 17, 1, -26, 12, 12, 12, 20, 5, 32, 53, -31, 13, -2, 5, 20, -29, 36, 3, 55, -4, -25, 55, 16, -29, 46, -7, -3, 4, 44, 46, 55, 13, -3, 3, 5, -34, -2, -44, -20, -33, -59, -37, 13, -57, 29, 39, -14, 41, -35, -22, 16, -3, -1, -41, 21, -6, -48, 73, -20, -35, -2, -41, -16, -32, 1, -27, 61, 54, -28, 62, 18, -18, -31, -55, -30, 31, 44, -11, -47, -45, 23, -52, -21, 48, 33, 59, 46, 6, -29, -50, -22, -72, 53, 11, -57, -1, -4, 15, 2, -30, -53, -43, -4, -3, 14, 43, 15, -13, -31, -29, 13, 8, 23, -2, -8, -6, -2, 39, 63, 17, 41, 43, 49, 9, -23, 22, -38, 37, 16, -6, 38, 37, 44, -10, 44, -15, -17, -44, -51, -30, 39, -14, 5, 32, -39, 4, 63, 8, 43, -16, -17, -25, 40, 18, 11, 22, 14, 8, 41, 0, 18, 26, 21, -5, 35, -2, 22, 57, -31, 32, 43, -24, 25, 15, -5, -7, 26, 28, 28, 18, -15, -4, 1, -18, 38, -2, 65, 3, 18, 35, -24, -2, -28, -14, -18, -29, 7, -8, 34, -12, 45, 28, -29, 36, -12, 33, 25, -29, 35, 19, 34, -27, 33, 55, -7, -5, 14, 9, 28, 1, -30, 12, 29, -7, -53, 32, -1, -32, -21, 7, -38, -52, -34, -24, 33, 6, -7, 6, -6, -58, -16, -10, -32, 13, 2, 21, 23, 19, -42, 19, -27, 10, 17, -27, -24, -7, 36, -49, -4, 20, -26, -35, -15, -48, -22, -50, 1, 15, -9, 31, -72, 1, -17, -68, 49, 4, -10, 20, -13, 5, -18, 48, -61, -13, 15, -60, -9, 30, 62, -13, 35, -15, -5, -55, -36, 30, -57, -32, -1, -23, -15, -1, -49, 31, 12, 18, -53, 48, -13, -56, 15, 1, -14, 19, -10, 10, -36, 47, -47, -37, -16, -51, -49, -2, 31, -20, 17, -4, 12, -30, -68, 20, -36, -4, 3, -39, -35, -22, -30, 25, -28, -16, -1, 49, -21, -35, 35, 15, 3, -22, -11, -5, 15, -5, -38, -48, 14, -17, 17, 3, 26, 2, 67, 2, -9, 20, -65, -13, 41, -79, -29, -12, 2, 14, 56, 13, 3, 38, -11, 31, 25, -46, 2, 20, -4, -40, -12, 16, 23, 33, -22, 11, 24, -26, 22, -24, 31, 4, 30, -42, -19, -2, -3, 50, 19, -23, 5, 39, -17, -30, 55, 25, -31, 12, -37, 3, 50, -27, 10, 39, -15, 3, 57, -8, -52, 21, -28, 25, -28, 42, -39, -28, -9, -21, -14, -14, 32, -34, -29, 10, -33, -6, 2, 5, -12, -6, 39, -28, 12, -10, 10, 42, 53, 12, 2, 7, -8, 19, 20, -18, 7, -40, -1, -18, 67, 8, 1, 29, 14, 16, 47, -16, 35, -3, 50, 18, 23, 18, -18, 18, -19, -33, 14, -19, 44, 18, -36, 41, 0, -4, 38, 25, -22, -26, -36, 24, -22, 24, -42, 32, 15, -90, 19, 17, -5, 10, 20, 18, -2, 29, -3, 22, -2, 14, 23, -14, 28, -40, -20, -13, -4, 7, 23, 51, 0, -56, 11, -2, -18, -38, -16, 26, 6, -22, -75, 54, 35, -79, 19, 20, 33, -28, 17, 39, -39, -3, 29, -11, -30, -51, -9, -6, -11, 15, -14, 23, -28, -33, -54, 21, -41, -29, 50, -44, -7, 39, -26, 26, 29, 15, -4, 56, 13, -64, -2, 30, 42, 11, 21, 4, -34, -27, -45, -7, -1, -16, -5, 49, -5, 44, 46, 26, -40, -15, -37, -34, 1, -46, -29, -5, -46, 17, 20, -13, -1, -53, -16, 0, 38, -4, -8, 7, -61, 53, 17, -10, 25, -5, 43, 6, -5, 29, 10, 15, -12, 46, 20, -38, -15, 6, 19, -22, 19, 36, -5, 17, 32, 49, 49, 4, 3, 24, 3, -29, 19, 4, 13, -6, 23, 46, 11, 30, 37, -22, 63, -29, -62, 68, 20, -77, 11, -46, -39, -43, -12, -23, 57, 23, -56, 40, -18, -25, -14, -31, -28, -45, 2, 32, 33, -10, 2, 20, 10, -12, 18, -28, -16, 21, -6, -35, -9, 43, -41, -37, 49, -8, -36, -39, 19, -2, 27, 19, -38, 38, -8, -73, 32, -26, 41, -10, 53, 24, 8, -16, -48, 30, 11, -48, -19, 26, -13, 7, 29, 14, -1, -13, -10, 40, -28, -19, 19, -54, -33, -42, 9, -31, -14, -6, -17, 5, 42, -49, 32, 2, -6, 7, 51, 14, -2, 15, -46, 13, 1, -11, 4, 28, -5, 3, 44, -14, -23, -23, -4, -30, -40, -25, -7, -35, -33, -22, -7, -32, 34, -29, -35, 2, -4, 24, 10, 4, -28, -48, 26, 2, 39, -34, 8, 27, -5, -7, 29, -5, -3, 32, 49, -3, 20, -13, 17, 3, 27, -59, 13, 30, -8, 57, 38, -18, -26, 23, -45, -53, -12, -40, -4, 3, 27, 28, 6, 4, -21, 14, 23, -5, -38, 1, 6, -14, -52, -16, -52, -37, 19, 18, 60, 32, 9, 39, 39, 42, -10, 22, -18, -31, 2, 1, 13, -48, 57, -27, 6, 69, -23, 48, 75, -31, -39, -5, -14, -30, 2, -4, -4, 29, 15, -12, 50, -33, 24, -18, -2, 2, -18, -3, -19, -67, -57, -10, -20, -17, 28, -22, 3, 49, 21, 13, 46, -17, 1, -36, -9, 5, 25, 17, -30, -45, -12, -19, -2, 32, -2, 12, 48, -34, -54, -3, 10, -43, -11, 25, -24, 18, -8, 37, -2, -27, -11, -16, 6, -14, -11, 4, -24, -6, -41, 1, -23, -5, -11, -39, 69, 9, -11, 16, -32, -2, -50, -3, -38, -6, 6, 6, -21, -26, 43, -17, -15, 63, 0, 48, 40, 33, 11, -2, -10, -35, 18, -6, -12, 35, 10, 49, 59, -19, -31, -49, -23, 11, 31, -22, -14, 6, 20, -23, 36, -25, -10, 22, 43, 26, -29, 56, 4, -10, 22, 25, 24, -7, -18, 27, 21, 41, 35, 33, -10, 8, -25, 31, 35, 34, 15, 27, -45, -25, 42, -18, -10, 31, -12, 47, 42, -21, -25, -9, -6, -3, 4, 1, 17, 8, -16, 8, 6, 4, 10, -54, -12, -6, -9, -34, 8, -15, -51, 28, -12, 31, -21, -47, -47, -20, 34, -22, 20, 53, -10, 32, 32, 8, 41, -23, 20, -59, 3, 22, 18, 28, -18, 43, 37, 1, 30, -14, -4, -45, 4, 2, -9, 47, 26, 13, -22, -21, -8, 12, 2, -27, 16, 34, 28, 25, -8, 6, 33, 1, 40, 49, 92, -33, -14, 33, -29, -56, 35, 20, -24, -61, 44, -4, 33, 28, -27, -6, -18, 7, 5, -27, 5, -43, 24, -17, 18, 22, -21, 15, -4, -52, 38, 5, -18, -34} + +#define TENSOR_CONV2D_1_KERNEL_0_DEC_BITS {8} + +#define TENSOR_CONV2D_1_BIAS_0 {-4, 6, 6, 20, -27, -40, -15, 21, -35, -50, -13, -25, 31, -28, 0, -53, -22, -71, -47, -38, -57, -65, -6, -32} + +#define TENSOR_CONV2D_1_BIAS_0_DEC_BITS {10} + +#define CONV2D_1_BIAS_LSHIFT {4} + +#define CONV2D_1_OUTPUT_RSHIFT {9} + +#define TENSOR_CONV2D_2_KERNEL_0 {-16, -10, -12, 39, 12, 18, 2, 11, -9, 7, 15, -12, -20, 6, -4, 12, -1, -6, 4, -6, 22, -3, -7, 3, 2, -16, 12, -3, 11, 21, -1, 16, -27, -12, 21, 0, -27, 2, 7, -7, -6, -19, 19, 19, 28, 20, 16, 13, 22, -22, 6, 8, -8, -9, 3, -17, -13, -22, 12, -5, 18, 16, -19, -7, -3, -15, 30, 5, 17, 4, 12, -15, 11, -8, -25, 11, 15, 11, 2, -20, 26, -20, 1, -25, -37, 30, -20, -5, -24, -14, -13, 3, 18, 16, -22, 1, -1, 26, -6, -30, 27, 19, 3, 16, 9, -14, 25, 6, -4, -7, 8, 34, -5, -14, -26, 47, -13, -6, -9, -8, 16, 19, -38, -9, -21, -14, -19, -22, 33, -12, 1, -5, 6, 27, -29, 1, -12, -10, 24, 34, -23, 7, -8, 40, -44, 27, -10, -36, 27, 60, -10, -20, 18, -26, -40, 13, -52, -24, 27, 27, -27, -34, -26, 10, -28, 1, -19, -17, -5, 70, 4, -33, 45, 26, 10, 0, 32, -39, -24, 28, -19, -15, 18, -9, -23, -17, -25, 36, -13, -7, 13, 5, 15, 48, -45, 4, 47, 28, 9, -2, -15, -29, -3, 7, -7, -24, -28, 14, -15, -10, -13, -32, -18, -12, -4, 4, 11, -6, 14, -23, 29, 21, -40, 38, -27, -28, -24, -15, -12, 11, -38, -34, -25, -26, 5, -5, -44, -52, -5, -21, -14, 20, 9, -9, 37, 16, -30, -4, 0, 13, -1, 3, -23, -9, 15, -20, -20, -4, 10, -8, 8, 18, -10, 21, 7, -27, -12, -3, 13, -9, 3, -40, -14, 19, -6, 19, -43, -15, 30, -11, 10, -5, -28, 17, -6, -1, 25, -6, 22, -5, -11, 7, 41, 25, 16, 32, 33, -5, 26, 16, 6, 20, -25, -1, -14, 11, -4, 33, 1, -19, 14, 37, 6, 30, -20, -11, 9, 32, -24, 9, 7, -5, -12, -15, -48, 38, 2, 16, 5, 30, -33, 5, 3, 30, -5, 13, 18, 24, -30, -46, 8, 4, -23, 22, 1, -25, -22, -1, -42, -6, 6, -11, -30, -18, 14, -25, -25, 3, 6, 8, -25, 6, -3, -11, -2, 35, -7, -8, 18, -6, 15, 21, -16, -32, -14, 15, -25, -28, -4, 30, 17, 13, -18, -25, -11, -32, -31, -13, 3, 0, 12, -50, 19, 0, 18, 31, -38, -4, 8, 7, -5, 15, 6, 33, -26, 25, 9, -11, -5, -11, -47, -40, -18, -20, -24, 14, -11, 6, -5, 11, -16, -7, 8, -8, 9, -12, -14, -14, 5, -20, 16, -18, 9, 12, -6, 17, -2, -16, -6, -15, 27, -35, 4, -4, 19, 35, 27, 27, 15, 24, -9, 39, 0, 0, 2, 36, -3, -32, -10, 14, -2, -7, 0, -1, 18, 36, 25, -14, -1, 13, -2, 13, 20, 10, 24, 6, -6, -10, -6, 19, 10, -13, -22, -8, -14, -11, -2, -28, 43, 16, 15, 36, -34, 8, -27, 48, 26, -32, 9, 25, -4, -28, -31, 30, 1, 22, -20, -10, 1, -18, -5, -25, -9, -8, -27, 24, 4, -35, -30, -14, 15, -34, 15, 3, -6, -5, -17, -17, -12, -12, -29, 18, -5, 21, 19, 27, -7, 21, -4, -11, 6, 12, 6, -25, 14, 11, 3, -41, -12, -8, 0, -16, -15, -7, -12, 35, -3, 8, -7, -6, -23, -15, -12, -37, 28, -11, -17, 1, 1, 1, 5, -65, -29, -54, 9, -49, 16, -27, 14, -16, 22, 37, -36, -9, -43, -23, -26, 2, -21, -14, -10, -32, -21, -3, -2, -22, 2, -15, 10, 17, -3, 18, 2, 4, 8, -9, 1, 41, -35, -2, 24, 3, 28, 18, 6, -9, -16, -11, -6, -14, -16, -4, 10, -29, 38, 24, 8, -17, 8, -17, 20, -10, 14, 25, 26, 8, 4, 14, 13, -29, -17, 0, 13, -16, -10, -19, 23, 0, 4, -12, -5, 4, -17, -38, -7, 7, -19, 15, -8, 19, 10, -23, -16, -8, 34, 23, 17, -50, 9, 7, -9, -16, 16, -1, 19, 23, -28, -33, 28, -6, -24, 26, -7, -20, 4, -26, 3, -5, 25, 14, 29, -27, -2, 20, -26, -34, 8, -33, 14, 46, -19, 10, 9, 32, -18, 13, -7, -17, -11, 5, -25, -14, 15, -2, 22, 3, 16, -29, 22, 8, -11, -11, 8, -15, 17, 18, -21, -11, -26, 14, -11, 1, -1, 26, 28, -31, -15, 22, -23, 10, -2, 34, -13, -2, -5, -35, 40, 35, -23, -40, 31, 3, -49, 23, 15, 5, 36, 5, -27, -25, 55, 26, 25, -15, -1, -5, 3, -26, -8, -62, 18, 33, -25, -25, 35, -21, 9, 30, 2, -21, 0, 18, -22, 16, 7, 23, -8, -21, 31, 11, 18, 22, 4, 36, -11, -7, -31, -37, -28, 18, -3, -24, 4, 25, 0, -8, 19, 41, -13, -29, -17, -4, -15, 0, 3, 19, -18, -37, -14, -16, -8, -6, -19, -7, 18, 6, 21, -4, 1, 14, -42, 19, 8, 5, -25, -22, 12, 4, -30, 12, -5, -43, -1, -31, -19, 6, -19, -21, 33, -7, 11, 27, -47, 14, 18, 23, -21, 22, -46, 32, 3, 26, 0, -7, 30, 4, 2, 2, 30, -15, 5, -23, -8, -20, 24, -20, 20, 7, -29, -23, -8, 34, 14, -10, 12, 10, -17, 28, -1, -7, 0, 11, -24, 12, -21, 17, 7, 30, -6, -25, -26, -6, 12, -7, -23, -2, -26, -26, -21, 13, 29, 12, -37, 31, 23, 35, 10, -9, 0, 35, -21, -33, -28, 10, -7, 0, 3, -9, -16, -29, 14, -5, 10, 6, -4, 1, -11, -2, -18, -21, 4, 12, 20, -1, 10, -20, -9, 4, 27, -23, -37, 24, 15, -13, 17, -22, -9, 7, 10, 8, -16, 14, 4, -8, 23, 30, -24, 12, -22, 14, -10, 12, -1, 12, 20, -22, -14, 4, 5, 23, -3, 43, 8, -15, -21, -48, 0, 12, 7, -4, -21, -34, -44, 16, 19, 15, -27, -17, 34, 7, 19, 6, -18, -3, -21, 4, 19, -23, 4, -40, 14, -2, 28, -22, -4, -40, -13, -15, -3, -34, 0, 20, -40, 0, -16, -13, -1, 12, -12, -33, -13, -9, 23, 8, -8, -32, -27, -45, -11, 9, 4, 24, 6, 0, -19, -15, -4, -11, -21, -20, 16, 2, -6, 0, 7, -15, 14, -71, 33, 0, 3, 16, 11, -12, -5, 17, 18, 7, -31, -16, 25, 35, 5, 45, -8, 14, 28, 38, 35, 19, 28, 13, 10, 11, -33, 16, -11, 9, -11, -28, -6, 20, 40, -28, 20, 10, 10, -30, 40, -43, -20, -23, -30, -32, -8, -19, 4, 2, -24, -29, 34, -14, 1, -1, 3, -30, -12, 2, 15, -23, 30, 26, 4, -40, 18, 3, -35, 11, -15, 22, -20, -4, -13, -14, -18, 6, -16, 8, -2, 15, 11, -31, 10, 11, 10, 24, -24, -12, -23, 0, -31, 9, -2, -11, 9, 18, 8, 11, -10, 5, -45, 38, 17, 12, -6, 8, 6, -22, 8, 23, -25, -12, 9, 7, -6, -8, 5, 24, -15, -12, -19, 10, -1, 39, -2, 13, -14, -6, 9, 8, 10, -6, 18, 9, 13, 12, 8, -4, 28, 19, -28, -9, 28, 51, 26, 13, 9, -2, -18, 12, -20, -42, -3, -26, 3, 7, 38, 26, 21, -35, -10, 9, 15, 8, 11, 12, -2, -29, -45, -53, -11, 7, 27, -14, -8, 10, -6, -24, -30, -9, 0, 2, -26, 10, 10, 5, -26, 17, 16, -35, 4, -17, -20, -7, 19, 7, -19, 15, 5, -16, -27, -20, 22, 19, -17, -5, -22, 10, 21, 20, 2, 5, -20, 7, 1, 10, 9, -23, -23, 2, -1, -13, 11, 7, 21, -14, -10, 2, 16, -22, 5, -15, 20, 16, 13, 14, -20, 26, 5, 5, 36, 18, -33, 16, -12, -24, -25, -19, -21, -27, 12, -6, -15, -17, -24, -13, 1, -7, 19, -14, -24, -3, -16, -25, -4, 17, -34, -9, 7, -16, 2, -5, -5, -16, 15, -29, -51, 6, -28, 36, -42, 6, 34, -2, 2, -6, 10, -20, 17, 25, -26, -10, -1, -20, 28, 15, -13, -2, 35, -8, -32, 0, -11, 42, -36, 36, 47, -13, 23, -10, -1, 17, -24, -13, 31, -5, 0, -6, 22, -23, -3, 20, 5, -11, 14, -8, 21, -16, 26, 30, 15, -16, -22, -32, 3, -7, -11, -31, 5, -33, -6, 4, 7, -16, 0, -10, -3, -18, 1, 7, 24, 15, 19, 30, -5, -9, 34, 16, -10, 9, 4, 15, 20, -26, 22, 12, 13, -6, 9, 33, 2, 20, 2, -11, 12, -4, -23, 1, -17, 34, 11, -7, -18, 4, -5, 20, 18, -23, -8, 4, -19, -3, -37, 15, 16, -27, -18, -10, -16, 13, 1, 5, -9, -21, 2, -10, -14, 11, -11, 30, -5, -23, 46, 4, 3, 37, -7, 17, -35, -4, -4, -25, 5, 14, -14, -34, -1, 28, 31, 52, 2, 1, -10, -21, -14, 59, 24, 26, 16, -37, -6, 19, -75, -10, -26, -7, -20, -39, -33, -61, -31, -8, -8, -7, -23, -35, 19, 24, -38, -33, 15, -67, -36, 40, 22, -22, 51, 25, -28, -30, -18, 40, -28, 12, 28, -33, -16, -33, 10, -2, -33, 22, 13, -41, 29, -21, -9, 42, -29, 17, 5, 2, 12, 24, -18, 20, 55, 38, -23, -11, -27, -15, -36, 2, -26, 5, 22, -10, 35, -9, 16, 35, -51, -18, -21, 23, 27, 6, -2, 40, 9, 24, 25, -2, -39, -22, 0, -2, 26, 10, -15, -39, -6, -38, -7, -13, -28, 12, 40, 10, 6, -1, -23, 25, -23, 11, -3, 23, -14, -30, 6, -30, 7, 5, -27, -35, -40, -41, 2, -7, -12, -5, 40, -25, -27, 2, -19, 18, 2, 8, 38, -15, -7, 3, -17, 16, 35, 24, -31, -20, -28, 3, -33, -11, 12, -26, -8, -2, 21, 8, 20, 16, 1, -30, 30, -1, 3, -42, 28, 8, -18, -5, 25, -31, 13, -10, 1, -11, 0, -16, 15, -12, 8, 32, 21, -25, 8, 10, -2, 5, -22, -14, -26, -10, -20, -3, -10, -49, 21, -19, -20, 21, 5, -7, 23, -12, 7, 7, -12, 35, 4, -4, 0, 10, 32, 13, -7, 2, 9, -7, -14, -8, 40, -2, 20, -1, -2, -10, 15, 9, 17, -4, -26, 18, 2, -13, 1, 21, -4, 12, 7, 12, -19, 6, -5, 43, -38, 11, 1, -9, 0, 7, -16, -41, -21, -15, 3, -26, -13, -27, 0, 18, -5, 5, 42, -4, 13, 2, -1, 34, -28, 17, -3, -3, -5, -8, 3, -39, -16, 0, -15, -13, 10, -5, 18, 3, 20, 28, -20, -2, 27, 1, -28, 32, 5, -28, 14, -31, 17, -6, 4, -28, 4, -17, -3, -10, 0, -34, -8, 5, -17, -33, -23, 8, -3, -10, 14, 1, -5, -14, 19, 13, 1, 1, 21, 9, -11, 19, -4, 25, 32, 30, 32, 39, -9, -38, 12, 17, 10, -14, 5, -41, -20, 1, -5, 37, 23, -18, 44, -13, 43, 40, 9, 36, 46, 10, 37, 14, 13, -7, 50, 29, -27, -20, 12, -28, -1, 14, -11, -5, 4, -8, 28, -25, 33, 19, 3, 6, 22, 20, 23, -22, 4, 19, -25, -23, -31, 23, 2, 33, -22, -4, -2, 1, -9, -13, -17, 23, -34, -21, -8, -18, 20, -21, -34, -28, 3, -36, 21, 11, -70, -33, 25, -33, -6, -6, -18, 8, 34, -7, -29, -12, 31, -30, 26, -51, -7, 2, -34, 6, -17, -29, -10, 13, -26, -42, -6, -6, -17, 1, -15, -1, -2, -4, 40, 17, 32, 23, -21, -9, -1, -8, -3, -10, -4, 5, -13, 8, -21, -8, 19, 29, 44, 40, 17, 4, 6, 6, -54, 23, 6, 5, -19, -18, 17, -36, -42, 30, -11, 33, -42, 3, -37, -5, -8, 21, 40, -5, -6, 18, -49, 21, -48, 45, -27, -29, -35, 8, -17, -22, -14, 3, -15, 22, -1, 9, 29, 2, -22, 11, -6, -27, 3, 33, -9, -2, -23, 32, -3, -30, -19, -14, -25, 25, -3, -13, -23, 8, 34, 16, 25, 0, 0, 38, -1, 13, -14, -8, -19, -3, -8, -21, 37, -9, 29, -17, 3, -25, -17, -25, -28, -19, -30, 5, 25, 7, -16, 1, -12, -13, 14, -8, -19, 11, 30, -18, 11, 11, -20, 3, 11, 8, 24, 6, -31, -20, -16, -28, -13, -6, -14, -41, -26, -38, -45, -20, -12, -29, -5, -29, -33, -7, -43, -23, -3, -36, -22, -49, -27, -3, 8, 35, -41, -1, 25, -18, 33, -30, -28, 46, -8, -50, -16, -1, 25, -29, -18, -3, 15, -1, -4, -15, 16, 7, 9, 44, 22, -4, 29, -10, 18, -2, 35, 52, 2, -16, 15, 36, 50, -2, 29, 15, 59, 73, 28, -4, -6, -24, -19, 18, -12, -3, 16, -28, -18, -27, 12, 20, -1, 0, 28, -7, 11, -10, 22, -11, 15, 21, 1, 22, -30, -14, -4, 3, 3, -13, -5, -11, 19, 15, -23, 34, 27, 27, -3, -7, -7, -17, -3, 31, 15, -3, 18, 15, -6, -28, 23, 25, 21, 6, 10, 7, 15, 14, 20, 7, -12, 23, 24, 18, -11, -21, -8, 13, 36, 22, 26, 12, 15, -38, 3, -8, -12, -4, -10, -23, 7, 2, -1, 21, -40, 0, 18, 11, 23, -3, -16, 3, -15, 30, -12, -24, -31, -3, 7, -21, -3, -2, -5, -11, 4, -10, -3, -3, -39, -10, 11, 12, -11, -3, -5, -25, -26, 5, 8, 17, -31, -8, 15, -21, -12, -18, -1, -13, 5, -35, -26, 24, 2, -38, 6, 5, -27, 8, 18, -6, 22, -24, -12, -33, 16, -18, 10, 4, -29, 1, 8, -1, -31, -3, 14, -3, 24, -18, 3, -8, 29, -17, 8, -12, 1, 18, -4, -16, -26, -4, -2, -16, 11, 35, 1, 47, 17, -21, -21, -8, -15, -3, -1, -19, -4, -39, 23, -10, -13, -4, 7, 19, 16, -40, 18, -27, 3, 21, 24, 15, -36, -6, 7, -10, 5, -15, 28, 9, -11, -35, 16, -32, -14, 7, 20, -12, 31, -37, 15, -3, -23, -29, 0, -39, 15, 31, 23, -27, -4, 15, 15, -3, 25, -22, 19, -30, -2, -26, -3, 14, 9, 2, 3, -29, 39, -11, -21, -20, -16, -4, 21, 29, -33, 25, 19, 35, 0, 3, -14, 17, 32, 48, 44, -4, -8, 7, 14, -4, -5, -17, -9, 15, -5, 13, 20, -5, -6, 28, 2, -23, -12, -9, 10, -20, 10, 19, 22, -20, -14, 13, 28, 18, -45, 1, 12, 16, -22, 12, 9, -45, 21, -22, -24, -32, -18, -7, -14, -40, -10, -6, 2, -24, 33, -19, -10, -14, -30, -28, 32, -39, -31, -7, 12, 23, 27, 11, -42, 6, 20, -4, 29, -9, 31, 12, -24, -19, 43, -16, 33, 13, 6, -7, -3, 22, -32, 42, -8, 10, 5, -27, -6, -12, 40, 21, 28, -2, 22, 28, 14, 22, -24, -10, -1, 12, -9, 19, -5, 15, -45, -26, 1, -26, 14, -12, 28, -37, 11, 5, 13, -31, 13, 4, -6, -32, 10, -13, 0, -35, -1, -39, 12, -19, 31, -4, 13, 26, -6, 18, -29, 6, -44, -33, -3, 1, 2, -3, -39, 42, 0, 2, 25, 20, -30, -27, 23, 14, 30, 16, 9, -14, -17, 7, -3, -1, 9, 4, -10, -19, 3, 2, -35, 30, -33, -4, 2, 0, -9, 10, -16, 3, -27, -18, 23, -6, 13, 29, 3, 20, -14, 3, 5, 24, -3, -21, 23, -11, -32, 14, 0, -15, 23, 8, 14, 15, 1, -9, -19, -31, -13, 20, -5, 4, -19, -4, 8, 9, -18, 39, 19, -11, -22, -13, -11, 1, 33, 33, -35, 19, -30, -34, -17, -8, -34, -22, 31, -20, 1, 16, -29, 2, -8, 7, 18, 5, -15, -25, 6, -8, 32, 17, -9, -15, 2, -14, 0, 4, -10, 6, 18, -12, -16, -21, 14, 7, -3, 13, 25, 23, 15, 8, 35, 29, -24, 0, 20, -19, 22, 11, 9, -26, -10, -39, -39, -31, -8, -4, 3, -9, 16, -11, -7, 0, 12, 8, -17, -1, -26, 30, 3, 1, -12, -2, -20, 37, -11, -23, 18, -16, -33, 7, 27, 21, 24, 29, -14, -6, 26, -21, -19, -30, -8, -24, 34, -51, -23, 28, 3, 20, 1, -15, 16, 3, -11, -6, -6, 7, 25, 11, 2, 18, 4, 62, -13, -25, 2, -2, -27, 62, 24, -15, -15, -6, 19, -13, -42, 7, -5, -54, -11, 42, -35, -20, -50, 0, 32, 13, 8, -20, -3, 23, -14, 46, -4, -20, -2, 11, 12, 8, -30, 21, -41, -29, -7, -17, -15, -62, -31, -5, 14, 22, -19, -15, -3, -28, 22, -33, 6, 21, -15, -6, -23, -2, 16, -8, 1, -33, -11, -6, -28, -15, -25, 21, -16, -32, 17, 6, 21, 36, -35, 30, -27, -8, -28, -14, -17, 37, -17, -3, -2, 12, -30, 42, 8, -18, 10, 11, -23, -50, -29, -36, 54, 29, -18, -18, -4, 3, 5, -9, -9, 15, -8, -9, -22, 6, -47, 51, -11, -26, 3, 33, -21, 17, -1, -37, 47, 4, -26, -24, -10, -3, -4, 11, -13, 37, 26, -10, -36, -42, 8, 17, 6, -18, 7, 34, -25, 11, 1, 21, 26, 20, 18, -20, 33, -32, -22, 4, -42, -27, 17, 29, -24, 19, -17, 34, -4, -13, 9, -2, -4, 16, 1, -33, 29, 55, -27, 9, 31, -35, -10, 23, -31, 0, 2, -12, -5, -14, 28, 32, 4, 17, -12, -13, 27, -28, 42, -15, -8, -23, 14, 27, 17, 17, 16, -6, 29, 1, -18, -22, -10, 12, 24, -15, -4, -40, -26, 11, -1, -5, 33, 23, 7, 3, -9, 7, 1, 7, -6, -1, 19, -15, -7, -19, 10, -22, 33, 20, 21, 27, 4, 8, 26, -25, -3, -38, -14, -41, -19, -10, -40, -10, -3, 11, 0, 24, 9, -17, -14, -26, 14, -2, -8, 9, -8, -8, -3, -5, -14, 17, -4, -34, -5, 10, 24, 44, 20, -41, -11, 4, -34, -7, 1, 41, -9, -22, -7, 19, 7, -2, -24, 14, 23, -1, -13, -1, 1, 34, 24, -29, 10, -13, 0, -35, -23, -19, -18, -14, -6, 1, -1, -9, -20, -15, -17, -4, 22, 23, -33, -1, -9, 13, 34, -15, -6, 6, -25, -13, -9, 29, -9, 22, 15, 28, 42, 20, 5, -11, 0, 38, 17, -15, -48, 3, -4, -18, 27, -13, 0, -6, 1, 16, -16, -4, 18, -22, -20, 33, -2, -27, 5, 5, -27, -26, -10, -35, 16, 36, -15, 27, -24, -16, -18, -11, -47, 10, 31, -11, -40, -26, -10, 3, -29, 4, 15, 15, -41, 20, -5, -28, 36, 56, 0, -26, 0, -15, -5, -1, -11, 6, 38, 2, -31, 13, -23, 8, -29, 10, 28, 13, -8, 3, -18, 4, 42, 37, -17, -22, -10, -39, -14, -2, -36, -18, -5, 4, -3, 22, -9, 9, 1, 11, 15, -26, 22, 21, 15, 21, 5, -4, 1, -6, -4, -14, 20, 2, -19, 8, -43, 16, -4, -15, 2, -1, -20, 7, -12, 17, 10, -36, -10, 9, 19, -7, -12, -37, -19, 1, 28, 19, 11, 16, 29, 1, 9, -13, -10, 4, 26, 9, 29, -1, 6, -28, -21, -17, -3, 15, 20, 3, -10, -16, 33, -5, -8, -1, 45, 28, -12, 6, 11, 3, 31, 22, 11, 11, -5, -4, 24, 17, -8, -41, 22, -22, -32, 20, 34, -21, 51, 9, 0, -4, 13, 21, 16, 8, -4, 3, 3, 4, 12, -8, 2, 18, -1, 7, -2, 31, 16, 38, 12, -25, 3, 19, 44, -21, 17, -7, -21, -6, 25, 22, 31, -3, -29, 9, 18, 14, -3, 28, -11, 1, 11, -38, -23, 16, -14, -30, -8, 10, -11, -21, -36, 15, -15, -17, -16, -34, 16, -21, 10, -15, -67, -32, 8, -5, 1, 21, 13, 2, 0, -2, -2, -10, 3, -14, -2, -39, -11, 23, 9, -19, 4, -13, 34, 29, -16, -22, 30, 18, -22, 7, 17, 13, 15, -18, 18, 13, -1, 30, -11, 1, 1, 2, 2, -31, -21, -35, -4, -4, 18, 4, 9, 5, -57, -28, -4, -22, 6, -5, 10, -21, 4, -14, 22, -3, -18, 1, -3, -5, -24, -44, -3, 15, -52, -51, 20, 3, -36, 5, -24, -18, 7, 5, -3, -16, 25, 12, -10, -51, 2, -31, -17, -24, 20, -16, 38, -3, -38, -33, 30, -26, -21, 2, 30, 4, 24, -14, 6, -3, 7, 15, 5, -33, 50, -16, -12, -10, 34, -5, 25, -3, -1, -38, 12, -43, 17, 9, -5, -3, 2, 34, -11, 16, 9, -10, -15, -13, 5, 6, -21, 23, -15, 7, -1, 14, 6, -17, -6, 23, -2, 9, 10, -6, 24, 1, -17, -11, -14, -2, -1, 20, -14, -19, -7, 20, -5, 9, -7, 19, -23, 0, 13, 7, 11, 22, 18, 2, -13, 22, -54, 8, -12, -19, 12, 1, -6, -24, -19, 25, 38, 11, 18, -6, -7, -50, 8, -6, -26, 9, 27, -4, 36, 11, -15, -21, 16, 18, 15, -31, 12, -16, -30, 14, 10, 17, 5, 1, 29, -1, -28, 34, 27, -21, 15, 17, -8, 9, 7, 13, -9, 8, 26, 3, -13, 6, -15, 2, -4, 17, 26, -6, -11, -26, 4, -29, -17, 4, 16, -4, 7, 13, -50, 19, 25, -27, 16, -26, 15, -8, -22, 3, 2, -35, 16, -7, -1, -22, 1, 19, 12, 34, 4, -4, -23, -8, -43, 8, 25, 23, -1, 9, -3, 20, -15, 24, 30, -6, -20, 9, 7, 37, 0, -1, 27, -18, -21, 35, -6, -29, -11, -2, 4, -17, -8, -22, -23, -26, -22, 16, -24, 12, -31, -13, -21, -16, 17, 9, 25, -7, 19, -22, -1, 21, -24, 8, -8, -16, -31, -25, 8, -24, -2, 22, -25, -31, -14, 3, 1, 30, -5, -36, 28, -9, -35, 1, 12, -4, -4, 38, 9, 0, 15, 15, 23, -11, -17, 20, -17, 18, 5, -11, -4, 0, -24, -5, 35, -18, -40, 18, -21, -40, -5, 19, -36, -22, 21, 12, -7, -1, -24, -29, -29, 33, 28, -34, -17, -1, -10, -3, 11, -20, 9, 1, 2, -8, -27, -5, -24, -30, 8, 6, 10, 0, -28, 11, 16, -39, 26, 41, -40, -18, 23, -16, -14, 43, -17, -1, -5, -28, -13, -11, 27, 25, 28, -5, 4, 22, -6, 8, -15, -48, -21, -13, 2, 29, -24, -16, -30, -24, -27, -33, -17, 23, -14, -32, -36, -4, 6, -8, 7, -16, 19, -8, -3, -9, -20, -23, 34, 32, -5, -39, -15, -1, -21, 3, -19, -14, 29, 5, -10, -4, -18, -20, 0, 1, -6, -11, -1, -17, 39, 11, -9, 8, 15, -10, -22, 17, -9, 25, 26, -18, -22, 10, 40, -8, 41, -22, 38, 9, 19, 25, 3, 45, -23, -3, -1, 9, -20, 36, 48, -4, -12, -21, -34, 37, -5, 20, -19, -24, -15, 14, 0, 11, -10, 11, -7, 17, 15, -2, 22, -6, -27, -14, 23, -24, 0, -4, 14, 16, -19, -9, -16, -29, -5, 25, -31, 1, -2, 7, -2, 34, 17, 0, -19, -43, 6, 15, -19, 21, 16, -29, 16, 8, -42, -22, 16, -35, -20, -43, -5, -17, -12, 21, -8, -2, 43, 53, -26, -58, 4, -5, -2, 18, -12, -11, 38, -41, -4, -12, 46, -6, 32, -51, -10, -18, -19, -9, 1, -6, 17, 1, 24, 9, 21, 32, 13, 1, 17, 1, 12, 33, 7, 12, -9, -15, 7, -15, 12, -25, -9, 9, 7, 36, -16, -4, -9, -10, -11, 41, 14, -10, -8, -14, -26, -28, -3, -26, -3, 0, -17, -24, -9, -23, -25, -13, -35, 2, -3, -14, 48, 25, -1, 35, -40, -36, -26, 1, 4, -22, 39, -33, -31, -18, 10, -5, 17, -27, -3, -12, -32, 2, -27, 3, -3, 13, -14, 5, -9, -1, -20, 31, 10, 0, -18, 8, -14, -1, -4, -22, 27, 13, 11, -13, 19, 39, 6, 27, 5, -26, 9, 15, 20, 3, -16, 9, 15, 3, -3, -8, 25, 1, -15, -9, 14, -3, -3, -8, -24, -3, 23, 6, -16, -13, -18, -5, -3, 1, -17, 6, 0, 27, 28, 21, 5, 8, -27, 34, -6, 19, 18, 31, -27, -28, -8, 9, 5, -5, 11, -51, -26, 17, 4, -9, 3, 3, 2, -5, -17, 16, 17, 40, -4, 27, 31, 17, 7, -22, 0, -19, -4, 22, 20, -13, -37, 13, 5, -17, -8, 25, -2, -27, 4, -10, 0, 8, -15, 5, 10, -21, 0, -11, 1, -24, -54, -37, 30, -45, 43, -5, 9, 23, -14, -32, 22, 8, 8, 0, 26, 8, -7, 7, -28, 15, -8, -33, -13, 1, -28, -22, 31, -71, 39, -13, 25, 11, -10, -11, 16, 49, -2, 17, 8, 44, 50, 0, 2, 26, 3, -25, 5, -9, -15, -31, -8, -7, 18, -1, 12, -11, -53, 16, 15, 9, 20, 17, 16, 30, -4, 6, -3, 27, -35, -18, -17, 21, -14, -11, -25, 45, 6, -2, -24, -3, 19, -42, -10, -24, -18, -14, 10, -38, 6, -43, -37, -36, 7, -25, 22, 9, 0, 14, -4, -9, -2, 9, -3, -12, 33, -32, -8, 12, 26, 19, 24, -12, -14, -20, -24, -29, -13, -5, -9, 27, -34, -28, 2, 21, -25, -16, 1, -29, -5, -10, -22, -35, 35, -16, -1, -11, -13, -9, -11, -17, 17, 30, 35, 29, 24, -1, 2, 20, -6, 2, 3, -24, 20, 9, 22, -15, 7, 7, 11, 0, -33, -8, -1, 22, 18, 35, -1, -10, -15, 12, -13, 11, -11, -16, 28, -7, 0, 8, 25, 15, 22, -14, -26, 21, 0, -19, -10, 14, 24, 49, 4, 22, 9, -32, 17, 38, 2, 16, -2, 18, 15, 7, -46, -5, -19, -15, 10, -28, 6, -4, -35, 13, 3, 40, 15, 0, -32, -4, -3, 26, -40, -11, 23, -26, 33, -6, 14, -12, 10, 14, 23, -38, 1, 9, -26, 10, -15, 2, 19, 33, -6, 3, -17, -21, -33, 26, 14, -44, 13, 6, 16, -30, 32, 6, 1, -21, -5, -23, 24, -27, -23, -22, 23, 7, 18, 7, -5, 39, -17, 22, 24, 14, 47, -7, -3, -22, 39, 7, 27, -11, 7, 8, -7, 6, 21, -5, -16, 18, 6, 25, 8, -5, -45, 2, -14, -9, 7, 1, -16, -40, 20, 10, 6, -52, -8, -26, 17, -26, -9, -3, 14, 21, -16, 6, 28, 1, -24, 26, 2, -11, -35, -23, -15, -43, 32, 5, -20, -37, 23, 15, -6, -30, -5, -14, -25, -8, 22, 19, -15, -6, -27, 2, -19, -22, -4, -20, -2, -23, -27, -13, -25, -10, 0, -13, 4, 16, 1, -32, -34, -13, 13, -4, 8, 6, 4, -6, 11, 8, -44, 27, -2, 15, -22, 18, 4, 10, -22, -3, 5, 17, -24, -53, -40, -14, 1, 4, -7, -21, 5, -23, 20, -10, -46, 0, 5, 38, -13, 11, -19, 41, 12, 35, 0, 20, 28, -7, -23, -6, 11, 21, 14, -9, 10, 9, 3, -11, -22, 24, -12, 26, -12, 9, 4, 24, -14, 23, 23, 19, 8, 28, 33, 6, -20, -3, 8, 0, 29, -10, 0, 26, -15, 15, -5, 8, -3, -19, -18, -13, 28, 22, -2, 25, 1, 28, -16, -4, -6, 7, -16, -23, 23, 4, -11, 22, -35, -28, 5, 39, 29, -14, 18, 41, 23, -4, -4, -18, 8, 8, -39, 3, -24, 12, 6, -21, 7, 14, -31, -9, -1, -1, -3, 17, -30, 19, 22, -8, -11, 28, 7, 9, -27, -5, -19, 1, 19, 18, 15, -31, -41, -18, 0, -13, -11, 3, 37, -4, -19, 13, 20, -14, 13, 7, 9, -25, 12, -28, 8, -6, -16, 11, -9, -3, -49, -4, -16, 2, 0, -14, -22, 5, 0, -9, 32, 5, 2, 1, -6, -2, 20, -21, -4, 23, -7, -29, 19, -12, -27, 14, -7, -47, 32, -1, 3, -19, -7, -1, 7, -5, 23, 0, -2, 7, 13, 33, -12, -27, 35, 9, -3, 39, -19, -28, -14, 2, 6, 12, 0, 19, -39, -31, 13, 2, -17, -1, -13, -1, 4, -3, 8, 27, -19, 4, 30, 2, -4, 35, 16, -23, -11, -7, -28, -14, 6, 1, 36, -30, -1, -8, -7, 22, 25, -14, -8, 43, -18, -40, 10, 5, -12, 41, 27, -29, -24, 3, -17, -12, 1, 34, 10, -23, 30, -7, 26, 31, 0, -12, -13, -1, 53, 43, 5, 23, -9, -22, -16, -3, -13, 34, 25, -22, -24, -32, -33, 9, 0, -34, 20, 30, 27, 0, -32, -31, 34, -6, -4, -21, 23, 3, 30, -2, -20, -15, 12, -11, -35, -3, -6, 15, -7, -35, 10, 24, -9, 12, -44, -32, -9, 1, 18, 1, -11, 4, -3, 1, -31, 25, -12, 9, -8, -25, -2, -7, 12, -8, 19, 27, -1, 25, -12, -7, -25, -16, 3, -13, -2, 9, -6, 8, 26, -7, -13, 16, -13, -41, 0, 34, -9, -20, -30, 14, -19, 21, -2, -34, 5, -15, -18, 26, 12, 22, -19, -16, -17, -4, -43, 18, 12, -15, 13, -4, -38, -10, -10, -17, 21, 22, 27, 5, -39, -37, -8, 18, -7, 23, 19, 13, 49, -14, -29, -3, 7, -21, 26, -11, 8, 7, -36, -10, -26, -2, 41, 48, 11, 30, 8, 9, -43, -27, -27, 21, 31, -25, 20, -29, -7, 16, 7, -23, -12, -25, -3, -7, 3, -29, -2, -19, -12, -2, -14, -21, -8, -6, 17, 11, 11, -9, -9, -13, -9, -8, 5, -7, 1, 6, -6, 25, -16, -6, 3, 20, -36, -12, 18, -28, -14, 19, 17, 0, 12, 0, -2, 9, 38, 6, 24, -24, 5, 43, 24, 2, 11, -30, -12, -17, 32, -5, -14, -43, 27, -23, -24, 36, -18, 31, 8, 1, 4, -32, 0, 34, 8, -31, -15, -4, 22, -31, -33, -38, -24, -2, -6, -14, 6, 10, 24, -19, -20, -17, -5, 6, 9, -23, -18, 11, 22, -7, -36, -8, 27, -35, -23, 4, -27, -39, -17, -19, 13, 19, 20, -29, 20, -18, -17, -27, -21, -11, -13, 2, 14, 2, -5, -16, 28, 2, 10, 11, 6, 5, 4, -30, -3, 11, 4, 10, 0, -27, 16, -31, 31, -4, 38, -28, 18, -24, 16, 8, 1, 8, 35, 52, -19, -15, 6, 11, 0, -8, -9, 20, 2, -15, -10, -38, 46, 15, 32, -8, 30, -14, 2, -29, 24, -16, -2, 2, -31, -15, 2, -8, -19, -11, -11, -37, 16, -10, -28, -27, 19, 25, 19, -51, 7, -6, -21, -14, 5, 10, -9, -2, -20, -8, 2, 24, 12, 31, 2, -30, 24, -21, 2, 19, 35, -1, 2, 7, -15, -4, 24, 17, -7, -27, 14, 45, -22, 16, 20, 33, 2, 27, 15, 6, 13, -9, -2, -15, 40, -3, 28, -29, 16, -20, -20, -33, 7, -5, 28, 49, 4, -19, 4, -8, -18, 22, 21, -9, -2, 24, -13, 6, 12, 16, -8, -5, -4, 24, 11, -21, 43, -22, 31, -1, 7, -25, 13, 21, -20, 27, 7, -13, 16, 5, 8, -2, -6, 10, -15, -11, -17, 21, -7, -6, -3, 32, -21, -15, -2, 12, 10, 35, 12, 22, -7, -27, -10, -12, -3, -27, 5, -19, -13, 6, 0, 26, 7, -20, -7, 5, 12, 8, -19, -9, -25, -3, 6, -28, -11, 23, -12, 13, -26, -5, -11, -20, -9, 29, -20, -10, -33, 1, 4, 3, -22, 20, 6, 12, 5, 27, 12, 18, 2, -2, 18, -11, -27, 14, -3, 6, 21, -18, -28, -11, -28, 23, 14, -28, -16, -27, 33, 36, -25, 28, -24, 14, 16, -6, -15, 20, 31, -11, -23, 15, 7, 2, 4, 29, 35, -3, -1, -45, -27, -6, 19, 8, -33, -28, -30, -4, 13, -33, -11, 9, 53, -18, -12, 10, -18, 28, 12, 20, 37, 8, 5, -21, -10, 7, 12, -10, -3, -15, 22, -15, 1, -3, -18, -12, -43, -21, -3, 11, -16, 15, 8, -1, 11, 11, -16, -21, -5, -8, -17, -32, 14, -11, -11, -16, 8, -3, -10, -22, -23, 10, 7, 24, 19, 3, 0, 14, -2, 12, -8, -34, 21, 21, -12, 10, 11, -33, 10, 22, 25, -2, -7, 15, -7, 20, 27, 30, 11, 11, 31, -4, -11, -11, 4, -28, -9, 25, 1, -36, 16, -38, -23, 11, -28, -14, -27, 5, -37, 23, 28, 23, -9, 17, 10, 23, -5, -6, 7, -22, -30, -4, 31, 36, -22, 18, 13, -22, -3, 6, -32, -4, -21, 12, -35, 2, -9, 35, -26, 8, 20, -18, 17, -15, -30, -38, 38, -3, -20, 26, -12, 15, -7, -12, -15, -6, -19, -16, -32, 11, 5, 21, 12, 2, -1, 14, -2, -33, 23, 17, 4, -13, 15, -1, 17, -29, 2, 22, 10, 4, 12, 4, -4, 1, 6, 18, 5, -13, -14, -9, 1, -26, -1, -37, 14, 13, -4, -16, 27, 9, 6, -1, 5, 0, 2, 20, -18, -25, 25, 15, -16, 15, 19, 2, 2, 17, -57, -49, 3, -2, -4, -49, 27, -7, -10, 17, -16, -14, 12, 24, -10, -11, -16, 18, -15, -18, -6, -8, -8, 41, 5, 10, -3, 25, -22, 0, 6, -40, -25, 3, 19, -11, 6, -6, -6, -35, -12, 0, -3, -40, -14, -35, -6, 5, -3, -26, -5, 2, -1, -26, 16, -17, 3, 29, 1, 12, -41, 15, -7, -36, 28, -20, -27, 5, -13, -23, 43, -24, -1, 15, 1, -41, 21, -23, -29, 19, 4, -10, -26, 3, 17, -9, 21, -11, 37, -3, 9, 40, 6, -19, -18, 20, -17, -6, 6, -20, -15, -8, 24, -35, -14, 52, -17, -18, 28, 13, 18, -11, -7, 55, 3, 21, 20, 24, -8, -9, 25, 27, -26, -2, 15, -34, -28, -8, 23, -19, 32, -7, 16, 22, 40, 1, 18, 20, 30, 15, 9, 5, 8, -52, 9, 3, 0, -20, 5, 28, -22, -19, -17, -42, -1, -3, -13, 11, 1, -4, -6, -46, 9, -16, 20, 17, -34, 16, 25, 18, -19, -20, 4, 6, 7, -46, 0, 17, 27, -51, 41, 5, 22, 3, 29, 3, 20, -17, -16, 8, 20, 22, 38, 35, -16, -34, 19, -11, -1, 21, 29, 19, -2, -4, -46, -30, 23, -15, 17, 0, -6, -30, -17, 0, 12, -23, 6, -9, 19, 11, -9, -20, -9, 12, 16, -7, -3, 10, 14, -13, 7, 0, 5, -23, 28, 15, -17, -16, 13, -31, -3, -2, -1, -16, -8, 6, -33, -22, -1, 5, 34, 7, 28, -27, 10, 0, 29, -23, -4, 5, 17, -28, -12, -59, 21, 37, -2, -37, 43, -39, -32, 4, 6, -13, -7, -23, -14, 6, 26, 17, -9, -13, 46, -21, 16, -13, -23, -20, 2, -20, 19, 14, -1, 7, 15, -19, -12, -29, -29, 32, -6, -9, 22, 29, 0, -1, 23, 19, -3, 8, 21, -32, 0, 8, 4, -15, 22, 7, -19, -13, 3, -13, -3, 25, -33, -4, -17, 22, 9, -11, 14, 2, 15, 21, -6, -15, -25, -49, 44, -1, -21, -12, 12, 23, 24, -4, -25, 1, -19, 3, -33, 11, -2, 23, -1, 17, 6, 29, 23, 5, -27, -47, -11, 2, -15, 20, -2, -16, 12, -7, 6, -1, 10, -10, -1, 28, 23, -9, -36, 15, 5, 2, -41, -44, -6, 24, -7, 28, -5, 2, 11, 9, -6, 34, 3, 0, -15, 34, 17, 20, -18, -1, 22, -9, 6, 5, 23, -41, -36, -45, 4, -10, -10, -42, -35, -8, 2, -13, -21, 21, 17, -10, -37, 20, 19, 45, -9, 54, 22, -4, 20, -1, -9, -36, -17, 9, 11, 6, -17, 8, -7, -57, 9, 8, 22, -31, -29, -8, -10, -17, -19, 30, 24, -17, -15, -32, -21, 15, 14, 21, 10, 20, -6, -18, -14, 14, 10, -3, -20, 0, -7, -7, 6, 4, -18, -7, -10, -7, 17, -15, 10, -14, 5, -30, -12, -45, 13, -3, 2, 20, -3, -1, -39, 38, -7, -30, -12, 6, 10, 39, -15, 35, 15, -15, -33, -34, 4, -11, 1, 11, -8, -5, 10, 4, -19, 5, 19, 20, -21, 18, -28, -22, -26, 3, 16, -11, -6, -24, 16, 26, 5, 15, 21, 26, 1, 13, 25, -10, -12, -18, -23, -5, 19, -7, -25, -19, 15, -26, -26, -12, -23, -4, -5, -22, 10, 6, -35, -11, -17, -6, -5, -24, -17, 18, 14, -35, -25, 8, -15, -21, -30, -15, 5, -11, 9, -6, 29, 15, 33, -19, 0, -22, 19, -6, 4, 4, 8, 10, -2, 5, -20, 0, -23, 20, 7, -5, -9, 15, -6, 10, 15, 22, 11, -34, -12, -38, 36, 13, -7, 36, 11, 26, 12, 43, -1, -19, 16, 47, 7, -10, 18, -1, 16, -37, 14, -27, -8, 0, 27, -45, 10, -22, 1, 23, -5, 13, 20, 36, 22, 25, 15, 34, 7, 48, 34, 13, -10, -48, 27, -2, 21, 14, -12, -1, -10, 8, 14, -23, 9, 19, 0, 9, -9, -11, -29, 21, -13, -1, 19, 4, -42, 15, 18, 6, 6, 13, -17, 3, 9, -33, -10, -15, 8, -22, 25, -19, -40, -30, 0, -36, -39, -10, -23, -54, 27, -28, -15, 22, -12, 18, 2, -10, 9, 4, -8, -11, -1, -23, -31, -33, 26, -26, 21, 0, -29, -29, -60, -12, 24, 12, -25, -14, -24, 1, 6, -4, 38, -6, -5, 19, -16, 6, -21, 6, -3, -36, -23, 20, 1, -27, -32, -18, 1, 18, 20, -8, 19, 5, 20, 15, -9, -17, -7, 17, -11, -21, 3, 11, -16, -19, -7, 4, -28, -32, -8, -25, 2, -18, -22, -33, 16, -13, 0, -6, -2, 5, 22, -27, 1, 10, 2, 2, 15, 7, 6, 8, -37, -16, -9, -9, -8, -17, -19, -9, 20, 32, -41, 18, -10, -15, -10, -17, -5, -9, 19, 9, -9, -4, -16, 0, -27, -4, -7, 11, 34, 1, 16, 8, -18, -30, 2, -9, 1, 23, 4, -12, 0, -14, -5, -36, 28, -29, 31, -31, -2, -32, -16, -25, -6, 23, 19, 12, -1, -18, 11, 58, -6, 27, -14, 1, 8, 7, -1, -7, 1, 3, -23, 8, 11, 5, -13, 2, -17, 17, -28, -16, 6, -24, -11, -15, 18, -17, -22, -23, -28, 18, -7, 20, -2, 3, -11, 29, -10, 39, 21, -6, 11, -43, -6, 19, -52, -44, 30, -5, -16, 21, 28, -11, -8, 7, -13, -22, 37, 29, 6, -7, -3, 16, 8, 14, -30, 19, 0, -9, 0, 27, -10, 29, -21, -6, -22, -17, 12, -7, 24, 5, -2, -3, 1, -28, 13, -3, -6, -10, -15, -3, -5, -16, 20, -13, -28, -9, 1, -34, -19, -3, -12, -17, -22, -14, -33, 9, -1, -19, -10, 9, -25, -32, -16, 59, 2, -23, 4, 25, -24, 46, 35, -25, 4, 21, -13, 13, -42, 16, -12, -27, 24, 2, -20, -4, -28, -1, 36, -7, 28, 7, -7, -33, -8, 29, 20, 31, 14, -14, 22, -21, -19, -13, 11, -14, 29, -33, -13, -32, -39, 13, -11, -9, 6, -11, -46, -19, 31, 24, 25, 10, 9, -37, 10, -12, -23, 13, -15, -23, -12, -46, 17, -16, -14, -12, -6, -15, 7, -5, -11, -16, -14, 1, -43, -7, 14, -1, 33, -9, -17, -31, -8, 2, 3, -46, -8, -8, -19, -9, 34, 20, -18, 27, -55, -36, 1, 32, -11, 29, 29, 1, 29, -21, -10, -21, -3, 7, 5, -16, -12, -32, -33, -15, -15, 26, -29, 1, 1, -20, -24, 30, -25, -14, -25, -34, -1, 11, 16, -6, 23, 1, -23, -17, -23, -19, -12, -14, 25, -4, 15, -17, -38, -6, -21, 24, 15, 36, 12, 11, 8, 1, 0, 10, 6, 32, 4, -37, -21, -9, 16, 15, -17, 16, 4, -4, 9, -9, 4, -15, -27, 21, 2, -14, -5, 11, 3, -3, 25, -20, -18, 6, 3, -9, -18, -9, -13, -40, 1, 5, 3, 25, 6, -10, -2, -9, 16, 10, -14, 16, 33, 10, 0, 22, -8, 12, 32, 25, 22, -7, -5, 42, 24, 31, -15, -29, 28, 9, -2, -19, -18, 28, 44, -29, 9, -19, 26, 6, 14, -33, -12, -40, 0, -20, -2, -11, -14, 13, 9, -14, 16, 5, 3, 6, -12, -23, -5, 7, -6, -30, 4, -15, -16, -12, -2, 3, 1, -34, -22, -6, 4, 13, -22, -32, 24, 4, 17, -21, 2, 27, 51, 3, 32, -1, 25, 26, 23, -34, 28, -17, -2, -25, 28, 19, 23, 35, -17, 5, 11, 49, -42, 20, 19, -1, 28, -21, 7, -48, 4, 16, 6, -15, 28, -23, -5, -19, -12, 26, 9, 26, 28, 12, -5, 50, -11, 23, 9, 14, 1, 5, 19, -20, 33, -17, -3, 8, 32, -18, 9, 3, -36, -23, 2, 18, 30, 40, 9, -13, -13, -31, -6, 10, -13, 11, 38, -27, -38, -11, -16, 21, 1, -2, 26, -13, -1, 12, 20, -23, -3, -31, -21, 13, -2, 2, 17, -53, 5, 8, -22, -10, 8, 10, -22, 24, -27, 8, 7, 38, -23, 12, -32, -2, 16, -13, -12, -6, 7, -8, -23, -13, -14, -5, -11, 22, -8, -36, -40, 48, -2, -2, -23, 36, -22, -7, -9, -23, 8, -13, 8, 1, 41, -9, 12, 18, -10, 8, 1, -13, 8, -16, -15, 15, 18, -5, -1, 5, 15, 12, 11, 4, 29, 21, -9, 5, 21, -23, 37, -7, 22, 47, -25, -2, 18, 7, -10, -14, 10, -13, -16, 37, 33, -30, -9, -9, -29, -23, 8, -22, 34, 3, -2, -9, -4, 8, -27, 6, 11, 1, -23, 20, 2, 16, -20, -10, -18, 22, -25, 10, -42, 2, 4, -10, 0, -25, 3, -11, 0, -18, -23, 7, -16, -22, 14, 11, 1, -2, -8, 8, -12, 48, 37, 10, 13, 13, 6, 12, 38, -5, -10, 24, 25, -5, -16, 9, 40, -26, 4, -11, 15, -31, -25, 6, -13, 0, 4, 26, -28, 0, -21, 31, -10, -14, -2, 7, -32, -10, 7, -37, -13, -21, 4, -50, -2, -25, -37, -43, -21, -8, 40, 28, -38, -13, -3, -8, -28, -19, 17, 24, 26, -24, 13, -24, 33, -22, -6, -53, 21, -16, -30, -26, -3, 3, 38, 19, -5, 28, 26, -3, 12, -18, 20, 20, 19, -26, 28, -13, 30, -27, 36, -2, 20, -6, 12, -36, -10, 3, 10, 22, -9, -7, 34, -1, -39, 27, 33, 3, 31, -15, 0, -26, 8, 29, -5, -14, -10, 1, -9, -39, 0, -7, 30, -8, -10, -32, 31, 35, -10, -1, -13, 18, 44, 9, 22, 1, 35, 2, 25, -39, 6, -8, -9, -10, 0, 4, -25, -21, 15, 19, -9, -28, 2, -10, -1, 14, -22, 17, 21, 1, 4, -8, -18, -11, -8, 20, 29, 6, 8, -35, 24, 23, 22, -7, -2, -9, -27, 5, 9, -29, 1, 16, 12, -10, 8, -12, 15, 23, -8, 0, 2, -32, 20, -42, -16, 8, -20, 4, -13, 0, -44, -1, 33, -47, 1, 10, -20, -46, -11, 17, -8, 6, -4, -26, 27, 21, 1, 3, -4, -15, 1, 4, -15, -8, 6, -26, 18, 18, 25, 9, -18, 0, 1, -14, 18, 29, 9, -21, 5, -20, 3, -10, 11, -37, -8, -40, -18, -24, -17, -2, 23, 14, -11, -23, 11, -18, -22, -32, -2, -9, -14, -8, -8, -18, -17, -45, -17, 4, 16, 7, 6, -15, 2, 33, 18, 22, -28, 18, -3, 29, 7, 42, 18, 24, 26, 71, 44, 25, 5, 37, -15, -14, -28, -4, -6, -6, 33, -9, 16, 21, -12, -13, -21, 0, 19, 3, 4, 8, 0, -4, 5, -16, 7, -1, -16, 12, -1, 34, -5, 31, 3, 8, -32, -16, -11, -10, -31, -3, -26, -20, 2, -7, -22, -20, -16, 1, -6, -21, -37, -6, -34, 23, 32, -12, -7, -1, -19, 30, 20, -32, -13, 19, 2, -7, 28, -6, -8, -35, -10, -7, 27, -4, 18, 23, -23, 1, 13, 26, -49, 19, 28, -18, 28, -20, 12, -3, 24, 7, -14, -13, -31, -28, -26, -30, -8, -7, -26, 3, -28, -13, 5, 2, 10, 3, -5, -5, 14, -26, -11, -17, -29, 5, -8, -9, 8, -13, 12, -21, -15, 14, -9, 10, -15, -18, 17, -4, 5, -4, -30, 40, 19, -49, -14, 4, -12, -6, 12, -43, -12, 13, -36, -41, 32, 29, 3, 48, 15, 23, 33, 59, 16, 45, 15, -33, 30, 19, -21, -11, 33, 27, -11, 0, 1, 17, 6, 23, -2, -6, 23, 2, 13, -10, -23, -5, -2, 22, 25, -9, 6, -7, -14, -25, -22, -23, 9, -10, 3, 15, 13, -11, 2, -23, 45, 24, -31, -23, 34, -43, -45, 25, -14, -15, 29, -28, -19, 2, 43, -11, 16, -8, -7, -7, -36, -31, 16, 26, 0, -21, 21, -8, -18, 53, 37, -9, -26, -6, -2, 28, -6, 24, -13, -2, -29, 17, -30, -7, -11, -20, -6, -23, -23, -43, 3, 13, -4, 17, -4, -13, -1, 9, 18, 17, -34, -20, -35, -23, 1, -29, -15, -11, -3, -7, 31, -69, 26, 47, 2, -22, 21, -4, -7, 31, 11, 25, 1, 7, -6, 8, 16, 2, 33, -13, -10, -5, -25, -2, -1, -6, -17, -1, -12, 35, 10, -24, 28, -5, -20, 41, -9, -12, 8, 26, -9, 35, -7, 17, 30, 12, -9, 20, 0, -31, -22, 9, 39, 22, 17, -14, 18, 19, 23, 13, -36, 25, -17, 30, -21, 25, 2, 54, 28, 33, -18, 18, -13, -6, -1, -18, -10, -32, 2, -37, 43, 31, -8, -26, 1, 20, 11, 0, -3, -28, -20, 47, -27, 11, -28, 28, -16, -48, -3, 0, -3, -6, 1, -39, -3, 7, -1, -27, 7, -2, 17, -15, -14, 28, -24, -18, 20, -7, -3, 4, -37, -8, -34, 5, -24, 15, -22, -54, 7, -27, -2, 5, -29, 17, 12, 17, -23, 27, 1, -19, -2, 19, -13, -15, 14, 13, -31, 11, 23, -18, 4, 6, -5, 11, 6, -16, 4, -13, -38, 1, -23, -20, -11, -1, -18, -34, -30, -18, 2, -3, 36, 35, -19, -20, 21, -11, -26, -17, 16, -12, -23, -8, -1, 19, 18, 12, 26, 20, 29, 39, 7, -17, 16, -5, -1, 43, -10, 6, -10, -17, -11, -1, -21, 13, -9, -7, 29, 10, 8, 7, -13, 7, 18, 23, 1, 20, -4, 17, 15, -4, 5, 27, 9, 18, 12, -11, 21, 4, -25, -10, 6, 17, -14, 13, -18, 22, 9, -7, 2, 1, 29, 4, 2, 6, -4, 7, -3, 39, -24, 4, -9, 6, 14, -2, -17, 1, -5, -13, 2, -14, 18, -2, 19, -4, 17, -2, -8, 18, -7, -6, -4, 2, -11, -5, -10, -6, -8, 15, -22, -25, 20, 11, -25, -15, 12, 5, -22, -12, 1, 4, -51, -15, 39, -1, -39, -2, 14, -14, 10, 0, -1, -4, 8, 7, 4, 25, -31, 28, -15, 13, 15, 4, 24, -10, 32, 21, 12, -14, 36, 41, -29, 35, -9, 20, 49, -21, -49, -12, 38, 12, 19, -7, 25, -17, -29, -21, -14, -54, -22, 13, -8, 7, 23, 22, -1, 27, -5, -17, 1, 9, 11, -6, 16, -3, -24, -30, 20, 22, -6, 0, 9, 9, -25, -1, 16, -12, -26, -22, 0, -4, -2, 16, -56, 17, 7, 15, -7, 31, -21, -3, -2, 28, -12, 3, 10, -9, -24, -23, -15, -4, 3, -36, -3, 4, -17, 5, -16, -5, 7, 6, -19, -9, -24, 29, -32, -8, -11, 18, -24, -4, -7, -6, 3, 16, -21, -52, 29, 5, 11, 3, -8, 10, -16, 17, -3, 1, 4, 6, 17, 42, -21, 6, 17, 25, 12, -6, 10, 21, -13, -9, 24, 16, -27, -5, -31, -17, -3, 27, 10, -7, -12, 2, -28, -18, -25, -5, -11, -72, 8, 29, -46, -65, 17, -47, -8, 31, 18, -3, -9, 14, -42, 32, 48, 5, -6, -17, 31, 34, 15, -9, 15, -16, 1, -15, 7, -14, 33, -48, 22, 30, 2, -6, -18, -3, -1, 23, 35, 51, -1, -20, 9, 16, 14, 16, 34, -10, 29, 0, -4, -29, -1, -5, 8, -1, 3, 7, -1, 29, 8, 7, 28, 1, 40, -11, 43, -12, -29, 15, 1, -27, -2, 11, 21, 24, -24, -3, -13, -40, -39, 14, -5, -44, 5, 9, -27, -13, -23, -36, 20, 10, -11, 11, 7, -24, -26, 4, -28, -11, 26, -31, 14, -32, -6, -9, -2, -31, 17, 7, -27, 30, 15, -11, 35, 50, 11, -29, 9, -4, -8, -32, 13, 24, -8, -9, -5, -30, -23, -6, -30, 4, 20, 25, -42, 27, -10, -12, 5, 29, 11, 25, 36, 28, -1, -18, 36, -4, 17, 16, -43, 18, 28, -60, 3, 21, 4, -46, -23, -11, -20, -11, -39, -8, 11, 8, -6, 43, -7, 31, 11, 13, 2, 16, -25, 11, -1, -42, 19, 23, -41, -9, 4, 9, -6, 1, 17, -11, -14, -3, 14, 11, 17, 1, 14, 25, 10, 4, -11, -4, 0, -56, 0, 25, -19, -12, 21, 6, 5, -17, -23, -7, -23, -13, -5, -29, -34, 3, 25, -20, -23, -11, 38, 15, -9, 4, -18, -17, -23, 7, -30, -22, -21, 31, -15, 21, 23, 3, -21, -2, -33, -18, -4, 18, -34, -6, -24, 8, 11, -8, -4, -6, 25, 12, -45, -11, -9, 36, 2, 41, 26, 5, -35, -36, 15, 3, 37, 30, 18, -13, -23, -15, 9, 17, -40, -6, 28, -18, -20, 23, 12, 2, 9, 18, 21, 6, -14, 33, -16, -12, 34, 24, -14, -22, -21, 14, 22, 12, 7, -17, -14, -27, -11, -10, 14, -21, -3, -12, -15, -7, 30, -11, -20, -4, -12, -26, 11, 2, -19, -23, 5, 3, -2, -4, -8, -4, 0, -28, 3, 4, 6, 24, -24, 25, -23, -39, 13, -18, -26, -16, 16, -24, 24, -17, 1, 5, -39, -1, 10, 15, -10, 13, 19, -6, 4, 28, 4, 0, 16, 21, 24, 2, 19, -27, 10, 21, 5, 11, 31, 29, 3, 13, -17, 10, -18, -18, 7, -8, -26, -27, 15, -6, 21, 35, -19, -27, -23, -36, -27, 55, 48, -16, -4, -29, 20, 5, -13, 22, -20, -23, -6, 10, -1, -7, -28, 19, -31, 22, 24, -2, 1, -5, -11, 54, 4, 10, -7, 8, -14, -18, -27, 3, -16, -13, -14, 5, 22, -36, -12, -19, -12, 2, 27, -10, -16, 8, 25, -8, -30, 2, 22, 5, 34, 2, 4, -1, -7, -16, -17, -61, 22, 0, -2, 9, -2, -50, -2, 37, -20, -34, 14, -35, -20, 3, -6, 2, -18, -26, -15, 21, 40, 0, 31, 11, 25, 17, 6, -1, -34, -32, -32, -25, 17, 5, 21, -27, -10, 18, -10, -16, -52, -13, -14, 26, 7, 26, -21, 39, -16, 20, 11, -7, -7, -31, 14, -5, -39, -24, -3, 5, -21, 8, 17, -17, 34, 17, 20, -19, 30, 14, 0, -31, -8, 26, 4, -20, -24, -26, 8, 41, -54, -32, 7, -7, -5, 8, 22, -31, 11, 8, 12, -5, 10, -1, 26, -41, 24, 0, -9, 11, 6, 17, -26, -14, 16, 10, -29, -7, 20, -29, -2, -18, -4, -7, 24, -18, -29, -15, 19, -13, -31, -27, 23, -14, 31, -28, -8, -1, -42, -4, 16, 3, -22, 17, 4, -30, 7, 23, 18, -8, 40, 31, -14, 1, 1, -9, -1, 0, 3, -36, 34, 41, 9, 11, -16, 51, -13, -8, 13, -1, -8, -18, -17, -25, 33, -4, 36, -10, 8, -17, 20, -23, 44, 16, 19, 25, 9, 9, 31, 22, -4, 17, 27, -8, 27, -14, -12, -28, 3, -22, -1, -30, 16, 26, 14, 8, -23, 4, 17, -2, -39, 2, 11, 23, -11, -17, -11, -12, 10, 10, -38, -2, 11, 6, -16, -37, -21, -11, -30, -28, 17, 0, -31, -32, 1, 19, -22, 40, 20, -39, -35, -15, 11, -14, 4, -8, -40, -3, -10, -2, -31, -8, -10, -29, -23, -23, 45, 8, -50, -9, 23, -26, 8, 9, 14, 38, 42, -22, -21, -20, 41, 14, 27, -18, 27, -11, -23, -1, -16, 10, -18, -15, 33, 22, -25, 37, 24, 2, 14, 9, -40, 36, 26, 8, -8, -5, -31, 11, -31, 1, 28, 38, 3, -14, -32, -26, 31, 6, -27, 19, -22, 9, -9, -5, -42, 20, 7, -5, -45, -6, -30, 36, -24, -15, 23, 14, -16, 17, -27, 10, 38, 32, -6, 15, 12, 19, 2, -9, -5, -14, 32, -8, -34, -8, -32, -4, -9, -18, 37, 4, 15, 20, -10, -7, 9, 11, -29, -9, 16, -4, -24, -1, -40, -22, -20, 10, 3, -25, -16, 26, -10, 17, -17, -16, 2, 35, -3, -35, -15, -14, -7, 5, 18, -12, -25, 15, -18, 1, -5, -7, 13, -25, 14, -10, 11, -2, -10, 12, 15, -18, -4, -12, 12, -16, -24, 3, 28, 9, -22, 19, -22, 1, 18, 12, -30, -18, 17, 32, 4, -3, 18, 28, 14, -3, -22, -20, 7, 27, -27, 52, -19, -20, -17, -28, 23, 20, -23, -47, -41, -26, 3, -8, -5, -3, 20, -30, -10, -8, 10, -5, 13, 27, -12, 39, -4, 14, 19, -13, 27, -16, -12, 7, -2, -6, 25, 20, -34, -14, 9, 8, 10, 5, 27, 30, 25, 12, 7, 33, 24, -4, -1, -27, 25, 22, 17, 19, -4, 3, -8, -10, -17, 21, 17, -6, 26, 23, 5, 36, -12, -38, -11, 36, -26, 5, 19, -21, 2, 25, -58, -24, 27, 12, -19, -7, 9, -36, -4, -15, 6, 39, 1, -32, 15, -7, -9, 28, 9, 18, -3, -24, 6, -6, -56, -2, -5, -11, -5, 26, 8, 20, -28, -1, -15, 39, 14, -9, -28, -43, -13, 4, -2, 26, 3, -29, -31, 11, 10, -18, -11, -13, -19, -14, -9, -25, 1, 2, -40, -54, -17, 16, 25, 8, 9, 16, 13, 21, -10, -4, -37, -11, 17, 28, 14, 29, -25, -1, 14, 16, 12, 5, 13, 1, -36, -34, 17, 44, -8, -17, 17, -30, -18, 17, -52, 28, 14, 0, -25, -23, 16, 11, -3, 21, 33, 19, 9, 26, 1, -25, 10, 21, -10, -5, 2, 19, 20, -34, -24, -1, 9, -37, -7, -24, 4, -31, -18, -22, 19, 4, 5, -18, 17, 12, -17, 1, 3, 26, -33, 29, 6, 7, 17, -3, -47, -11, 24, -15, 37, -39, 34, -11, -7, -30, 8, -40, 26, 14, -12, -31, 18, -2, -9, 30, 24, 14, 19, 19, 26, -25, -19, 24, 18, -16, 2, -18, 17, -8, -4, -15, 13, 12, -7, 8, 22, -18, -2, -26, 23, -7, -10, 20, 31, 11, 24, -14, -2, 29, 0, -6, 13, -14, -1, -11, 3, 20, 5, 2, 27, -17, 20, -8, 21, -3, 5, 28, 4, 0, 15, 1, -18, -19, -7, -19, -31, 10, 4, 4, 15, 13, -18, -45, 25, -4, -14, 17, 5, -30, 1, 11, -1, -23, 19, -18, -31, -7, -15, -2, 9, 18, -40, 9, -2, 16, -35, -21, -19, 8, 14, -16, -34, 18, 8, 12, -18, -1, 18, -29, 21, -17, 33, 1, -25, -27, -25, 34, 16, 2, -44, 1, 12, 10, 21, 2, -5, 38, 33, 5, -44, -12, -9, -41, 9, -21, -25, 8, -17, -6, -24, 5, -13, -3, -29, 0, -7, 39, -15, 1, 17, 4, -19, -2, -37, -24, 3, -13, 6, -30, 22, 9, 10, -9, -12, -32, -23, -5, 6, -2, -36, 28, 1, -23, -14, -8, 14, -20, -15, -29, -36, -18, -30, -23, -8, 6, 1, -32, 20, -37, 13, 7, 28, -1, 19, 21, 9, -8, -21, -12, -15, -12, 0, 13, -9, 19, 4, -38, 22, -33, 9, 9, -17, 39, -34, -21, 0, 28, -11, -24, 9, -11, -5, 22, -25, -2, 5, 11, -36, -11, -21, 5, -27, -4, 5, -1, -11, 38, -21, 12, 18, 32, -2, 32, -20, 3, -13, 7, 25, -2, -20, -6, -14, 12, 1, -11, -20, 8, -1, 12, 4, 14, -12, -37, 9, 2, -27, 26, 28, 13, -4, -3, -8, 30, 6, 22, -2, -12, 5, 24, -16, -1, 18, 25, 4, 4, 3, 3, 3, -9, 32, -42, -14, -26, -13, 14, 9, 3, -39, -2, 28, -34, 24, 3, 4, -13, -23, 12, 33, -18, -14, 21, -18, -29, 21, -6, -19, 21, 17, -14, -5, -24, -3, -8, 37, 36, 18, -24, -5, -23, 17, -15, -42, 14, 16, -3, -12, 38, -37, -9, 0, -8, -25, 27, 13, -22, 21, 0, -23, -14, -15, -42, -32, -29, 11, -42, -19, 15, 20, -4, -52, -15, 16, -38, 0, 12, 24, 11, -4, -37, -22, 8, 33, -6, 10, -46, 37, 24, -10, -4, -3, -58, -9, 17, 18, -29, 15, -38, -2, 19, 19, -13, -25, -19, 19, 24, 34, 4, -9, 14, -5, 39, 8, 16, -5, -15, 21, 37, 2, 8, 39, 14, 6, 0, -5, 22, -16, 11, 0, 26, 39, 22, 38, -34, 41, -1, -30, -28, 7, -16, -4, -15, 24, -15, 19, -35, -25, -13, -2, 31, -19, 21, 15, 4, -5, 26, -12, -17, 8, -14, 21, -18, 11, 13, 1, -32, 15, -9, 11, -34, 6, 10, -10, 5, -12, 4, -10, 10, 13, 11, -8, 13, -12, -23, 21, -16, 24, -6, 0, -28, -26, -29, -1, -11, -20, 5, 30, 12, 7, 16, -10, -15, 3, -7, 10, -6, -4, -18, 11, 5, 28, 27, -21, -6, -38, -20, -15, 28, 22, 29, 15, -18, -29, 20, -9, -3, 11, 13, -12, -2, 13, 8, 12, 3, 6, 2, -33, 15, -33, -20, -29, 13, -16, -10, 11, 3, -11, -19, -23, -5, 10, -19, -17, -25, -27, 2, -20, -3, -17, 10, -17, -5, 16, 34, -6, 35, -1, 11, -8, -17, 11, -10, -16, 29, -14, 24, 11, -16, -34, -6, -7, 0, -36, 28, -7, 13, 39, 10, -34, 20, 1, -12, 3, -2, -9, -30, 22, -31, -42, 19, -36, -44, -39, -31, -5, -34, -21, 34, 21, 35, 39, 31, 20, 18, 3, -15, -12, 19, 38, -25, -17, -31, 1, -41, -1, -16, 11, -43, 8, -60, -23, 72, 6, 6, -16, 3, -21, 66, 8, -5, -27, 21, -10, -40, -41, 11, -2, -39, -15, -19, -25, 1, -13, -31, -15, 44, 34, 0, 19, 38, 17, 48, -29, 9, -14, -23, -12, -16, 20, -27, -17, -38, -7, 4, -1, -26, 10, 3, 17, -5, 6, -2, 12, 5, -4, 5, -18, 12, 21, -20, -1, -12, -13, -47, -21, -37, 0, -10, -24, -44, -11, -2, -3, -11, 21, 9, 2, 0, 20, 17, -39, 21, -15, -28, 9, 23, 21, -42, 3, -14, 14, -5, -17, 16, 20, 16, 15, 7, -3, 24, -36, -29, 33, -4, -33, 15, 16, 20, 27, -3, -19, -15, -13, 14, 19, 8, -3, 4, 11, 17, 31, 9, 31, 22, -4, -3, 33, 0, -12, 29, 31, 4, 30, 16, 15, -13, -3, 15, -13, -26, 18, -27, -4, -3, 0, -41, -14, -43, -30, -39, -12, -1, -22, 22, -2, -42, -11, 37, -14, 5, 15, 46, -13, 19, -6, -5, 19, 26, 9, -39, -4, -38, -49, -48, -2, -8, 6, 13, -4, -4, -25, 12, 8, 8, -11, 18, -21, -25, 16, -18, 0, -3, -25, -44, 14, 12, -8, -2, 17, -47, -11, 22, 1, -3, -32, -10, -14, 23, 21, 5, 16, -7, 3, 11, -2, -13, -25, 54, -17, 7, -41, -21, -14, 29, 20, 19, -12, -1, 21, 26, -26, 25, -20, 1, 23, -15, -25, 9, -23, -8, -17, 10, 38, 32, -42, -34, 23, -4, -15, -16, 8, -15, 19, -15, -36, -36, 38, -2, 10, -49, -14, 8, -8, -1, 28, -16, 2, 33, -21, -18, -2, 32, -48, 17, 16, -7, 57, -3, -15, -27, -4, -6, -23, -37, -27, 3, -8, -2, -38, 21, -25, -2, 18, 5, -20, 32, -8, -23, 1, 20, 32, -6, -17, 20, -11, 11, 26, -43, -1, 20, 13, -5, 14, 2, 42, 7, -27, -6, 35, -8, 0, 14, 14, 27, 44, 21, -32, -21, 9, -6, 20, -43, 35, -15, -8, -15, -1, -44, 10, 38, -10, -30, 37, 6, -12, 41, -5, -29, -5, 36, 3, -10, 10, -11, -25, -13, 16, -23, 9, -15, -13, -25, -10, -33, 14, 29, -11, 5, -11, -7, -13, -23, 25, 15, 21, 3, -29, -13, -11, -12, -6, 7, 25, 1, 16, -24, -11, 17, 6, -45, -7, -21, -12, 14, -3, 3, 4, 16, -23, -29, 22, -18, -1, -31, 5, -16, -3, 23, -22, 8, -34, 12, 16, 2, -12, -14, 17, 6, 14, -7, -39, 29, -2, 7, -15, -1, -42, -2, 13, 14, -1, 18, 10, -17, 12, 15, 0, -3, 17, 4, 10, 3, 8, -9, 25, 14, 8, -5, 19, 4, 27, 10, -18, 18, 2, 11, -25, -16, -3, 31, 2, 19, 6, -6, 13, 0, 25, 22, -7, 16, 22, -19, 37, 24, -13, 19, 20, 2, 22, -8, 10, -46, -17, 10, -35, -26, -6, -25, 2, -6, 10, -10, -3, 14, 2, 15, 21, 15, -4, -26, -18, 17, 11, 3, -15, 4, 22, -17, -25, 16, -14, 19, 17, -4, -26, -11, -1, 0, -3, 12, 17, 1, -15, 28, -13, -26, 0, 16, -5, -17, -12, -6, -3, -22, 32, -53, 2, -12, -1, 27, -34, 3, -17, -6, -6, -6, 25, 15, 31, -5, -24, -4, 22, -44, 16, 13, -11, -13, -5, -25, -9, 28, -10, 24, -16, 10, 44, 8, 43, 31, 38, 7, 43, 45, 3, -8, -30, 5, 21, 12, 24, 61, -32, 9, -15, -7, -27, 14, -20, -21, 12, -30, -33, -9, 0, -13, 1, -11, -33, -13, -5, -8, -12, 1, 46, 44, 3, -14, 4, 4, -25, -6, -23, -2, -46, -3, -15, -28, -14, -13, -48, -22, -4, -12, 14, -7, -8, -24, -44, -4, -4, 16, 14, 26, 2, -10, 11, 24, -17, -21, 23, 12, 6, -13, 6, -15, -19, 28} + +#define TENSOR_CONV2D_2_KERNEL_0_DEC_BITS {8} + +#define TENSOR_CONV2D_2_BIAS_0 {-102, -7, -44, 32, 5, -12, -28, -2, 36, -17, -30, 31, -72, -69, -14, 12, 7, 6, -61, -61, 15, -29, -35, -15, -1, -64, -24, -1, -33, -4, 34, -12, -71, 1, 10, -116, -28, -20, -19, -9, -46, -73, 52, -21, 37, 20, -2, -66} + +#define TENSOR_CONV2D_2_BIAS_0_DEC_BITS {11} + +#define CONV2D_2_BIAS_LSHIFT {2} + +#define CONV2D_2_OUTPUT_RSHIFT {9} + +#define TENSOR_DENSE_KERNEL_0 {32, -23, 4, -25, -1, 5, 17, 9, 11, -17, 31, 7, -9, -9, -3, 28, 8, 3, 55, -12, -5, 4, 9, -19, 12, -4, 26, 0, -20, -12, 15, -26, 49, -16, -11, -3, -5, -14, -10, -14, -1, -9, 59, -20, 8, 14, 14, 7, 0, -5, -8, -11, 9, 4, -10, -9, -21, 8, 12, 7, 0, -1, -21, -8, 5, 5, 12, -8, 18, 22, 5, -16, 11, -1, -36, 8, -15, 14, -5, 3, -10, -6, -6, -7, 10, -1, -13, 15, -8, 4, -6, -12, -14, -7, -20, -10, 5, -8, 19, -1, -15, -15, -10, 10, -9, -4, 6, -8, 2, -10, 15, 3, 47, -13, 30, 7, -18, 9, -21, -31, 39, -3, 26, -10, 16, 21, -17, -2, -27, -10, 7, 12, 9, -1, -15, 9, 58, -6, 16, -1, 5, -26, 7, 4, 25, -2, 29, -5, -4, 12, 16, -13, 2, -24, -16, 11, 3, 12, -1, -4, 25, -5, 28, -1, 13, 17, -1, -11, 8, -20, -2, -7, -14, -15, 13, -6, -29, -3, 16, -3, 8, 16, 15, 7, -9, 15, 42, -5, -6, 18, 16, 7, 29, -12, -2, -22, 9, -20, -18, 7, -11, 0, 13, -21, 1, -7, -23, -19, -19, 13, 26, -4, -12, 9, 20, 7, 22, -1, 24, 8, -17, 10, -15, -8, 13, -11, -6, -13, 1, -13, -6, -19, 11, 7, 60, 4, 8, -24, -2, -35, 21, -15, -18, -24, -22, -17, -15, 12, 11, -14, 12, 7, -8, -16, 13, -3, -4, 0, -19, 5, 13, -8, 2, 7, -13, 0, 21, 4, 9, -8, 18, -10, -5, -2, -19, -5, 2, -21, -2, 0, 4, 13, 25, 14, 3, -5, -12, -19, -14, -8, -7, -16, 1, -24, -21, 7, 37, -12, -2, 0, 7, -28, -6, -18, 23, 9, 25, -16, 5, -15, -5, -11, -20, -19, -2, -10, -22, 15, -13, -17, 12, 20, 3, -20, -6, -24, -7, -13, 4, 3, 11, -20, -9, 9, 4, 3, -10, 10, -8, 3, 21, 16, 4, 0, -12, -16, 6, 3, -9, 28, 12, 3, 4, -4, 5, -9, 16, 22, -15, 31, -6, -22, -10, -20, -3, 2, 15, -15, 21, -21, -3, 12, -7, -3, -21, -15, -14, -14, 58, 1, 0, 10, 4, -23, -36, 12, 16, -11, 5, 38, -1, -6, -14, -9, -5, -15, 17, 29, -18, -5, -8, 1, 1, -11, 6, 8, -3, -6, 13, -2, 5, -5, 2, -18, 2, -1, 21, 9, 37, -21, 8, 5, 8, 0, 5, -11, 33, -20, -7, 11, -20, 13, 12, -3, 13, -22, 15, 14, -5, -16, 3, -13, 6, 7, -17, 17, 2, 2, 17, 9, -26, -20, 3, -8, 9, 3, -14, 13, -17, -16, 6, -16, 5, 15, -14, -11, 16, 13, -19, -1, 19, 25, 25, 7, 29, -3, -10, 3, 18, -15, 15, 4, 9, -17, 17, 4, 17, -10, -4, -23, -6, -14, -11, 20, 5, 21, 18, 0, 8, 2, -5, 19, -1, 11, 27, 9, -18, -22, 1, -7, -25, 29, 14, 13, -6, 17, -20, -37, -7, 13, -17, -5, 4, -7, 0, 14, 0, -13, -1, -12, 10, -4, 0, -17, -9, -9, -5, -27, 34, -7, 9, 6, -18, -24, -19, 0, 6, -21, -3, -1, -14, -13, 13, 2, -1, -15, -6, 29, 3, -19, 27, -14, 7, -8, -4, 39, -9, -1, -7, -12, 18, 11, -23, -16, 21, 8, 7, -6, 3, -26, -5, 26, 3, 21, 6, -6, 2, -13, -19, 4, -11, -5, 2, -22, 7, -5, -11, -22, -1, 11, 1, -2, -28, -1, 0, -7, -5, 24, 13, -26, 18, 6, -3, 7, -21, 25, 27, -9, -7, 16, -5, -15, 2, -18, 37, -21, -11, -11, 18, -2, -14, 9, 0, 14, -7, 18, -8, 21, 4, 2, -32, -2, -7, -15, -19, 10, 7, 8, -12, 14, 19, -28, -18, -16, 14, 10, -14, -11, -9, -22, 7, 1, 3, -1, 16, -4, -1, 2, -10, -29, -2, 29, 1, 0, -3, 3, -12, -21, -10, -11, 21, 20, 3, -19, 12, 10, -20, 17, -24, -7, 12, -21, -19, 7, -19, 7, -7, 11, 5, 7, -7, 15, 1, -5, 3, -24, -13, 5, -9, 36, -19, 4, 15, 8, -15, 7, -19, -2, -3, 49, -15, -3, 19, -4, -24, -13, -12, -9, 15, -13, -29, 9, -6, -12, 12, 24, 4, 5, 7, -18, -1, 15, 9, -28, 1, -21, 15, 11, -4, 3, -19, 0, 37, -11, -4, -8, 10, -5, -19, 28, 8, -8, 5, -17, -20, 19, 1, -4, 18, -21, 10, -6, -8, -11, 4, 2, 3, 10, 1, 1, -21, -4, -12, 12, 4, -1, 70, -28, -13, 3, 16, -33, -11, -19, -2, -11, 0, -6, -14, 4, -23, -19, -14, -7, -17, -8, 3, 7, 40, 17, 29, 14, -8, 13, -13, 35, 28, 5, 30, 6, 0, -13, 9, -1, -9, 2, 9, -29, 13, 9, 18, -20, -27, 1, 4, 6, -9, 12, 4, 20, 6, -14, 39, -29, 5, 7, 14, 17, -4, 5, -18, -10, -2, 13, -18, 6, -6, -9, -16, 14, 16, 0, -21, 6, 14, 3, -33, 3, 7, -26, 11, 27, -2, 0, -28, 9, 10, -2, -14, 13, 1, 17, 5, 16, -19, -10, -11, 6, 21, 9, 9, 0, 18, -23, 5, -8, -5, -17, -16, -2, -8, 24, 12, 10, -15, 3, 28, 8, -21, 0, -6, 1, -29, 1, 4, -8, -13, 14, -6, -26, 0, -24, -2, 11, 2, -36, 6, -10, 14, -9, 24, 3, -15, 3, 0, 3, 39, -12, -29, -19, -16, -34, 3, 17, 10, 12, -38, -20, 19, 27, -13, -13, -31, -25, -1, -10, -7, 16, -19, 12, 12, -9, 5, -10, 8, -16, 14, 6, -24, -6, 9, 9, -17, -4, 6, 14, -7, -22, 0, 6, 1, -8, 6, 1, 14, -19, 31, -16, -9, 21, -18, -34, 4, -18, -7, -12, 11, -12, -13, 22, -7, -24, 21, 8, -4, 14, -7, 5, 8, -8, 12, 0, -11, -20, 4, -6, -8, -16, 4, -17, -8, 10, 17, 30, -15, 0, 19, 6, 6, -6, -19, 19, -13, -16, 19, 19, -16, -9, 16, 1, -16, 6, -4, -18, 0, 20, 10, 11, -18, -4, -8, 14, -8, 14, -19, 16, -13, -19, -13, -5, -22, -8, -23, 22, 11, -18, -22, -1, -13, -1, -17, 29, -23, -22, 29, -1, -10, -18, 2, 8, -14, -25, -2, -15, 4, 19, 12, 14, 30, 14, 7, 13, 4, -33, 0, -10, -10, -23, -14, -19, -19, -1, 4, 1, -12, -3, 3, 3, -8, 20, -6, 3, -52, -15, 10, -1, 4, 16, -9, 26, 31, -7, 17, 6, -2, -18, 14, 0, -16, 10, -6, -21, 4, 30, -18, -4, 33, -2, -8, 8, -17, -38, -11, -12, 3, 12, 13, -14, 10, -19, -7, 1, -17, 12, 24, -2, -11, 17, -10, -16, 13, -2, -3, -14, -5, 35, -19, -34, -4, 10, -6, -12, -12, 30, -12, 1, 6, -18, -24, 12, -6, -11, 18, -19, 9, -15, -24, 1, -22, 11, -25, -1, -6, -9, 19, -21, 16, 7, 6, 9, -8, -17, 2, 10, 11, 3, -21, 23, -27, -26, -7, 13, -3, -4, -19, 0, 30, -10, -4, 7, -25, -3, 6, 18, -31, 8, 2, 1, -2, 4, -5, -35, 43, -25, 10, 15, -18, -21, 15, -13, -3, -6, 2, 16, -14, -37, -10, 4, 5, -18, -17, -10, 12, -2, -17, -42, -21, -27, -6, 10, -12, 15, -12, 23, 25, -24, 27, -4, -10, 12, -21, 0, 5, 15, 27, -13, 15, -22, 18, -34, -3, 5, 28, -24, 7, 8, 0, -10, -14, -2, 15, -7, 10, -10, 14, 16, 6, -13, -6, -4, 14, 7, -12, 16, -11, 1, -21, 6, -11, -21, -19, -6, 25, -6, 10, -17, 19, -2, -25, 4, 11, 13, 3, -20, 7, 12, -15, -13, -21, -19, 5, 17, -4, -7, -9, -40, 13, -6, 15, 2, 16, 29, -21, 3, -9, -6, -6, -23, 17, -5, -20, 19, 9, -25, 11, -23, -7, -7, 11, 1, 10, -9, -16, 7, -1, 18, -10, 21, -8, -7, 12, -27, -22, -17, -9, 6, 44, -27, -3, 13, 4, -11, -12, -12, 20, -27, 8, 7, -9, 5, -18, 25, -6, -12, 19, 9, -21, 25, -13, 16, -12, 2, 3, -6, -7, 1, -6, -23, 13, -1, 16, 7, -10, -14, -14, -1, 27, -26, 13, 3, 9, -8, -15, -4, 34, 13, 15, 13, 15, 10, -23, 22, -5, -20, -9, -12, 12, -20, -10, 3, 14, -6, -7, -7, 9, 4, 10, 26, -20, 8, 4, 11, 12, -13, 5, -13, 9, -4, 21, 6, 2, -14, -11, 6, 3, 0, 21, 18, 14, -7, -17, -11, -14, -2, 4, 12, 9, 7, -23, 11, -14, 8, 5, 13, 15, 34, -15, -9, -27, -13, -3, -4, -19, 25, 6, -3, 18, -22, -5, -14, -18, 25, 7, -42, 34, -31, -11, 3, 4, -3, 3, 4, 5, -21, -2, 2, -10, -29, -4, 10, -24, 5, 32, 17, -17, -21, -13, -8, -13, 12, -10, -14, 17, 28, 12, 20, -6, 0, -17, -13, -18, 14, -8, -26, 6, 13, 1, 11, -18, 0, -18, 30, -15, 8, 32, -8, 4, 11, 12, -25, -12, -4, -22, -8, 1, -11, -17, 1, 5, -3, -21, 14, 10, 21, 3, 24, 10, -4, -10, -18, -17, 1, -15, 26, 18, 8, 20, -16, -6, 5, 8, -2, 0, -18, 11, -19, 17, -12, 4, -10, -10, 8, 27, 13, -8, -7, -13, 4, 9, 4, -33, -23, -16, 5, -14, 4, -12, -24, -11, 10, -13, -8, -17, -15, -10, -16, 4, -8, 3, 1, -5, 29, -2, 20, 17, 5, 14, -18, 2, 12, -9, 9, -3, -14, -12, -25, -17, 12, 19, -14, 14, 8, 15, 19, 15, 8, 1, 10, -7, -20, 19, -1, -22, 20, -13, 9, -17, -4, -12, -18, -15, -3, -20, 5, -4, -24, -14, 10, -11, -5, -20, -7, -7, -20, -9, -8, -14, 3, 10, -24, -41, -26, -6, 16, 0, -19, -8, -7, 8, -5, -13, 3, -9, -8, 14, -23, -18, 19, -16, -25, -7, -15, -1, -12, -14, 8, 14, -1, 15, 14, 17, -21, -20, -23, -24, -2, -18, -6, -11, -30, -7, -13, -2, 34, -13, 16, 16, -2, -14, 13, -1, 15, -14, 9, -2, 12, -13, 9, -17, 0, 13, 9, -6, -10, -11, -10, 14, 25, 16, 15, 14, 10, 9, 10, -9, -27, -13, -4, 4, -19, -22, 20, 10, -14, 4, 12, 21, 12, -5, -26, -18, -19, -17, 1, 27, -11, 24, -7, -18, -34, -1, -2, 13, 14, -11, -8, -13, -14, -10, 0, 24, 6, -8, 4, 14, 10, -6, 8, -19, 5, 4, 0, -5, 20, -7, -22, -2, 9, 9, 13, -24, -5, 7, -1, 12, 15, -13, -16, 8, -14, -13, 18, 14, 9, 21, -23, -11, -5, 15, 26, -1, -7, 23, 19, 0, -5, 16, -27, 22, -14, 15, 4, 16, -2, 8, 5, 6, -19, -25, -1, 2, -14, -15, -17, 24, -4, -15, 7, -7, 0, 13, 28, -9, 6, 22, -9, 2, -5, -9, -6, 11, -19, -14, -18, -3, 0, -24, 17, -21, -5, 0, -10, 15, -7, 13, -9, 1, -17, 18, 13, -23, -9, -5, 2, -2, -22, 30, 3, 1, 2, 16, 10, 10, 17, -12, 9, -25, -37, -2, 6, 16, -1, -3, -4, 7, 28, 1, -1, -6, 14, -14, 0, -2, 4, -14, 5, 1, 3, 6, -24, -16, -1, 16, 23, -19, 0, -37, -6, 1, 1, 17, -7, 0, -20, 11, -10, 21, -5, 8, -16, 11, -2, -25, -8, -20, -32, -15, 33, 0, -12, 6, -14, -8, 15, 7, -3, 10, -22, -2, 10, -18, -5, -23, -16, -28, -23, 27, -18, 14, 16, -16, 4, -17, 6, -6, 11, -5, -16, -22, 1, 7, -8, 21, 3, 15, -23, 4, 14, -1, -26, 10, -8, -19, -1, -10, -10, -15, -16, -3, 18, -7, -18, -3, -13, -9, 7, 23, 1, 6, 8, 7, 1, 22, 16, 2, 16, -4, -36, -13, 10, 24, 2, 6, -11, -11, -19, 5, -23, 15, 4, 15, -5, -15, 27, 10, 9, 3, -2, 19, -14, 0, -20, 8, -3, 11, -8, -3, -15, -15, -19, -2, 33, 11, 2, 9, -12, 9, -10, -2, 2, 7, -6, 0, -25, -3, 14, -8, 28, 17, -9, 3, 0, 8, -1, -20, -2, 16, -18, 3, -16, -8, 12, -8, -9, 19, -14, 6, -8, -20, 23, -20, 20, 9, -12, 2, 6, -5, -6, -6, 10, 15, 11, 21, 15, -3, -54, 4, -12, 6, -15, 2, 10, -7, 1, -1, -8, 19, -14, 2, 11, 24, 2, 0, -11, 41, 10, 14, 4, 5, -38, 16, -14, 0, -5, 27, 18, 8, 0, 11, -11, -7, -24, 8, -20, 7, 15, -2, -28, 46, -30, 17, 16, 15, -72, -10, -8, 12, 12, 0, 2, -19, 10, 9, -4, 10, -9, 12, -9, 2, -51, 9, -7, -5, -9, 12, 4, -2, -2, 21, -23, -11, 11, 25, -19, -15, 19, 12, 10, -23, -20, 20, 11, 12, 15, -21, -28, -4, 10, -13, -25, -4, 7, 18, -23, 9, -19, -17, 8, 20, -39, 13, 21, -7, 2, -10, -2, 4, 12, -17, 43, 14, -19, 16, -12, 11, 12, -8, 0, 5, -20, 23, -11, -12, -15, 12, -33, -15, -11, 29, 5, 18, -7, 0, -17, -16, 9, -16, -15, 15, -14, 12, -14, 30, -3, -11, 5, -6, -64, 7, -7, -21, -2, 33, 8, -17, -29, 14, 1, -12, -12, -12, -1, 11, 13, 10, 37, -17, 5, 12, 2, -4, 17, 7, -1, 8, -13, -12, -11, -22, -14, -19, 12, 28, 0, 11, 16, 17, -28, 8, -1, -4, 13, 23, 11, -8, -24, -12, 8, -8, -7, -19, 14, 19, -13, 1, 33, 12, 18, -1, -21, -10, -13, 2, 20, 24, 4, 6, 10, 7, 23, -2, -36, 3, -8, 3, 2, 4, 3, -2, -10, 22, 11, 4, -13, 12, 16, 19, 22, -14, -9, -13, -11, -15, 6, 18, 7, 10, -15, 20, 0, -7, 4, -16, -11, 12, 1, 6, -6, -13, 29, 19, 14, -12, 3, -8, 9, -1, 13, -17, -22, 37, 7, 18, 1, -14, -12, -17, 17, 23, 12, 8, -21, 18, 20, 13, 7, 9, 0, 1, 16, -19, 17, -14, -35, 16, 15, 4, -13, 4, 10, 1, 31, -5, 5, 2, 9, -13, -22, -19, 12, 34, 3, 6, -8, 18, 6, 6, -13, 9, 11, 25, -2, 10, -12, 10, -5, 10, 7, -8, 10, -11, 19, 11, 27, 3, -8, -3, -7, -23, 37, -14, -12, 2, 4, -9, -7, 16, -21, -20, 2, -16, -6, 14, 19, 5, 10, 14, -23, 22, -4, 0, -5, 17, -8, -16, -3, 11, 9, -16, -24, -1, -2, 15, 5, 22, -2, 10, -22, -10, 24, -12, -16, -27, 8, 12, 13, -17, -20, -14, 13, 5, -18, -16, 6, -14, -2, 16, 16, -8, -13, -19, -21, -18, 12, -4, -7, -1, 0, 6, 20, -6, 15, 0, 6, 0, 17, -12, 11, -2, 21, -22, 33, -9, 3, 7, 18, -22, 6, -23, -19, 18, -11, -20, -15, 2, 19, 13, 16, 25, 6, -12, -24, -11, -12, 18, 3, 15, -14, 14, -23, -20, -9, -5, 22, 13, -21, -17, 5, 3, 28, -1, 22, -1, -11, 23, -22, -12, -40, 10, -17, -14, -6, 19, 0, 10, 15, 2, 13, 1, -9, 2, -16, -5, 9, 14, 0, 14, 10, 22, -7, -18, -6, -7, -23, 1, 8, 1, 18, -23, 14, -6, -35, -2, 2, -18, -24, 9, 20, -21, 23, 23, 5, -18, 0, 13, -12, -22, -5, 8, -16, -17, 16, -6, 28, -17, -27, 24, -5, 13, -5, 11, -21, -3, -12, -3, -1, -14, 5, -3, -18, 2, 21, -14, -1, 3, -3, 6, -2, -2, -5, 8, -15, -17, 15, 7, -22, -17, 18, 13, -24, 3, -20, 16, 9, 7, -5, -2, 15, -11, -20, 8, 15, -19, -8, -10, -21, -25, 16, -9, 26, 6, -32, 12, -16, -3, -20, 13, 3, 5, 1, 5, 0, 20, -18, -11, -17, -2, -16, 12, 9, 9, -19, 22, 34, -5, -1, 19, -9, 13, -9, 8, -11, -6, -49, -9, 11, 0, -2, 18, 19, 17, 20, -13, -6, 10, -16, -1, 9, -17, -44, -33, 4, 11, 1, -12, -14, -8, -19, -23, -5, -15, -12, 7, 10, -6, 23, 17, 13, -25, 11, 12, 32, 14, 23, -8, -5, -20, -10, 10, 22, -12, -5, -17, 16, -26, 12, -9, 10, 18, -12, -13, 19, 3, -8, 9, -18, -19, 8, 18, -12, -1, 1, 17, -18, 5, -10, 16, -2, -5, -9, -5, 11, -4, 12, -8, 15, -15, -8, 11, 0, 11, -20, 16, -8, 10, 6, -4, -14, -4, -29, -7, -6, 15, -17, -2, -26, -4, -22, 0, 2, 12, 9, 8, 9, -9, 10, 27, 20, -7, -20, 8, 18, -25, 8, 34, -3, -9, -30, 2, -7, 11, 31, -41, -4, -20, 7, -14, 20, -16, 7, -17, 12, -1, 8, -10, -20, 19, -2, -34, -11, -3, -20, -14, -5, -7, 19, -33, -11, 8, 7, -11, -16, 12, -14, 12, -10, -7, -9, 11, 13, -7, -18, -14, -3, -22, 15, -4, 9, 14, -14, 17, -19, -11, 13, 2, 10, 17, -29, 14, -6, 10, 5, 18, -16, 17, 36, -13, -18, -33, 13, 4, 15, -8, 15, 24, -8, -3, -15, 11, -13, -1, -11, 31, -2, 15, 16, 20, 4, -15, -32, 1, 4, 7, -19, 18, 14, -17, -11, 23, 19, 5, -17, 13, -5, 10, -32, -16, 8, 10, -22, -4, 9, -22, -34, -24, 7, -18, 12, 21, -13, -18, -10, -3, -7, 36, -4, -16, 22, -16, 26, 33, 2, 11, 13, 17, 30, -3, 42, -5, 18, -12, -1, 2, 11, -23, 25, 0, 3, 3, -14, -12, -23, -7, -21, -25, -8, 16, -21, 16, -11, -1, -47, -36, -15, -10, -14, 4, 9, -21, 6, -27, 9, -11, 9, -13, -1, 15, 20, -18, 17, -19, -2, -6, 15, -18, 24, 11, -22, 1, 2, -7, 24, -4, 25, 25, -14, -18, -1, 4, 2, -9, -5, 12, 5, 1, 20, -29, 6, -33, 5, 11, -21, 11, 1, 0, 3, -9, -13, -7, -28, 14, -15, -6, 14, -37, -8, 18, -37, 4, 20, -12, -12, -2, 2, -20, 5, -3, 10, 23, -16, 4, 20, 9, -43, 15, -13, 26, -3, -16, -6, 10, 6, 4, -10, -7, -15, -7, 14, 18, 9, -15, -16, -14, 7, -7, -29, -14, -7, -18, -9, 10, -1, 10, 21, -17, 5, 18, 28, 3, 1, -10, 25, -3, 1, 5, 22, 7, 18, 8, 32, -9, 7, 1, -13, 5, 3, -14, 10, -7, 6, -17, -6, 3, 19, -24, -19, -22, -10, -22, -34, 14, 2, -14, 2, -7, -5, -5, -44, -37, -40, 9, -14, -2, -15, -20, -32, 8, 15, 7, 8, 1, 8, -23, 13, -10, -12, 7, 18, -16, -15, -4, 1, 15, 8, 18, -1, -20, -21, -15, -23, -20, -31, -39, -10, -21, 10, 15, -8, 18, 2, 7, 31, 10, -37, -1, 6, -18, -14, 5, -3, 9, -33, -2, -10, 24, 18, 14, 4, -10, 27, 0, -26, -25, 12, -3, 2, -2, -16, -15, 16, -1, 5, 2, 10, -9, -3, -1, -1, -9, 6, -7, -21, -12, -28, 18, 13, 19, 11, 13, -21, 13, -4, -10, -3, 10, -2, 2, 7, -17, 4, -16, 4, 12, -3, 26, -14, -17, -5, -12, 4, -5, 0, 25, 3, -11, -4, 14, -30, 32, 29, 21, -20, -3, -27, -4, -6, -11, -19, -3, 4, 14, -5, -8, 6, -11, 14, 24, -20, -20, 18, -20, -36, -10, -33, 9, -9, -18, 13, -9, 2, 16, 2, -17, 15, 2, -12, 12, 34, -3, -8, -4, -14, -20, 9, -11, 1, 7, 6, 25, -11, -18, 0, -20, -6, 13, 15, -4, 4, 13, -6, -4, -13, -25, 20, 1, 0, 6, -12, 0, 3, 13, -21, 5, -14, -13, 36, -10, -12, 24, -16, 23, -2, 5, -9, 18, 0, 11, 17, 7, -13, -16, -2, -5, -3, 0, 12, -3, -13, 5, 0, 14, 8, 8, 6, -7, 16, 9, 3, -3, 15, 18, -7, 22, 17, -23, -20, -18, 16, 4, -2, 17, -25, -13, -28, -11, 19, 5, 8, 31, -18, 7, -11, -1, 23, 53, -59, 28, -22, -13, -9, -2, -14, 23, 14, -10, -12, 1, 69, -18, -11, 2, -38, -2, -28, -8, 23, 12, 8, -17, 0, 16, 2, -3, 14, 8, 19, 5, 5, 1, 12, 16, -21, -23, 10, 12, -13, -9, -6, 6, -1, -19, 8, -12, 23, -5, 1, 0, 10, 15, -7, 2, -5, -4, -8, 13, 2, 13, -6, -6, 6, -8, -11, -17, -30, -14, 18, 17, -3, 1, 16, -18, -10, -19, 2, -8, -19, 8, -21, -4, 17, 1, -2, 24, 33, 22, -26, -14, -3, -12, 2, 33, -7, 27, -11, -20, -31, 13, 20, 49, 6, 15, 11, 11, 6, 11, -22, 15, 13, -12, 27, -22, -1, -8, -14, 6, 8, 1, -14, -20, 10, -18, -11, 13, 13, 19, 8, 0, -2, -13, 25, 8, -8, 37, 4, -1, 15, -2, 33, -32, -3, -18, 20, 12, 9, -14, -2, 17, 14, 7, -2, 13, 10, 5, 6, -5, -28, 13, -3, -2, -25, -20, -8, -7, -19, -7, 10, 2, 5, -1, 27, 7, -12, -21, -24, -7, -13, -9, 35, 12, -17, 29, 9, -3, 4, -3, -11, -9, -56, 37, 15, -18, 8, -19, 25, 19, -5, 23, 7, -12, 42, 15, 24, 19, -33, 17, -1, -4, 29, -2, 5, 9, -4, 31, 0, 10, -1, 5, -8, 26, -3, -10, -14, -8, -14, 8, -4, 17, -14, -22, -8, 12, 52, 0, 21, -9, -14, 12, -4, 8, 31, -3, -7, 21, -26, 15, -5, 11, 18, -16, 22, 22, -5, 31, 11, 15, 10, 7, -17, 35, -16, 2, -4, 13, 50, 6, -11, -7, -19, -12, -6, -10, -14, 2, 5, 0, 19, 12, 7, -6, 5, -16, 43, 35, -16, -4, -16, -7, 6, 16, 2, -29, 11, 12, -21, -8, 9, -8, 20, 6, -13, 13, 3, -25, -3, 0, 27, 16, -26, 25, 2, 6, 9, 1, 0, -2, -18, 2, -9, 5, 0, -2, -9, 11, 4, 19, 31, 2, 21, -2, -8, 27, -19, 22, 17, -16, 4, -10, -1, 15, -17, 9, 25, -8, 23, 10, -22, 11, -8, 45, 14, 18, -2, 13, 2, 27, 6, 18, -24, -12, -4, 4, -9, 23, 22, 29, 22, -9, -8, -13, 36, 21, -11, 39, 2, 6, -14, -7, 8, 33, 8, 6, -12, -10, 2, 6, 8, 3, -14, 27, -20, -1, -18, -1, -16, 4, 9, 4, -11, 7, 3, -17, 26, 11, 6, -26, 14, 9, -3, 12, -50, -23, 13, 4, -23, 0, 0, 0, 12, 22, 7, -1, -13, -21, 2, -13, 29, 1, -15, 3, 4, 15, -10, -13, -17, 40, -7, -1, -33, 9, 10, 0, -6, -7, 12, 13, 9, 7, 6, 11, 21, -9, 5, 11, -28, -11, 10, -17, 23, -29, -7, -23, -24, -6, 2, 3, -25, -9, 7, 20, -8, 3, -1, 14, 24, -28, 8, 15, -17, 5, 8, 4, 31, -12, -10, -5, 1, -6, 3, 12, -20, 9, -5, -10, -17, -26, -27, -21, -7, 19, -16, -6, -7, -2, 17, 20, -21, -14, 18, 5, -38, -4, -25, -1, -11, 23, -17, 27, 13, -17, -7, -1, 31, 22, -10, -6, -12, -20, 11, -13, -27, -20, 41, -15, -30, -2, -21, 16, -21, -8, 2, -3, -17, -2, 4, -14, -5, -4, 54, 4, -2, 10, 1, -13, -28, -20, 14, -10, 3, -7, -10, 15, 26, 11, -29, 1, 24, 19, 5, 17, -3, 35, 3, 3, -16, 10, 13, 1, 22, -10, -8, 21, -3, -6, 0, -18, -30, -17, 8, 3, 5, -8, 14, -3, 1, -10, 1, 1, 2, -11, 21, 4, -11, 22, -6, -12, 8, 6, 17, -22, -24, 8, -6, 4, 4, -17, 16, 7, -7, -4, 30, 9, 13, 1, -18, 3, -18, 5, -11, -4, 11, 4, -7, -17, -2, 15, -7, -27, 13, -18, -1, -6, 19, 6, -20, 9, 14, 0, -2, -3, 14, 5, 16, -33, 12, -20, -27, -24, -18, 23, 16, 12, -10, 16, 24, -17, -1, -17, 37, 1, -21, -18, -1, 5, 7, 19, -21, 14, 16, -8, 6, -20, -8, 2, -7, 12, 2, 15, -23, -11, -5, -22, -6, -27, 36, 5, -3, -16, -9, -29, 4, 15, -25, -17, -7, -2, 46, 7, -18, -5, -4, -5, -32, -1, 0, 39, -44, 28, -18, -8, -29, -17, -19, -3, 4, 7, -9, -21, 14, -2, -18, -24, 5, 11, -18, -12, 18, 3, 7, -6, 36, 28, 0, -8, 26, 10, -12, -3, -7, 11, 3, -17, 50, -19, 3, -41, -5, 8, 6, 10, 1, 14, 18, 0, 13, 24, -8, 5, -11, -11, -19, 28, -26, -20, -7, -15, -5, -1, -13, -20, 4, 13, -19, 13, -17, -15, 20, -9, -17, -10, 22, -11, -21, 10, 6, -29, -2, 10, 7, -14, 5, 3, 24, -22, 14, 49, -41, -13, -27, -22, 17, 6, -13, -6, 0, 10, 16, -4, -31, 49, -31, -18, 20, -4, -22, 10, 34, -1, -12, -9, -30, -15, -1, -1, -13, 18, -23, 23, -23, -24, 1, -17, 6, -3, 26, -18, 17, 10, 5, -5, -16, -14, -2, 22, 11, 6, 13, -2, 11, -53, 33, 24, -13, -2, 6, -24, -10, 13, -19, 29, -12, 10, -9, 10, -29, 4, 10, 9, 29, 1, -17, 15, 11, 18, -31, 1, 6, 9, -13, -14, 9, 31, -16, -7, 5, -19, -7, -24, 15, -2, 0, 2, 25, -18, -23, 15, -12, 22, -44, -13, 16, -25, 13, 13, 20, -27, 14, 46, -6, -13, -25, 12, 5, -4, -19, 5, -7, -21, -20, -3, 6, -20, -23, 3, 13, -1, 12, -2, 13, -14, -14, 17, 24, 10, 18, 7, -7, -8, 9, 1, -27, 15, 4, 11, -9, -8, 11, 15, 4, -14, 37, -2, -16, -5, 18, -2, 6, -16, 19, -12, 10, 10, 1, 18, -4, 3, 7, -13, 27, 1, 1, 0, -30, -11, 0, -24, -11, -9, 1, -16, 13, -11, 20, 10, -11, -29, 6, -9, -9, -2, 20, 11, -7, -13, 10, 0, 15, -22, -10, -9, 1, 14, 11, 36, 4, -11, 2, -24, -7, 4, 18, -15, 1, -13, 12, -22, -21, -6, -11, 6, 22, -17, 13, 6, -9, 28, 19, -9, -15, -4, -2, 1, 15, -16, -2, 21, -30, -6, 10, 1, 22, -7, -30, 3, 22, 13, 4, 9, 20, 15, -39, 1, 8, 3, -15, -20, 20, -13, 15, -3, -23, 3, -3, -18, 6, -2, 15, -11, 6, -6, 23, -19, 11, -2, 29, 46, 0, -15, 9, -6, 10, 21, -11, -3, 18, 7, 25, 16, 9, 16, -2, -2, 5, -2, 25, -25, 6, 22, -24, -1, 6, 13, -3, -1, 9, 15, 20, 26, 10, 20, 9, -1, 8, 3, -24, 12, 11, -6, -7, 13, -12, -31, -14, 20, 12, -22, -15, 3, 13, -35, 19, -5, 37, -12, -5, 12, 23, 28, 26, -8, -1, -11, -16, 10, 4, 3, -16, 25, 23, 10, 1, -5, -4, 8, 28, -21, 31, -5, -14, -14, 25, -13, -9, -25, 15, 12, 3, -8, 7, 24, -9, 5, -4, -11, -11, -1, -13, 9, -9, -7, 17, -2, -1, 5, -2, 10, 9, -14, 5, -17, -19, 1, -32, 19, 25, 9, -35, 3, -21, 1, -9, 27, -5, 20, -17, 7, -16, 7, -12, -7, 32, 1, -16, 9, 0, 14, -14, 15, -13, 11, -9, 3, 12, 13, 17, -7, -2, 7, 25, -20, -28, 16, -11, 13, -22, 16, 36, -8, -10, -15, 3, 7, 18, -1, 6, -14, -34, 1, 10, 17, -46, -8, 12, -10, -13, -3, -24, 8, 32, 0, 2, 1, -15, -10, -33, 8, 11, -4, 25, 4, -8, -4, -14, 20, -5, -4, -19, 14, 20, -6, 14, 18, 12, -9, 32, 8, -5, -6, 25, -7, -7, -21, 13, 19, 10, 15, -2, -9, -11, -12, -18, 11, 3, -6, -3, -29, 8, -9, -5, -11, -17, 7, 2, 19, -31, -13, 13, -1, -1, -5, -6, 24, 10, -5, 20, -6, 2, 3, 25, -6, -6, 12, 18, -1, 7, 1, -5, -15, 23, 2, 18, -6, 2, 0, 19, -14, 10, -21, -15, 10, 0, -10, -19, -4, -6, 19, 18, 12, -22, -20, 4, 15, -23, -15, 2, 15, -12, 4, -23, 22, -8, -25, -17, 3, -13, -24, 7, 2, 0, -4, -16, -18, 28, 1, -2, 31, -5, 0, -11, 8, 19, -11, -13, -24, 15, 8, -23, 11, -2, -16, -5, -33, -5, 10, -21, -11, -11, -21, -16, -5, -6, 6, 4, 1, -30, 11, 0, 6, -9, -16, 18, -12, 0, 4, 4, 8, -22, -3, -1, -2, 18, -23, 21, 8, -5, -20, 8, -10, -16, -15, 11, 0, -3, 15, 3, -20, 12, -9, 22, -37, -3, 20, -17, -16, 18, 14, -24, -31, -16, 16, 1, -17, 18, 10, -20, -6, 19, 3, 11, 9, 17, 2, 0, 12, 18, -2, -3, -23, 3, -9, 13, -18, -16, -30, 22, 8, -6, 7, 24, 10, 11, -16, -7, 8, 33, 5, 30, -10, -10, 0, 26, -24, -15, -5, -10, 2, -7, -6, 2, -10, 1, 4, 25, -12, 25, -10, 14, -22, 26, -19, -18, -32, -20, 7, -5, 1, -25, -13, -5, 25, -25, 1, 20, 13, -3, -18, 20, -24, 10, -29, -1, 3, -10, 1, -22, -8, -23, 14, -24, 18, 8, -5, 16, 8, 21, 8, -10, 7, -3, 7, 9, -12, -1, 24, -48, 13, 13, -17, -2, -3, 16, -13, 11, -16, 17, 0, 28, -31, 20, -12, -1, -14, 22, 0, -9, 6, -1, -14, 12, -25, 1, 12, 25, -7, 8, 15, 2, 17, 16, 5, 12, -3, -2, -16, 9, 13, -14, 6, -4, -10, 0, 2, 20, 12, -22, -13, 17, -8, 11, -34, 22, 1, -18, -21, 4, 8, -9, -7, -20, 5, 17, -5, 7, -11, 2, 23, 17, 10, -13, -8, 20, -12, -8, -8, 5, 14, 11, 5, -3, 17, 21, 19, -1, -6, 2, 12, -16, 13, -14, -6, -12, -4, 6, -8, 8, -23, -8, 0, 4, -21, -8, -1, -7, -7, 2, -47, 44, -11, -7, 9, -17, 21, -12, -38, -10, 6, 14, -3, -30, -11, -15, 28, -21, -22, -9, 16, 0, -38, 16, 20, -26, -16, -10, -13, 18, -17, 26, -4, -16, 15, -17, -17, -10, 18, -17, 11, -18, -3, -4, 10, 18, -15, 0, -9, 10, -5, 19, -7, -22, -41, 7, -52, 42, -12, 2, 17, 25, -27, 11, -26, -11, -15, 8, -14, 16, 11, -15, -13, -2, 11, 28, -7, -23, -21, 15, 15, 2, 12, 5, 11, 11, -35, 14, -4, 29, -24, -14, 9, 6, 8, 28, -11, 4, 13, 16, -3, 13, -22, 23, -6, 18, -24, 19, -9, 0, -20, 64, -5, -11, 13, -9, -23, 28, -11, 24, -2, 21, -23, -2, 0, 43, 18, 36, -35, 10, -9, 28, -8, -8, -12, 16, -34, 38, -11, -14, -6, -5, -11, 13, 6, 1, -15, 19, -11, 2, -28, 11, -23, 32, -3, 6, 13, 26, -15, -2, -13, -12, 13, -23, 5, -7, -28, 22, -8, 49, 7, 19, 8, 16, 1, -2, -34, 22, -9, -8, -17, -15, -9, 23, 1, 1, -13, 18, -15, 11, -1, -13, -16, 23, -9, 2, 14, 27, -33, 1, -49, 40, -1, 4, -4, 29, -4, -6, 13, 1, 2, 1, 16, -15, -8, 17, -32, -22, 2, 15, 13, -32, -1, 7, -13, 12, 17, -19, -2, -4, -18, 14, -25, 5, -9, 31, -16, -21, -4, 11, -20, -10, 10, 3, 8, 7, 8, -16, 6, -5, 3, 0, -7, 18, 23, 26, -12, 3, 10, -17, -8, -2, 15, 13, -15, -19, -7, 15, 16, 30, 10, -1, -5, -17, -17, -6, 11, 0, -29, -31, 27, -21, 13, 14, -3, -13, 13, 6, 0, -2, 17, -30, -15, -21, -9, -23, -10, 5, -22, -27, -20, 31, 10, -14, 4, 14, -11, 7, 8, -26, 15, -9, 15, 6, 0, -10, -10, -7, 19, 33, 6, -4, 16, 18, -11, -24, 4, 12, 13, -7, -5, 11, -2, -10, 6, -14, 4, -1, -12, 15, 12, -3, -20, -12, 2, -18, -10, -20, -14, 10, -14, -2, 15, 17, 17, 5, -7, -13, -3, -19, -2, -13, 8, 6, 1, 26, -6, -7, -13, 21, -6, 18, -3, 16, 10, 4, 6, 29, 9, -8, 11, 8, -13, -7, -1, 0, -17, 17, -1, -20, 0, 12, -9, 17, 20, 8, 3, 6, 9, -12, 8, 39, 16, -12, -16, -5, 19, 16, -15, -13, -21, 7, -2, 10, 2, 7, -24, -18, -12, -6, -14, -13, -6, 3, -19, -7, -14, -23, -21, 9, -6, -15, -8, 3, -8, 1, -13, -3, -13, 1, 8, -7, -7, 13, -8, -6, -14, -17, 6, 9, 10, 4, -4, -19, -13, 18, -8, -26, 8, 11, -9, -5, -10, -4, -13, 6, -6, -14, -7, -21, -22, 14, 3, -4, -13, -27, -20, 3, 10, -31, 0, -21, 17, -10, -13, 13, 7, -6, 9, -33, 14, -14, -2, 0, -6, 11, 2, 19, 1, 0, 3, 24, 16, 20, 23, -5, 11, -11, -3, -15, -8, 11, 5, 10, -5, 27, 8, 0, 0, 35, 15, -21, 0, 8, -12, -15, 15, -5, 3, -8, 1, 4, 4, 5, 4, -10, -9, 23, -15, 25, -17, 2, -14, 26, -4, -12, -11, -15, -5, 9, -6, 5, -22, 16, -17, 17, -13, -5, 5, 1, -10, 1, -9, 12, 13, -4, -6, 2, -1, 18, 8, -16, -13, 5, -3, -6, 1, 18, -5, -18, 8, -5, -21, 9, -17, 11, 2, 0, -11, -10, 24, 3, 9, 39, 16, -18, 2, 12, -18, 15, -2, 8, 11, -20, -20, -23, -14, -15, -7, -40, 4, 19, -15, -11, 8, 17, 20, -2, -4, 1, -9, -1, 6, -1, 18, -17, -16, 12, -16, 5, 9, 27, -12, -15, -11, -8, -23, -5, -5, -1, 22, 26, 11, 9, 4, 17, 13, -10, 10, 6, -19, -17, 17, 13, -1, -5, -14, 3, 3, 19, -24, 10, -18, -5, -22, -11, 4, -22, 11, -6, -25, -32, -7, 25, 19, 1, -16, -22, -6, 6, -11, -39, 0, -2, -10, -10, -22, 37, 6, 7, -7, -21, -4, -6, 36, -3, -22, -11, -9, -26, -13, 19, 19, -13, -7, 21, -12, -20, -5, 12, 12, -7, 9, -4, -7, 13, 16, 28, -21, -27, 12, -13, -19, 15, -3, 27, -13, -19, -12, 19, -4, -16, 3, 24, -27, -5, 4, 16, 14, 19, 9, -14, 7, 18, -13, -25, -11, 2, -10, -5, 9, 18, 9, -3, -14, 0, -4, 14, 13, 9, 10, 24, -20, 5, 11, 24, -1, 7, -8, 15, 14, 6, 4, -3, 12, 7, -22, 11, -12, -15, -13, 25, 14, -4, 6, 16, 9, 8, 16, 24, -13, -9, 16, 8, -2, -14, 4, -28, 20, -40, -3, 30, 9, 28, 9, -20, 25, 12, -8, -41, -4, -54, 4, -8, -26, -66, -2, 18, 4, 18, 15, 34, -16, 20, -33, 30, -7, -1, -19, 11, -25, 3, 3, 0, -20, 15, -15, -19, -4, 1, 22, 3, -12, 8, -18, 18, 34, -25, -18, 18, 6, -8, -12, 22, -2, -20, -26, 22, -17, -19, 16, 36, -1, -15, -35, 6, 18, 20, 11, -2, -7, -18, -17, -26, -3, 8, -11, 5, -13, -16, 10, -19, 17, -3, -2, 34, -18, 3, -5, -22, -3, -23, -19, 36, 0, -3, -15, -11, 6, -44, -23, 23, 12, -51, -13, 1, 4, 21, -10, 11, 3, 2, -4, -2, 1, 36, -16, 3, -10, -17, 7, 12, 16, 17, -21, 38, -26, -19, 2, 13, -13, 11, 16, -33, -3, -27, -15, 7, -11, -42, -2, -16, 13, -59, 16, -57, -23, -5, -11, -3, -28, 14, 37, -17, 15, 23, -12, -9, -22, 1, -27, -8, 3, 17, -20, 37, 10, -9, -23, 8, 11, -18, 13, 0, -1, 5, -2, -9, -20, -1, 4, -1, -1, 0, -16, -3, 5, -23, 1, 11, -15, -28, -6, 21, -11, 8, 17, -39, 18, -21, 23, -8, -21, -48, -17, 4, -1, -37, 0, 22, -3, -9, 15, -20, 14, 9, -1, -15, -31, -15, -22, 1, -3, -15, -2, 4, 4, -22, 5, -4, -19, -10, 3, -8, -4, -5, -12, -13, -7, 2, 21, 2, -33, 10, -7, 2, -1, -14, 4, -37, -17, -16, -37, 6, 32, -29, 29, -32, -27, 12, -1, -2, 3, 8, -6, 10, 13, -8, 20, -6, -13, -15, -10, 12, -1, 8, -11, -16, 10, -13, 22, 2, -21, -11, -8, -4, 6, 0, -10, -28, 11, -19, -12, 0, -8, -8, -23, -25, 12, -8, -10, 23, -8, -13, 19, -25, -7, -16, -9, 9, -2, -5, 31, -19, -9, -16, -9, -39, 19, -6, -7, 5, -20, 17, -34, 0, 5, 4, 46, 8, 3, 21, -24, -3, -5, -18, 6, 27, 16, 6, -22, 1, 17, -23, 16, -7, -27, -24, -15, -11, 24, 8, 3, 4, 1, 1, -23, 7, 29, 5, 13, -6, 4, -26, 1, 2, -5, -4, 5, -14, 19, -7, -27, 18, 0, -14, 20, 1, 20, 12, -14, 1, -14, -12, 12, -9, 27, 4, -1, 10, 5, 7, 10, -3, -10, 12, -31, 2, -4, 0, -19, 6, -8, -17, 6, -22, 29, 16, 1, 30, 3, -2, -4, -21, 18, -13, 3, 12, 3, 6, -14, 20, 19, 26, -3, -20, -13, -12, 18, -29, -13, 11, -12, 3, -6, 6, -31, 11, -15, 5, -10, 34, 0, -5, -40, -4, 9, -9, 15, -11, -32, 13, 33, -4, 22, -4, 16, 9, 0, -16, -2, -7, -12, -16, 12, 7, 12, 20, 5, 10, -6, 1, 22, 6, 4, 1, -24, -19, 26, 4, 1, 3, 27, 10, -12, 22, 2, 13, -19, 1, -5, 2, -22, 18, 8, 15, -25, -8, 26, 19, -13, 20, 13, 15, -10, 6, 21, 33, 28, -6, 18, -13, 4, -5, -18, 23, 6, -11, 11, 1, -20, -14, -29, 6, 35, 13, 13, -22, 14, -5, -20, 9, -8, 9, -48, -6, -16, -19, 16, 11, -20, -9, 1, 14, 12, -3, 15, 18, -1, 31, 13, -2, -22, 5, -10, -26, -27, 0, 24, -16, 3, -5, 18, -9, -9, -2, 3, -5, 2, -1, 10, 5, -6, 7, -7, -13, 5, 9, 0, 17, -5, -17, 33, 54, 17, -5, -2, -10, -12, -10, -4, -8, -4, -25, -1, -13, -17, -9, 27, -12, 28, 5, 11, 6, 3, -28, 5, 21, 11, -11, 13, -24, -10, 8, -6, 2, 9, -12, 9, -28, 10, 15, -4, 1, -19, 28, 27, -10, -7, -35, -2, 5, 6, -18, 14, 29, -13, -15, -15, -32, 21, 3, -17, -15, 27, -19, -15, -10, -9, 8, 16, 23, -1, -19, 16, -11, -30, -3, -5, -8, 30, 7, 24, 4, 5, -16, -26, -5, 2, -16, 5, -18, -6, 4, -1, 4, -27, -17, 21, -20, 5, -3, -32, -7, -26, -4, 6, 18, -20, -11, -11, -20, -18, 24, -11, -18, -23, -10, 19, 2, 4, 13, -22, 13, -1, -10, 4, -13, -2, -13, -2, -24, 0, -9, 7, -17, -22, -9, -13, -18, -9, 7, -42, 8, 11, 12, 0, 17, 28, -23, -4, -2, 13, 5, 10, 7, -28, 39, -6, -35, -5, -15, -11, 22, 11, -14, -3, 3, -46, 27, -11, -2, -25, 11, 27, 0, 3, 0, -11, 4, 0, -4, 13, -1, -34, -19, 6, -8, -8, 15, -16, 5, 10, -13, 34, -15, 18, -2, 21, -4, 3, -6, -11, 4, 23, -24, -32, -2, -6, 14, 27, 17, -14, 4, -32, -5, 4, -8, -1, -2, 46, -6, -30, 2, 16, 6, 35, 11, -23, -12, -2, 7, -3, -24, 28, -8, -17, 6, -39, -27, -14, 16, 18, 16, 6, -37, 9, -30, 7, -4, 26, 48, -16, -15, -30, 3, 16, 9, 15, -11, -26, -29, -19, 29, -23, 6, 14, -21, -19, -6, -16, 22, -5, -15, 53, -6, -17, -8, -29, -16, 18, -15, 39, 5, 2, -29, -10, -32, -29, -15, -11, -9, 5, -19, -19, -12, -3, -20, -6, -8, 42, -20, 5, 3, -7, -22, -11, -12, -2, 1, -15, 8, -11, -1, 22, 12, 32, 0, -18, -22, -26, -22, 11, -3, -6, -30, -3, -2, -25, -17, -6, 16, 7, 15, -28, -1, -14, -12, 0, 5, 18, -19, -12, 11, 5, 7, -5, 4, -33, -6, 11, -23, -16, -35, 1, 1, 17, -5, 2, -8, -12, -44, 15, -6, -16, -4, -17, -26, -20, -34, -8, 6, 12, 5, 10, 11, -16, -25, -9, -16, 55, 33, 9, -18, 11, 15, 6, 10, 2, 8, -17, -14, -29, 2, 26, -22, -1, -9, -7, -2, -10, 2, 15, 15, -24, 18, 0, 5, 7, -5, 13, -12, 7, 3, 16, -12, -46, 2, 13, -18, -4, 5, 7, 35, 8, -9, 1, -13, 10, -7, 20, 0, -14, -21, -4, -13, -8, 17, 1, 8, -13, 32, 3, -18, 15, -24, 2, 3, -9, 3, 10, 9, 3, 18, -13, 1, -18, 4, -16, 22, -11, 22, 8, 3, -7, 1, -9, -5, 13, 21, -17, -14, -10, -6, 0, 10, 1, 0, 13, 9, 20, -8, 3, -31, -12, -15, 21, -18, -3, 9, -30, 3, -29, 0, -9, 28, -10, 2, 12, -11, -20, -29, 3, -5, 10, 12, 24, 4, -23, -13, -6, 8, -22, -3, -3, -8, -11, -26, 13, 25, 0, 33, -2, 14, 6, -21, -10, 12, -2, 4, 4, 13, -29, 16, -3, -7, -15, -21, -14, -10, 12, 16, 14, 9, -23, -19, -30, -15, 2, 12, -6, 44, -1, -6, -27, -11, -20, 6, -11, 12, -10, -8, 7, 13, 8, -1, -21, -22, 6, 2, 24, 38, -30, 6, -4, 42, -27, 3, -25, 31, -15, 2, 6, 3, 12, 18, -18, 3, 1, -36, -8, -18, -5, -9, 33, -4, 21, 6, 26, 26, -22, 13, 2, 4, 14, -2, -15, 0, 45, 19, -5, 12, -9, -13, -15, 9, 4, -9, -14, -27, -17, 16, -5, 5, 8, 19, -7, 11, -5, -8, -4, -28, -23, 0, 5, 1, 8, -1, 27, 0, -12, 16, 22, -6, -7, -4, 3, 0, -4, 26, 6, -20, -13, -9, -2, -7, 0, 12, -1, -7, 12, 6, 8, 25, 16, -7, -28, -11, -20, -11, -19, 6, 23, -7, -22, -9, 26, 12, -4, 31, -8, -12, 10, -21, 22, -6, -22, 22, -13, -10, 11, 0, -8, -20, 27, -17, 23, 16, 0, -16, 10, -18, -10, -8, -27, 29, -20, -10, -19, 0, -5, -1, 11, 12, -15, -14, -10, -5, -25, 28, 0, 12, 7, 33, -20, -21, -9, 10, 24, -17, -10, -6, 20, 6, 28, -15, 30, 10, 22, 12, 19, 6, 35, 27, 17, -13, -4, 22, -9, -26, -21, 26, 5, -7, -15, 21, -10, 21, 8, 10, 17, 1, -3, -16, 2, -34, -26, -21, -16, -9, 13, -9, 24, 6, -6, 18, 28, -11, 11, 10, 17, 13, 25, -2, 6, -11, 14, 2, 22, 32, 9, -19, -27, 13, 38, 2, -33, 0, 7, -2, -8, 15, -16, -5, 18, -14, -28, 7, -8, 6, -8, -11, -18, 38, 10, 1, -13, -39, 27, 19, 2, 3, -35, 21, 6, 17, 26, 16, 0, -25, 5, -24, -21, 16, 13, 29, 6, -19, 12, -30, -9, -23, -5, 13, -19, 25, 5, 3, -29, 32, -21, -4, 3, 22, -9, -28, 13, -2, -25, -14, -16, 52, -8, 31, 0, -25, 4, 9, 21, -15, 5, -9, 4, -12, -31, 15, 12, 19, 11, 8, 13, 4, 1, -18, -22, 27, 24, -24, 3, 5, -6, -17, 9, -6, -10, 4, -6, 5, 6, 22, 1, 7, 0, 8, -32, 48, -18, 8, -45, -7, -2, 11, -8, 31, 26, 34, -19, 16, -4, -22, 13, 5, -10, 2, 27, 12, -27, 5, 36, 12, 1, 16, -7, 1, 11, 8, 15, -21, -9, 47, -29, 29, -18, 11, -15, -14, 26, 10, 12, -20, -18, 7, -17, -30, -8, -14, 21, -1, 33, 8, 0, -27, 9, 7, -33, 50, 0, 27, -4, -33, -15, -10, -11, -8, -19, 3, -9, -12, -12, 8, 33, -10, -21, 33, 27, 1, -14, 7, 16, 1, 7, -13, -11, -5, -13, -1, -14, 2, 3, 16, -5, 0, -2, 1, 10, -15, -46, -7, -34, -12, -4, 18, -3, 18, -14, -15, 8, -6, -1, 26, -8, 21, -3, 6, -17, 36, -28, -20, -17, -15, -5, -5, -6, -35, 19, 37, 9, -17, -10, -10, -23, 9, 31, -22, 19, -16, -30, 7, -9, 25, 7, 50, 37, 1, 4, 24, -20, 5, -8, 4, 34, 10, -19, 14, 8, -12, 35, 10, -15, -5, 11, 11, -13, -25, -30, -13, 23, -7, 18, -6, -13, 6, -2, -6, 6, 0, -25, -1, -24, -24, 10, 25, 47, 24, -27, 4, -27, 11, -6, -26, -25, 17, -1, 8, 13, -13, 12, 6, -15, -4, -17, 25, 15, -16, -7, 12, 27, 16, 17, -5, -5, 6, -9, 16, -23, 3, 2, 10, 0, -16, -16, -23, -16, 25, -5, 17, 8, 3, -19, 19, -31, -12, -11, 15, -14, 3, -5, 22, 15, -19, 5, 11, 6, 15, -4, -22, 34, -18, 14, 22, 2, 13, 21, -25, 17, 7, -8, 12, -1, 2, -13, -3, 17, -19, -8, -3, 24, 9, 11, 30, -39, 23, 29, 13, 5, 12, 28, 1, 1, -1, -20, -11, 10, 8, 22, -8, 13, -16, 11, 31, 5, 17, -1, 20, 6, 1, -47, -30, -4, -3, 1, 18, -16, 21, -10, -13, -4, 2, -5, 6, 39, -13, 0, -1, -5, 21, -24, 3, 22, 21, -9, 13, 11, 7, -20, 13, 6, 0, 25, 16, -14, -10, 8, -6, -10, -8, 22, -15, -14, 21, 19, 8, -2, 10, -5, 4, -4, 4, -21, -1, -10, 2, -4, -37, -7, 5, 17, 6, -34, 14, -20, -13, 24, -27, -16, -4, -15, 6, 8, -18, -8, 23, 2, 10, -5, 7, 12, 7, -12, 0, -10, 12, 2, -21, -1, 36, 30, 20, -2, 7, -27, 17, 15, 19, -23, 14, -7, 16, -8, 1, 36, -13, 12, 8, -20, 17, 7, 14, -15, 16, 4, -26, 20, -23, -17, -8, -7, -6, 11, 29, 10, 18, -20, 10, 17, -11, 19, 25, -5, 15, 5, 11, -20, -11, 21, 4, -16, 12, -27, 10, 2, 18, -4, 11, -26, 8, -25, -17, 21, -21, 1, -8, 15, 19, -8, -6, 16, 25, -4, 20, 19, -8, -29, -14, -23, -22, -1, 14, 9, 5, 2, 8, 8, -17, 3, -14, -6, -10, 49, -15, 0, 0, -19, 12, 6, 7, -1, -23, -20, 10, 2, 9, -6, 35, -28, 3, -9, -10, 7, -39, 8, -4, 19, 9, -11, -26, -4, -15, 35, 5, -22, -6, -21, -3, 20, -21, 10, 8, 35, 12, -17, -26, 14, 2, 2, -28, 21, -5, 11, 13, -7, 11, -22, 19, 1, -3, -20, -10, 18, -4, -3, -20, -2, -18, 11, 0, 1, 5, -8, 10, -18, 0, -5, -11, 17, -11, -21, 15, 4, 18, 19, 19, -13, -2, 16, -21, -1, 18, 5, 2, 7, 4, 16, 6, -1, 9, -8, 4, -23, -25, -1, 4, -14, -23, 17, 9, 11, 12, -9, 0, -7, -44, 6, 11, -16, -4, -16, 1, -7, -2, 22, 40, 3, 1, -13, 4, -39, 0, -11, -28, 14, 20, -3, -17, 17, -6, 29, 25, -21, -10, -15, 10, 5, -25, 7, 20, -20, 8, -32, 17, -22, 3, -15, 17, 19, -12, -11, -2, 16, 15, 5, 25, 14, 17, -29, 5, -28, 14, 0, 15, 9, -27, -19, 3, -16, 0, 24, 19, 18, 2, 6, 2, 16, -21, 2, -17, 14, -18, 23, 15, 1, 40, 5, -11, 6, -3, -6, 14, -11, 1, -27, 7, 15, -19, -17, -5, -2, 2, -13, 21, 14, 11, 23, -13, 3, -4, -7, 7, -2, -22, 1, 11, -13, 15, -14, -16, 18, -20, -17, -11, -8, 19, 26, -19, 9, 21, -6, 11, 24, 2, -10, -38, -3, 14, 8, 19, -18, 1, 1, 4, 9, 7, 22, -17, 10, -19, 15, 2, -7, 3, 23, 1, 2, -2, -21, -4, 6, 23, -19, 0, -19, -29, -9, -7, 19, -40, 10, 9, 13, -3, 13, -12, -14, -30, 25, 11, 8, 24, 18, 14, 2, -7, -11, -23, 23, -13, 6, -3, -20, 24, 3, 16, 1, 4, -4, -6, -2, 13, -10, 11, 34, -15, 20, 13, 14, -47, -1, -12, 33, 11, -6, -16, -4, -13, 4, 26, 19, 15, -1, -7, 17, 23, 0, 24, -15, -13, 3, -17, 2, -11, 9, -6, 36, -8, 17, -18, -3, 12, -12, 17, -6, 17, 7, 9, -12, -1, -37, 29, -9, 4, -34, -12, -38, 20, -19, 26, 8, 15, -5, 26, -11, -12, -2, 19, 3, -26, -4, 13, 16, -5, -19, 13, 22, -20, -14, -4, 10, 24, -14, 21, -8, 17, 21, 0, 11, 28, -11, 10, -1, 10, -1, 30, -8, -2, -15, -14, -6, 1, -38, 15, -22, 31, -43, 40, 4, -1, -15, 11, 8, -3, -15, 23, -22, 26, -23, 19, -17, 15, -19, 5, -21, 16, -20, 10, -34, -8, -17, 17, 5, 29, 19, -8, 1, 28, -19, 4, -10, 11, -4, 6, -23, 1, -3, -3, -12, 13, 7, 15, -5, 3, -14, 17, 6, 1, 5, -28, 15, 12, -26, 27, -20, -27, 33, 36, -14, -1, -9, -27, -12, -4, 9, 19, -15, 8, -3, 2, -3, 4, -25, 26, 15, 32, -13, 13, -30, 25, 15, -13, -15, 19, 19, -3, -14, -3, 1, 34, 15, 5, -7, -13, -24, 5, -4, -12, -16, -25, -37, 18, 12, 3, -5, 5, -32, -6, -31, 26, 8, -2, 2, 14, -10, 6, -9, -2, -27, -30, 17, 21, 12, 19, -25, -16, -21, -3, -2, -24, -5, 6, -2, 18, -11, -2, -37, -7, 28, 4, -6, 7, -14, -18, -12, -4, 15, -32, 19, -13, 19, -8, 15, 32, -4, 3, -18, -14, -16, 24, 16, -14, 21, 15, 26, -25, -10, 14, 22, 18, 7, -15, -46, 55, -2, 6, -17, -16, 0, 9, 12, -17, -12, -1, -6, -31, 0, 24, 22, -12, -6, 20, 4, -18, 21, 3, -13, -5, -9, 10, 22, -9, -6, 4, -11, -16, -26, 16, -7, 10, -23, -7, -26, -11, -20, 18, -5, -17, -12, -7, 13, 20, -7, 23, -17, 20, 3, -23, 13, -2, 8, -6, -11, 1, -26, 18, 0, -7, -22, 14, -18, -23, -24, 32, 28, 4, -18, -2, 40, 6, -1, 7, 16, -1, -9, -8, 6, -3, -4, 2, 13, 7, -31, -9, -6, -8, 7, -17, 19, -18, -13, 11, 3, 2, 19, 3, 8, -3, -5, 12, 14, 1, 32, 22, -1, 25, -21, -12, 16, -6, 5, 21, -3, -15, 2, 9, -24, -2, 18, 37, -24, 7, 6, 29, 14, -24, -33, -14, 6, 1, 15, -30, -16, -7, -11, 14, 19, -12, 6, 17, 7, 1, -18, -15, 15, 20, -14, 0, -22, -5, 13, -22, 14, 13, 23, -32, -7, -21, 23, -8, 14, -1, 34, -8, -28, -28, -19, 28, 24, -36, -17, -8, 11, -3, 5, -7, 0, -26, -23, 8, -20, 46, 9, -18, -5, 31, 11, 5, -2, 5, -9, -6, 19, -8, -9, -7, -4, -21, -40, -14, 0, -20, 30, -14, -11, -13, 12, 3, 22, -13, -39, -15, -31, -19, -6, -2, 14, -20, -2, -13, -17, -34, 27, -16, -13, -31, 28, -12, 15, -1, -15, 1, -11, 17, 17, 19, -49, -18, 17, -8, 33, -36, 21, -7, 21, 14, 21, -12, -19, 16, 25, -14, -8, 16, -21, -12, 8, -10, 7, -12, -2, -32, 7, -33, 37, -16, 4, -21, 21, 12, -48, -57, -7, 10, 6, -54, 33, 3, -10, 1, 4, -6, -10, 13, 1, -17, 21, 5, -4, -13, 2, -17, 14, 2, -11, -1, -24, -22, -12, -29, 0, -30, 4, 11, -12, -31, 9, -16, 11, -4, 24, 7, 24, -6, -14, -6, 10, -16, 12, -35, -1, -6, 11, -23, -9, 7, -3, -14, 14, 0, 19, -1, 8, 2, 23, 14, -14, 5, -2, -7, 17, 26, 3, -5, 14, -7, 20, -3, 6, -1, -8, 24, 3, -12, 10, 29, -13, -41, 17, 15, 30, -54, -40, 18, 2, 17, -17, -4, 6, 2, -5, 10, -5, 17, 14, 9, 28, 22, 7, -30, 19, 10, -21, 15, -20, 24, -47, 24, -17, -15, 19, -40, -31, -1, 14, -30, 4, -22, 12, 6, -2, -22, -27, 3, 0, -5, -15, 1, -22, 32, -11, -7, 6, -5, -6, -10, 41, -1, 9, 17, -3, 18, 5, 0, 4, 0, 22, 16, 14, -17, -7, 8, 39, -47, -6, -30, -1, -12, 5, -19, -10, -13, -15, -14, -3, -15, 5, 24, -8, -37, -16, 15, 9, 6, 15, -22, 31, -17, 27, -18, 15, -5, 4, -25, -11, -4, 10, -45, 17, -21, -12, 11, -12, -1, -19, 0, 21, -41, 6, 16, -8, -37, -22, 11, -11, 20, 17, -44, 2, -7, -6, -34, -17, 5, 1, -27, 24, -8, 7, -34, 2, 14, 0, 8, 25, 4, 12, -10, 11, -14, -7, -14, 0, -38, 26, -24, 22, 19, -13, 10, 28, 6, 7, -18, -2, -24, -10, 0, -19, -13, -16, 5, 10, 26, 16, -27, 4, -2, 4, -13, 4, -16, -3, -8, 19, -21, -4, 23, -6, -24, -2, 51, 42, -3, 19, -7, 25, -3, 15, 13, 4, -11, -5, -1, 2, -6, 12, -5, 7, 1, -29, 20, 24, 8, 7, 24, -26, 4, 2, -15, -12, 2, -11, -2, -24, 17, -15, -2, 3, -14, -2, 11, -18, -13, 13, -9, 1, 2, 6, -9, 7, -12, -8, -4, 5, -1, -3, -16, 13, 26, 3, 33, 5, 3, -4, 17, -21, 14, -7, 15, 12, 18, -22, -3, -25, -29, -24, 16, -4, -12, -1, -5, -30, 2, -14, -14, 7, 8, -4, 15, -6, -27, 13, -3, 14, -23, 2, -14, -1, 28, 11, -20, 13, -18, -9, 48, 20, -17, -8, -36, 20, -19, 3, -20, -26, 7, -6, 3, 9, -17, -22, -9, 25, -13, -1, -7, 5, 2, 10, 18, -14, -19, 23, 0, 10, 22, 16, -26, 5, -42, 2, -12, 18, -6, 30, -4, 24, 25, -24, -17, 1, -5, -9, 15, 11, 16, 4, 17, -9, 19, 9, -7, 16, 2, -21, -19, 14, 19, 10, 9, -19, -2, 10, 28, 7, -15, -2, 1, 7, 0, -4, -20, -6, 8, 21, -11, -21, -16, 2, 14, 29, -1, 11, 9, -10, -17, -16, 18, 14, 13, 24, 5, 4, -8, -14, -21, 30, -3, 11, -2, 8, -21, 14, -52, -4, -8, 10, -13, 10, 3, -14, -26, 4, -5, 19, -5, 1, 24, -7, 4, -4, -21, -1, -7, 8, -7, -7, -23, 15, 17, -4, 5, -4, -4, -3, 43, -13, 2, -12, 18, -11, -4, -6, 26, -11, -19, 4, 4, -14, -4, -9, 2, 11, 16, 0, -2, 10, -25, 11, -13, 12, -15, 18, 7, -10, 8, -21, 8, 26, 6, 33, 5, -12, -9, 13, -53, 22, 10, -7, 19, 2, -11, -1, -45, 14, -5, 10, 8, -7, 18, -23, -12, 10, -14, 28, 2, -7, -43, 17, -25, 8, -8, 46, -5, -1, 8, 6, -3, 24, -2, 16, 7, -13, -4, -20, 16, 5, 15, 0, -17, 6, 8, 20, 13, 2, 13, -6, 8, 10, -22, -19, -23, 20, -19, -2, -23, 5, 5, 9, -2, 15, 8, 14, -9, 4, 2, -17, -4, 1, 12, -14, 15, 16, -5, -4, 11, -14, -7, -13, 5, -13, -19, -17, -11, 20, -22, -14, -5, 8, 0, 7, 10, 3, 15, -21, 1, -9, -2, -25, -26, 16, 6, -13, -2, 11, -7, 8, -12, 1, -3, 14, 16, -5, -40, 8, -36, 24, 20, -10, 10, -20, -35, -27, 19, -10, 6, -13, -12, -6, 0, 11, 0, 9, -16, -13, -9, 11, -30, 10, 19, -8, 13, -6, 7, -14, 0, 8, 10, -17, -17, 1, -5, -2, 5, -2, 19, -4, 20, 25, 0, -9, 21, -18, 8, -5, 11, -2, -21, 14, 6, 7, 19, 15, 9, 22, -9, -1, -17, -24, 6, 25, -3, 12, -10, 17, -36, 17, -4, -17, 11, 13, -11, -14, 2, 3, 2, 24, 16, 10, -10, -1, -24, 13, -3, -5, -22, 14, 15, -7, 20, 12, -10, 19, -9, 2, -15, -3, 2, 5, 11, 2, -10, -5, -14, -10, -18, -20, 11, -7, -21, -5, 1, 4, 7, 9, 4, 22, 3, -12, -17, -6, 13, -21, -10, -9, 11, -3, -21, -11, -10, -14, -14, -12, 10, 11, 15, 7, 11, -9, -23, -16, 14, 3, -15, 3, -12, 5, -36, -15, 15, 8, -4, -1, 7, -10, 13, -7, 9, 2, 15, 6, -11, -11, 8, 11, -19, 5, 12, -17, -13, -7, 28, -6, -8, -1, -19, -12, 31, 7, 2, -5, 18, 15, 14, -3, -5, 14, -8, 5, 5, 14, 1, -1, 14, -8, 16, 12, -9, 9, 9, 15, 22, -11, 3, 7, -16, -22, -9, -20, 11, 12, -10, -19, -7, -5, 3, -7, -10, 6, 29, 0, -8, 24, -5, 6, 5, -18, -25, -13, -17, 33, 5, 8, -22, -18, -15, -23, -11, 13, 1, 1, 9, 8, -6, 2, 15, -15, 10, 10, -3, -1, 12, -28, 7, -24, -9, 19, 28, 18, -19, -7, -24, -31, -17, 0, -14, -16, 4, 14, 21, 15, -19, 7, -30, 8, 6, -15, 8, 19, 5, 8, 15, 6, 13, -27, -4, -6, -13, 6, -10, -17, -7, 6, -1, 20, -19, 15, 5, 7, -34, 7, -25, 18, -1, -18, -19, 14, 10, -18, -11, -8, -11, -21, 11, -12, 4, -14, -9, -6, 10, -6, -32, 7, 15, -17, -22, -20, 13, -18, 5, 9, -6, -28, 14, -21, 14, -6, 5, -6, -33, 6, -3, 0, -22, -3, -9, -5, -29, 8, 15, -9, 3, 10, -4, 11, -6, -30, -22, -2, 7, 19, -5, 5, 18, 6, 12, 2, -12, -1, -10, -8, -4, 12, 14, -14, 17, -18, -27, 3, 3, -26, -2, -4, 8, -7, 3, 6, 11, -23, -1, -8, -17, -21, -1, -3, -3, -6, 0, -10, 15, -12, -29, 19, 9, -26, -11, 27, -17, -3, 10, 20, 8, 7, 8, -1, 4, -13, -11, 17, -16, 13, -6, -35, -15, -21, -5, 9, -12, 17, 2, 25, -21, -18, -20, -7, 2, -13, -23, -7, 4, -8, -7, 15, -13, 7, -11, 27, -9, -7, -7, 0, -16, -17, -15, 6, -6, -14, 23, 9, 18, -11, 7, -27, -15, 3, -10, -2, 6, -10, 3, -19, 15, 9, -11, 7, -18, -15, -10, -9, 12, 6, 9, -3, 9, 7, 5, 19, -21, -5, 16, 3, 47, 0, -17, -22, -23, -14, -13, 6, -19, 13, 2, -16, -20, -3, -3, 7, -31, -13, 17, -5, -18, 11, 30, -8, 18, -3, -23, -3, -2, 17, 15, 10, -24, 6, 17, -13, 13, -21, -29, 2, -49, -11, -10, -3, 9, -8, -13, -5, 30, -7, -4, 50, -11, 14, 24, 3, -69, 9, 0, 34, -16, -15, 21, 8, 0, -5, 3, 12, 3, -20, 17, -14, -8, -10, 0, -2, 6, -1, -63, 20, 10, 8, -15, 17, 18, -17, -16, -1, -43, 15, -2, 10, -19, -8, -14, 14, -9, -4, -9, 5, 14, -20, 36, -7, 23, 15, 2, 18, -17, -9, 22, -1, -30, 12, 2, 7, -9, 7, -2, -10, 0, -12, -23, -13, -11, -6, -11, 16, 24, 0, -15, 18, -4, 10, 1, -1, 8, -10, -18, -17, -20, 14, -10, -9, -17, -5, 12, 12, 18, 8, 22, 13, -7, -3, -11, -18, -11, 5, -30, 3, -25, -8, -24, -1, -12, 2, 24, 0, 19, -19, 7, 7, -15, -10, 23, -1, -26, 2, -24, 0, 0, 1, -11, 19, -15, 5, -12, 6, -4, 2, 6, -2, 7, 7, -15, -3, -16, 9, -1, -8, -6, 25, -8, 0, -19, 0, -32, -12, 5, -5, 3, -2, 7, -5, -30, 3, 18, -20, 2, 9, -24, -5, 22, 9, 16, 9, 15, 9, -16, -19, 10, 5, -61, -19, 10, 5, -1, 3, 17, 1, -7, 5, 7, -5, 12, 3, -18, -5, 28, -14, -9, -2, -19, 0, -2, -2, -32, -32, 8, 9, 8, -13, 7, -2, 7, -42, -4, 3, -5, 5, -21, 12, 11, 2, -15, 13, -5, -17, -4, -6, -35, -6, -23, -13, -8, -23, 17, 4, 13, 19, -23, 7, -15, 12, -7, -15, 9, 2, 15, 20, 13, -3, 13, -11, -29, 14, -1, 1, -10, -22, 26, -24, 5, 21, 9, -16, -6, 2, 20, -12, -13, -16, 9, -13, 6, 13, 2, -11, 8, 10, -21, -15, -18, 6, 9, -13, 40, -12, -19, 17, 2, 5, 14, -4, -33, -10, 2, 3, 11, 3, 5, 9, 20, -1, -4, 5, -12, -15, -57, -7, -10, -19, 14, -10, -20, -20, -2, 14, -26, -15, 17, -18, -13, -16, 29, 18, -6, -11, 7, -7, -23, -16, 33, -2, -8, 12, 1, -14, -8, -12, -3, -20, -21, 10, 11, 10, -14, 7, 33, -27, 27, 70, -7, 9, -12, -16, -26, 4, 4, -11, 0, 23, 5, -12, -13, 6, -12, -33, -22, 9, 14, 10, -10, 1, -9, -11, 14, 31, 18, 13, 0, 8, -11, 16, -15, 1, 9, -20, 20, -22, -20, -6, -9, -20, -7, -22, -1, -13, -23, 3, 15, -3, 7, 18, -41, -17, 8, -43, 8, 25, -10, -13, -34, -19, 28, -9, -17, 19, 6, -16, -15, 11, -36, -19, 8, 3, -9, -21, -19, -26, 21, 5, -18, 13, -10, 1, 8, -20, -11, 14, -17, 8, -1, -21, -9, 4, -43, 14, 5, 1, -1, 0, 3, 16, -21, -7, -16, 20, -13, -19, -13, -5, -25, -4, -23, 35, 5, -6, 20, 0, -17, -5, 13, -1, -5, -14, 10, -7, -11, 6, -4, 14, -4, -11, -27, -5, -17, 18, 17, 27, -14, 3, -19, -25, -1, 11, 9, 15, -7, -13, -4, -11, 2, 13, 1, -11, -24, -12, -10, 3, -7, 22, -3, -20, -24, 2, 7, -2, -13, 12, -20, 21, -14, -19, 4, -7, -23, 16, -20, 25, 0, -16, -35, 11, 30, -13, -1, -12, 4, -11, -7, 5, 0, 3, 18, 22, -11, -23, 11, 11, 3, 30, -18, -9, -23, -20, 27, 3, 13, -3, 15, -7, 15, -13, -16, 15, -12, 27, -9, 13, 22, 9, -29, -4, -36, -18, 13, 22, 11, -13, -19, -13, -29, 18, -4, -10, -7, 6, -13, -5, 6, -12, -3, 8, -12, 11, -43, 3, 23, -26, 1, -11, -17, -9, 23, -18, -21, -12, -3, -8, -21, 21, -19, 7, -35, -5, -16, -11, 17, -13, 16, -9, 12, -13, 15, 16, -10, 0, 1, -17, -16, 19, -7, -31, -12, -18, -26, 6, -30, 18, -23, -22, 17, 13, -5, -10, 23, -12, 6, -38, -20, 18, -11, 17, -14, -14, -4, -11, 14, 3, 8, -21, -21, -13, 16, -20, 2, -16, 11, -4, -10, -25, -3, 1, -1, -14, 3, -16, -21, 5, 0, 26, 7, -7, -12, 1, 11, -6, -13, 1, -22, -6, 4, 9, 8, 6, 1, -14, 0, -4, -2, 17, -6, -21, -1, -1, 3, 5, 10, -19, -10, 19, -8, 38, 14, 4, -21, -12, 21, 20, -1, -14, -10, 11, -13, -9, 11, 29, -4, 22, 0, 1, 10, 8, -12, -23, 20, 5, -19, 20, 10, -2, -32, 30, 4, -14, -2, -15, -21, 14, 5, -6, 9, 16, 16, -14, 11, 5, 12, -20, -2, -15, 1, -9, -27, -22, -6, -8, 6, 9, 10, 13, -12, 4, -4, -16, 2, -11, 11, -7, -20, 2, 7, 17, -18, -8, 3, 21, -8, -8, 2, -23, 15, -22, 18, 15, 2, 19, -26, -18, -1, -22, 2, 10, -6, -15, 0, -2, -1, -3, -14, -22, -18, -8, -8, -1, 7, -14, -1, -13, 4, -16, -40, 6, 4, 14, 17, 14, -6, 11, 16, -1, -1, -16, -14, 6, -19, 17, -17, -22, 7, 2, -5, -6, 15, 1, -21, -3, 15, 19, 16, -15, -13, 19, -24, -4, -24, -7, 9, -6, 7, 7, -8, -1, 10, 23, 11, 3, 1, -11, -2, -6, 17, -3, -7, -2, -6, -10, 19, 6, -19, 3, 3, 1, 16, -23, -16, 21, -7, 8, -1, 0, -6, 15, -6, -15, 12, 4, 15, -9, 6, -15, 3, 46, 8, -2, 14, 8, -10, -9, 33, -5, 12, 21, -18, -2, 15, -15, -19, 9, -1, -7, 3, -9, -13, 20, -7, 28, 0, -2, -21, -21, 1, 8, -7, -16, -9, 5, -15, -19, -25, -18, 19, -21, 19, -6, -17, -18, -18, -15, 19, -17, -14, -21, -12, -12, 5, -11, 8, 20, -13, 5, -16, -7, -4, 15, -32, -7, 7, -7, -18, -18, -7, -4, 6, 6, -18, 9, 4, -11, 9, 4, 6, -4, 15, -4, 8, 19, 2, -8, 5, 3, 11, -21, 7, -16, 9, -10, 35, 8, 11, -13, -2, -17, 15, -4, -43, 23, -20, -7, -5, -15, -12, -11, 5, -3, 8, 0, -3, 11, -3, 9, 28, -5, 10, 22, -23, 7, -7, -19, 16, 19, -24, -12, -19, -12, -2, -26, 11, -26, 9, 5, 7, 10, -8, 4, 0, -9, -15, -16, -24, 15, 3, -11, 5, 9, 10, 28, -13, -15, 14, 8, -17, 26, -10, 15, -4, 5, 8, -6, -11, 13, -6, -1, -12, 10, 4, -16, -8, 5, 14, 14, 7, 0, -14, 4, 21, -6, -23, 0, -15, -27, -19, -19, 6, 3, -8, -9, -7, 8, 9, -1, 15, 3, -23, 7, 13, 2, -15, 7, 3, -23, 6, 34, 9, -23, -12, -9, -2, 30, -21, -12, -12, -17, -11, -16, -14, 30, 0, 3, 5, 11, -2, -10, 15, 11, -1, 28, 17, 1, -27, -7, 29, 8, 8, 15, -18, -12, -8, 15, -1, -23, 14, -12, 17, 4, -9, -10, 14, 6, 1, -33, 5, -25, -17, -1, -15, 10, 3, -12, 0, -18, -6, 13, -11, 57, 7, -29, 11, -2, 10, 15, -6, -7, -6, 6, -19, -19, -16, -20, -10, -17, -19, -26, -15, -10, -9, -1, -14, -15, -8, 12, -14, -21, 4, 16, -21, -14, 5, 18, -10, 11, 3, -4, -12, 16, 5, 16, -24, -9, -3, -9, -14, 0, 5, 13, -2, 11, -32, 2, 20, 7, -6, 23, 13, -16, -8, -22, 7, -4, 0, -25, -11, 14, -22, -20, 10, 0, 16, 36, -22, -18, 3, 15, 16, 1, -18, 12, 10, 9, 16, -5, -2, 3, -4, 17, -12, 8, 23, 13, -13, 19, 8, 26, -9, 6, -18, 3, 15, 4, 5, -24, 17, -19, 3, -11, -4, -15, -12, 41, -14, 17, -12, 11, -18, -2, -15, -7, 13, 14, -12, -22, 8, 17, -2, 12, -1, -15, -30, -17, 3, 0, 19, 14, -11, -5, -6, -8, 1, -20, -11, -9, -22, 0, 5, -23, -8, 17, 16, 23, -6, 3, 3, 11, -19, 23, 16, 26, -22, 13, -1, 19, 6, -12, 7, 16, 2, 6, 9, 10, -11, 31, -7, -8, -3, 19, 2, 10, 6, 27, -19, 18, 15, 1, 17, 21, -22, 15, -13, -19, -4, -7, 17, 2, -22, 16, -12, -12, -24, 11, -21, 15, 14, 52, -10, 0, -3, -7, -15, -12, 21, 25, -6, -7, -7, 12, -17, 8, -22, 6, 5, 26, 4, -6, 19, -13, -15, 26, 5, 41, 13, -17, 22, -10, -13, -1, 3, 3, 7, -10, 1, -2, -33, 20, 16, 56, -15, 20, 2, -18, 1, 8, -11, -3, 6, 2, 7, -12, -9, 31, 16, 4, -16, 14, -19, -9, 19, -23, -1, -12, 8, -17, 31, 1, 35, 27, -2, -11, 17, -20, -15, -15, -30, 3, 10, -7, -8, -16, -25, -1, -8, 9, -4, 12, 2, -1, 12, -18, -3, -3, -5, -4, -8, -12, -7, 2, -5, 28, -8, -21, -13, 8, -3, 11, 8, 25, -16, 5, -10, 8, 9, -11, -39, -3, 3, -13, 1, 3, -3, 5, 11, -31, -8, 8, -17, -16, 20, -13, -3, -15, 14, -7, 5, 3, -18, 12, 4, 9, 15, -9, 14, 6, -4, 20, 10, 9, -8, -2, 3, -3, 5, 12, 8, 4, 18, -27, 7, 6, 6, 7, 6, 23, -6, -5, 13, -3, 0, 18, -19, 6, -5, -9, 0, 12, -32, 8, 19, 2, -18, -5, 9, -5, 12, -23, 1, -1, -19, 9, -6, 0, 23, -10, -37, 6, 0, -36, -14, -4, 1, 4, 16, -24, 3, -24, 9, -15, 14, 2, -27, -33, -14, 27, 4, -7, 6, 2, 15, -12, 11, 18, -22, -14, -26, 14, -10, 13, 4, -14, 7, 8, -22, 20, 18, -1, -5, 0, 6, -15, -9, 15, -3, -3, -17, 20, 13, 9, 1, -15, -1, -4, 17, -37, 9, 6, 18, 9, 0, 38, 14, 9, -19, 14, -19, -13, -6, 3, -4, -30, -1, -8, 1, 17, 24, 14, 16, -23, -3, -20, -20, -13, -4, -10, 8, -3, -7, -14, 1, 12, 32, -15, -11, 0, 0, 8, -13, -19, 7, 25, -13, 22, -3, 2, -7, -12, -2, -13, -8, 38, 17, 1, -1, -11, 12, 44, 6, -10, -19, 12, -1, -4, -19, -4, 3, -26, -20, -11, -11, -11, -8, -1, 6, -3, 17, 13, 8, -9, -15, 41, 12, -1, -20, -4, 11, 13, -16, 34, 12, -12, 15, -2, 24, -16, 23, -8, -17, -10, -22, -13, -1, 10, 19, -24, -11, 28, 17, -6, -16, -6, 16, -6, 4, -16, 15, -16, 3, -12, -35, -30, -18, 29, 0, 16, -3, 3, 29, 20, 4, -20, -23, 0, 6, -17, -10, 33, -19, -5, 14, 17, 21, -12, -18, 58, -20, -3, 20, -7, 17, -17, 0, 8, 9, -2, 19, -19, 5, -22, 16, 54, -18, -4, -17, 9, 23, -17, 2, -19, 8, -17, 6, -1, 4, -2, 3, -26, -21, 11, -7, 5, -11, 4, 17, 8, -3, 48, -19, 0, 30, -13, 25, -11, -8, 33, -20, -7, 30, 2, 30, -16, -14, -35, 15, -2, 2, -4, -8, 18, -5, 5, -6, 5, -2, 10, 6, -15, -4, 18, -8, -25, 25, -13, 16, 25, 14, -12, 18, 2, -9, 0, 13, 18, -3, 2, 0, 5, 10, -6, -17, 2, -6, 41, 1, -21, -7, -21, 6, 9, 18, -18, -5, -17, 6, 11, 14, 9, -9, -25, -5, -23, 20, 6, -13, -17, -4, 8, 18, 8, -15, -1, 10, 1, -6, 2, -21, 6, 0, 19, 25, 29, 4, 20, 8, 4, 5, -21, 0, 62, 2, -10, 3, -13, 28, 12, 4, -1, -18, 3, -19, -9, 16, -2, -10, 7, 0, 38, -14, -18, 23, 18, -23, -30, -7, 2, -9, -9, 10, -14, -11, -24, -12, 46, -19, 10, 10, 15, 8, -1, -6, 26, -20, -13, 9, -5, 16, 44, 0, -9, 19, 15, 1, 14, 23, 18, -5, -29, -1, -9, 1, 1, -8, -1, 0, 18, 7, -18, 3, 21, 13, -17, -4, 19, 5, 12, 14, -2, 2, 8, 17, -5, -18, -6, 17, 0, -18, 22, -14, 76, 10, -1, -7, -4, 23, -14, 5, 14, 7, -1, -5, 13, 18, 10, 11, -21, 14, 8, 26, 15, 9, 10, 14, 21, -14, -10, 11, -1, -3, -20, -12, 18, 4, -18, 22, -10, -1, 21, -16, -6, 6, 5, 10, -22, 24, -3, -2, -20, -5, -17, -14, -10, -2, 0, -4, 2, -17, -1, 1, 2, 34, 10, -14, 11, -24, -8, -2, -11, -9, 10, -11, -14, 5, 21, 32, -16, -8, 36, 16, 5, -12, -4, 22, -7, -1, 19, -16, -9, -19, 8, 6, -8, 13, 14, 12, 43, -6, -12, -5, 7, 28, 23, -19, -15, 5, 14, 13, -2, -6, -29, 7, 41, 9, -19, 5, -20, 28, -10, -18, 21, 11, -12, 1, 7, 13, 3, 17, 7, -14, -15, 37, -13, 16, -29, 13, 5, 7, -7, -6, 9, 24, 2, 8, 3, -20, 3, -14, 0, 2, -4, 19, 5, -4, -9, -8, 20, 24, 12, -3, -21, 19, 0, -6, -8, 4, -20, -18, 28, 5, -6, 8, 16, -3, 1, 9, 7, -10, -19, 37, -3, 4, -2, 18, 6, 19, 17, 20, 11, -44, -14, -16, 4, -14, -20, -40, 10, -31, -12, -11, 0, 3, 10, -14, -14, 8, 34, 0, 29, -14, 12, 12, 3, 23, 9, 3, 0, -12, 5, 2, 15, 26, 9, 3, -12, 2, 20, 54, -5, 16, 40, -19, -8, -8, 13, 38, -1, 12, 23, -14, 5, 17, -10, 11, 18, -7, 0, -6, -18, 10, 10, -16, -17, -27, 5, 15, 1, -24, -4, -32, -5, 8, -23, 47, -5, -26, -46, -8, 4, 11, 8, -3, 6, -7, -13, 19, -18, 9, 7, -22, -18, 72, 13, 30, -39, -2, 9, -20, 15, 15, 13, 3, -20, -5, -5, -16, -3, -26, 8, 17, -9, 13, 17, 2, -18, -23, 30, 42, 2, 47, -6, 14, 2, 9, 5, -9, -23, 29, 12, -23, -9, 22, -18, -14, 17, 0, 15, 23, 22, -31, 1, -2, 12, 14, 2, -28, -17, -7, 25, 7, -8, 11, -7, 6, -15, -31, -12, 12, 23, 13, 1, -30, 9, -7, 21, -18, 9, 6, 6, -9, 10, 18, -2, 27, 5, 1, 8, -18, 14, 8, 18, 3, 16, 28, 7, 16, 23, 8, -1, 15, 18, 33, 5, -10, -17, 10, 13, -1, -31, 37, -2, -32, 5, -22, 23, 11, -3, 15, -8, 11, -4, -8, 8, 14, 25, 25, -5, 36, -14, 34, 1, 20, -39, -7, -5, 9, -3, -36, -5, 16, 25, 23, -3, 19, 20, 22, -4, 10, -12, -15, -13, -11, -14, -15, -11, 7, 7, 8, 6, -19, -19, 19, -13, 12, -7, 6, 13, -26, 1, -10, -13, -5, 0, 24, 6, 14, 3, -13, 1, -2, 24, -16, -11, 0, 17, -9, -2, -2, -8, -15, 3, -3, -21, 16, 10, -12, -16, 21, 12, -3, 7, 5, 9, 22, -13, 2, -3, 21, -17, 15, -16, -4, 0, -14, -3, 15, -11, 30, 4, 22, 5, 22, -21, 42, -20, -19, -41, -12, 5, -13, -21, -17, -6, -18, -8, -7, 8, 18, 10, 6, -21, 5, -5, 6, -11, -14, -23, 17, 9, -8, 22, -12, 5, 35, -19, -12, -12, 20, 18, 4, 11, -18, 18, -17, -11, 9, 8, 7, -10, 25, -15, -10, 7, -29, -27, -5, 9, 25, -9, 31, -8, 20, -26, 5, -19, -6, 15, 27, -18, 10, 22, 5, 0, -25, -11, -12, -20, 2, 7, -39, -18, -5, 0, 14, 16, 23, 26, 57, 0, 21, 11, 21, -5, 2, -37, -18, 25, 25, 15, 3, -5, 10, 3, -17, -16, -21, -17, 4, -11, 17, 8, 10, 13, 1, -9, 26, 17, -22, -18, -9, 14, -7, -17, 11, 12, -16, 13, -3, 18, 14, -21, 9, 3, -2, 35, -17, 20, 15, 10, -23, -11, 32, -2, 20, -40, 13, -22, -19, 5, 5, 35, -26, 20, 9, -9, -1, 12, -17, -12, 6, 29, 19, -9, 12, 7, 6, -20, 16, -22, -2, -5, 16, -19, 7, -13, 6, -26, 11, -8, 17, -2, 13, -22, -17, -6, 30, -19, 7, -5, 17, -23, -14, -6, 22, -6, 31, -19, 9, -35, -36, 1, -6, -19, -3, -2, -8, -8, -19, 9, 18, -22, 7, 14, 4, 10, -18, -1, -14, -4, -11, 9, 27, -7, 8, -5, -15, 16, -5, 11, -10, -6, -10, 24, -16, -10, -20, 10, 6, -28, 16, -35, 24, 14, -17, -18, -13, -21, -25, -20, 2, 3, -12, -3, 23, -5, -34, 12, -12, -23, 19, 10, 16, -1, 0, 22, -10, -4, 9, 15, -13, -21, 0, 51, 5, -13, -12, 6, -3, 32, 33, -3, 14, 3, -2, -18, 14, -15, -14, -18, -2, -5, 10, -21, 15, -4, -18, -17, 32, 12, -14, -4, -1, -12, 27, 1, 28, -12, 3, -14, 31, 16, 12, 12, 3, -4, -14, 5, 3, 4, -15, -1, -30, -1, 27, -11, -24, 52, -6, 27, 17, -10, 6, 3, 23, -1, -10, -12, -6, -22, -15, -4, -7, 34, 7, 12, 7, -4, -8, -2, 13, 30, 20, 9, 31, -12, -22, -20, -27, 23, 36, 17, -6, -16, -15, -1, 1, 30, 7, 3, -19, -10, -14, 5, 11, 8, -21, 19, 28, -17, -9, 2, -5, 16, 23, 15, 43, 3, -11, -18, -25, 37, 3, -11, 19, -2, -25, -21, 11, 23, 7, -13, 2, -14, 17, -16, -1, 24, -2, 25, 5, -22, 14, 12, 6, 15, 13, 10, 2, 21, -22, -11, 22, -3, 1, 27, -25, -2, 18, 1, 0, 8, 24, 25, 16, -2, 11, 6, 6, -11, 24, -4, -5, 4, -16, -23, -27, 23, -1, -6, -10, -17, 4, 2, -1, 24, -8, 9, 10, -5, 2, -19, -25, -8, 25, -23, 20, -18, 1, -6, -13, 58, 5, 23, -2, -20, -10, -2, -13, 14, 13, -30, -17, -3, 6, 4, -22, 10, 25, 26, 45, -19, -32, -11, -32, 2, 31, -11, -12, 8, -18, -1, -22, 9, 21, -7, 28, -14, -12, -4, 3, 9, 14, 14, 13, -13, 19, -14, 18, 36, 13, 0, -9, -19, -12, 16, -3, -7, -5, 24, -5, 15, -6, -21, 20, -18, 40, -29, 37, -20, -21, 14, -36, 5, -4, 13, 7, -20, -25, -1, 41, 12, 14, -16, 35, 6, -20, -10, 28, 12, -2, -10, 12, 11, -8, -20, 0, 23, -7, 8, 20, -8, -4, -6, -37, 24, 14, 8, -11, -10, 0, -12, 16, 8, 33, -15, 14, 4, -17, 18, 14, -8, -5, 3, -18, 13, -34, 12, 16, -7, -7, -14, 21, -12, -11, 18, 16, 22, 19, 29, 31, 9, -17, 0, -13, 6, 10, 1, 13, -22, -1, 6, -12, 11, -14, 10, -14, 5, 8, 0, -33, -8, 6, -7, -13, 19, -2, 0, -10, -21, 12, 25, -15, 16, -19, 8, -3, -11, 0, -1, -17, -6, -22, -23, -7, 38, 17, 14, -8, -21, -13, -12, -10, 4, -9, 32, 1, 11, -35, 14, 32, 0, -19, -21, 30, -16, -20, 9, 7, -6, 11, -7, 28, -18, -20, -8, -9, 0, 0, 25, -22, -7, -10, -11, 2, -13, 26, 3, 8, -12, 45, -5, -35, -1, 34, 12, 8, 12, 20, -7, -47, -6, -4, 14, -16, -4, 5, 13, -26, 16, -10, 25, -6, 16, 22, 7, 28, -25, -5, 29, 24, 1, -24, -9, -11, -17, -11, 11, 23, -1, -2, -1, -12, 8, -4, 1, 5, 11, 0, 14, 6, 10, 19, -20, -9, -20, -39, 17, -14, 20, -19, -21, -29, -15, -33, 6, 8, -2, -27, -29, 4, -5, 2, -6, 0, -9, 11, -1, 11, 0, 34, -20, 0, -9, -1, 5, -8, 3, 4, 6, -11, -31, 20, -23, -2, 13, -6, -3, 17, -26, -2, -11, -21, -5, -12, -20, 15, -18, -5, -23, 1, 14, 10, 11, 2, -15, -31, -6, -1, 9, -13, -5, -22, 7, 21, 7, -3, 3, -13, 8, -12, 14, -5, -26, -2, -8, -16, -8, -3, 6, 13, 13, 20, 20, -9, 1, -3, 41, 0, 17, -1, -8, 6, -21, -11, 18, 13, 16, -1, 12, -9, -17, -9, 15, 31, 14, 1, 1, -8, -11, 17, 42, -2, 18, -28, -14, -33, 15, -6, 13, -12, 38, 14, -20, 24, 4, -3, -17, 27, 29, -20, -7, -12, -8, -2, -31, -21, -14, -14, 19, 20, 10, 3, 15, 21, -10, -15, 5, -24, 10, -20, 26, -9, -8, 20, -5, 13, 13, 5, 6, 3, 10, 31, -15, 22, -20, -5, 26, -2, -5, 11, 10, 18, 13, -8, -10, -14, -8, -13, 4, 6, 12, -12, -10, -10, 62, -35, 17, 5, -11, -14, 37, 2, 5, 7, 16, 19, 2, 4, 22, 2, -23, -27, 6, -10, 17, -5, 19, -4, 25, 20, 0, 22, 3, 29, 5, -11, -9, -7, -16, -26, -18, 6, -5, -4, 12, 11, -6, 5, 17, 7, -3, 6, 33, 18, 8, 2, -2, 11, -41, 25, 25, 26, -18, -10, -10, 1, 6, -8, -6, -5, -21, 7, -5, 20, -20, 9, 13, 27, -20, -4, -22, -10, 21, 11, 5, 2, 12, -1, -9, 22, 25, 12, -28, -17, -14, -12, -9, 27, -1, -3, -28, 29, -8, 16, 14, 47, 7, -36, -25, 21, -6, -21, 11, -9, 17, 6, 21, -4, -4, 10, -9, -15, -15, 15, 19, -7, 10, 18, 13, 14, -20, 5, -22, -14, -17, -11, 19, 13, -1, 4, 2, -14, 10, 10, -9, 1, 6, 31, 17, 15, 13, 17, -1, -16, -10, -34, -5, 4, 1, -4, 0, 13, -7, -7, -11, -5, 5, -14, 0, -9, -13, -1, 7, 7, -15, -2, 19, 26, 14, 12, 21, -1, -10, 7, -22, -11, 0, -24, 19, 5, 1, 5, -10, 5, 0, -3, 19, -8, -10, 26, 15, 10, 19, 16, -20, 22, 16, 18, 19, -5, -6, 42, 19, -38, -13, -19, -18, 14, 15, 9, 8, 15, -15, 5, 17, 7, 28, 32, -5, -5, 17, 25, 17, -5, 22, 24, -14, 22, -6, 5, 16, 35, -13, -22, 17, -6, -15, 10, -22, 11, -21, 8, 14, 12, -16, 25, -7, -13, 26, 44, 8, -20, 14, -6, 11, 4, 12, 10, 43, 4, 1, -27, 8, 1, 11, 6, -1, -10, -1, -24, 2, 23, 0, 25, 39, 7, 20, -22, 5, 21, -11, 2, -14, -9, -2, 5, -2, -26, -14, 23, -1, -9, -20, 4, 3, -18, 31, -19, -8, 26, 13, -22, -5, -15, -11, -11, 13, -34, -7, -5, 17, 15, -3, -4, -13, 17, -9, -4, 6, -19, 12, 17, 3, -5, -6, 5, 1, 24, -5, 0, -32, 6, 11, -10, 10, 39, -23, 6, 1, 17, -12, 5, -21, -4, 38, 23, -6, 12, -17, -10, -4, -28, -5, 2, 10, 11, 8, 11, 3, -3, -3, 5, 18, -9, 16, 32, -19, 3, -9, 4, -21, -9, 17, 5, 14, -6, 4, 25, 0, 1, -6, -6, -10, -10, 8, -3, 14, -5, 7, -1, 17, -5, -11, 7, 23, 18, 18, 23, 8, 29, 10, 9, 4, 10, 2, 3, -7, 9, -23, -19, -14, -30, -10, -21, -10, 16, 3, 10, 1, 20, 8, -3, 5, -7, -4, -12, 25, 9, 2, -15, 8, -17, -12, -12, 2, 12, -5, 7, 5, -23, -8, -16, 1, 15, -9, 11, 1, -14, -16, 22, 15, 5, -11, 8, 19, 4, -15, 28, -13, 3, -9, -15, -17, -30, -16, -1, 27, 20, 10, -25, 17, -8, -16, -19, 1, 17, 16, -16, 6, 8, -26, -24, 3, -30, -20, -28, 21, -15, 10, 5, -16, 24, -7, -11, -14, -15, -2, -11, -9, 14, 6, 9, -5, -36, 6, -3, -7, -1, 7, 19, -9, -22, 8, 17, -17, 20, -3, 1, -9, 2, -4, 25, -17, -10, 10, 8, -2, -21, -13, -13, -2, -21, -2, 5, -4, -10, 9, -3, 2, -14, -15, 1, -6, 10, 13, -14, -6, 8, 7, 27, 16, 8, -14, -15, -12, -25, 9, -5, 1, 21, 24, 16, 12, -13, -3, 4, -13, 0, -1, -6, 4, 27, 17, -1, 11, -10, -7, -1, -13, 1, 16, 40, 0, -1, -11, -20, -15, 4, -10, 10, -20, 21, 15, 30, 9, -12, 9, 11, 16, 12, -6, 18, 0, -18, -11, -13, -21, 25, -10, -15, 1, 0, -11, 12, -16, 5, -6, -16, -3, 7, -1, 17, -4, -3, 0, -15, -21, -21, -13, -7, 8, 3, -18, -7, -15, 3, 17, -11, -8, 8, 13, -14, 13, -3, 14, -9, -23, -11, -24, -17, -8, 16, -16, 20, 9, -5, 16, 0, -24, 10, -11, 29, -6, -12, -4, -6, -10, -14, 1, 12, 17, -9, -5, -23, 11, -15, -20, 7, 12, 10, 19, -10, -38, -19, 2, -4, 4, 3, -8, -23, 10, -10, -19, 24, -23, 9, 5, -36, -6, -9, -21, -1, -20, 10, -10, 2, 0, -13, -13, 19, 5, -11, -3, -17, -27, 9, 11, 15, 1, 7, 18, -11, 19, -21, 5, 6, -19, 0, -12, -22, 13, 9, 18, 24, 15, -2, -3, -11, -15, -26, 11, 28, -14, -5, -1, -4, 21, 15, 9, -5, 3, -8, -6, 0, 8, -10, 13, -4, 17, -23, 20, -7, 16, -5, -7, 0, 7, 19, -42, -16, -21, 9, 9, -9, -9, 5, -37, -1, -14, 2, -24, 2, -5, 3, 9, 14, -12, -9, 11, 13, -13, -22, -31, 11, -28, 8, -19, 1, -20, 26, 14, 0, -4, -29, -11, -17, -15, 13, 20, 0, 7, 19, -23, -19, -9, 15, 10, -31, -16, 1, 12, -16, -8, -6, -18, 30, 3, -9, -2, 23, 19, -6, -8, 9, 10, 20, 13, -8, -6, 7, 7, 7, -22, -16, -20, 6, 4, -1, 3, -21, 34, 25, -8, -14, 8, -9, -3, 22, 13, -3, 9, 0, 0, -7, 8, 2, 14, -13, 11, -4, 8, 9, -6, -11, -31, -6, -5, -13, -6, 2, -15, 15, -1, 29, -13, 11, 8, 8, -8, -22, 27, 23, -23, 11, 20, -17, -8, 8, 5, -10, -16, 0, -22, 10, 26, 7, -6, 5, 3, 0, -13, -16, 12, 6, -6, 19, 8, 23, 12, -4, 2, 14, -2, 0, -18, -5, 12, -8, -25, -5, -15, -11, -1, -11, 19, 6, -26, -8, -19, -23, 20, -21, 17, 15, 6, -2, -27, 1, 7, 20, 8, 3, -5, -34, 0, -17, 16, 1, 9, 22, -44, 6, -16, 14, 8, -20, 20, 9, 1, 0, -11, 15, 5, 18, -6, -16, 8, -6, -50, 1, -22, 5, 12, 17, -13, 7, -22, 0, -6, 21, 1, 3, 4, 32, -15, -5, -7, 27, -2, 16, -38, -25, 5, -1, -18, 26, -22, -7, -29, -6, -9, 4, -8, -5, 7, -18, -6, 5, 23, -8, -22, -4, -16, -13, -24, -3, -36, 23, 21, -20, 8, -1, 6, 12, -4, 4, -17, 2, 6, -9, 8, -15, -14, 16, -22, 6, -20, 16, -25, -23, -9, -44, 17, 6, 4, 4, 0, 26, 17, -13, -6, 21, -21, -21, 11, 11, -11, 18, 10, -13, 5, -1, 5, 9, -28, -8, -15, 10, 20, 9, -32, -14, 0, 2, 16, 21, -10, -4, -55, 9, 10, 1, 14, 5, -18, 12, -42, -14, 27, 15, -8, -33, -8, 16, -8, 27, 1, -12, -5, -8, 0, -3, -11, 5, 14, 26, -1, 6, -17, 2, 10, -2, -19, -24, 9, -5, 14, -19, -16, 2, -2, -17, 6, -14, 13, 13, -5, -2, -5, -12, 12, 14, -8, -23, 6, -25, 8, -22, -13, 10, -6, 11, -22, 29, 7, -14, 19, 0, -11, -1, -7, -11, -14, 3, -4, 28, -4, -23, 6, 0, 9, -27, -12, 33, -7, -3, -18, 8, 6, -43, 8, -7, 7, -12, -30, -7, -32, -46, -16, -18, 12, 3, -16, -6, 9, -17, 2, -7, -6, -6, 20, -12, 7, -16, 5, 14, -4, -5, -17, 13, -16, -23, 12, -17, 7, 18, 6, 24, -19, 0, 6, 23, -3, 22, 7, 10, 6, -16, -19, 12, 15, 9, 3, 13, 10, -5, 2, -1, -20, 23, -29, 15, 4, -18, -9, 19, -3, -6, -23, -6, 7, -26, -13, -6, -18, -2, -8, 1, -14, -34, 13, 27, 7, 5, -15, -13, 13, 6, 3, -9, 7, 24, 10, -15, 0, -30, 0, -8, -20, 25, -7, -31, 13, 8, 0, -3, 14, 33, -4, 12, -18, 2, -6, 4, 7, 28, -8, -13, -9, -6, -1, 4, -2, -4, -31, 19, -3, 25, -19, -11, -2, 7, 18, 10, 11, -8, 9, 27, -13, 12, 4, -5, 22, -5, -19, -4, -22, -18, -19, 25, -13, 8, -5, 29, 16, -18, -11, -8, -10, -27, 13, -20, 14, -4, 13, 0, -3, -16, 10, 13, -15, 7, 5, -43, -15, 5, -9, -23, 0, -5, 7, 25, 18, -16, -10, -21, -3, 5, -8, 0, -33, 37, 1, -21, 17, -15, -17, -9, 14, -13, -9, -15, 8, -5, -53, 18, 11, -24, 13, -20, 5, 2, -28, -9, 22, 10, -5, -4, 4, -9, 21, 8, 15, -6, 16, 14, 10, -3, -36, 33, -2, 21, -20, 1, -12, 17, -8, -10, 15, 4, 18, 15, 7, 12, 11, 13, 3, -8, 18, 17, 4, -15, -6, 12, -11, 7, -9, 10, 2, 18, -7, -2, -21, 33, 2, -5, -7, 6, 5, 20, -3, -20, -16, 26, 9, -17, 3, 19, -14, -14, 7, -21, 13, 5, -1, 13, -28, -3, -12, -24, -11, 18, -48, 15, 8, 20, 0, -11, -5, 27, 4, 21, 6, -30, -15, 14, 5, 19, 1, -2, -19, 1, 8, 11, -17, -8, -17, 3, -15, 22, -2, -8, -21, -15, 6, 5, -20, 3, 8, 23, -12, -11, -24, -5, -9, 9, -1, 9, 15, 11, -21, 32, 0, -22, 10, 9, 5, 13, -35, 3, -27, -20, 17, -9, 4, 13, 6, 2, 4, -12, -21, 10, -18, 3, 23, 13, -1, -11, -1, 6, -17, -8, -18, -6, -6, -16, 4, 10, -5, 1, 1, -17, -18, 2, -9, -7, -2, -37, -1, 17, -47, 13, -8, 1, 11, -10, 4, -22, -21, -32, -21, -12, -6, 16, 2, 1, -8, 8, 10, 2, -21, -19, -3, -40, 8, 7, 13, 16, -10, -18, -2, 16, -3, 14, 13, -9, 7, 17, 0, 6, 20, -4, -11, 9, 18, 22, 6, -14, 29, -17, 8, -15, -6, -18, -8, 5, -47, 21, -16, -3, 3, 13, -3, -10, -27, -1, -12, 0, -15, -12, 2, -23, 12, -15, -24, 0, -9, -21, -4, 14, -17, 0, -12, -2, -23, -20, -31, 14, -15, 23, -2, -5, -9, -21, -17, -7, -24, -17, -2, 9, -21, 19, -1, 34, -8, 17, 12, 17, 14, 13, -29, -11, -10, -10, 11, 9, -10, -14, 41, -24, -13, -9, -8, 6, 3, -24, -4, -12, -5, -11, 13, 6, -14, -1, -29, -30, 1, -10, 1, -30, -14, -11, -16, -7, -1, -7, -14, -6, 9, 40, 22, 17, -17, -41, 15, -18, -4, 8, 19, 8, -20, -2, 11, -14, 11, -6, -12, 5, -36, -9, 18, 16, 17, 7, -18, -11, 31, -15, 16, -33, 15, -37, -8, -12, -41, -18, 18, -18, -16, 15, -15, 1, -18, 7, -2, -5, -6, 14, 7, -21, -13, -20, -13, -3, -14, 19, 45, 14, 8, -16, 17, 6, 16, 16, -12, -30, -1, -7, 11, -30, 8, 28, -29, 7, -43, -3, -8, -1, -13, -19, -5, -4, -31, -21, 7, 3, 1, 15, -5, 7, 22, 13, 14, 17, -10, -3, 30, -11, 24, 9, 8, -10, -9, -33, -18, 19, 5, -1, 2, 6, 10, 15, -16, -4, -35, 1, -16, 5, 17, 12, -9, 10, -32, 3, 18, -24, -16, 16, -40, -26, 7, 1, -8, 10, -17, 0, -12, -28, 8, 15, -5, -1, -14, -18, 13, -7, -29, -12, 8, 20, -15, -17, 21, -3, 7, 2, 21, 33, -4, 11, 39, -15, -15, 4, -8, -5, -13, 6, -8, 17, -10, 18, -19, 19, 6, -2, -8, 4, 21, -13, -15, -2, 27, 11, -3, 18, -9, -26, -22, 13, -15, 11, -5, -13, -6, 14, 47, -1, -9, 12, -10, -32, 10, 16, 3, 3, 15, -9, 18, -31, 11, -5, 36, 3, -23, 5, -4, -22, -5, 0, 14, 21, 8, 10, 12, 0, -3, 3, 15, 0, -17, 2, -15, -16, 12, 11, 21, -1, -24, 5, 6, 11, -8, -12, -27, -14, -17, 20, -6, 3, 20, 9, 3, 8, 16, 28, -8, -26, 7, -9, 9, -3, 17, 15, -9, -29, -6, -11, -18, -22, 9, -7, -2, 16, 0, -5, 31, -5, 3, -34, 2, 4, 34, -41, 4, -7, 10, 4, 15, 14, -13, -40, -2, 24, -15, 19, -1, -1, -10, 2, 22, -25, -4, -17, 10, -37, 21, -8, -23, 14, -5, -3, -20, -1, -9, -12, -17, 23, -3, 12, -10, 16, 8, 21, -10, 15, 42, -13, 10, 2, -8, -40, -33, 12, 13, -15, 21, 16, 25, -7, -36, 4, 16, 18, -22, 9, 24, -41, -7, 25, 26, -17, 12, -2, 12, -7, 12, 10, -9, -11, 12, -11, 12, 1, -5, 14, 10, 5, 26, -25, 4, -13, -6, -1, -28, 23, 5, -6, 6, -36, 26, -5, 10, -6, 23, -12, 16, -8, 11, -12, 30, -18, 4, -2, -21, -13, 15, -16, 24, -15, 28, 17, -36, -10, 30, -25, -40, -1, -15, -21, 10, 18, 8, 1, 9, 11, -38, -2, 5, -14, 14, 5, -6, 4, -6, -9, 4, 14, 1, 6, -1, -12, 11, -17, 3, -8, -16, -14, 3, 14, 7, 19, -16, 3, 28, 16, 3, 15, -14, 4, 1, -19, -17, -6, 15, 17, -1, 29, 21, -2, -7, 10, -10, -12, 6, -19, 11, 18, -8, -8, 0, 20, 26, -5, -14, -19, 10, 29, 8, 3, -13, -29, -5, -14, -10, 13, 12, -9, -20, 19, 1, -20, 5, 20, -10, -24, -18, 4, -1, -21, 14, 18, 30, 27, -11, 7, 1, -5, 10, -12, 25, 5, -8, 7, -22, 15, 11, 17, 26, 7, -8, -14, -24, -44, 15, 0, 19, -25, 15, 1, 6, -14, 42, -34, 20, -6, -24, -66, 19, -5, 0, -10, 13, -13, -5, 19, -25, -2, -13, 8, 38, 5, 19, -42, 12, -43, -15, -19, -18, -2, 3, 20, 8, -3, 11, 2, -13, 15, 0, 13, -2, 4, 5, -24, -7, 1, -16, 9, 11, 4, -22, 17, 15, 8, -23, 25, -28, -6, 10, 10, 3, 12, 3, 13, 8, -6, 14, -6, -27, 10, -26, 6, 5, -5, -2, 1, -8, -28, -13, -29, -1, 11, 31, -11, -12, -13, -23, 7, 23, -4, -33, 18, -7, 22, 29, -18, -2, 11, -16, -1, 16, 31, 22, -11, -8, -33, 22, -26, 2, 26, -18, -32, -1, -18, -14, -7, 11, -14, 7, -5, -9, -21, 8, 17, 9, -8, 7, -12, -1, 18, -11, 13, -2, 1, -14, -7, -18, 14, 13, 7, 18, 11, -8, 6, 10, -2, 24, 17, -7, 18, 33, -10, -15, -34, 15, 1, 35, 3, -24, 14, -19, 8, -18, -15, 12, 0, -14, -3, 1, -8, -9, -11, 12, 11, 30, 1, 0, -17, -32, -4, 20, 22, 21, -11, -6, 6, -9, 15, 19, -15, 7, -21, 4, -26, 30, 22, 12, -30, -31, -33, 17, -26, -36, -17, 24, 31, 9, 16, 8, -27, -2, -10, 27, 32, -8, 18, 1, -5, -21, -17, -24, 19, 4, -7, 0, -20, -19, -1, 3, -10, 2, 0, 6, 22, -9, -17, -18, 6, 13, -39, 2, 26, 9, -5, -20, 8, -44, 9, 11, -15, 2, -40, -40, -19, 4, 26, 19, -14, -11, 1, 7, -6, 0, -22, 15, 2, -26, 13, -21, 25, 23, -41, -23, 18, 10, 16, 11, 6, 6, -20, 11, -7, -5, 23, 7, 1, -13, 9, 22, 13, -12, -2, -13, 11, 3, -16, 19, -11, -26, 1, -1, 14, 0, -25, -8, -16, -4, 3, -4, 16, -3, 18, -11, 0, -2, 22, 16, 13, 15, 6, -22, 12, 1, 9, -2, 12, -4, 4, -5, -5, -21, 5, 0, 5, -18, -1, -16, -12, 17, 31, -8, 16, 0, -30, -1, -2, -8, -6, 21, 0, -35, 0, 8, -2, -9, 11, -21, 18, -21, 11, -17, -13, 18, 13, -28, 1, -17, 3, -18, 12, 2, 15, -20, -4, 14, 14, 1, -20, 4, -19, -15, -4, -7, 7, 1, -27, -10, 19, -16, 5, -21, 0, 0, -4, -7, -12, -14, -2, -14, -11, -4, -2, -37, 4, 16, 1, 13, 2, 15, 0, -21, -10, -23, -4, -1, 18, -7, 11, -10, 5, -1, -25, -3, 15, 11, 10, 15, 11, -8, 18, -3, 9, -9, 6, 1, -16, -20, -8, 6, -2, 5, -10, -5, -19, 17, 18, 27, -51, 9, -20, -4, -11, -17, -29, 4, 3, 2, 9, -21, -9, 18, 11, 20, 16, 14, 18, -15, 16, 18, 8, -22, -5, -13, 0, -1, 22, 35, 7, 11, 6, 20, -16, -29, 8, 12, -3, -13, 28, 12, 22, -4, 17, 7, -12, -10, 27, 4, -3, -22, 1, -13, 16, 13, -3, -4, 11, 10, -6, -9, 2, 14, 7, 10, 8, -23, -28, 15, -11, -17, 22, -42, -9, 7, 17, 20, -13, 37, -14, 45, -27, -5, -4, 2, -17, -21, 1, 14, 18, -8, 9, 15, -1, 23, 21, 9, -8, -11, 19, -12, -4, 12, 32, 4, -17, -8, 14, 36, 4, 24, -7, 3, -13, -11, 13, 21, 1, 20, -11, 9, 19, -1, -6, -14, -11, -11, -13, 18, 34, -12, 6, 11, 13, 23, -10, 2, 15, 7, -15, -24, 24, -15, 13, 5, 14, -27, -17, -10, -15, -17, 5, 29, 1, -29, 3, -17, 10, -12, 2, 13, 4, 20, -14, 3, -6, 19, -8, -9, 17, 9, -5, -15, 5, 11, 16, 8, 5, -12, 14, -14, -6, 21, 9, 13, -16, 19, 3, 32, -22, 8, -22, -15, -22, -13, -11, 17, 6, -4, 12, 31, -1, -2, -3, 44, 32, 2, -28, -13, -23, -10, 0, 7, 0, -16, 1, -20, -12, 9, -3, 15, 9, 11, -16, 12, 1, -19, 15, 14, -11, -17, 5, -15, -17, 8, 22, 18, -11, 19, -28, 16, 3, 5, 1, 17, 15, -8, 9, 0, 11, -4, 13, 11, 27, 11, -1, 6, 13, 12, 7, 9, -10, -3, -6, -18, -11, 22, -11, -2, -12, 22, 15, 36, 16, 7, -20, -6, -20, 22, -22, 2, -3, -6, -13, -10, -4, -12, 32, 22, -21, -17, -3, -11, -22, 23, 31, -5, -9, -4, 3, 14, 2, 35, 10, 1, -6, 19, 11, 6, 13, -1, -6, 19, 9, 1, 26, -15, 0, -27, 23, -8, -14, 30, -8, 10, 22, 23, -31, -5, -24, -2, 11, 14, 2, -28, 10, -9, 31, -1, 21, 13, 10, -4, 3, -6, 6, -13, 9, 0, -18, 8, -25, 4, 40, 13, -7, 29, 4, 9, -13, 17, -27, 15, -12, -1, -48, 2, 22, 20, 14, -20, 0, 13, -21, 13, 5, -4, 39, -6, 11, -4, 10, -22, -49, 2, 4, -19, 5, 20, -2, -2, 18, 16, -8, 11, -8, 23, -24, 16, -10, -9, 21, 5, -11, 10, 6, -6, 36, 10, 15, 34, -28, -3, -39, -8, -14, 44, 14, 17, -1, -14, -16, -13, -21, 33, -8, -18, -41, -27, -35, -7, 21, -10, 38, -8, 24, -16, -33, -17, -11, 26, 29, -3, -29, -31, 0, 0, -12, -6, 22, 13, -20, -13, -29, 3, 2, 7, -7, 14, -10, 22, 14, -19, -5, 6, 1, 6, 7, 25, 14, -4, -4, -18, 25, -2, 15, -20, -3, 19, -13, 21, -11, 9, 13, 14, 23, 23, -4, -12, 12, -2, -34, 17, -39, -8, 11, -20, 25, -21, 29, 7, -7, 28, -19, -14, 6, 16, 32, 6, -12, -24, -22, 1, 3, 20, 9, -23, -15, 6, -19, -10, 27, 23, 9, -25, 5, 8, -8, 9, 11, -15, 13, 18, 24, 13, -8, -3, -16, 11, 6, -18, 0, 3, -20, 15, 17, -1, -6, -3, 0, 4, -19, -7, 22, -7, 12, 10, 5, -9, 10, -15, 6, 5, 31, -16, 7, 0, -14, -19, 1, 12, 7, 26, -13, -18, 0, -9, -21, 12, -2, 30, 8, 16, 7, -4, -4, -6, -14, -9, -19, -2, 19, -6, 8, -33, -6, 3, -15, 13, -9, -19, -26, 8, -31, 31, -15, 7, -9, -12, -26, 34, -22, -35, 35, 14, -19, -4, 1, 2, -5, -3, -1, 18, -8, 32, -35, -11, 6, -21, 9, 2, -14, 5, 3, -11, -13, -11, -16, 0, -6, 5, 21, -30, -22, -15, -1, 13, 5, -13, -24, 27, -11, -6, -6, 1, -9, 15, 29, -27, 32, -30, -4, 22, -47, -2, 5, 15, 26, -25, -12, 13, -17, 1, 20, -19, 12, 38, -1, -3, -8, 28, -21, 32, 32, -15, -36, 6, -15, 2, 55, 13, -2, -18, 22, 25, -32, 24, 11, -23, 11, -12, 10, 3, 34, 26, 14, 12, 16, 11, 39, -3, -8, -13, -47, -20, 4, -24, -2, -16, -25, 7, -26, 5, -10, -28, -24, 7, -1, -13, 23, 13, -14, 0, -6, 0, 7, 0, -4, -16, 21, -6, -15, -3, -4, -10, -2, -14, -17, -4, 23, -28, 14, 17, 12, -14, -11, 2, -4, -1, -11, 1, -16, 20, -52, 1, -20, -6, 34, -34, 34, 8, -32, -22, -5, 4, -7, -26, 25, -9, 25, 5, -16, 1, 7, -26, -16, 20, -1, 5, -18, 14, -2, 21, 3, 9, -7, -39, -9, 9, 4, -15, 11, -14, -16, 5, 23, 8, 5, 13, -23, 2, 26, -5, 12, 21, 7, 23, -14, -29, 6, -15, -1, -23, 8, 18, -8, -2, -21, -22, -18, -29, 5, 9, 8, -15, -7, 8, -24, 20, 6, 2, 18, 11, -14, -2, 10, -14, -10, -37, -30, 8, 18, -11, 25, 29, -32, 33, 4, 0, -6, -12, -12, 14, 12, -32, 13, -19, -18, 12, -20, -4, -6, 6, -7, -13, 11, -6, -4, -23, 3, -26, 35, 2, -6, -16, -26, -2, 3, 2, -1, -3, -18, 3, 21, 31, 0, 17, -20, -3, -2, 25, 6, -19, 1, -9, 10, -8, 18, 5, 14, 6, -10, 6, -5, -3, 12, -20, -35, -18, 8, 2, 19, -19, 8, 32, -4, 24, 0, 4, -15, 3, -6, -16, 7, -15, 23, 3, -13, 23, 13, -24, 1, 8, -12, -33, -5, 23, -27, -21, 7, 2, -6, -16, 17, -1, -3, -1, 17, -18, -11, 19, -22, -4, -13, -19, -6, 6, 29, -18, 10, -25, -13, -13, -25, -36, 31, -5, -1, 15, 6, -11, -13, -10, -17, -18, -2, 25, -8, -7, 26, -16, -23, -35, 2, 6, 18, 10, -3, 14, -17, -3, 19, -13, 13, -13, -11, -13, -5, -1, 11, 23, 17, 25, 32, 9, 21, -1, -1, 8, 10, 1, 18, -30, 15, -17, 5, -9, 2, 22, -33, -6, 20, -21, 14, 9, 21, -11, -9, -20, 8, -7, 6, -3, -42, -14, 8, 15, 11, -26, 17, 13, -2, 0, 25, 3, -15, -24, 27, -3, -5, -21, -7, -10, -28, 10, 20, 22, 3, 8, -2, 6, 34, 17, 0, -22, -21, 11, 10, 4, 23, -7, 9, -19, 9, 12, 12, 24, -9, 12, -4, 23, -25, 32, -9, -8, 1, -2, -15, 9, -28, -13, 16, -15, -13, 10, 10, -3, -20, 10, 15, 13, -24, 10, 18, 9, -3, -17, -15, 0, -8, 1, 7, 16, 19, 7, 1, 1, -39, -2, 9, -11, -29, -13, -8, -22, 20, 13, 19, -3, 14, -16, -1, 22, -2, 22, -10, -8, -54, -20, -8, -17, 59, -5, -6, 11, 22, -7, -4, -8, -9, 6, -8, 25, 18, 23, 0, 15, -9, 8, -24, 5, 12, 6, 21, -6, 8, 16, 5, -12, 3, 4, -10, -30, 11, 24, 1, -5, -39, 22, 8, 5, 2, -5, 3, 3, -6, 1, -12, -2, -7, -22, 27, 6, 8, -38, -9, 12, -5, 1, 32, 8, 5, -13, 31, 4, -29, -7, -28, 2, 7, -21, 4, -8, 9, -11, 38, -20, -1, -1, -45, 0, 33, 10, 14, -1, -1, -12, -10, -2, 0, 8, -10, 16, 8, 17, 19, 10, 21, 16, 26, 16, -15, -20, 33, -5, -8, -14, -2, -16, -15, 6, -43, -8, 4, 36, 10, 33, 15, -27, -6, -7, 9, 19, -3, 35, -24, 2, -25, -10, -17, -26, 7, -34, -8, -28, -6, -18, 20, 17, 29, -9, 13, -16, -16, 13, 16, 15, 20, 2, -6, -11, -4, 20, 0, -2, 10, 9, -36, 51, -8, 22, 7, -19, 12, 23, 6, -6, 22, 2, -13, 0, -21, 24, -18, 2, 4, -6, -11, 5, -11, -2, -33, 4, -7, 1, 11, 8, 18, -23, 27, 11, -7, 7, 0, -4, -18, 14, 21, -6, -6, 6, -13, 15, 9, 15, -53, -6, -5, 20, 33, -4, -4, -6, -25, 17, -3, 23, 7, 5, 0, 2, -14, 37, 3, 16, -23, 38, 5, 30, 9, -8, -30, 18, -14, 18, 0, 10, -15, 11, 16, 51, -1, 2, 3, 8, 10, 3, -21, -6, 19, 24, 22, 30, -15, -10, -24, -13, 2, -7, -2, 22, -4, 7, -1, -12, 17, 13, -3, -7, 16, 8, 34, -9, -16, -4, -7, 17, -19, 35, -7, -7, 27, -11, -11, 10, -9, 7, -7, -7, 17, 11, 26, -9, 7, 20, -26, -15, 3, 34, 9, 18, 0, 29, -1, -27, 12, -20, 4, -2, -31, 10, -11, -17, 16, 18, 26, 32, -30, -10, -23, 15, 38, -5, 18, -2, 7, -8, 24, -14, 16, 2, 16, 4, 32, 24, 7, -2, -38, -5, -15, -10, -11, -13, 10, -6, -2, -10, 4, -5, -4, -25, 2, -32, 14, 23, 13, 32, -17, 3, -21, 0, -8, -30, 44, 1, -19, -30, 21, 3, 0, -4, -2, -26, 20, -18, -21, -29, 36, 20, -3, 1, -5, 12, 8, 0, -17, -28, -16, -13, 2, -8, 11, -39, 16, -12, 8, -39, 9, 2, -10, -23, 43, 23, -20, -4, 6, 2, -10, 15, -9, -14, -6, -23, 27, 15, 6, -4, -9, -3, -6, 5, 4, 2, 2, 21, -15, 3, 23, -3, -5, 25, -15, -3, -15, 8, 20, 32, -28, 16, 12, 28, -26, 5, -4, -25, -4, -9, -25, 12, -6, 7, -19, -4, 16, 16, -30, -41, 7, -3, -26, 12, 8, 33, 25, 1, 9, 23, -32, -17, -2, 17, -2, -14, 38, -2, -14, -30, -7, -11, -18, 4, -16, 0, -5, -8, 11, -10, 10, 13, 21, -8, -3, -8, -11, -4, 16, 23, -2, 23, -12, -18, 4, -19, 1, 20, -12, 20, -1, 24, -6, 3, -12, -31, 34, -7, 39, 21, -11, 15, -18, -9, -14, -29, -14, 2, -9, -21, 10, 3, 23, -3, -1, -30, 31, -9, -7, -4, -18, -1, 3, -2, 7, 4, 26, -24, -4, 7, 20, -8, 6, 4, 8, -36, 45, 5, 11, -12, 0, 37, 12, -27, 3, -9, 9, -16, 7, -15, -8, -5, -4, -15, -15, 36, -1, -23, -1, 20, -22, 1, 30, -7, -11, 18, -7, 3, -14, -20, -18, -21, 33, 14, -19, 17, -20, -19, 53, -18, -3, 5, 9, 5, 17, 28, -1, -5, -25, 14, 19, -19, 36, -14, -18, 13, -20, -16, -13, -27, 61, -18, 1, 20, -12, -8, 13, 15, -4, 16, 12, 7, -16, -13, 0, -51, 39, 6, 3, 6, 1, 12, 19, 15, 14, -17, -10, -17, 7, 18, -28, 14, -2, -25, 29, 5, -19, -12, -12, -16, 9, -22, 46, 2, -15, 16, -32, -8, 30, -39, 45, -21, 2, 16, -2, -4, 0, -15, -7, -3, -4, -1, 14, -12, 9, 20, -14, 4, 11, -15, 11, 8, -18, -17, -4, -6, 11, 13, 2, -13, 13, -12, -24, -5, -15, -1, -1, -49, 10, 7, 17, -13, 10, -1, -10, 7, 8, 21, 6, 1, 6, -26, -15, -20, 7, 10, 4, 2, 12, -13, 21, 5, 13, 1, -12, 12, 41, -26, -11, 31, -38, -1, 29, -33, 16, 7, -24, 14, -23, 18, 15, -18, -6, 50, -5, -8, 4, -15, -1, -15, -32, -1, 29, -7, 49, 4, -12, -17, 12, -7, -6, -27, 9, 22, 10, -7, -13, 73, -44, 15, 16, 7, 9, -5, 2, -25, 2, -4, 9, 21, 8, -10, 5, -18, -30, -2, -20, -17, 20, 15, -31, 17, -2, -17, 32, -29, 16, 9, -46, 19, 0, 1, 7, -14, -25, 14, 3, -1, 21, -7, 23, -12, 2, -37, 0, 8, 25, 14, 19, 21, -12, -19, 22, 6, 0, 15, -6, 4, -9, -6, -6, 22, -23, 14, -2, -1, -7, -30, -11, -29, -40, -3, -1, 12, 55, -27, 5, -28, 24, -22, -27, -6, -3, 21, 7, 7, 4, -13, -12, -3, 6, -25, -23, 12, -19, 1, 5, -12, 6, -18, 2, 1, -12, -24, -31, 8, 8, -10, -29, 16, 26, -11, 43, 16, -9, -16, 12, -26, 12, 22, -42, -1, 5, -6, 18, -4, -1, 25, -22, -27, 1, 3, -16, -10, 8, -22, 25, -17, -16, 5, -7, 18, -1, 22, -11, -5, 0, 8, -15, -25, 9, -2, 39, -12, 25, -33, 29, 17, -3, 5, 31, -2, 13, 0, 6, 9, -49, -18, 2, 7, 45, -11, -18, -8, 3, 14, 6, -11, 5, -4, 30, -1, -17, -6, -2, -11, 31, 8, 8, 32, 4, -21, 19, -7, 34, -10, 4, -13, 1, -8, 22, -8, -8, 22, 26, -21, 11, -11, -10, -6, -5, 31, -19, 8, 10, -2, 28, 14, 8, 25, -20, 11, -4, -17, -26, -8, 12, 0, -16, 22, -5, 23, 2, 9, 5, -20, 13, 10, -2, -8, -9, -14, 18, -13, 20, -16, -18, -14, -4, 4, 15, -25, 6, 22, -10, -18, 7, -18, -6, 4, -24, 9, 11, -5, 4, -19, 1, -21, 4, -9, 29, -22, 5, -20, -27, 21, -9, 12, -25, -12, -10, 0, 25, -20, 1, 6, 15, 8, -9, -6, 0, -18, -15, -14, -12, -18, 3, 7, -21, -4, 7, 6, 25, 12, 34, -6, -6, 18, -6, 1, 31, -5, 4, 9, 5, -1, -13, -12, 18, -21, 8, 9, -5, 13, -4, -14, 26, -25, 5, 15, 4, -5, -12, 5, 1, 18, 8, -21, -3, -3, -9, 12, 20, 11, -25, -16, 15, 6, -1, 7, 5, 11, -11, 19, -19, 28, -2, -6, 9, -10, 4, 8, -5, -26, -26, 1, 13, 5, -29, 17, -4, 36, -9, -12, -7, -7, -20, -6, -17, -4, -4, 28, -24, -11, 2, -16, 0, 32, 12, 24, 13, 14, 0, 12, 10, 2, 10, 8, -21, -10, -14, -15, 5, 2, 4, 6, -4, 8, -30, 0, 11, 16, -3, 28, -16, -8, 10, 14, -18, 33, -11, -8, -3, 18, -31, 9, -25, 20, -12, -21, 24, 12, -15, -8, -5, 3, -21, 25, -15, 9, -10, 4, 10, 11, 12, 6, 23, -8, 8, -1, -8, -9, 26, -9, 23, 20, -16, 17, 4, 28, -7, 29, 24, -6, 12, -2, -23, 2, 14, -7, 4, 3, -21, -17, 0, 13, -4, -17, 16, -5, 10, -9, 1, 31, -9, 16, -8, 8, 6, -4, 10, 13, 19, -5, 7, -21, 25, 19, -18, 2, 0, 18, 18, 13, -8, -22, 1, 19, -1, -15, -10, -1, 25, -11, -21, 9, 11, 15, -5, -7, -21, 14, -13, -14, 17, -8, 19, -9, -26, -13, 4, 12, 21, -14, -13, -1, -4, -16, 6, 7, 11, 13, 16, 7, 14, -14, -15, -15, 15, 15, -10, 19, -19, -8, 22, -34, -10, -22, -11, -15, 15, -18, 12, -4, 35, -26, -35, -13, 1, 17, -31, -42, 23, 12, 17, 0, -5, -3, 20, 3, -6, 16, 26, 10, 10, 19, -1, -21, 15, -6, -13, -4, -2, 14, 14, 8, -6, -16, -7, 15, -7, -17, -24, 17, 29, -13, 7, -12, -2, 9, 14, 5, -22, -23, -7, -4, 4, -6, -6, 22, -8, -5, -26, -9, 9, -2, -4, 0, 1, -21, 24, 13, 10, -11, 4, -2, 7, 1, 5, -8, -1, 15, 0, -6, 6, -17, 8, 14, 17, 9, 1, -10, -14, -20, 0, 18, 13, -4, -28, -4, 22, 1, -2, -9, 8, -3, -8, -1, 18, -10, -7, -3, -25, 15, -14, 12, -11, 4, 18, 10, -11, -13, -12, 1, -2, -2, 14, 18, 13, 13, -15, 26, -1, -20, -10, -8, -12, 21, 15, -51, -7, 19, 1, -4, 49, -13, -17, 5, 2, -20, 10, 10, -12, -11, -11, 30, 29, 8, 1, 12, -24, -6, 10, 19, 4, 7, -26, 3, 14, -3, -18, 10, -15, 5, -11, 19, -1, -9, -15, 19, -5, -1, 6, 1, 18, -9, -2, -17, -10, -24, -8, -11, 13, 14, -20, -20, -25, -27, -17, 6, -6, -11, 4, 5, -10, 1, 6, 9, 14, 12, 24, 3, -9, -17, 14, -10, 14, -21, -12, 8, -23, 17, 22, -21, 15, 15, 14, 10, 28, 27, 10, -23, -10, -6, -9, 24, 12, -3, 12, -10, 0, -7, -7, -32, -2, -16, -7, 6, -7, -23, -10, -12, -11, -6, -9, 13, 10, -7, 10, -24, 15, 1, 14, -12, 17, -2, 19, 6, -6, 2, 5, 7, -24, -21, 2, -24, -17, -19, 6, -18, 6, -21, -20, -4, 20, 29, 6, 18, 13, -10, -17, -21, 17, -40, 0, 18, -7, -14, 8, 15, 9, 20, 0, -13, 0, 19, -23, 5, -6, -10, 4, 13, 18, -21, -30, 40, -5, 4, 15, -16, -10, -10, -18, -11, 25, -13, 19, 18, 20, 20, 0, 26, -28, 15, -10, 9, -22, 13, 16, 29, -8, -18, -6, 12, 18, 14, -12, 22, -16, -37, 15, -9, 3, 6, 2, 4, -9, 6, 35, -12, 5, -21, -46, -4, 14, -3, -11, -6, -10, 18, -3, 20, 12, 2, 16, -2, 9, -19, 27, -32, 11, 27, 23, -21, 5, 22, -22, 26, 22, 6, 13, 18, 4, 18, -23, -13, -26, 24, -26, -7, -34, 8, -11, -6, -13, -8, -8, 10, 2, -18, 7, -2, 1, 2, -21, 10, -26, 2, 22, 17, 9, 6, -22, -9, -15, -8, 1, 18, -22, 1, 3, -3, 5, 16, 10, 27, 16, -9, -6, -3, -7, 13, -23, 16, 15, 3, -10, 4, -5, 12, -4, 14, 17, 14, 2, -18, 5, -16, -18, -1, -1, 17, -12, -5, 19, 7, -22, 15, 13, 27, 7, 4, 2, 3, -11, -11, -15, 33, -4, -11, -2, 19, -31, 4, -4, -30, -48, 8, 1, -4, 37, 5, -20, 4, -13, 1, -5, -3, -16, 9, 15, 48, 12, -4, 5, 13, -25, 21, -9, 20, 0, -15, 3, -5, 2, -13, -23, 20, 31, 12, -16, -18, -34, 2, -12, 13, 14, 5, 8, -10, -4, -2, 7, 20, -13, 12, 8, 16, -18, 18, 22, -42, 26, -2, -24, 15, 3, 0, 1, 6, -4, 18, 16, 10, -21, -3, 16, -15, 42, -22, -4, -14, 32, -6, 2, 12, -4, -5, 6, 6, -24, 19, -9, -9, 7, 10, -8, 6, 24, -36, 54, 17, -19, 2, -26, -7, -10, 16, -3, -27, 28, -10, -6, 13, -27, 2, -11, -6, 13, 13, 11, -9, 14, 9, -10, 12, -15, 17, -11, -18, -4, 13, 8, -28, 18, 11, 25, 12, 13, -8, -3, -5, -11, -20, -12, -22, 24, 3, 7, 10, -22, -10, 13, -18, -19, 0, 34, 30, -2, -3, -14, -19, -18, -5, -14, -3, -31, 11, -8, -4, 6, 18, -21, -10, -16, -22, -24, -6, 16, -19, 18, -8, 16, 6, 23, -17, -26, -9, -8, 12, -5, -19, -7, -6, 19, 38, -32, 4, 16, -13, -3, -15, -22, -1, -10, 12, -22, 1, -15, 19, 2, 36, 32, -21, -25, -3, -24, -6, 21, -7, -7, 8, -10, 1, -26, 5, 17, -13, 18, 14, -11, 11, -11, 12, -8, -14, 14, 5, -16, -9, 25, 7, -32, 34, 3, -8, -3, -12, -16, -21, -4, -2, -6, 4, 12, -2, -17, -13, 26, 32, -16, 7, -19, 1, -21, -22, 0, -26, 15, 21, 20, 7, -17, -10, -16, -13, -4, 3, 16, 3, -6, -5, 2, 12, -16, 24, 3, 20, 25, 14, 8, -6, -12, -6, -31, -15, 32, -23, -11, -16, 22, -8, 22, 6, -6, 18, -7, -6, 16, -11, -22, 11, -3, 12, 6, -15, 10, 17, 4, 13, 1, 1, -18, -19, 13, -4, 7, -15, 3, -8, 3, 19, -5, -2, 11, -11, -22, 8, -5, 4, 25, 8, -5, -1, -4, -17, 12, 20, -10, -20, -15, -19, -28, 12, 14, 12, 17, 0, -15, -19, -7, 14, 3, 14, -38, -8, -29, -1, -5, 17, 5, 22, -11, 23, 0, 11, 12, -4, -19, -33, 45, 7, -6, 1, 20, 1, 10, 11, -3, 15, 24, -21, 19, -10, 15, 35, -8, -26, -1, -20, 8, -9, 21, 34, -16, -4, 17, -9, 5, -22, -25, -5, 17, 2, -19, 11, -13, 7, 25, 44, 1, 2, 12, 1, -24, 4, -27, -14, 47, -4, -14, -21, 11, 12, 15, 11, -33, 27, 6, -19, 9, -15, 9, 9, 18, 16, 13, -18, -10, -3, -4, 10, 23, 3, -2, -21, -30, -11, 27, 17, -19, -13, 17, 20, 18, 4, -21, -20, 14, -6, 5, -9, 0, 8, 32, -3, -9, -10, 24, 4, -11, 11, 26, -27, 2, 18, -4, -4, -27, 20, 7, 11, -2, -5, 22, -9, -5, 6, -38, 1, 33, -13, -19, -3, -17, -1, 32, -4, -5, -19, 10, -1, 6, 1, 3, -3, 7, -33, 2, 17, 30, 12, 40, 17, 18, 21, 26, 1, -20, -2, 17, -3, -4, -18, 11, -17, 22, -1, -12, -2, 7, 11, -3, -12, -1, 9, 15, 12, 13, -4, -5, -2, -7, -19, 11, -13, 28, -3, 13, -21, 0, 11, -13, 10, -4, -2, 12, -22, 4, -5, 6, 18, 24, 1, -4, -4, 27, -20, 4, 7, -1, 6, 17, 3, -9, 10, 1, -19, 13, 17, 21, 3, 8, -21, 11, 1, 11, -7, -1, -21, 5, -19, -2, 0, 33, 4, -12, 19, -7, 15, 2, -11, -2, -3, 21, 11, -14, 18, -7, -5, -4, -8, 17, -22, 14, -12, 13, -5, -18, 21, -2, -20, 4, 14, 8, 4, -13, 22, 8, -18, -13, -9, 11, 3, 15, 11, 30, -18, 19, -8, -5, -13, -18, -2, -11, -20, 18, 19, 10, -12, -12, 21, -18, 0, -9, -16, 3, 20, 5, 41, 16, 11, -31, 7, -2, -16, 8, -19, -12, 0, -19, -4, 26, 0, -20, -4, 4, -10, 12, 21, 3, 35, 3, -4, 19, 19, -16, 14, -12, 10, 15, -19, -6, 13, 5, 3, -15, -13, 21, -7, 14, -8, 2, -15, -15, -15, -2, 30, 16, -7, -26, 13, -31, 22, -7, -12, -8, -20, -23, -1, -4, -11, -1, -4, -1, -18, 0, 2, -33, 15, -19, 19, 26, 2, 15, -20, 9, 18, -6, -7, 29, -20, -23, 5, -3, 15, 12, 9, -3, 14, -6, -21, 11, 28, 26, 5, 22, 19, -18, -3, 10, -3, 15, 5, -7, 15, 6, -15, 9, -25, -5, 4, 27, -12, 14, 12, -3, -34, -3, -24, -4, -15, -6, -4, -11, 1, 12, 6, 11, 9, -22, 10, 21, 15, -14, 4, 20, 6, -32, 13, 7, 5, -3, 11, 38, -21, 31, 1, -48, -22, 10, 21, -1, 13, -46, -3, -1, -15, -8, 26, 10, -16, 4, 4, -31, -9, 37, 5, 7, -5, -25, -8, -8, -18, -14, 24, 20, 10, -15, 11, -5, 12, -20, 43, -9, -3, -1, -6, 3, -21, 15, -12, 3, 5, -10, -20, -2, -17, -23, -18, -8, 12, -5, -16, -2, -13, 30, -6, 12, -11, -17, -14, -32, 18, 13, 11, 7, 0, 6, 16, -28, -25, -6, -31, 25, -2, 19, 11, -9, 4, 17, 22, -9, -8, 6, -14, -3, -12, 5, -2, -20, 4, 26, 19, -2, 13, 11, -24, -9, -9, 26, -22, 28, -6, 16, -25, -20, -21, -8, -21, 9, -6, -12, -31, -5, 18, 6, -19, 23, -11, 22, -21, -6, 18, 5, -19, -19, -26, -22, 17, -24, -17, 18, 9, -24, -14, -9, -19, 15, -9, 20, 12, 25, 6, -17, 33, -12, -21, 5, -8, -20, -33, -6, 17, 14, 14, -2, -12, -29, -6, 0, 5, -39, -10, -39, 15, -11, 18, -7, 14, -6, -7, -30, 6, 3, -16, 8, -32, 16, -14, -7, 9, 24, 0, 7, 2, -9, -3, -11, -3, 2, -23, 16, 7, -20, 3, -1, -7, 23, -24, -17, -8, 4, -17, -19, 4, 16, 3, 5, -27, -24, -17, -13, -7, 16, -15, -16, -12, 31, 4, 7, 9, 20, -1, -16, -3, -17, -17, 13, 15, -38, -11, 28, -8, 13, -18, 2, 13, -28, 7, 4, 8, -4, 18, -20, -10, -2, 19, -8, 15, -22, -19, -15, 5, 12, 16, -2, -15, -18, 1, -29, -5, 27, -8, -11, 13, 9, -12, -3, -12, -4, -12, 0, 46, -8, -5, -8, -22, -36, -36, -2, 14, -20, -12, 26, 0, 1, -10, -11, 15, -32, 4, -16, -23, -2, -14, 9, 2, 5, 16, 12, 11, 6, -12, -4, -8, -2, -16, 8, 4, -19, -14, -5, 14, 8, 16, -10, 16, 33, 10, -16, 10, -25, -1, 17, -17, -5, -11, 22, -19, 1, -19, -4, -15, 12, -4, 20, 9, -15, -15, 5, 11, -40, -29, 2, 26, 10, -17, 3, 17, -11, -28, -6, -12, -26, -10, -25, 2, 14, -6, -27, 27, -14, 12, 10, -16, 0, -10, -2, -4, -5, 0, 33, 13, -14, 9, 7, -3, -1, 19, -10, 20, -7, -23, -5, 21, -10, 9, -22, -6, -1, -8, 3, -11, 2, -11, 15, 4, 16, -34, 12, -9, 4, -19, 19, 6, 12, -26, -15, 19, 14, -15, -2, -1, -19, -14, 22, -16, 16, -11, 8, -2, 23, -2, -10, 9, -4, 11, -19, 5, -6, 30, -1, -6, 21, -15, -13, -8, -16, 1, 8, -5, -8, -12, 3, -7, 33, -2, -3, -19, 14, -23, 13, -21, 15, -18, -28, 4, -3, -1, 8, -1, -11, -7, -36, 18, 23, 8, 36, -7, -22, -25, 6, -21, -5, 9, 24, -12, 2, 17, 2, 9, -19, -3, -22, -20, -10, -18, 4, -3, 7, -19, -4, 1, -4, -15, -11, -14, 20, -11, 14, -19, -22, 9, -10, 18, -18, -16, -5, 7, -26, 37, 10, -32, 9, 19, -19, 14, 24, 18, -19, 22, 12, -17, -16, -14, 2, -1, 16, 2, -5, -21, -7, -6, -12, 9, -9, -16, -6, -2, -32, -9, -25, -31, 11, 11, -11, -9, -23, 14, -24, -35, -29, -19, 10, -21, 35, -2, -5, -10, 26, 27, -15, 12, 16, -23, -7, -26, 10, -7, 2, -16, 12, -6, -20, -37, 8, -3, 14, 19, 5, -8, -12, -17, -7, -22, 8, -17, 16, -6, -3, -3, 17, -32, 0, -20, -12, -16, -24, -9, -26, 5, 4, -7, -12, 2, -4, 13, 20, 1, -16, 8, 18, -5, -28, 5, -1, -24, -16, 19, -32, 5, 25, -8, -4, 31, -15, -7, 28, -18, -30, -16, -14, 28, 14, 9, -11, 0, 8, 18, 18, 10, -9, 4, 1, 15, 6, 10, -5, -22, 9, -7, -9, 0, 22, 19, 17, -29, 33, -16, 17, -18, 15, 15, -11, 12, 15, -19, 31, 18, 19, -13, 15, -28, -5, 15, 9, -15, -3, 3, -14, 30, -9, 15, 16, -13, -17, -12, -16, 20, 0, -6, 9, -20, -16, -2, -17, -1, 1, -13, -7, -15, 4, 41, 1, 3, -1, 3, -19, -16, 2, -6, -15, -24, 11, 16, 23, -14, -7, 6, -6, 5, 19, -8, 10, 5, 3, 31, -22, -7, -4, -7, 8, -7, 9, 17, 11, 12, -27, -6, -2, 17, -18, -20, 24, -10, 8, -5, -14, 12, 16, 12, 12, -3, 13, -14, 23, 18, -12, 1, 2, -10, 27, -13, 30, 21, 14, 15, 11, -23, -11, 2, 19, -7, 3, -11, -2, -21, 25, 19, 12, -9, -8, -7, -6, 16, 7, 5, 5, 18, -10, -2, 7, 21, -36, -16, 10, -12, -29, -14, 1, -10, 22, -17, -4, -5, 9, 21, 1, -5, 20, -13, -18, 16, -1, -72, 18, 21, -28, -1, 18, -2, 17, 15, -16, -24, -8, -9, -16, 6, -11, 34, 21, -27, 18, 5, -14, 7, 0, -1, -10, -13, 15, 14, 8, -8, 2, 1, -19, -28, -5, -7, -1, 1, -6, -20, -12, -9, -12, 4, 37, -4, -6, -10, -10, 0, -7, 18, 0, 15, 0, -27, 12, -8, 1, 15, 16, 8, -10, -3, -18, -13, -7, -11, -14, -3, 3, 2, 13, -23, -11, 12, 4, 19, 10, 14, -23, 0, 6, 18, 8, 12, 14, 20, 22, 5, 8, -9, 11, -7, -9, 5, 12, 5, -2, 0, 4, -10, -1, 19, -27, -18, -1, 14, 17, 7, 28, -13, 19, 0, -14, -5, 14, -4, 8, 16, 7, 11, 13, 16, 16, 16, 17, 11, 9, -13, 17, -17, -15, 21, -34, -23, -17, 43, -24, 20, 9, 6, -3, 39, -15, 15, 16, -12, -27, 13, -5, -7, -14, 22, -11, -23, 9, -11, 12, -25, 11, 35, -19, -7, -18, -6, -7, 53, -30, -17, 2, -21, -13, 6, 11, 39, -27, -2, -41, -22, -33, -20, 5, 6, 8, 14, -26, 14, -14, -5, 6, -18, -5, 37, 16, 4, 29, -5, -34, -10, -14, -26, -24, -13, -6, 7, -15, 8, 3, 55, -10, 9, 6, 5, -18, 8, 8, -9, 9, -2, -28, -4, 23, 3, -11, -18, 14, 5, -48, -3, -8, 35, -14, 22, 4, -13, -2, 7, 0, -14, -12, 1, 9, 17, 5, 4, 4, 59, -6, 1, 12, 21, -21, -15, -30, 1, 15, 20, 7, 15, 21, 19, -3, 21, 4, 32, 1, 2, 16, -1, -43, 18, -28, 20, -13, -3, -23, 18, -29, -2, 17, 46, -30, -3, 16, -14, -46, 20, -14, -22, 5, 11, 16, 3, 10, 29, 10, 30, -6, -22, 4, -21, 1, -17, 17, -3, 9, -15, 21, 7, -2, 12, -9, -2, 19, 13, -42, 16, -12, 11, -8, -18, 2, 8, 10, -15, 8, -1, 3, 38, 2, -1, 4, -17, -34, -2, -6, 21, 18, -19, -50, -3, -16, 4, -1, 12, 18, -21, 16, -3, 17, -31, 16, 37, -1, -21, 20, -20, -8, 18, 12, 21, 9, -17, 17, 14, 26, 19, 26, -38, -37, -10, -51, -21, 2, 31, 11, 17, -7, -3, -24, 14, 15, 18, -18, -4, -10, -5, 1, 16, 0, -9, 6, -28, -15, 16, -5, 3, 12, -11, 11, 2, 3, 13, 18, -15, -36, 13, 20, 27, 6, -16, -13, 1, -42, -1, -12, -10, 5, -11, 24, 4, -8, 34, 3, -29, -6, 5, 8, -20, -8, -12, -10, -9, -20, -14, 21, 8, 12, 11, 23, 3, 6, 9, -26, 3, -5, -7, 2, -6, -16, 12, 9, 14, 19, 29, -6, -43, 21, -8, 9, 6, 16, 2, -3, 20, 11, -10, -30, -17, -36, 11, -9, 39, -1, -6, -17, 7, -16, -3, 3, -19, -17, -16, -12, -23, -16, 39, 5, -4, 20, -11, -26, 7, 22, -7, -15, 13, 4, 16, -26, -16, -32, 45, 10, 9, 10, 17, -28, -1, -30, -12, 37, 6, -8, 20, -24, -11, 12, 8, 29, 33, -2, -2, -11, 19, -10, -3, -34, -5, -17, -20, 10, 15, -19, 11, 9, -5, 8, -15, 5, -4, 14, 11, -28, 15, 6, -9, 21, -14, -10, 51, -12, -30, 6, 14, -27, -12, -6, -3, -10, 34, 17, 12, 11, -5, 8, 13, -14, 16, 10, -1, -17, 18, 16, -6, -27, 29, 0, 14, -12, 12, -14, -14, 19, -17, -7, -10, -9, 19, 19, 2, 15, 56, -1, 11, 6, 15, -16, -7, 7, 22, 17, -12, -29, 12, 18, -31, -2, 27, -8, -6, 20, -13, 3, -20, -23, 6, -11, -7, 4, 1, 6, -29, -14, 15, 3, -5, -6, 11, 8, 34, -15, -24, 26, -9, 4, 17, -31, 21, -4, 27, -11, 7, 4, -7, -16, 27, -18, -43, 5, 1, -9, 1, 9, -8, 24, 5, -2, -4, -7, 4, -9, -14, 1, -3, 14, 12, -27, 6, -49, -7, 28, 12, 3, -13, 15, -4, 17, 18, -12, 39, 5, -17, 0, -3, 24, 31, 21, 14, -10, 11, 2, -1, -21, 7, -19, -2, 17, 0, -22, -4, -2, -12, 10, -14, -6, -5, -2, 21, -24, -8, -7, -27, 1, -19, 5, -14, -21, 2, -19, 23, 8, 14, -2, 1, -14, 28, -24, -9, -22, 2, 41, -9, -10, -21, -15, -10, -5, 2, -18, -21, -2, 1, -23, 12, -4, -2, 1, -8, 42, -26, 8, -18, 4, -19, -3, -18, 21, -32, -2, -2, 7, 14, -9, 2, 11, -5, 22, 2, 16, -13, -54, -12, -10, -16, -2, -9, 10, -7, -1, 5, -8, 7, 4, -19, 2, -22, -12, -6, 11, 4, -18, -22, -4, 17, -15, 1, -21, 6, 13, 19, 1, -13, -1, -6, 4, -11, 9, -11, -10, -6, -10, -5, -8, -19, -24, 7, -3, -27, -16, -25, 10, 3, -21, -18, 16, 20, 11, 0, -11, -25, -17, -20, 13, -4, -11, -2, -28, 24, 2, -17, -8, -20, -6, -3, -49, -12, -17, -9, 8, -24, -10, -5, -10, -23, -7, -10, 0, -12, 11, -21, 7, -12, 11, 5, -1, -3, -23, -18, 6, 16, 7, 21, -24, -17, 1, -9, 47, 25, -1, -18, -3, -4, 28, 12, -22, -9, 14, -18, 16, 10, -4, 9, -2, 8, 17, -28, -18, 17, 8, -26, -16, 20, -4, 1, -16, 1, 25, -27, -5, -32, 5, -22, -23, 16, -21, -1, -32, -6, -4, -3, -11, -10, -6, -2, 4, -12, 9, 5, 8, 8, 2, 4, -30, -6, 13, 4, -15, 1, 31, 7, 20, -8, -22, -15, 9, 9, -7, -16, -11, -2, -15, 5, -3, 2, -27, 28, -12, 1, -10, -20, -2, -5, 10, 22, -21, 16, 16, 3, 9, 13, 36, -11, 17, -1, 9, -33, 12, -13, 3, 20, -26, -14, -2, -7, 16, 13, -1, -10, -4, -6, -12, -6, -9, 10, -14, -1, -15, 28, -13, -12, 1, -12, -5, 7, 7, 6, 14, -10, -20, 11, -24, 3, 2, 24, 14, -21, 15, -3, 25, -6, -6, 17, -19, 16, 15, -1, 26, 3, -14, -7, 12, -1, -22, 12, 7, 14, -5, -20, -19, 20, -3, -6, -17, -28, -8, -7, 5, -17, 6, -6, -39, -21, -2, 14, -11, 3, 10, -16, 3, -23, -8, -22, 8, -24, 6, -13, 16, -9, 4, -12, -19, -20, -18, -5, -16, 26, -8, 28, -22, -31, 12, -14, 18, 15, -29, -13, 17, -12, 17, -26, 7, 17, 4, 57, 14, -10, 2, 13, -49, 1, 8, 7, -21, 18, 10, 23, -14, 11, 32, -44, 10, 2, -1, -15, -29, -16, -28, 19, -14, -14, 17, -9, 12, -4, -14, 25, 4, -4, -6, -15, 17, 22, -8, -7, -5, -12, -23, -11, -9, -6, -5, 10, 14, -8, -19, 4, 45, 9, -19, 2, 17, 24, -14, -16, 44, 7, -3, 9, 13, 32, -21, 10, 27, 9, -4, -7, 1, 0, 17, 1, 23, 4, -15, -25, 14, 8, -7, -17, 2, -8, 26, -30, 19, -7, -20, -15, 4, 2, 7, -11, 13, -34, 0, -24, 14, 9, -28, 2, 19, -17, -11, 32, -32, 23, 3, 9, -21, 5, -1, -9, 3, 9, -13, 11, -11, -11, 9, 12, 19, -5, 9, -2, -11, -13, -8, 5, 7, -5, -16, -5, -11, -15, 0, 23, -1, 1, -11, -32, 17, -5, 6, 12, -51, -15, 16, 9, -21, 3, 9, 16, 14, 16, 14, 7, -1, 1, -12, -18, -9, 1, -16, -8, -23, -33, -12, 17, -19, 9, -33, 2, -18, 7, 18, 2, -7, 13, -11, 10, 14, -28, -18, -22, -13, 0, -24, -16, -18, -29, -11, -36, -28, -20, -30, -7, -24, -7, 17, 3, 12, 1, 28, -25, 11, -37, 14, 5, -25, -37, -3, -5, 3, 5, 9, -2, -5, -27, -31, -26, 18, 12, -4, 0, 4, -20, 13, -19, -19, -13, -22, -27, 2, 5, 35, -23, 5, -23, 4, 6, -9, -31, -11, 6, 4, -25, 17, 8, -2, 7, -12, -39, -8, 5, 1, -12, -20, 5, -7, -33, -1, -22, -15, -6, -36, 14, 16, -30, -1, 0, -10, -24, -50, 2, -27, -14, 19, -7, -10, -1, -35, -8, -16, -8, -3, -31, -20, -20, -18, -45, -3, -22, -8, 19, -2, 27, -7, -5, -20, 11, 15, -33, -11, 0, -6, -12, 7, -19, -9, -23, -19, -34, -9, -3, -24, -10, -14, -10, -6, -3, -37, -24, 5, -25, 9, 14, -26, 13, -2, 7, -15, -12, -13, -15, -23, -43, -35, -6, -37, -2, -14, 28, 1, 15, -27, -11, 12, 10, -1, -35, -19, -20, -26, 11, -9, -15, -12, -4, -1, -1, -18, -16, 31, -16, -10, 5, -7, 11, -5, 4, -15, -20, 13, -4, -23, -6, -8, 2, -38, -20, -16, 2, 14, 5, -35, 5, 14, 9, -12, 4, 16, 7, -10, -25, -21, 4, 3, -8, 9, 20, 12, 24, 20, 2, -2, -4, -24, 3, -16, -14, -26, -2, 0, 10, -1, -6, -7, 21, 15, 14, -21, -20, 12, -3, -7, -31, -25, 3, 10, -19, 3, -3, 2, 15, 4, -8, 17, 40, -2, -33, -7, -35, -23, -16, 6, -17, 4, -4, -2, 16, -5, 21, -15, 17, 2, 3, 7, 21, 0, -12, -1, -1, -6, -5, -15, 10, 1, -16, -18, -42, 7, -22, -10, 2, -21, 13, -26, 6, 8, -5, 10, 2, 9, -20, -1, 9, 5, 8, 6, 3, -28, 23, 15, 21, 14, -18, 7, -7, -3, 21, 9, -15, 8, -7, -15, 21, -25, 2, -14, 23, 15, 31, 30, 8, 9, -16, -4, 2, -6, -8, -30, 19, -1, 19, 8, -6, -9, 11, -15, -15, -23, -6, 4, 17, 15, 1, -4, -12, 16, 27, -13, 1, -20, -2, -9, 24, -37, -34, -10, -7, 2, 12, 33, -15, -21, -21, -18, 1, 11, 0, 4, -4, -7, 5, -20, 11, 10, 4, -6, 9, 6, 0, -20, -17, -13, 10, -20, -15, -40, -8, -22, -23, -12, -18, 9, -28, -23, 6, 12, -18, -24, -11, 24, -3, -23, -2, 3, -20, 1, 18, 27, 21, -4, -24, 5, 25, -23, 11, -15, 21, -11, -1, -23, -18, -14, 12, -8, 22, 1, -14, -8, -16, -21, 14, -4, 0, -31, -3, 13, 8, -6, 5, -24, -7, 8, 20, 16, -25, -14, 1, 8, -5, 8, -9, 12, -9, 4, -16, 14, 1, -6, 18, -20, -7, 7, -9, 28, -3, 19, -17, 14, 2, -20, 21, -32, 16, -22, -26, -2, 12, 3, -13, 1, 1, 11, 0, 14, 1, -21, -8, 17, 1, -17, -8, -22, -9, 9, -15, 9, -3, 4, 17, -4, -6, -8, -9, -22, -5, 34, -7, 10, 5, -20, 27, -17, -11, -8, 6, 17, -29, -5, 1, -20, -10, -3, -3, 5, -25, -20, 12, -46, 0, -27, -4, -7, -9, -16, 4, -11, 4, -2, -9, -1, 13, -4, -1, -18, -4, 6, -30, -14, 10, -12, -19, -19, -6, -23, -16, 4, 7, -17, -4, 3, 20, 6, 27, -6, -11, -1, 17, 21, -23, -21, -48, 9, 1, -14, 16, -46, 24, -28, -5, -8, -17, 9, -5, -28, 8, -19, 26, -3, -4, -12, -33, -10, -21, 11, 7, -23, 12, -2, -7, -17, 11, 3, 22, 6, 0, 10, -8, 0, 4, 14, 3, 3, -33, -9, 4, 16, 8, -17, 19, -20, 11, -18, 2, 8, 18, 7, 8, 5, 32, -24, -9, -52, 0, 7, -12, -19, -11, 20, -4, -13, -18, 3, -10, -22, 12, 8, -9, -9, 42, -24, 16, 9, -30, 2, 31, 14, 3, -10, -20, -2, 7, -15, 26, -25, 29, -27, -16, 14, -9, -4, 12, 3, -17, -23, -6, 7, 17, -7, 28, -3, 21, -8, -6, -2, 2, -11, -13, 8, -4, -31, -15, -7, 22, 16, -14, -14, -4, -16, -3, -25, 10, -13, -12, -7, 9, 2, -9, 2, -48, -17, 2, 17, 26, 7, -18, 15, 26, -13, -23, -18, 4, 0, 5, 6, -5, -2, -2, -4, -5, -20, -15, 15, 5, 7, 20, 10, 0, 29, 31, 6, -15, 18, 6, -4, 21, 10, -17, -2, 25, -1, -30, -2, -6, -7, -11, -21, -9, -8, -23, 29, 12, -26, 22, -21, -33, 17, -3, 32, 47, -29, -13, 0, -5, -16, 10, 11, -12, -10, -38, 5, 1, 15, -3, 0, -21, -27, -8, 14, -11, -11, -14, -15, -41, 6, -14, 1, 5, -24, 10, -16, 12, -4, -19, 14, -5, 5, -19, 1, -22, -14, -22, -10, -25, -21, -21, -21, 15, 11, 12, 2, 1, -7, 23, 24, -19, -13, -6, -2, 8, 19, -18, 0, 12, 3, -33, 13, 7, 13, -7, -17, 5, 5, -5, -9, 19, -9, -2, -10, 8, 0, -13, -16, -28, -8, 20, -61, -1, -16, 15, -7, -18, -6, 6, -7, -28, 13, -31, -18, -10, -1, -3, -35, 6, 4, 3, -23, -17, -3, -11, 17, -31, -6, -18, 14, 0, 10, -10, -2, -4, -15, -9, 11, -14, 13, -5, -21, -27, -29, -7, 16, -23, -1, -16, 6, 12, -24, -28, 9, 8, 15, -16, -13, -10, 34, -11, -2, -2, 19, -26, -28, 5, -30, 8, 7, -37, -12, 20, -16, 5, 6, -12, 14, -2, -9, 2, 2, 15, 21, -15, -22, -4, -25, -5, 7, -1, 3, -18, 4, 0, 8, 5, 0, -4, 28, -10, 10, -6, 16, 5, -5, -38, -8, -3, -20, -9, 8, -24, -18, -4, -3, -3, 15, -15, 11, 13, 10, 27, 18, 1, 7, 0, -2, 6, 11, -13, 7, 2, -19, -16, 18, -10, 1, 22, -33, 11, 17, 6, -18, 5, 5, -14, 3, 3, 11, -16, 10, -18, 13, -2, -3, -7, 7, 15, 7, 2, -37, 3, -12, -16, -15, 11, -1, 16, 12, -1, -19, -13, 1, -10, -7, 23, 13, -2, 21, 2, 15, -19, 6, 13, 13, -3, 15, 13, 0, -12, -20, 3, -9, -4, -17, 24, 5, -27, 2, -10, -3, -2, 4, -3, 12, 1, -2, -11, 2, 17, -8, 28, -10, 15, 5, -23, 11, -6, -5, -16, 5, 19, 19, 7, -1, 11, -12, 5, -18, 11, 3, -22, 3, -11, -15, -7, 5, -1, -11, 3, 12, -12, -21, -7, 0, -4, 6, 5, -9, 0, 26, 8, -19, -7, 2, -10, 28, 2, 1, 25, -22, 23, -10, 26, 18, 2, -30, 26, 5, 18, 3, -24, 16, 23, -14, -14, -4, 15, -8, -3, 1, 9, 7, -5, 11, -28, 9, 16, -11, 9, 35, 3, -20, 14, 13, 33, -6, 21, -11, -18, 17, -5, 18, -2, 0, -14, -1, 16, -6, 29, -2, -7, 0, 32, 1, 5, -8, -25, 3, -7, -3, 18, -11, 0, -22, 20, 11, -14, -13, -35, -9, -9, -17, 16, -20, -11, -18, -10, 5, 11, -4, 7, -5, 0, -12, 19, 1, 19, -6, -5, 9, 47, -12, 19, 4, 19, -21, 43, -22, -8, 12, -14, 17, 4, -4, 2, 10, 8, 5, -15, 25, -20, 15, -7, -7, -5, 2, -11, 7, 10, 9, 10, -2, 20, 15, 19, 18, 0, 9, -8, 15, -19, 11, 38, -15, 8, 14, 28, 15, -5, 15, 27, 15, 19, 6, 13, 12, -19, 23, 24, -16, 0, -17, 5, 1, 19, -7, -8, -5, -5, 12, 23, -17, 11, -2, 19, -19, 14, 9, 4, -4, 11, -9, 9, 10, 8, -19, -7, -5, 22, 18, 19, -30, 12, -2, 17, 15, 7, -11, 7, 0, -4, 10, -16, -14, 5, 19, 8, -9, 22, 14, 2, 8, -10, 27, 3, -20, 18, 11, -15, -20, 19, -6, 25, -25, -36, -2, -5, 8, 26, 36, 28, 22, -2, -12, -25, 1, 24, 11, 23, -5, -21, 4, 18, -12, 22, 1, -12, 18, -14, 5, -9, 16, 9, 6, 3, 1, -20, -22, 24, -13, -2, 8, 1, 1, 23, -21, -15, -19, 10, 44, -22, 9, -6, -11, 8, 14, 4, -24, 4, -24, -11, -19, -8, 5, 34, 33, -1, -12, 10, 16, -6, 6, 39, -22, -27, 9, 3, 10, 4, 7, -16, 20, 0, 20, -6, -2, -7, 6, -4, 2, -5, -6, -12, -8, 20, -20, -11, -16, 13, 11, 55, -7, -19, -5, -4, -21, 8, 2, -5, 7, 21, 9, 27, -25, 5, 17, -17, -23, 20, -8, -30, -15, -1, 10, -5, -11, -21, 0, 0, 20, -9, -17, -26, -11, 11, -19, -14, 7, 15, 27, -21, 6, 17, -16, 19, 0, 22, 18, -5, 9, 20, -6, 0, -13, 22, 2, 30, 6, 10, 15, 6, 8, -21, -25, -13, -15, -4, -19, 4, 24, 2, -16, -10, 11, 6, -9, 12, -18, -21, 31, 3, -9, 5, 17, -16, 39, 2, -16, -5, 5, 26, -3, 13, 18, -5, -3, 0, -1, 14, 10, 42, 18, 9, -19, 4, -17, 28, 3, -32, -26, 15, -15, 11, -6, 13, -3, 17, -1, 14, 16, 4, 1, 10, -23, 29, -26, 4, -11, -18, -10, 2, 4, 26, -13, 36, 1, 1, -17, 0, -8, -5, 17, 2, 10, -22, 6, -24, -18, 26, 22, -14, -33, -36, 0, -2, -17, 48, 20, 17, -16, -13, 2, -13, 5, 5, 6, 11, 2, 1, 7, 13, 1, -19, 14, 1, 3, -55, -15, 4, 8, 40, 0, 30, -12, 7, 9, -39, -9, 23, -9, 42, 16, -4, 7, -23, 0, 46, 5, 20, 22, 5, -16, -23, 12, 14, 33, 33, -1, -1, -10, -14, -2, -1, 1, -2, 0, -17, 9, 5, 9, 2, -5, -7, -14, -7, -11, 11, 10, 26, -24, -4, -20, -15, 15, 4, -22, 35, -5, -21, 14, 17, 8, 12, 17, -8, 7, -5, 11, 16, 8, -22, -3, -14, -3, -12, 12, 8, -20, -33, -18, 5, 12, -1, 1, 24, -6, -19, 13, 2, 18, 14, -5, 12, 13, -4, -13, 25, 6, 11, -2, -8, 11, 15, -17, 48, -6, 37, -12, 15, 2, -15, 8, 29, 32, 19, -14, 21, 13, 19, -1, -24, 8, -12, 13, -23, -8, -11, -8, 5, 46, 6, -21, 21, -17, -3, -12, -2, -14, 9, 5, 17, -11, -22, -2, 25, -29, -17, 8, -31, -6, 36, 14, 4, 17, -16, -15, 15, -24, 12, -16, 9, -15, -1, 21, 48, 4, 19, -6, 2, -8, 9, 3, 2, -19, 6, 11, 28, 34, 1, -27, -2, 7, 4, 14, 14, 0, 0, 15, 27, 3, 14, 16, -20, -22, 15, 21, -7, -8, -18, 4, 24, -25, 35, -2, -28, -3, -23, 15, -6, 15, -7, 8, -28, 15, -13, -18, 22, -2, 14, 2, 8, -21, -1, -10, -18, 16, 21, -4, -22, -14, 21, -4, 27, 1, -17, -3, 6, 7, 19, -12, 22, 14, 42, 25, 0, 11, 3, 5, -1, 12, 11, 3, 10, 0, -24, -12, 25, 31, 0, 18, 29, 0, -18, -15, 37, 23, -18, -34, 14, -4, -13, -22, -9, 22, 20, 22, -2, -4, 7, 10, 16, 3, -17, 3, -34, 6, -49, 16, -18, 5, -17, 48, -24, -15, -20, 18, -4, 9, -14, 11, 2, -2, 42, -6, 4, 3, 15, 3, 18, -13, -6, -6, 9, 4, 23, -25, 5, -2, -1, -7, -2, 0, 13, -4, 14, -20, 1, 9, 2, 25, 17, -6, 17, -3, 8, -1, -10, 29, 26, 3, 33, -14, 13, -8, -11, -49, -27, 26, 8, 16, -16, 4, 21, -50, -10, -2, 14, 8, -17, -16, -11, 32, -14, -11, 20, -10, 38, -8, 31, -16, -16, 5, -12, 8, 4, 16, 15, -1, -32, -4, -2, -23, 12, -7, -18, -2, 14, -71, 6, -18, 14, 15, -1, -10, 4, -12, 15, 20, -7, -5, 38, -36, -7, 5, 20, -5, 13, -14, 31, -32, 4, -14, 23, 10, 21, 8, 1, 12, -19, 7, 27, -5, 7, 20, 25, -57, -2, 4, 5, 18, -6, -3, -17, 8, -14, -11, -12, -5, 26, -21, 18, 5, 24, -22, 21, -6, 29, 3, 14, 9, 39, -9, 8, -15, 30, -14, -1, 25, 22, -58, 14, -12, -16, -6, -19, 25, -27, -7, 23, 4, -4, 19, 13, -5, -2, -8, -8, -8, 9, 20, -2, -1, 3, 11, 11, 21, -14, -14, 34, 8, 18, 15, 34, 9, 47, 11, 3, 17, -18, -9, 15, -9, 5, 2, -7, 15, 28, -16, 34, 17, 11, 2, 10, -1, -10, -14, -15, 8, 3, -20, 19, -5, -5, 21, -35, -8, 18, 7, 2, 26, -7, 7, -13, -20, 21, -2, -16, 27, 24, -5, 12, 20, 16, -2, 22, 26, 56, -19, 13, -15, -16, -14, -3, -19, -7, 21, 39, 12, 11, 22, -23, 12, 2, -3, -20, -14, 27, 18, 31, 3, 70, -23, -20, 15, 21, 9, 30, 21, -5, 0, 24, 17, -17, -22, -25, 15, 42, -8, 28, -1, -16, 9, 34, -24, 29, 6, 11, 14, -27, -11, 37, -21, -10, -2, 16, 1, 15, 0, -6, 18, -19, 22, 7, 8, 14, 10, -11, 0, -3, -5, -12, -14, -17, -23, 23, -8, -9, 13, 4, -1, 28, -4, -2, 2, -36, 11, 18, 9, 41, -14, 8, 25, 15, 17, 4, -19, 31, -18, -10, -7, -11, -16, -11, -23, -8, -21, -1, -9, 1, 11, 16, 9, 18, -8, 7, -3, 0, 11, 6, 12, 16, 22, 6, -11, 33, 20, 30, 11, 4, 19, -13, -35, 27, 39, -15, 16, 4, 18, -13, 28, -14, 17, -11, -22, 34, 15, 1, -12, -46, -13, 64, 15, -17, 8, -26, -21, -1, -9, 15, 7, -5, 13, 18, 16, 36, 14, 37, -11, 18, -4, 44, -9, 4, 15, 3, 1, -11, 18, 1, -23, -16, 7, -8, -19, -1, -8, -22, -9, 62, -25, 28, 17, -12, 12, -6, -5, -1, -9, -16, 9, -2, -21, -7, 6, 47, 20, 6, -7, 26, -13, -5, -14, -36, -34, -6, -21, -9, -11, -1, 20, -7, 19, -28, -5, 10, 6, 36, 14, -10, 18, 1, -17, -36, 19, 16, -37, 8, -4, 21, 4, 2, 10, -34, -27, 25, 11, 10, -9, -14, -7, 22, 24, 20, -3, -2, 8, -29, 4, -7, 14, 50, 13, 15, 4, -11, 20, 26, 9, 24, 21, 15, -12, -15, -8, 15, -14, -2, 12, 24, -12, 25, -2, -20, -21, -48, -10, 7, -18, 5, -22, 24, 11, -1, -5, -13, -20, 12, 17, 29, 3, 4, -26, -21, -3, 5, 12, -8, 1, 8, -5, 22, -15, 3, -1, 2, -9, 11, -7, 44, -19, -7, 7, 18, -22, -14, 8, 6, 4, -2, -22, -34, -43, 13, 18, 12, -11, 20, -10, -41, -1, -28, -3, 10, 14, -3, 8, -12, -9, 20, 5, -32, -9, 33, 2, 19, -3, -15, -23, -22, -8, 4, -17, 23, -27, -16, 9, 17, 7, -38, -11, 16, 19, 26, -3, -14, 14, -4, 6, -11, 18, 28, -10, 11, 17, 29, -18, -3, 3, -27, 5, 7, -16, -15, -19, 3, 30, 12, 16, 12, 3, -24, 8, 19, 1, 17, -12, 3, 13, -2, -16, -44, -14, -3, 1, -31, -2, 27, -2, -4, 9, 9, -12, 7, -23, 11, -8, -7, -6, -33, -27, 5, 3, 26, -11, 0, -4, -21, 11, 14, -19, 23, -23, -5, 8, -20, 15, -3, -12, -11, -12, -11, 15, -21, 4, 19, 15, -14, 1, -28, -8, 32, 6, -14, 15, -27, -2, 12, 27, 20, 39, -12, 8, -34, 6, -2, -24, 20, -2, 41, 6, 42, 12, -34, -20, -23, 5, -32, -2, -6, 2, 15, 41, -4, -16, 9, -13, -18, -17, 19, 11, -6, -44, 31, -2, 25, -11, -24, -32, 3, -11, -21, -6, -25, -19, -7, 24, -4, -4, -17, 17, 36, 7, 0, -23, -28, -2, 26, -9, -4, 0, -26, -31, 17, -8, -7, 8, 23, -6, -21, 12, 3, 28, -22, -23, 0, -2, 28, 24, 17, -10, -8, -1, 10, -11, -6, 3, -13, 4, -19, -3, -8, -7, -8, -17, 11, 2, 7, 11, -23, 1, 11, -13, 9, -14, -5, 18, -26, -15, 33, 21, -16, 16, -25, -1, -17, 16, -11, 14, 11, 17, -15, -17, -23, 10, -1, 0, -24, 11, 1, -16, 7, -10, 4, -16, -5, 13, -20, -7, -17, 16, 34, 14, -4, -14, 15, 9, -13, -14, 18, 7, 4, 11, 14, -2, -16, -20, 16, 22, -23, -20, 18, -12, -2, -5, 16, -15, -35, 16, -22, -6, 23, 6, 6, -21, -37, 21, -33, 2, -23, -13, 24, 8, 13, 18, -16, -18, 0, 0, 0, 3, 0, 0, 22, -9, -18, 17, -5, -2, -28, 16, -11, -14, 19, 5, -20, 5, -15, 18, -24, 16, 0, 14, 3, -16, 0, 28, -2, -11, 10, -5, 9, 8, -1, -1, 11, -21, -17, 4, 10, 9, -17, -2, 5, -23, -2, 9, 14, 5, 17, 9, 11, 18, -17, -21, 18, 6, -13, -9, -21, -2, -4, 8, 13, 12, -7, 25, 0, -13, -24, -5, -13, -11, -4, -20, 74, -19, 6, -6, 5, 20, -12, 8, -6, 1, -5, -14, -18, 1, -22, 13, 10, 11, -19, -16, -29, -29, 14, -24, 4, -21, 10, 6, -12, 10, 2, -9, -23, 3, -9, 12, 20, -18, -14, 22, -12, -6, -38, -14, 2, -46, 10, -21, -3, -3, -5, -24, 17, -11, -16, -14, -10, -18, -14, -3, -19, -32, -44, 25, -1, 17, 15, 13, 3, 10, -6, 11, 33, -16, 25, -23, -23, -3, -16, 30, 13, 15, 1, -16, -24, -12, -34, 25, -40, -4, -20, 6, -5, 9, -23, -6, 15, -3, 10, -4, 10, -25, 17, -2, -7, 6, -23, 11, -14, 1, 10, -1, -6, -14, 9, 1, -8, -4, -20, 24, -11, -11, 38, -5, -19, 18, -18, 9, 13, 18, -2, 9, -3, -5, -20, 46, 1, -17, -1, 17, 22, 14, -8, -3, -9, -4, -9, 14, -1, 17, 14, 7, -18, -18, -3, -5, -1, -4, 4, 15, 14, -16, 3, -1, -11, 6, -8, 3, 17, -23, 8, -10, -21, -6, 7, 7, 56, 4, -7, -17, -12, 33, -2, 1, -17, 7, 8, 12, -7, 9, 0, 16, 41, -19, 0, 6, -5, -2, -9, -10, 0, 9, 2, 23, 17, 17, -8, -9, 13, -11, -3, -3, 18, 1, -1, -3, 25, -20, 9, -15, -22, -11, -32, -8, 22, 8, 37, -6, 32, -9, 18, -3, 27, -11, -6, 0, -18, -1, 0, 27, 35, -2, 3, 4, -16, 3, -6, 4, 44, -16, 16, -17, -31, -16, -1, -14, 69, -19, 19, 12, -10, 13, -8, -9, 9, -13, -25, -13, -15, 7, -11, -15, 5, -7, 49, 7, -15, 8, -5, -4, -4, 15, 24, 15, 18, 15, 10, 2, -13, 3, 19, -11, 16, 10, -7, -12, 40, -1, -3, 7, 9, -4, -4, -7, 19, -19, 7, 8, -5, -13, 4, 13, 17, -18, -2, -22, -14, -16, 8, 4, 51, 7, 5, 14, -30, 0, -10, 30, 6, 12, -4, 2, -15, 11, -37, -17, 33, 16, 7, -13, -18, 26, 6, 13, -10, -2, 40, -16, -25, -14, -10, -34, 25, -11, 3, -12, -5, -8, -3, -3, -1, 6, 17, -20, -15, -5, 4, 19, 34, 3, -3, -9, -7, -12, -7, 0, 16, -1, 28, -14, -4, 5, -2, 17, -3, -15, -9, -16, -4, 22, 15, 0, 6, -19, 47, -11, -8, -31, -33, -26, 1, 8, -39, 4, -27, -32, -14, 6, 24, -10, -13, -10, -14, -22, 17, -7, 18, 16, 22, -4, -25, -1, -6, -9, -60, -19, 9, -6, 1, 6, -1, -1, 12, -1, -45, -8, -17, -17, 18, 8, 15, 2, 11, -1, -9, -7, 0, 2, 25, -11, -11, 10, -5, -22, -9, -10, 19, 11, 0, -9, -41, 10, -22, -2, 10, 14, 17, 5, -24, -6, -4, -8, -1, 8, 25, 8, -9, -12, -31, -13, -26, 13, 10, -18, -16, -3, 10, -19, -19, -9, 15, -21, -17, -18, -7, -41, 2, -13, 19, 8, -26, -43, 18, -37, -6, 18, 3, 7, -27, 14, -13, -14, 11, 12, 11, 8, -8, -18, -40, -11, 33, -15, -46, -17, -8, -11, 12, 35, 18, 2, 21, -19, 1, -46, 19, -12, -16, -1, 11, -5, 18, -19, -27, -27, 6, 8, -1, 17, 9, -10, 3, -21, -29, -17, 32, -10, -19, 10, -31, -20, -29, 23, 46, -5, -23, -26, -38, -28, 31, -2, 4, -13, -22, -17, -23, 4, -13, -24, -12, 10, -12, -13, -15, 21, 20, -3, -3, -22, 2, -16, -18, -7, 14, 4, 5, 12, -21, 13, 8, -10, 31, -4, -42, -20, -23, -22, 51, 2, 16, -10, 18, 16, -12, 3, 10, -23, -5, 10, -16, -23, 24, 39, 1, -4, 4, -7, -5, -9, -6, 6, 12, -15, -26, 16, 5, -15, 9, -1, -34, 26, -5, 3, -4, -6, -24, 10, -7, 25, 18, -9, -12, -18, -13, -7, 29, 3, 6, -4, 0, 9, -16, 2, -7, 28, -7, -3, 6, -12, -18, 12, -17, -1, 16, 11, 23, -4, 1, 9, 1, 13, 31, 2, -5, -11, -7, 9, -12, 15, 24, 2, 22, -11, -23, -14, 12, 6, 2, -23, 28, -12, -11, -15, 6, 10, 6, 12, 8, -18, -15, 12, -20, -23, -8, -2, -9, 2, -13, -16, -5, 22, 32, 3, -17, -15, -32, 6, -6, -9, 42, -5, 6, -21, -20, -1, -42, 4, 11, 7, 3, -9, -15, -28, -34, 21, 5, 18, -13, -23, 4, -10, -35, 4, 25, -3, 18, -3, 0, 17, 5, -3, -11, 10, -8, -24, -30, -18, -26, -14, 16, -3, 14, 18, 1, 10, -10, 13, -8, 20, -4, -12, -28, -3, -3, -74, -15, 12, 14, -14, 12, 2, -9, -10, -19, -19, 15, 12, 31, -26, 19, 32, -15, -5, -1, 17, -7, 8, -4, -2, 24, -19, 6, -11, 14, 20, -21, -36, 21, 15, 8, -3, 6, 4, -24, 34, 0, 5, 3, -21, 11, 11, -17, 0, 5, -11, 24, -23, -21, -23, 23, 4, 21, 11, -4, 15, 2, 9, 3, -12, 21, -4, 16, -8, 37, 12, -4, 26, 11, -1, 11, 13, -3, -32, -16, 6, 16, -8, -7, -3, -1, -3, -3, -18, 4, -22, 21, -4, -1, 23, -18, 1, -16, 10, -9, 1, -13, 20, 4, -17, -2, -2, 12, 0, -28, -3, 40, -16, 23, -8, -10, -18, 8, 23, 28, -35, -19, -11, 26, -21, 6, 0, -24, 20, -6, -13, 22, -9, -3, -5, -23, 9, -11, -2, 1, 17, -6, 1, -13, -26, 20, 6, 20, 15, 15, 17, 3, -1, 13, 15, 22, 2, 28, 25, -7, 17, 27, -10, 11, 15, -16, -44, -12, -25, 27, -17, 21, -18, 10, -3, -4, -22, -11, 20, 15, -15, 19, -2, 7, 16, -13, -14, 16, 11, 29, -4, 13, -23, 8, 1, -14, -1, 10, 5, -3, -17, -12, -6, 17, -1, -1, -29, -11, -12, -2, -4, 4, 11, 28, 21, 22, 6, -16, 12, -9, -5, 15, 13, -27, -23, 7, -20, 2, -16, 6, -2, -11, -3, 44, 10, 29, -22, 19, -7, -32, -22, 0, 1, 8, -3, -8, 18, 22, 0, 29, 7, 22, -1, 14, 9, 4, 37, -35, 8, 36, -14, 14, -14, 25, 25, 32, 3, 2, 4, 20, -7, -14, -12, -12, -16, 42, 12, -7, 10, -4, 10, 19, -6, 7, -5, 12, 22, 9, -16, -8, 11, 5, 2, 0, 40, 14, 26, 3, -13, 4, -12, 7, 6, 27, -8, 24, -16, -6, 4, -6, 31, 8, -43, 10, -13, 21, 14, -11, -14, 4, 5, 11, 6, 9, -12, -18, -45, 13, -6, -21, 18, -38, -14, 17, 0, 25, -24, 26, 13, 0, 7, 22, 2, -5, -30, -16, 18, 10, 6, 12, -14, 5, -17, 7, 15, -14, 17, -33, -41, 7, 12, 8, 1, 29, -22, 0, 19, 19, -3, -9, -13, -11, 8, -9, -10, -27, -1, -31, 17, -7, -14, -20, 2, -27, -20, -7, 21, -15, 14, -5, 25, 14, 20, -17, 0, 15, -5, -23, -5, -9, 7, -5, -2, -12, 0, -7, -35, -25, -37, 1, 11, -5, 1, -30, 23, 6, 16, 13, 13, 11, -17, 3, 35, -16, 40, 11, 17, -5, 21, 35, -18, -6, -14, -2, 10, -37, -17, -24, -24, -34, 18, -16, 18, 3, -3, 7, -11, -5, -15, -4, 16, -14, -12, -26, -3, 8, -63, -4, -1, 4, 3, -39, -12, -33, -13, -1, 0, -5, -11, 5, 38, -2, -13, -14, -21, -13, -18, 0, 7, 5, 14, -2, -7, -32, 6, -25, 15, -22, -5, -6, -9, -4, -12, 13, -12, -6, 14, -10, -11, -17, 15, 6, -33, -22, -57, -10, 8, 32, -11, -14, -30, 12, -20, -2, 10, 19, 16, -40, -24, 21, 29, 19, -12, -6, 18, -8, 22, -8, -5, 9, -6, -2, -7, -18, 49, 0, -33, -13, -19, -21, -20, 0, -4, 8, -29, -1, 7, 27, -14, -24, -3, -10, -34, 10, -20, 15, -13, -40, -59, -34, -42, 11, 9, -13, 0, 1, -2, 4, 0, -20, -18, 10, 9, -8, -12, -1, 5, 5, -1, -10, -18, -30, 8, 2, 1, -17, -4, -17, 3, -17, -19, 3, 18, -9, 17, 16, 4, 22, 21, -18, -7, -28, -16, 18, -17, -5, 4, 12, 2, -6, -3, -18, -6, 22, 3, -1, 13, 11, 14, -32, 19, 2, 20, 9, -10, -1, 8, 16, 7, -28, -15, -8, 49, 6, -10, 24, -2, -10, 34, 9, -2, -3, -11, -12, 13, -8, -1, 12, 7, 8, -21, 20, -9, -1, 13, -8, -28, -1, 12, 4, 1, 15, -8, 7, -40, 4, -18, 8, -19, 14, 2, -5, -1, 4, -13, 13, 1, 2, -11, 5, -25, 11, 5, 20, -17, 7, -22, -17, -14, -27, 1, -9, -16, -41, -19, -8, -4, -11, 10, 35, -23, -22, 17, 22, -27, -8, 1, -7, -7, 15, -11, -21, -1, 14, -21, 0, -13, -47, -10, 6, -15, -10, 2, 24, -12, -29, 20, -29, -10, -10, -15, 7, 1, -7, -11, 2, -28, -5, 5, -13, -16, -41, -37, -4, -16, -23, -19, 9, 14, -9, -25, -25, 13, -26, 13, 7, 12, -26, -14, -14, -27, 14, 7, 2, -18, 18, -17, -39, 11, -9, -22, 18, -22, -25, 15, -1, 28, -6, -14, -12, 17, -17, 10, -13, 2, 8, 8, -17, 9, -25, 25, -19, 15, -12, -22, -2, -15, -16, -34, -19, 21, -4, 13, -14, -11, -10, 10, 27, 8, -3, -13, -7, 8, 13, 25, -24, -3, -6, -9, -5, -14, -8, -32, 18, -7, -10, -6, -5, 0, -3, 13, -18, 5, -8, -22, 16, -1, -26, 21, 10, 6, 21, -7, 19, 8, -3, 14, -25, -2, 1, -8, 17, -17, -8, 16, -12, -2, -27, 4, -16, -15, -5, -14, -20, 4, -14, 3, -13, -2, -30, -3, -14, -2, 3, 1, -3, 13, -16, 9, -3, 25, 0, -24, -14, 7, 10, 3, -15, -4, -5, -9, -4, 13, -29, 4, 14, -5, -11, -20, -13, 0, -2, 0, 16, 10, -24, 0, -21, 17, -6, 4, 2, -29, 21, -12, 18, -2, -29, -8, -18, 5, -8, -5, -3, 4, -7, -18, -2, 0, 12, -19, 11, 8, -29, 11, -22, -24, 1, -21, -20, 1, -27, -5, -5, 9, -12, -2, 2, -20, 14, -8, 19, 4, -9, -23, -16, 4, 8, 3, -15, 3, -11, -15, -6, 6, -22, -7, -26, -13, -13, 13, 27, -13, 3, -5, -6, 6, -25, 4, -3, 16, -18, 25, -22, -14, 12, 10, 1, 13, 21, -39, 21, 4, -4, 0, 5, -1, 16, -15, -33, -2, -18, 8, -7, 14, 9, -13, -5, 1, 9, -11, -22, 19, 4, 7, -39, 0, -9, -18, 5, 3, -14, 10, 0, 19, -17, -19, -1, 18, -36, -2, 0, 6, 3, 0, -21, 4, 6, -1, 9, 3, 13, -12, 0, -11, -19, -12, 18, -4, -10, 7, -15, 11, -4, -8, -33, 20, 18, -18, 6, -7, 12, -3, 20, -32, 21, 2, 28, -3, 21, 3, -4, -35, -6, 5, -20, -21, 19, -29, -4, 4, -13, -3, -4, -10, -9, 26, -8, -3, 27, -6, -4, 8, -7, -6, 19, 10, 7, -4, 24, -22, 1, 13, 1, -17, -15, -10, 0, 14, -14, 27, 13, 3, -34, 10, -5, 19, -41, 2, -3, 10, 8, 7, -4, -17, -24, -18, 10, -7, -23, 9, -3, -4, -14, 12, 14, 13, -10, 8, 20, 3, 13, -3, -21, -1, -8, 15, 2, 6, 7, 9, -11, 6, -24, -14, -11, -12, -12, 6, -13, 32, -24, 9, 29, -10, -9, -52, -17, 19, -14, -17, -10, 13, -13, -9, -27, 5, -37, -18, -18, -9, -47, -2, -2, -16, -32, -11, 6, 18, -32, 7, -13, 4, 6, 10, 10, 13, 2, 28, 24, -23, 5, 2, -5, -17, -13, -13, -24, -48, 16, -1, -12, 18, 12, -20, -12, -8, 18, -7, -22, 5, 19, -21, -24, 10, -22, 12, -4, 2, -9, 8, 3, -13, -3, -10, 2, -10, -5, 18, -12, 7, -11, -18, -30, -18, -43, -14, 1, 18, -25, -8, 5, 1, -17, 44, 3, 5, -31, -4, 15, 8, -18, -3, 37, -38, 13, 8, 14, -1, -17, -9, 17, -52, -34, -21, -18, -21, -16, 25, -5, 18, -16, 8, 5, -11, 1, -11, -6, 14, -11, -11, -19, -6, -10, -13, 1, -10, 4, -6, -17, 4, 9, -40, 1, -17, 7, -17, -9, -7, -33, 6, 6, -3, -19, 20, -15, 7, -34, -7, 5, -14, -8, 1, 4, 3, 8, -7, -18, 11, 3, 15, -10, 11, 14, 8, -43, 17, -45, 11, 6, 4, 10, 25, 3, -11, 4, 13, 5, -8, 2, -22, 0, 17, 16, -6, 15, 8, -9, -43, 12, -3, -35, 9, -17, -14, 13, 18, 3, 3, -2, -15, 11, -7, -40, -25, 25, 0, -9, -9, 26, -18, -17, -8, -36, -14, -7, -14, 18, -9, -23, 18, 15, -22, 6, -6, -4, 6, -5, 18, 2, -4, -15, 9, 12, 14, 21, -32, 17, -21, 6, -13, 0, -11, 30, 5, 9, 28, -12, -3, -6, -3, -31, -2, -13, 16, -1, -2, -1, 12, 34, -4, 13, -35, 12, 14, 2, -7, -19, 2, -13, -14, -26, 8, 3, 10, -17, -28, -10, 28, 9, 16, 5, -20, 20, 8, 24, -31, 27, 13, 8, 9, -33, 11, 4, -16, -5, 2, 22, 1, -9, 22, 11, 3, 2, -14, 16, -8, 15, -32, 8, 0, -2, 10, -8, -10, 8, -19, 40, 4, -35, 12, -12, 13, -7, 15, -14, -28, -6, -10, -3, 3, 20, 4, -31, -29, 4, 10, 21, 16, 5, 4, 24, -30, -5, 6, -4, 2, 0, 34, 21, -9, -11, 7, 10, -2, 18, 10, -8, -18, 8, -9, 21, -12, -6, -4, -13, -6, -5, 16, 11, -18, -17, -4, 2, -4, 0, -7, -17, 9, 16, -4, 4, 1, 7, 10, -4, 6, 21, -1, -12, -8, 13, 4, -2, -16, -29, 10, 27, -10, -5, -12, -14, 5, 26, 16, -6, 13, 7, -5, -17, 3, -8, -15, -3, 11, 3, -18, 11, 18, -28, 19, 3, -43, 1, -6, -9, -2, -20, -3, -29, 0, 27, 19, 11, -16, -11, -18, -21, 4, -6, -20, -19, 14, -20, 8, -17, -12, 11, -5, -1, -2, 9, 12, 9, -9, -8, -5, -8, 14, -3, -20, 9, 0, -14, -20, -9, 17, 6, -24, -42, 26, -25, 16, 21, -2, 6, -20, 20, -24, 9, -13, -10, 18, -17, 8, 7, 17, 5, -7, -9, -4, 6, 2, -25, -15, 9, 15, 18, -21, -9, -3, 10, -5, -7, 3, -27, -8, -24, -28, -8, 9, 37, 1, -10, 2, 21, 0, -7, 22, -29, -2, -2, -18, 5, 35, 8, 5, -5, -3, 6, 9, 15, -13, -33, 6, 5, 6, 1, -4, -9, 17, -10, -16, 17, 7, 11, -5, 11, -26, -9, -17, 6, -7, 14, -6, -4, -10, -6, -6, 8, -4, -12, 0, 3, -5, 0, 29, -12, 0, 1, 19, -2, 9, 11, -20, 17, -3, 13, -13, -18, -33, -10, -17, -10, 10, 30, -7, 16, 15, 12, 2, 9, -11, 23, 11, -30, -11, -18, -17, -1, -7, 23, 18, -7, 18, -13, -26, -22, -9, 24, 12, -21, 28, 2, -14, -16, 17, 23, 14, 11, -1, -21, 16, -31, -2, 19, 12, 4, -15, -11, -2, -5, -8, 15, -19, -24, -14, 8, 9, -1, -18, -20, 3, -10, -19, 29, 14, 25, 2, 24, -24, -19, 6, 13, 13, -3, 2, -25, -15, -12, 8, 11, 11, -6, 15, 2, 19, -14, 32, -6, 8, 6, -4, 24, 12, -18, 17, -26, 14, -7, -1, 23, 0, -33, -2, -6, 13, -11, -3, 2, 17, -10, 4, 3, 23, 11, 2, 3, -15, 20, 1, 7, -25, -2, 11, -7, -4, -12, -26, -7, -14, -13, 17, -12, -12, -6, 6, -10, -22, -3, 4, -5, 11, 18, 13, -12, 22, -8, -19, 15, 7, 22, -2, -15, -13, -9, -15, -3, -23, 8, -14, 2, -25, 3, -10, -23, -37, -10, 27, 8, 11, -5, -20, 5, -9, 6, -23, -14, 1, 17, 14, 3, -19, -1, 5, -11, -18, -24, 16, 12, 5, 24, -9, -10, -1, -11, -25, 6, -26, 29, 8, -3, 21, -10, 8, -11, -20, 18, 7, -10, 18, 4, 23, 36, -7, 10, 1, -19, 5, -22, -3, -20, -39, 23, -3, 12, 21, -14, -28, 15, -17, 25, 3, -23, 13, 2, -19, -9, -21, -25, 15, 8, -7, 14, 13, 0, -3, 6, -18, 11, -11, 9, 5, -33, -27, -22, -19, -5, 20, -4, 13, 1, -23, -9, -17, -11, -11, 22, 22, -9, -21, -23, -10, 7, 14, 10, -19, -18, 6, 40, -18, -28, 23, 6, -30, -19, 4, 18, -20, 22, 9, -16, 17, -14, -22, 10, -5, -7, 4, 17, -35, -21, -15, -17, -16, 32, 20, 8, 2, -22, -23, -13, 16, 9, 16, 21, -16, 11, -21, -4, 4, -6, -1, 17, 1, 3, -24, -3, 17, -22, -29, -9, -8, 11, -34, -20, -10, -18, 6, -11, 11, -10, -1, -13, -19, 2, 17, -25, -18, 16, -4, -10, 3, 16, 3, -9, 21, 19, 4, -12, -4, -19, -8, 24, -16, -23, 7, -2, 2, -26, -5, -12, 9, 32, 17, 17, 9, -20, -4, -2, -9, 9, -1, 22, -13, 2, 19, -15, 0, -44, -2, -12, 5, 4, -1, -14, 7, 18, -6, 13, 15, -35, 16, 6, 10, -15, 15, 31, 28, -3, -14, -6, 6, -19, -14, 4, -11, -33, -12, 25, 19, -12, 14, -11, -6, 36, 2, 5, -17, -16, 2, -23, -8, 22, -10, -16, -9, -16, 8, 18, 15, 36, 5, 15, -21, -4, 9, -6, -8, 7, -18, 8, 9, 10, -7, -16, 0, -29, 15, 14, -1, -20, -13, -5, 21, 1, 8, 14, 5, 24, 12, 10, -15, -50, 6, -23, -13, -10, -23, 54, 6, 40, 1, 4, 21, 1, 1, -46, 14, 6, 32, 21, -8, -20, 7, -26, 11, -17, 15, 15, 4, -5, 14, -22, 16, 32, 6, 23, -11, -10, -4, -47, 3, 22, -1, 10, -40, 2, 5, -7, 10, -9, -32, 56, 25, -27, -24, 32, 15, 23, 15, -19, 0, -24, 18, 11, -14, 8, 35, 1, 3, -6, -9, 3, -2, 15, 28, 40, 7, -16, 30, -38, 2, 20, 16, 17, -23, 10, 10, 5, -16, 1, 0, 29, -23, 0, 21, -9, -10, -4, -6, 1, 3, -18, -24, -27, 2, 7, 19, 15, 23, 13, 2, 20, -10, -2, 21, 25, -16, 5, -4, 24, 26, -23, 5, 3, 10, -11, 24, -2, -6, 29, 13, -20, -3, 3, -8, 36, -15, -27, -14, 12, 20, -16, -11, 2, -3, 8, 28, 34, 5, 2, -20, -33, 11, 9, -3, -10, -7, -38, -7, 32, 27, 35, -19, 13, -5, -9, 12, -6, -2, 4, 19, 22, 8, 37, -6, -19, -7, -9, -12, 16, -5, 4, -7, -13, -13, 21, 4, 12, -18, -5, -19, 26, 23, 22, -14, -38, 42, -16, -10, 18, -10, 1, 15, 27, -2, 21, -13, -6, 22, 19, 22, 21, 18, -8, -8, 27, 7, 32, 33, -14, 16, -35, 8, -17, 12, 46, 18, 19, -10, 7, -22, -36, 10, -6, 28, 18, -9, 1, 10, -33, 16, -1, -35, 13, -2, -5, 8, 21, 39, 15, 11, 1, -17, -9, -12, -16, 12, 3, 23, 3, 4, 2, 8, 23, -14, 18, 41, 0, -5, 16, 4, 19, -7, -19, 39, -12, 38, 2, 11, 31, -8, -2, -8, -13, 20, 18, -5, 18, -10, 18, 14, 8, 20, -17, 3, 20, 22, 1, 23, 4, 9, -31, 2, 14, 8, 10, -11, -18, -8, 6, 1, 37, -14, -1, -13, 6, -13, -6, -3, 26, 12, 27, 10, 7, -6, 20, -14, 7, -9, 6, 10, -8, 7, -6, -6, -1, -6, 12, 6, -23, 13, 15, -37, 30, -16, 32, 5, 14, -28, -12, -10, -15, -13, 16, 39, -16, -8, -15, -16, -10, 12, -9, 21, 4, 16, -12, 25, 8, -8, -3, -8, -4, 18, -25, -7, -11, -3, 27, 25, -7, -12, -11, 19, -1, -15, 24, 2, -29, -44, -33, -2, 33, 20, 6, 17, 20, 25, 19, -12, 30, -8, 3, -12, 0, 35, -33, -23, 14, -6, 46, 4, 2, -4, 10, -15, 22, 12, -29, 14, -13, 13, -23, 17, -12, -1, -13, -11, -1, 5, -11, 10, 8, 17, 8, 9, 1, -7, 5, -17, 29, -13, 12, 16, -16, -1, -16, -2, 10, -1, 38, 11, 5, -21, -4, 15, -35, -23, 10, -15, 6, -26, 1, 18, -2, -2, 26, -33, -5, 17, -18, 7, -5, -7, 2, 26, -14, -13, -27, -8, 2, -9, 1, 12, -5, 0, 4, -33, 4, 5, 0, 17, -3, -11, 13, 11, -54, 16, 13, 8, 10, 1, -1, -14, 1, -8, -12, 20, 14, -5, -4, 24, 3, 15, 4, -3, -5, -17, -33, -7, 28, 8, 28, -11, -18, 0, -11, -1, 24, 10, -6, 16, 5, 22, 18, -9, 11, -30, 30, -11, 5, -18, -14, -29, 28, 1, 6, -1, -10, 7, 2, -12, 4, 25, 3, -10, -10, 3, -7, 12, -26, 0, -6, 2, 8, -26, 15, 9, -8, 12, -1, 20, -6, -27, 10, -12, 1, -2, -43, 31, 6, -21, 30, -1, -1, -18, 4, 6, -16, -2, 13, -37, 9, -18, -4, 19, 17, -17, 11, -12, -32, 1, -23, -11, -9, -12, -9, -17, -15, -40, 34, -1, 2, -19, 16, -37, 6, 7, -10, 8, -10, -7, -17, 0, -18, 4, 15, -8, 17, -22, 15, -4, 9, -23, 22, -15, -3, 5, -5, -33, -9, 9, 15, 7, -9, -15, 14, -28, -30, -8, -29, -11, 16, 3, 3, -17, -23, -23, 12, -23, -17, -23, 30, -19, 14, 7, -32, -19, -2, 4, 7, 4, 2, -7, -39, 8, 19, -2, 34, -14, 8, -13, -6, -6, 18, -3, 13, -24, -12, -13, 18, -8, -6, 6, 5, 9, -32, 21, -15, -24, 24, -30, -17, -3, -28, 7, 22, -10, -3, 0, -25, -3, -11, 13, 27, 8, 19, 18, 4, -14, -12, -6, -25, 17, -9, 0, 1, -14, 10, -16, -13, 23, 13, 10, 11, -33, -32, -15, -20, 1, -11, -14, 8, -31, 5, 3, 4, -2, -7, 10, -26, -3, -27, 3, -26, -31, -13, -19, 16, -7, 10, -20, 36, 24, 32, 2, -13, 15, -1, 2, 20, -4, 3, 0, 27, 15, -14, 14, 1, 1, -13, -32, 0, -1, 10, 21, -68, -16, -15, -1, 28, -21, -1, -16, -37, -10, 21, -16, -4, 9, -11, -67, 0, 9, -5, 31, 9, -9, 3, -6, 10, 9, -2, -7, -19, 6, 23, -21, 1, 23, 2, -22, 5, 13, -8, -12, 22, -6, -15, 9, -5, -13, 7, -13, -23, -9, 7, 5, 9, -7, -16, 21, 6, 11, -26, 10, -6, 6, 2, 21, 4, -37, -8, -8, 15, 13, 1, 21, 15, -4, -14, 2, 29, 28, -2, 11, -47, -5, -8, -4, 7, 1, -17, 9, -31, -10, 0, 10, 24, -31, -15, 55, -16, -14, 11, -17, -30, -10, 0, -6, 21, 7, 25, -14, 22, 0, -29, 10, -9, 19, -1, -36, -13, 21, 33, 5, 24, 13, 28, 29, 18, 4, -9, 11, -22, 16, -21, 11, 13, 5, 16, -1, -31, 26, 36, -7, 23, -36, 6, -3, -29, -15, -4, -19, 15, 5, 15, 24, -11, 11, 5, 9, -14, 8, -41, -15, 3, 14, -20, -19, 15, 6, -15, 38, 4, -3, 2, 7, -19, 1, 25, 8, 6, -2, -7, 23, 4, 15, -22, 14, 26, 23, 19, -26, -17, -2, 9, 17, -42, -22, -19, 22, 10, 0, 24, 21, 0, 17, 2, 12, 10, -17, 2, 1, 25, 6, -7, -18, 31, 8, 10, 37, 7, -12, -13, 0, 19, -18, 11, 30, -28, 21, 9, 2, -9, -6, -13, -3, -13, -35, 9, 2, 5, -10, 16, 9, -10, -5, 4, -4, -1, 18, -12, -43, -22, 21, -15, -12, 8, -2, 16, 5, 1, 40, 13, 30, -14, 7, -15, -7, 18, -12, 0, -13, 14, 9, 13, -3, -12, 29, -48, -11, -3, -8, 17, 16, -9, -12, -30, -12, 25, -21, 21, -1, 9, 9, 8, -17, 0, -12, 11, 9, 6, -8, 10, -14, 0, -5, -6, -33, 19, -5, -6, 7, -34, -17, 1, -5, 4, 32, -9, -21, -26, 23, 30, 28, -19, -14, 7, -5, -19, 10, -5, 27, -16, -19, -36, 0, -13, -14, 3, -5, 15, -19, 8, -21, -34, -11, 2, -9, -1, 11, -14, 15, 11, -10, 17, -3, 5, -17, -7, 5, -11, 5, 20, -28, 17, 26, -23, 16, -6, -21, -4, 0, 19, -8, -11, 17, -12, -22, -17, -4, 32, 23, -2, 15, -8, -8, 0, -15, -28, 5, 0, -13, -22, 13, -8, -17, 31, 0, -18, -18, -12, -3, -10, 26, 15, 20, -15, -9, -16, -2, 6, 12, -2, 11, -40, -28, 2, -23, 27, 4, -14, -10, 33, -26, -22, -13, 19, -14, -19, 22, -1, 5, 9, -3, 2, 10, -15, -10, -21, -21, 20, 4, -4, -26, -17, -25, -38, -18, -3, -27, 6, 1, 23, 0, -16, 9, -16, 7, -38, 1, -6, 8, -5, 4, -14, -28, -11, 28, -5, -15, -3, 7, 14, -8, 11, 7, 6, 19, -36, -12, -17, -26, 9, 0, 4, 37, 11, -2, -17, -3, 15, -20, -20, -6, 17, -17, -3, -9, -6, -12, 0, 13, -4, -27, 27, 20, -6, 13, 15, 27, -5, -15, 14, -29, 16, 12, 3, -23, -24, 16, -6, -23, -14, 0, 15, -23, -12, -31, 19, -4, -1, 10, -5, -15, -29, -1, 19, -34, -8, -1, -2, 23, 10, 10, 6, -32, 19, -35, -8, 17, -22, -13, 1, 2, -37, -15, 26, 11, 21, -14, -20, -16, 6, -30, -9, 17, 6, -7, 24, -6, -13, -13, -4, -5, -5, -16, 11, 4, -2, -26, 8, 13, 9, -6, 9, 2, 4, 11, 0, 17, -7, 20, 25, 18, 13, 13, 14, -22, 5, 2, -10, 12, 0, 6, 1, 20, 17, -5, 20, 20, 12, -16, -28, 11, 1, 12, 8, -28, 32, 13, 16, -2, 13, -6, 16, -13, -2, -12, -2, 14, -2, -18, -11, -18, 16, -15, -13, -27, -2, 3, 3, -12, -20, -2, 7, 22, -10, -10, -4, -19, 19, -2, -3, 20, 12, -29, -13, -2, -5, 15, -22, 5, -3, -6, -12, 22, 18, -16, -12, -10, -3, -11, -3, -25, -25, -8, 8, -2, -5, 13, -9, -13, 4, -8, -48, -12, -15, 16, 18, -28, 22, -9, 13, 36, 11, -24, -18, -5, -7, -7, -6, 22, 12, -30, -2, -26, -19, 32, 10, -23, -4, 17, -8, 4, 1, 22, 12, -22, -7, -10, 20, -30, 21, -12, 7, 7, -16, 15, 28, -26, -16, -6, 9, -20, -10, 9, 0, 10, 19, -24, 4, 6, -20, -26, 36, -3, -6, 6, 24, -9, -2, 3, -11, -5, -13, 8, 14, -13, 19, 28, 18, -30, 4, -1, 9, 7, -19, 4, -5, -1, -8, -4, -1, -2, -19, -13, -3, 6, 20, -21, 16, -24, -6, -19, -9, -6, 10, 29, -2, -22, 0, 8, 7, -6, -6, -10, 3, -5, -1, 16, 40, -2, -13, -11, 4, -7, 0, 7, 18, -38, 32, 18, -11, 27, 13, -19, -11, -23, 6, 2, 9, 20, -13, 12, 19, -18, -5, 5, 31, -27, -2, 7, -19, 10, 22, -14, -7, 7, -24, 22, -25, -12, -13, -17, -27, 21, 4, -11, 14, 5, -3, 15, -28, -15, -9, 23, -19, 4, -16, -14, 20, 7, 29, 5, -36, -1, 31, -23, -21, 13, -38, 1, -14, 1, 19, -14, -30, -21, 17, 16, 28, 1, 14, -2, -5, 12, -3, -33, -21, -14, 1, -3, 0, -10, 21, -10, -1, -19, -7, 7, -15, -17, 37, 19, 4, 21, -24, -2, 11, 4, 10, -3, 15, -22, 1, 30, 1, -1, -27, 3, -6, 10, 5, -11, -7, -7, -25, -10, -2, 6, 6, -17, -2, 4, 3, -4, -10, -35, -8, -31, -19, 1, 26, -6, 3, 13, 13, 26, 2, 16, -1, 9, -11, 4, 41, 20, -23, -7, -27, -12, -3, 6, 19, -10, -11, -14, -17, -27, -23, -4, -5, 8, 18, 5, -8, 10, 28, -23, -14, 10, -10, 17, -20, 25, 2, -24, -6, -16, -7, 26, 18, -8, -1, -9, -9, -1, 1, 18, -4, -1, 26, 16, 3, 27, -30, -7, -29, -40, -1, -16, -8, -2, 15, 5, -2, -17, 30, -18, -1, -21, 13, 2, -4, -23, 18, 19, 2, -33, -3, -24, 25, 17, 0, -2, 10, -18, -3, 15, 20, -26, -3, 1, 29, -6, -37, -10, -8, 10, 3, -4, -5, -32, 15, -24, 16, -11, 1, -22, -2, 19, -10, -11, -29, -11, 23, 12, 13, -14, -25, -29, 3, 10, -1, -24, 46, -47, -8, -3, -19, -10, 3, 6, 8, -4, -7, -8, 4, 20, -6, -25, -17, -52, 24, -22, 11, 6, 18, -14, 6, -36, 33, -5, 13, 13, -25, 5, 26, -13, -13, 21, -3, 5, 6, -24, 14, 15, 33, -16, 17, 13, -12, 18, 23, -1, -1, -21, 11, 21, 25, 6, -19, 10, 24, -2, 11, -3, -12, -8, -9, -7, 0, -15, -3, -27, 0, 1, -1, -5, -6, 29, -5, 25, -13, 14, -14, -16, 7, -33, -17, -15, 6, -8, 25, 18, -3, -22, 6, 9, 1, 24, 15, -4, 11, -2, 14, 7, -1, -11, 9, -40, -2, -15, 17, -18, -19, -17, -34, -22, -11, 8, 20, -21, 20, -17, 4, -17, -4, 12, -13, 22, 0, -4, -7, -25, 18, 6, 4, -4, 9, -5, -16, 17, 9, 10, -12, -10, 8, -3, -15, 4, -3, 13, 16, 1, 6, -11, -13, -2, -1, 15, 25, 5, -15, -16, -25, -14, 26, 17, -4, -25, 13, 9, 20, 20, -5, 8, 30, -10, 23, -5, 14, -16, -24, -5, -26, -1, 6, -15, -26, 27, 6, 20, 3, 10, 9, 21, -26, 13, 19, -9, 13, 21, 22, 4, -18, -9, 20, -29, 16, -8, -16, 6, 12, -12, -6, -13, 5, -29, 5, 5, 14, 0, -12, -5, -14, -19, -1, -17, -3, 9, -16, 5, -6, -9, 0, 8, 10, 9, -13, 9, 2, 9, 4, 13, -37, 15, -21, 23, 3, -35, 8, 17, -18, 26, -12, -4, 31, -2, 8, -12, 8, 25, 4, 16, 9, -5, 23, -12, 0, 6, -6, 6, 10, -2, -6, -27, -25, -9, -23, -9, 10, 0, -14, -2, -4, -18, 2, 25, -19, -7, 0, 0, -22, -21, -31, -2, 5, -24, -17, -11, 11, -15, 4, 10, 5, 19, 5, 20, 13, -22, 28, 8, 14, -11, 21, -10, -18, 3, 5, 6, 17, 24, 5, -20, 3, 13, 6, 1, 3, -6, 20, -6, 3, -3, -8, 6, -53, 22, 26, -15, -7, 26, 16, -4, 21, -14, -23, 18, 3, -31, -9, 23, 21, -31, 9, 10, -7, -18, 20, 21, -29, 13, 5, 5, -1, 22, -19, 13, 22, 15, 7, -14, 15, 1, 10, 14, -17, -11, -24, 9, -5, -10, -16, -1, 1, 0, 2, 7, -18, 13, 18, -14, -10, -6, 23, -38, -10, -7, 6, 6, 5, -4, -26, -19, 26, 17, 22, -4, -10, 0, -1, -17, 32, -14, -4, -15, 16, 5, 0, -39, -7, -11, -14, -5, -14, -4, 19, -3, 43, 23, 13, 9, -19, 18, -21, -19, 0, 23, -19, 13, 13, -5, -25, 4, -14, -11, 16, -6, 24, -20, 29, -13, -1, -19, 16, -6, 22, -23, 25, -14, -14, -25, 24, 16, -16, -5, -23, 6, -28, 16, -10, 12, 13, -31, -14, 9, -16, 14, -2, -17, 26, 6, 1, -3, 10, -7, 14, 1, -7, -15, 17, -18, 11, -3, -3, -10, -23, 14, 33, 5, -6, 13, -7, -1, 13, -25, 10, -35, 6, -9, 12, 23, 17, -21, -7, 12, 6, 8, -30, 12, 6, 18, -28, -7, -4, -8, -4, 15, -22, 20, 26, -22, -14, -33, 17, 21, 3, 4, -1, -16, -22, 30, -16, -18, -5, -1, -6, 14, 4, 10, 13, -19, 10, -6, -11, 17, -8, -1, -25, 16, -9, -3, 3, 9, 1, 29, -35, 1, 20, 2, 43, 34, -29, -26, 22, 8, -16, 9, 1, -16, -18, -14, -19, 31, -3, -19, 7, -9, -8, -5, 26, -28, 45, 6, 8, -4, -13, -4, 11, 1, 11, -14, -34, 0, -30, 15, -9, -7, 7, -23, -15, -10, 0, 6, -26, 0, 15, -20, -1, 23, -9, 24, -3, -25, 18, -11, -7, -3, -12, 5, 5, 15, -3, -5, 9, -26, -15, 11, 0, 29, 11, -10, 4, -17, -24, 19, 12, 10, -13, -5, 13, -14, 14, -5, 0, 1, 16, 19, -9, 9, -14, -10, 11, -8, 14, -11, -14, -7, -18, 3, -37, -4, 8, -12, 8, -10, -17, -1, 16, 19, 5, 1, 6, 7, 7, -7, 4, -14, -35, -3, 10, -17, 8, 16, -9, 2, 28, 16, -4, -8, 3, -1, 2, 9, 0, -4, 9, 5, 14, 11, -5, 7, -12, 31, -17, -3, 1, -4, -5, -22, 40, -21, 15, 14, 0, -19, -13, 35, 26, 12, 11, -32, -2, -9, 22, 10, 8, -18, 24, 13, 17, 9, 17, -26, 34, -36, -15, 11, -9, 41, -8, 0, -23, 6, -10, -5, -25, 5, -1, -60, -24, 11, 6, -34, 6, 0, -3, 17, -18, 9, 28, 20, -2, -21, 1, -15, 8, 20, 8, -2, 2, -16, -27, -1, -7, -31, -12, 17, 39, 20, 7, -41, 16, -9, 20, 15, 35, 24, 2, -20, -13, -19, -4, 9, -12, 8, -5, -38, 20, 29, 9, -5, -18, -31, -13, -40, -10, -11, -7, 6, -10, -3, -22, -37, 15, -6, -32, -7, -11, -12, 21, 38, -4, -35, 1, -34, 19, 38, -7, 32, -6, 7, 6, 18, -15, 1, -8, -3, 14, 11, 27, 4, -21, -7, -4, -4, 13, -38, 4, 19, -10, 16, -3, -35, 17, -8, 10, 2, -12, -8, -1, 19, 31, -41, 6, 11, -5, 21, 3, -2, -3, -40, -19, -12, 37, 32, -28, 9, -4, -4, -14, -5, -16, -10, -25, 15, -6, -43, -16, 1, 26, 38, -6, -2, -32, 7, 7, -14, 27, 21, 2, 6, -7, 22, -17, -19, -27, 10, 0, -2, -2, -19, -2, 14, -13, 31, 20, -31, 8, -7, 6, 6, -19, 13, -29, 17, -3, -11, -1, -4, -18, 0, -14, -4, -12, -23, -28, 11, -11, -7, 0, 20, 9, -18, -34, -1, -3, -8, 2, 15, -1, 1, -13, 9, -21, 6, 0, -3, 19, 8, -11, 5, -12, -17, -23, -1, 20, -14, -9, -20, 6, 10, 6, -5, -20, 12, -11, -24, -7, 22, -4, 22, -12, -19, -28, -4, -28, -13, -15, -13, -23, 10, -17, 7, -19, -2, -22, 0, 1, 1, -28, 8, -23, 34, -6, 18, 18, 0, -11, 4, 22, -15, -56, 36, -4, 10, 12, -1, -8, -5, 27, -21, 15, -22, -13, -7, -1, -15, -28, 9, 5, 2, -6, -16, -11, 0, -21, 17, -27, 5, -3, 25, 18, 28, 7, 5, -20, 11, 6, 3, -8, 9, -2, -15, -9, -19, -1, 2, -3, 9, 12, -6, -17, 15, -11, 10, -6, 13, -2, 0, 12, -8, -10, -16, 2, 17, -21, -13, 19, -18, -13, -17, 3, 11, -38, -9, 1, 9, 16, -16, 11, -2, 29, 5, 22, 4, -2, 2, -4, -9, 5, -1, 3, 9, 13, 11, 7, 12, 8, -26, -1, -18, -13, 6, 6, 16, 6, 9, -5, 18, 27, 7, 11, -12, 20, 8, -3, 25, 31, -17, 21, 56, -4, 9, 23, 24, 8, 0, 40, 11, 1, -1, -17, -2, 1, 11, 6, 3, -25, 6, 3, 13, -19, -19, -3, -15, 4, 13, -14, -18, -30, -14, 10, -8, -34, 18, 3, 6, 20, 0, 18, 26, 2, -8, 19, -9, 13, -13, 31, 5, 4, 10, -11, -8, 23, -16, -2, -6, -10, -3, -16, -6, 28, 16, 21, -19, 15, 22, 13, 3, 9, -15, -18, 28, -3, 3, 8, -22, 2, 15, -10, -22, 17, -17, -16, -14, -15, 13, -11, 22, 6, -9, 22, -13, 10, -20, 1, -22, -21, -9, 35, 7, -12, -3, -18, -32, 11, -26, 4, -2, 17, -18, 11, 14, -23, -18, -7, -7, -15, 6, -5, 14, 14, -29, -27, 7, 9, -13, -24, 7, 24, -18, 15, 15, -9, 12, 23, -5, -7, 14, -2, 12, -2, 0, 22, 2, 16, -11, 23, -28, -2, -2, -55, -3, -25, 5, -2, -14, -7, -1, 12, 16, -6, -10, -28, -14, -3, -19, 17, 22, 4, 19, 9, -16, -12, -2, 26, -1, 2, 5, -8, 11, -36, -13, 18, 5, -14, 3, -20, -18, -1, 24, 13, 8, 3, -10, -3, -24, 16, -6, 31, -17, -5, 4, -37, 4, -9, 18, -1, 19, 12, -1, -2, -14, -7, 8, -24, 21, -2, 14, -13, 16, 11, -21, -37, 36, -28, -21, 19, -7, 9, -1, -7, 12, -20, 10, -3, -17, -4, 15, 5, 21, -35, 40, 25, -8, 3, -8, 6, -21, 9, 12, 1, -19, 5, 16, -6, -25, 1, 13, -21, -8, 16, -16, -27, -2, -10, 10, -28, -14, -6, -28, -4, 5, -21, -7, 31, -4, 7, 11, -28, 28, -11, -5, -1, 3, 19, -10, -11, 11, 11, 13, 13, 2, -16, 12, 5, -15, -18, 20, -3, -10, -16, -7, 18, 2, -24, 34, -9, -16, 17, 1, -11, -1, -3, -9, 3, 0, 0, -19, -25, -10, 10, -6, -14, 4, 5, -14, -24, -7, -1, 6, -4, -21, -6, -1, 18, -6, -13, -4, 4, -5, -12, 19, -31, 4, -9, 1, 11, -20, -18, -4, -29, 22, -17, 5, -9, -5, 10, 2, -21, -10, -14, -9, -21, 14, -3, 17, -26, 36, -2, -22, 15, -14, 12, -12, -25, 11, -37, 7, 53, 1, 3, -20, 3, -18, -29, 44, 18, -5, -22, 18, -43, -22, 13, 24, -9, -15, 2, -8, -30, 7, 7, 23, -17, -16, 12, 1, -42, -22, -12, 4, 5, -5, -17, -4, -14, 5, -4, -8, 7, 6, -5, -3, -7, -6, -26, -12, 13, 5, -13, -19, -8, 2, 2, -3, -12, -4, -9, -15, -25, -4, 5, -3, 11, 7, -10, 15, -39, 7, -36, -7, -23, 11, -19, -9, -35, 5, -10, 16, -8, 3, 6, 2, -22, 14, -29, 40, -25, 13, -8, 18, -34, -13, -22, -6, 9, -19, 3, -1, 2, -8, 34, -1, 1, -3, -23, 8, -1, -6, -41, 29, -1, 17, -23, -7, -7, 2, 5, 5, 5, -20, -10, -14, -32, -15, -6, 11, 1, 1, 23, -18, 8, 6, -40, 18, 7, -11, -23, -24, -30, 32, 14, 10, -16, -16, -12, -12, -8, 5, -23, 4, 20, -16, -5, 6, -17, 2, -6, 5, -5, 9, 12, -13, 10, 7, 9, 14, -7, 22, -20, -20, 7, -9, -17, 35, 6, 4, -25, 13, 5, -4, -18, 13, -26, -1, -23, -8, -5, 16, -30, 31, 3, 15, 1, -13, -20, 26, 8, -20, 1, 7, -4, 0, -23, 27, -4, -9, -22, -17, -23, -26, -16, 15, 10, -8, -15, 3, -8, -14, -13, -18, 30, -30, -14, 6, 4, 11, -14, 25, -43, 31, -11, 8, -12, 18, -13, -10, 11, 5, -1, 11, -12, 20, 4, -4, -10, 15, 15, 4, -6, -12, -19, -6, 18, 11, -15, 12, -10, 12, -5, 8, 7, 18, -8, -16, 6, -13, -27, 6, 34, -2, -2, -12, 0, 15, -14, 2, 5, -13, 11, -2, 1, 0, -3, -24, -1, -1, 6, 8, -12, -19, -7, -1, 10, 3, -14, -8, -27, -15, 13, -15, 1, 1, 26, -8, -3, -17, -38, -2, -6, 13, 0, -10, -2, -8, 2, 11, 13, 22, 0, -3, -24, -6, -32, -8, 5, 7, -33, 2, -20, -7, -6, -2, -26, -2, 2, -7, -5, -21, 15, 11, -20, 7, 15, -11, 3, -15, 14, -32, -48, 25, 1, 11, -11, -19, 30, -45, -12, -15, -2, -10, 7, -17, 9, -24, 23, -4, 1, -2, -5, 0, 14, -8, -26, 5, -17, -21, 2, 15, -12, -8, -11, 19, 51, -10, -5, 5, 18, 7, 24, 5, 1, -20, 11, 11, 2, -16, -14, -17, 3, -13, -10, -9, 18, 15, -20, 4, -15, -16, 9, 5, 22, -24, -24, -44, 14, -2, -17, 14, 7, 2, -4, 9, -9, -5, 15, -3, 2, -45, -15, 6, -25, -14, 1, -18, -7, -8, -9, -17, 20, -22, -24, -16, -28, -17, 2, 15, 14, 2, 2, -20, 10, -10, 28, -6, -20, -14, -7, 2, 0, -24, 16, -5, -18, 13, 21, -9, -15, 33, -50, 21, 20, -4, 8, -4, -5, 3, 11, 2, -10, 8, 2, -18, -6, 0, -22, 26, 11, -25, 16, -12, -10, -9, -3, -2, 17, -14, -14, 4, -10, -18, -10, -14, 18, -15, 19, 18, -8, 19, -2, 24, 9, -20, 0, 2, 3, -19, 0, -12, 5, -14, 15, -14, -51, -1, -16, -18, 6, 18, 3, -22, 19, -4, 10, -17, 15, -3, -2, 8, -3, -18, -12, -7, 5, 10, 8, -7, 0, -18, -8, 17, 13, -23, -20, -12, -3, -13, 10, -9, 6, -9, 40, 0, 16, -9, -17, 7, -14, 10, 18, -10, 2, 20, -21, 9, -6, -21, -11, -17, -28, 1, 9, -5, -5, 9, -7, 14, 7, 20, -16, -20, 14, 4, 11, -8, -16, 8, 24, 22, -9, -9, -2, -23, 20, 35, -17, -3, 15, -5, 31, -19, 8, 5, -30, 52, -34, -10, -7, 8, -33, 0, -10, 6, 34, 6, -3, 2, 11, 14, -5, 29, 3, 12, 16, 11, 9, -27, -16, 21, 2, 2, -14, -7, 2, 28, -7, -3, 6, 10, 4, -10, -4, 3, 8, 13, 8, -7, -21, -17, -16, -9, -23, 40, 9, -12, -24, -18, -29, -8, -3, -8, -30, -24, 21, -6, 1, -14, 6, 6, -24, -20, -12, 4, 8, 3, 15, -5, 9, 17, 25, 2, -9, 0, 21, 2, 2, -6, -16, 7, -33, 6, 4, 0, -1, 5, -2, 9, 6, -23, -8, 14, -21, -5, 32, -21, -1, -16, -3, 3, 7, 17, -1, 10, -35, -29, 10, 22, -15, 17, 18, 18, -7, 26, -17, 4, 0, -21, 7, -6, -31, 0, -1, 1, 0, 19, -14, 13, 7, -3, 3, -9, -13, -10, -13, 3, -6, 3, 0, 3, -1, -3, -2, -3, -38, -4, -13, 14, -17, 0, -12, -24, 10, 13, -11, -6, -10, -16, -3, 5, 16, 24, -24, -32, 2, 1, 9, -11, -29, -26, -11, 23, -16, -6, 20, -20, -19, 8, -7, -1, 14, 15, 27, -11, -27, -40, -20, -43, 23, -22, -6, 14, -19, -7, -30, -20, -5, -14, 7, -19, -44, -16, -28, -13, -22, 3, 27, -15, 15, 15, -20, 1, -7, 2, 14, 18, 10, 19, -19, 22, -2, -16, 20, 16, -39, 14, -37, -9, 14, -2, 1, -25, 10, 22, -12, 12, -14, 0, -13, 16, -13, 9, -18, 23, -6, 15, 27, -1, -23, 10, -33, 20, 16, -11, -13, 1, -14, 15, -21, 3, -16, 17, 9, -11, 33, -11, 40, -4, -35, -24, 8, 17, -9, -22, -1, 25, 14, -12, -12, 7, -13, 3, -8, -4, 16, -18, 9, -6, 24, 1, -17, 11, 2, 17, -13, 16, -34, -16, -8, -15, -16, 3, 8, 7, -5, -9, -15, 4, 7, -19, -18, -23, -21, 10, -2, -21, 39, 15, 33, -21, 2, -8, -17, -22, -11, -1, -23, -6, -8, -14, 0, -6, 13, -14, -11, 14, -20, -17, -15, -6, -10, -7, 18, -4, 15, 12, 8, -13, 14, 5, 42, -10, -8, -19, -9, -25, -26, -9, 14, -17, -18, 20, 16, -28, -10, -16, -10, -15, 0, 24, -17, 36, 14, 8, 7, -15, -14, -2, 17, -6, 29, -12, -22, 14, -3, 4, -23, 8, -16, -16, -21, 14, -10, 17, 14, 23, 23, -20, -8, -2, 10, -15, -10, 12, 21, -19, 13, 0, -5, -12, -5, 10, 8, 6, -13, -3, 23, 5, -9, 17, 4, -11, -3, 13, 22, -9, 20, -29, 6, -6, -4, 17, -10, 18, 20, -18, -23, 3, 22, -12, 7, 1, -5, -30, 30, -23, 22, -4, 3, -6, 16, -17, -2, 16, 17, -18, -9, 6, 25, -30, 14, -4, -13, 13, 9, 12, 11, 0, 5, 8, 15, 10, 7, 13, -4, 10, -15, -20, -31, -11, 2, -2, -3, 16, -22, -1, -17, 4, -12, 2, 0, 14, -5, 13, 5, -22, 30, -26, 30, -14, -2, 12, 1, -20, -24, -16, -15, -2, -23, -5, 35, 21, 19, -15, -8, 2, -29, 16, 33, -20, -7, 21, 46, -15, -2, 1, 6, -12, -3, 13, -4, -4, 20, 12, -20, 10, -11, 9, 20, 4, -2, -17, 50, -17, -16, -23, 34, -22, 17, -6, -21, -9, 12, -16, 22, 28, 9, -14, 27, -20, -12, -6, 26, 8, -3, -7, -5, -16, 6, -32, 18, -22, 25, -21, -5, -9, 28, -2, 17, -8, -9, -10, -27, -14, -17, 12, -14, 1, -16, 14, -21, -9, 15, 10, 0, 15, 11, -12, 12, -12, -5, -26, 8, -31, 1, 17, -4, 13, -18, -11, 20, 15, -6, -20, -4, -18, 23, 13, 15, 20, -10, -1, -1, -3, -9, 8, -4, 1, 16, -18, -22, -11, 9, -24, -4, -25, 6, -11, 26, -11, -2, 10, 13, -5, -4, 14, 10, 5, 8, -16, 8, 16, 20, -19, -23, -16, 10, 13, -8, 10, 0, -13, -35, 17, 18, 17, -6, -13, -7, -13, -24, 2, 8, 5, 16, 18, 21, 11, 15, -10, -23, -6, 2, -31, -10, 17, -6, -21, 5, 10, -9, 1, 26, 5, -12, -19, -3, 16, 17, 13, 17, -11, -6, -25, 11, -11, 11, 2, 13, 18, 35, -14, -24, 12, 30, -10, 6, -24, 1, -9, -20, -14, -19, 7, 19, 2, -3, 19, 23, 15, 14, -21, 28, 7, -4, -19, -23, -32, 2, -23, 14, 5, 11, -20, -21, -2, 12, 17, 21, 3, -12, -4, 2, -16, -14, -30, 1, -23, 8, 3, 7, 33, 12, 17, 27, -8, -13, 15, -4, 0, 5, 13, 14, -10, -15, 8, -4, 8, 21, -25, 18, -3, -9, 15, -14, 3, -2, 2, -6, 12, -18, 9, 9, -11, 8, -26, 27, 2, 4, -6, 11, -10, 10, 15, -1, -18, 20, -9, -14, 10, 27, -16, 15, 2, -2, -4, 11, 18, -2, 17, 26, -22, -13, 13, -9, 9, 5, 21, -16, -18, 2, -11, -1, -21, -9, -24, 0, 13, 7, 10, 18, -4, -43, 4, 7, 2, 5, -10, 4, 14, 0, -3, -18, 0, 12, -22, 4, 27, -2, -3, 11, 21, 37, -9, 5, -3, 17, 4, 12, 17, -15, 12, 2, -6, -6, -20, -21, 5, 17, 9, -18, -7, 5, 13, 2, -3, 23, -21, -7, 15, 18, -3, -1, -11, -14, -16, -4, 4, 5, 15, 13, -13, 23, 17, 6, 1, -24, 2, 13, -9, -18, 15, -34, -24, -2, 23, -34, -11, 7, 1, -15, 25, 4, 16, -20, 12, -3, -12, -20, 2, -4, 3, 8, -4, 20, 13, -47, 6, 10, 0, -21, -18, 5, -11, -15, -4, 9, -7, 1, -14, 40, -2, 4, -1, -10, -3, 22, -6, -24, -26, -12, -32, -23, -6, -15, -14, -23, -3, -32, 17, 12, -11, -16, 5, -6, 12, 14, 10, -16, 16, -8, -1, 26, -20, -20, -8, -5, 0, -8, -10, -24, 12, 11, 15, -11, -8, 7, -8, -12, -24, -6, -3, -7, -9, -32, -12, 14, -21, -2, 1, 12, -3, -5, 16, 15, -17, -33, 12, -35, -18, -11, 7, -3, -7, -7, -9, -21, 5, 44, -2, 15, -12, -5, -3, 5, -4, -7, -15, 24, 13, -22, -7, -1, 0, 38, -5, -4, 16, -1, -23, 3, 21, -15, -14, -26, -6, 13, 4, 20, 23, 0, 11, -24, 2, 3, 3, 12, 8, -3, -19, 7, 10, 1, -3, -4, -1, -13, -17, 12, 12, 18, 7, 3, 17, 33, -21, 37, -22, 0, 26, 26, -44, 15, -14, -7, -7, 7, 17, -11, 2, -18, 1, 27, -18, -4, -35, 4, 11, 31, 13, -21, -2, -9, 33, 9, 6, 39, 3, -6, -17, 18, -20, -13, -2, 14, 18, 13, 18, 6, 2, -13, 1, 2, 2, -2, -11, 14, 8, 13, 19, 0, 7, -12, 12, 9, 12, 5, 2, -16, 13, 1, -15, -24, 25, 19, -13, -6, 4, 15, 5, 17, -14, 16, -9, 0, 17, -16, 19, 3, 9, 20, -9, -4, -21, 28, -10, -5, -5, 13, 1, -4, 9, 17, 3, 21, 3, -9, 10, -18, -5, -8, 3, -14, 3, -7, -1, 22, 18, -45, 7, -18, -23, -20, 6, 5, -9, 20, 6, 4, -19, 8, -11, -30, 11, 0, 17, 18, -1, 12, 12, 4, 5, -23, -14, 3, -13, -19, 15, 10, -7, 14, 14, -3, 13, -2, -22, 2, 9, 15, -21, -21, -13, 3, -38, 16, 16, -24, 15, 0, 9, 1, 14, -22, 19, 2, 9, -20, 0, 18, 15, 16, -22, 6, -4, 23, -22, 13, -9, 48, -12, -19, -10, 46, -37, 8, -2, 6, -13, -3, -19, 1, 9, 3, 8, -44, -16, -17, -5, -1, 4, 7, 6, -2, 14, 14, 3, 21, 19, 8, -10, 13, -22, -10, 8, 2, 30, 22, -12, 17, 5, 13, 2, 20, 9, -12, -19, -3, 7, -32, -11, 30, 14, -6, -4, -1, -13, -26, 17, -13, -5, -13, 38, -37, 12, 8, -12, -3, 6, -6, 7, -21, 4, 2, -1, 14, 5, -4, 14, 1, -1, 18, 17, -17, -27, -11, 12, -37, 17, -27, 12, -28, 4, 8, -9, -2, 6, -9, -4, 30, 0, -13, 9, -4, -20, -17, -12, -6, 13, 7, 6, -22, 16, 0, -20, -4, -3, -2, 17, 4, 10, -16, -11, 18, -4, -18, -8, 10, 12, 20, -10, 28, 5, 26, 16, -19, 11, -15, 21, 14, -6, -16, -2, -12, -4, -24, -1, -13, 10, 17, 14, -1, 4, -3, 9, 3, -10, -14, -5, 45, -8, -3, 14, 7, -23, 18, -14, -11, 7, -33, 3, 16, 14, -22, 17, 2, 12, -3, 15, 18, -17, 18, -21, 6, -3, -8, 12, -6, -19, 10, -20, 20, -15, -6, -3, 6, -26, -12, -9, 1, -8, 31, 14, 9, 5, -15, 0, -16, -16, -5, -8, 9, -5, -17, -13, -1, 14, 9, -20, 6, 7, -14, -18, -1, -2, -4, 21, 17, -13, 10, 9, -15, 21, 7, 4, 14, -14, 14, 1, 16, -1, 16, 9, 26, -9, -1, -4, -9, -2, -4, -7, 25, 12, 9, -4, 9, -8, -27, 13, 5, 28, -2, -29, -12, -12, 24, -6, 7, 21, 1, -19, 6, -23, -7, 4, 7, -23, -5, -3, 16, -10, -13, 19, 12, 18, 10, -3, -19, -14, -3, 12, 15, -11, 20, 2, -17, -9, -9, -9, 25, 2, 0, 7, -5, 5, 1, 5, 30, 3, -6, 15, -10, -1, -32, 22, 10, -3, 14, 15, 18, -22, 7, 3, 15, -15, -16, -22, -6, 16, 18, 18, 20, -4, 1, -20, -20, 13, 5, -4, 14, -8, 13, 4, 5, -8, 13, 4, -6, -23, 17, -14, 0, 17, -3, 14, -1, 3, -12, -6, -20, 3, -12, -19, 0, -15, 21, 10, -11, -11, 12, 18, 27, 4, 16, -1, 30, 6, -6, -15, -14, 0, 13, -4, -21, 15, 18, -7, 28, 13, -6, 14, -9, 8, 20, 20, 7, -11, -10, 15, -19, -19, 12, 0, 17, -10, -1, 2, -21, 14, 18, -14, 18, 12, -15, -7, -30, -21, 20, 0, 21, -21, -7, -10, 27, 2, 9, -13, -17, -27, 22, -1, -15, -11, -3, 17, -21, -44, -3, -11, -16, 7, 28, -2, 12, 0, 8, -4, -7, -25, 44, -19, 4, 18, -14, -13, 24, -1, 21, -10, 37, 31, -8, 0, -3, -6, 22, 15, 17, -6, 8, 1, 9, -8, 12, -17, 2, -14, -22, 11, -5, -8, 6, -21, -9, -22, -15, -12, 19, -5, -11, 16, 6, -13, 18, -21, 27, 3, -7, -2, -7, 26, -3, 1, 4, -5, -14, -19, 1, -9, 15, -5, 14, -3, 8, 17, -15, -32, 2, -2, 0, 10, 10, -1, 2, -16, -19, -3, 19, -17, -9, -15, -4, -9, 8, -15, -15, -11, 10, 14, 7, -5, 21, 1, 3, -10, -5, 8, 10, 18, -20, -8, 30, 16, -19, -7, -1, -17, 6, -12, -9, -10, -10, -7, 39, 7, -17, -27, 20, 11, -5, -5, 9, -23, -12, 8, -7, -22, 2, -7, 16, -14, 7, -7, 12, -17, 42, 23, 9, 6, 13, -3, 8, -16, 3, 33, -11, 5, -19, -4, 7, -24, 12, -11, 7, -35, 13, 7, -29, -28, -6, -16, -17, -36, -6, -31, 16, -20, 0, 3, 17, -16, -10, -19, -6, -10, 14, 26, 9, 14, -5, -14, 23, -24, 2, 11, 19, -25, -19, -7, 24, 22, -9, -3, 11, -15, 3, 2, 17, 21, -13, 11, -18, -12, -10, -9, -8, 12, 15, -3, -11, 4, 20, -29, 19, -8, -10, -12, 4, 3, -20, -23, -3, -33, 2, -22, 12, -27, 21, -25, -14, 29, 2, -20, 14, -17, -10, -15, -20, -13, 3, 15, -18, 4, -9, -32, -1, 4, 9, 3, 0, -7, -21, 9, 19, -28, -12, -20, -4, -16, -19, 14, -2, -13, 37, 2, 7, 13, -22, -15, 3, 28, 25, 7, -3, -22, 8, -9, 22, 33, 9, 2, -21, -1, -10, -11, 17, -7, 2, -10, -10, -6, -13, -28, 15, 16, -2, -2, -16, 7, 13, 10, 23, -17, -23, 2, 0, -19, 12, -35, 16, 5, -2, 4, -2, 5, -14, 10, 10, -6, 5, -21, 8, 19, -3, -14, 16, 4, 5, 12, -1, 1, -6, 17, 15, -20, 4, -14, 3, 9, 9, -27, -8, -20, 3, -42, 8, -5, -22, -14, -5, -36, -20, -8, 2, 7, 0, -1, 21, -2, 7, -5, -22, -11, -15, 16, 11, -12, -11, -18, 7, -3, -15, 14, 0, 9, 2, 25, -7, 23, 17, 23, -17, 9, -11, -8, 0, -7, 10, -31, 10, 35, -19, -24, 1, -11, 3, -20, 0, -22, 11, -27, -22, -7, -9, -23, 19, 28, -11, -16, 2, 3, -21, -28, 33, 8, 0, 4, 12, 22, -19, 16, 10, 20, -22, 15, -12, 16, -8, -7, 15, -14, 1, -1, 1, 16, 1, 7, -33, -37, -12, 2, 11, -35, -18, 10, 2, 25, -30, 9, -23, 11, 1, -14, 34, -7, 0, 2, -8, 7, 5, -18, 19, -14, -16, -8, 19, 4, 5, -17, 18, 16, -23, -15, -12, 10, -6, 5, -3, -10, -22, -20, -19, 22, -23, 6, -51, -16, 8, 14, -9, 0, 7, 12, 8, -20, 25, 15, -2, 17, -8, -7, -37, -42, -1, 5, 16, -25, -16, -12, 13, 33, 8, -34, -1, 30, 0, 3, -10, 8, 9, 1, -18, 22, -20, -7, -2, 2, 16, 24, 15, 21, 6, 2, 34, 5, 6, -2, 14, 21, 1, -14, -12, -8, -11, 3, -1, 2, 5, 6, 3, 4, -12, 28, -5, -6, -9, 7, 27, 21, -9, -11, 13, 34, -18, 19, -21, -20, 12, 15, -2, -11, -10, 9, -2, -3, 9, -8, 1, -29, -15, 6, -9, 0, 3, -32, -11, -8, 19, -13, -18, -9, -12, -37, 4, -24, -4, -43, 16, 4, -1, -26, -9, 23, -5, -3, 14, 24, -8, 33, 7, 13, 5, 11, 23, 27, -8, -3, -3, 2, -9, -8, -22, -4, -10, -14, 12, -9, -20, -7, -28, -29, 29, -11, 1, -64, -14, 17, 22, 11, -15, -17, -17, 21, -6, -20, -18, -20, 10, -19, 16, 13, -16, -5, 15, -3, 4, -57, -21, 11, -15, -64, -4, 11, -5, -13, 0, 25, 9, 16, 1, 23, -3, -14, -5, 10, 13, 3, -14, -26, -13, -1, 1, -8, 2, -26, 19, 19, 13, -25, -15, 0, -23, 17, -18, -21, -21, -15, -12, -13, -10, -33, 0, -18, -1, 26, 2, 8, -22, 5, -17, -27, -12, 4, 1, -39, -19, 12, -3, -7, -13, -5, -3, -3, -7, 20, -34, -28, 3, 13, -20, -7, 14, -17, 8, 3, 26, -26, -4, -4, -11, 4, -2, -38, -26, 6, 6, 0, -6, 19, -20, -28, -7, -1, 7, 3, -4, 0, -15, -16, -3, -10, 13, -9, -22, 4, 16, -6, -10, 16, -8, -27, 18, -21, -7, -27, -14, -7, 10, 13, 18, -15, 25, 16, -15, 10, -8, 8, -20, 31, -20, -28, -23, 21, -3, 10, -12, 6, 1, -29, -29, -20, -14, -28, 1, -31, -8, -23, 5, -15, 13, -24, -7, 1, 12, 5, 23, -15, 6, -11, -9, -25, 0, -17, 27, 8, 4, -18, -5, 7, -32, -20, -18, 25, 20, 19, -1, 9, -17, -2, 14, -25, -1, -27, -4, -10, 2, -4, 7, -9, -16, 5, -10, -15, -12, -1, 16, -30, -5, 17, 5, -13, -1, 16, 2, -24, 3, 14, 1, -20, -5, -10, 17, -8, 7, 6, 8, 21, 10, 5, -6, -29, -4, -11, 18, -20, 40, 12, -15, -18, 14, -37, 13, -9, -8, 7, -10, 15, -11, -6, -17, -24, 11, -16, -12, -24, -23, 8, 7, -41, 12, 13, -1, -2, -22, 26, -9, -25, -8, 9, -14, 1, 19, -14, 5, 2, 8, -9, -11, 6, 16, 0, -18, 30, -28, 7, 20, -3, -17, -28, -16, -11, -13, -17, -3, 4, -21, -29, -22, 5, 15, 10, -6, -7, -16, -11, 1, -10, -15, -2, -31, -26, -22, 14, 11, -43, 27, 26, -21, 7, 14, -14, 17, 19, 4, -24, -17, -3, -21, -21, 18, -5, -6, -5, -25, 0, 9, 14, -7, -2, -16, 24, -27, -7, 18, -17, 18, -14, -9, 1, -20, 43, 0, -14, 11, 31, -31, -19, 24, 6, 15, -24, 8, 17, 20, -22, -17, -15, -21, 19, -9, -7, -7, 13, -12, 28, 14, 4, -1, 2, 5, 0, -25, -10, 20, -6, -7, 14, 25, -13, -22, -21, -20, -20, -2, 18, 20, -15, -1, 5, 11, 2, -8, -11, -15, -2, -27, 1, -15, -5, 2, -18, -17, 4, -5, -11, -16, 24, -14, 14, -15, -14, 6, 5, -12, -12, -17, -1, 29, 0, -2, -8, -10, 0, -20, -25, 21, 21, 16, 12, 4, 3, 18, 4, -15, 0, -7, 39, -7, -9, -14, 37, -1, -21, 12, -3, 12, 19, 4, 8, 3, -32, -9, 7, 9, -5, -11, -11, -15, 19, -19, 12, -9, -1, -6, -10, 9, 18, -9, -25, 10, 21, -20, -27, -14, 13, -18, -14, 11, 24, -17, -24, 24, -2, 34, -21, 14, 22, -19, 0, -24, -19, -7, 6, -4, 34, -22, -15, -4, -2, 7, 18, 11, 0, -18, 8, -8, 24, -18, 18, 12, -8, 3, -16, -16, -11, -7, 5, -4, 3, 17, 29, 8, 7, 33, 20, 5, 17, -19, 9, 27, 9, 1, -31, 5, 11, -18, -7, -37, 7, -2, 14, -22, -23, -2, -2, -11, -23, -18, 7, -17, -11, -17, 13, 0, -20, -6, -3, 6, 10, -8, 13, 26, 11, -14, -14, -13, -2, -5, 8, 7, -8, -8, 5, -8, -21, -7, -5, 0, -3, -10, 4, -19, -32, 10, 0, 15, 15, -17, 18, -11, -12, -10, -6, 25, -56, -18, -27, -20, -53, 4, 22, 10, -18, 27, 1, 11, -10, -21, -20, 11, 16, 20, -15, -7, -8, 4, -38, 13, 8, 11, -6, 6, 16, 14, -28, 19, 14, -20, 1, 19, -21, -3, -3, -2, 13, -11, -49, -14, 4, 5, -19, 6, 2, -5, -13, 5, -27, -12, 2, 1, -2, 15, 23, -16, -6, -15, 32, -22, 18, -28, 5, 4, 11, 11, -30, -15, 19, 2, 11, -21, 7, 11, 17, -18, -6, 0, 4, 11, -14, -4, -39, -32, 4, -27, -7, -10, -15, -13, -3, 33, -9, -9, -18, 5, -23, 15, -12, -16, 4, -21, 7, -10, 0, -22, 3, 5, 7, -18, -21, -23, -13, -20, -7, 25, -10, 12, 11, 10, 8, -21, 17, -2, -5, 4, 8, -17, -23, 3, 8, 3, -10, -17, -35, -14, 21, 16, -6, -11, -5, 1, -3, 9, -8, 19, 6, 12, 11, 0, -20, 19, 15, 10, 6, 2, 13, 16, 14, -19, -3, 1, 7, -15, -23, -24, -23, -14, 6, -22, -5, 5, 7, -3, 16, -11, -19, 2, 26, 16, 0, -8, -27, -11, 4, -17, -10, 36, 10, -1, -27, -16, 26, -12, -39, 8, -23, 7, -12, 8, -25, -3, 10, 11, -31, 1, -6, -8, -26, -19, -14, -26, -26, -13, 9, 4, -32, 18, -12, -19, -18, -18, 1, -2, 5, -21, -18, 13, -5, -14, 16, -2, -24, 18, -9, -11, -4, -5, -11, 7, -23, -7, -2, 14, -13, -37, 5, -21, -16, -5, -8, 5, -5, 1, 4, -21, -2, -4, -2, 1, -27, -26, 0, -2, -29, 16, 5, 9, 16, -7, 26, -22, 8, 1, -32, -7, -18, -21, -16, -2, -13, 6, -19, 11, -8, 17, 0, -6, -9, -22, 7, 15, 11, -12, 0, -2, -38, 1, -13, -23, -1, 21, 8, -8, -20, -1, -6, 14, -4, 17, -34, 7, 19, 19, 9, 20, -16, -9, -23, -2, -25, 18, -8, -8, -10, 21, -24, -8, -13, -1, -19, 36, 1, 2, -19, 14, -13, 4, -12, -25, -19, -4, 10, 6, 16, -6, -33, 4, -3, -22, -8, 7, 18, -17, 4, 25, 24, 6, 3, -19, -17, 2, 2, -17, 19, 7, 3, 15, -7, -14, -23, 23, -18, -2, 13, -18, -27, -22, 15, 8, 22, 18, 17, -15, -4, 12, -4, -11, -14, 5, -4, 8, -29, -16, -3, -17, -18, 20, 25, -13, 10, 11, -15, 22, -2, -24, 1, -11, -31, 0, 6, -2, 12, 16, -15, 17, -4, 11, -19, 18, -16, 26, 11, -13, -20, 11, -19, 15, -7, 8, -8, 17, -8, -21, -19, -7, -24, -25, -19, 17, 10, -17, 20, -6, -8, 54, 23, -4, -23, 9, -4, 5, 8, 34, 0, -9, 9, 13, 26, -3, -14, -12, -25, 1, -8, -19, -27, -23, 13, 11, -4, -20, 1, -8, 9, 25, 30, -1, -5, -9, 28, 19, 8, -20, 15, -17, 13, 13, -4, 4, 2, 19, -17, -7, -4, -12, -1, 4, -15, 19, -6, 10, 21, 12, -7, -9, -2, 11, 3, -6, -25, 4, 14, -16, -15, 1, -1, 14, 24, 12, -1, -17, 25, -3, -9, -14, -25, -8, -5, -16, 15, -8, 7, 16, -10, 11, -11, -16, -24, -10, -11, 1, -12, 18, 5, -10, -26, 18, 20, -14, 22, 14, -22, -2, 34, 23, 25, -15, -10, -17, 15, -9, 9, -7, -5, 0, -14, 1, 6, -11, 9, 15, 6, -11, 2, 19, -17, -2, 16, -2, -19, -9, 18, -5, -1, -23, -9, 24, -11, 10, -10, -7, 12, -6, -1, -17, 10, -12, 4, -12, -4, 6, -12, 9, -5, 15, 9, -22, -10, 0, -15, 13, -7, 43, 14, -2, 1, 11, 19, 7, -7, 16, -11, 7, -5, 6, 17, 20, -2, -1, -3, 13, 2, -21, 20, 16, -6, 40, 33, 13, -9, -14, 10, 33, 19, -38, -27, -11, -3, -17, 12, 10, -10, -2, 7, 15, -8, 8, -13, 31, -8, 5, 17, 11, 13, 7, 15, -7, -22, 16, 2, 14, 10, -21, -5, 14, 25, -1, 1, -1, -15, -16, 31, 0, -15, -36, 4, 16, 14, 16, 10, -19, -5, -11, 8, -11, -12, -10, -19, -8, -20, 11, 24, 9, 17, -17, 11, 0, 26, -12, -21, 13, 15, -13, 8, 23, -9, -28, -19, -11, -20, -1, -9, -8, -11, 14, 20, -8, -20, -6, 24, 7, -31, 32, 7, -11, -8, -3, 3, -6, 4, -9, 0, -15, -4, 11, 26, -14, -13, -24, -17, 7, -10, 2, -4, -20, 9, 18, 26, 12, -9, -15, 25, 6, -25, -25, -29, 18, -10, -12, -10, -3, -15, -10, -17, -21, 9, -20, -12, -29, 2, -21, 3, 14, 8, -12, 21, -15, 4, -20, -16, -5, -16, 13, -12, 25, 22, -4, 3, 19, -7, -1, -13, -8, 7, -15, -6, 1, -9, 6, -16, 10, -18, -4, -14, 0, 4, 10, -20, 21, 35, 26, 11, -11, 4, -15, 2, 32, -9, 14, -33, -22, -27, -12, -25, -13, 7, 30, 1, 3, -15, -19, 20, -14, 7, 13, -27, 17, 19, 14, -11, -25, -14, 18, -17, -2, 10, -5, -18, -15, -7, -6, 17, -19, -34, 2, 4, 24, 4, 37, -10, -22, 14, 16, 20, 24, 9, 3, 10, -10, -11, -4, -2, 9, 12, -7, -6, 8, -18, 2, -17, 14, 15, 28, 7, -13, -16, 0, 1, -13, 7, -17, 9, 11, -5, 18, -25, 15, 7, 13, -14, -6, 10, -14, 10, 14, -12, -19, -3, 9, 6, -23, -24, 24, 14, 16, 9, -7, -22, 19, -22, -3, 12, -10, 8, 12, -4, -12, -27, -16, -19, 0, -11, 5, -13, -19, 0, 4, -7, 5, -2, -13, -36, 11, 23, -3, -4, -1, -21, -5, 9, -9, 0, 5, -7, -6, 3, -15, -12, 10, -17, -22, -19, -27, -10, 19, -26, 10, 0, -2, -22, 17, 6, -8, -42, -20, -9, -23, -10, 23, -15, 14, 18, 0, 11, -3, -21, -12, -12, 1, -4, -18, -12, 4, 10, -14, -7, 10, -11, 2, -9, 7, 10, 21, 9, -7, 27, -21, -27, -20, -16, 11, -1, 8, -11, 7, -14, -10, -9, 18, -14, 11, -14, 7, 13, -6, -4, 15, -33, 14, -42, -7, -27, 31, 12, 3, -24, -1, -8, -16, -2, 10, -1, -15, -6, 14, -26, -18, -15, -12, -8, -25, -17, -12, 14, 12, -1, 13, -23, 9, 4, -11, -30, 18, -9, -12, -4, 3, 11, -8, 3, 2, -37, 3, -26, -27, 21, -5, -32, -21, 21, 0, -4, -9, 17, -20, 13, -3, 3, -23, -11, 4, 11, -9, -14, 1, -25, 4, -1, -20, 1, -22, -28, -20, -28, -18, -9, 25, -24, 5, 7, 5, -41, -8, 25, -1, -18, -16, 9, 5, -11, 5, 2, 13, -13, -13, -27, -19, -25, 24, 1, -18, 2, -7, 1, -11, -8, -1, 3, -11, -3, -19, 18, -8, 16, 4, 26, -34, -16, 5, 15, -5, -17, -7, 28, 7, -10, 8, 23, -8, -14, -6, 26, 1, -8, 6, 0, 9, 17, 39, -31, 42, 29, 11, -11, -22, 22, -11, 11, -25, 3, 9, 29, 8, -14, 27, 42, -17, -13, -13, -3, 9, 16, -49, -62, 24, 19, -10, -34, 11, 3, 11, -2, -14, -29, 16, 3, -19, 14, 46, 18, -2, 0, 6, 22, -17, -12, -5, 12, -1, 1, 12, 3, -10, 1, -2, 11, 1, -7, 9, 17, -11, 15, -19, -24, -1, 29, 7, 17, -16, -10, -5, 0, -11, 2, -12, 10, 3, 25, -12, -35, 16, 19, 15, -22, -5, -4, 31, 7, -5, -1, 3, -3, 10, -14, 16, 23, -4, -36, 13, 26, 19, -10, -9, 4, -33, -25, 3, 17, -1, 4, 32, -1, -33, 9, -21, 5, 7, -21, 15, -4, 5, -15, 11, 7, -12, 14, 14, 22, 8, -8, -13, -13, -13, 9, -13, -36, 8, 3, -17, -6, -20, 25, -27, -33, 14, 6, 16, 22, -15, -3, 9, 9, 24, 20, -13, 16, 18, 13, -3, -17, 11, -7, 19, -2, -6, 29, 23, 46, -2, -3, -19, 23, -13, 24, 9, -10, 5, 24, 15, -13, 4, -7, 55, -1, 13, 25, -12, 1, 14, -10, -16, 30, -5, 20, -4, 21, 14, -11, -21, 7, -53, -18, -7, 36, -2, 9, 36, 0, -12, 13, 13, 8, 13, -14, 1, -49, 16, 45, 11, 40, -8, 6, 8, 3, -15, -6, -18, -7, 11, 6, 16, 2, 0, 10, 0, 0, -3, 40, 31, 18, -32, 1, 6, -3, -19, 33, 24, 9, -1, 7, -19, 23, 19, 8, 7, 30, 11, -7, 14, 9, 6, 18, -18, 9, 15, 14, 5, -2, -3, 3, 15, 2, 33, 7, 17, 23, 5, 31, -16, 8, 2, -6, -1, 5, 6, -6, 13, 21, -10, 22, 14, 9, 6, -13, 8, 19, 19, 20, -13, -2, 10, 2, 31, 27, -6, 28, 14, -3, -13, 36, -5, -11, -15, -7, 8, 9, 8, -1, 9, 17, -11, -15, -5, 18, -17, 4, 5, -14, 16, 7, -14, 5, 11, 31, 22, 3, 4, 24, -21, 4, 19, 15, 39, 0, 18, 41, 2, 6, -8, 13, -15, 11, 11, 30, 13, -2, -23, 1, 47, 39, 6, 16, 19, 3, 1, 8, -5, 3, 19, 9, 8, 5, 14, -9, 16, -15, 2, 15, 5, -29, -1, -4, 8, 12, -1, 14, -18, 9, -2, 0, 34, 10, -16, -3, -10, -16, 9, -4, -5, 7, 0, -12, -10, 13, -21, -11, -12, -19, -7, 9, 10, -6, 4, 21, -12, 15, -19, -12, 8, -17, -14, 8, 18, -18, -3, 24, 14, -6, -7, 13, 9, 0, 27, 17, -6, 25, 12, 9, -38, -17, 11, -8, 4, 21, 0, -10, -16, 15, 2, -18, -5, 8, 11, -26, -5, -6, 5, 0, 0, 11, 2, -14, 28, 11, 35, -1, 14, 16, -4, 19, 2, -3, -26, -6, 15, -5, -16, 13, 16, 25, -10, -17, -7, 1, -6, 4, 19, -6, 39, 26, -4, 14, -17, 16, 13, 23, 44, 33, 13, 20, 18, 33, -2, 15, -2, -1, 3, 17, 3, -25, 29, 0, 8, 16, 4, 31, -5, -23, 4, -18, 21, 23, 0, -1, 3, 13, 3, 40, 31, 19, -22, 17, -22, 15, 24, 16, 32, 20, -11, -29, -2, 21, 33, 16, 23, 15, 12, -4, -11, 46, -18, 15, -4, 4, -13, 56, 8, -4, 19, -1, 2, 0, 15, -22, 12, -10, 28, 37, 14, 20, 10, 1, 18, -10, 7, -4, 1, -4, -9, 14, -9, -12, 7, -17, 18, 10, 7, 20, 20, -31, 25, 12, -5, -18, 11, 25, -21, -28, 7, -10, 34, -20, -21, -15, 14, -9, 21, -6, -7, 0, 9, -19, -15, 31, 7, 15, 25, 24, -10, -11, -9, 15, 29, 17, 12, 11, -16, -32, -12, -24, -17, 21, -6, -1, -16, -18, 16, 1, -33, -12, -19, -12, -17, 22, -5, -30, 10, -12, 17, 6, 8, -7, -16, 1, 19, 9, 1, -12, 19, -16, 14, -13, 24, 18, -6, -2, 8, 15, -14, -17, 7, 32, -3, -16, 19, -7, -6, 4, 3, -5, 19, 13, -1, -25, 14, -10, -10, 13, 14, -15, 2, -10, -18, 25, 31, 13, 6, 3, 19, -29, -3, -35, 14, -2, 15, -20, 18, 0, -5, -6, 29, 25, 10, -14, -18, -15, 5, 3, 17, 12, 16, 5, -16, 41, 7, -11, -19, 5, -7, 22, -1, -2, -13, -10, 13, -19, 5, 9, -16, -18, -8, -13, -20, 25, -9, -8, 13, -8, -10, -3, 30, -11, -20, 2, -13, -10, -15, -17, -11, -20, 17, -9, -21, 2, 10, 3, 1, -23, 38, -4, -34, -11, 10, 17, 16, -6, -15, 5, -14, 4, 0, -18, -3, -16, 9, 0, 19, 2, -17, 2, -34, -5, 2, 4, 6, -17, -28, -8, 0, 5, -29, 10, -4, -20, -21, 8, -5, 12, -10, 10, -22, -1, -22, 28, 12, -17, 16, -21, -7, -2, 9, 14, 7, -2, -14, 2, 11, -17, -25, -19, -7, -12, -14, 8, -14, 19, 1, -1, -11, 10, -4, 15, 5, -12, -3, -13, -25, -2, -11, -14, -10, -2, -14, -3, -8, -3, 2, 16, -15, -26, -12, 23, 12, -1, -6, -14, -23, 2, -13, 12, 20, 18, 12, -4, -18, -34, -5, 15, -14, 19, -2, -9, 12, -4, -36, -13, -4, 7, 30, 4, 39, 3, -18, 10, 6, 19, 16, -2, -25, 9, 0, -9, -13, 6, 5, -13, -39, 5, -20, 0, 27, -22, 4, 13, -7, -15, -21, -18, 4, -5, 2, 16, -28, -4, -30, -5, -12, 9, -16, -18, -6, 7, -22, 12, 13, -13, 12, -4, -14, 12, -6, 17, 43, -16, -25, -7, 4, 7, -10, -21, 16, -4, -5, -4, -11, 5, -15, 2, 8, -10, 5, -10, -7, 9, 20, 3, -2, -14, -23, 11, 10, -5, -6, 13, 18, 14, 11, -23, 25, -7, 19, 6, -2, -7, 11, -9, 10, 9, 8, 8, 1, 11, 1, -1, -1, -15, 9, 3, -15, -15, 1, 11, 17, -2, 24, 22, 17, -13, -17, 6, -4, 9, -9, 19, -22, -8, 0, 3, 7, -5, 23, -31, 20, -19, -16, 27, -8, -1, 14, 6, -23, 8, 11, 22, 18, -8, 4, -19, -9, -5, -20, 7, -19, 6, -6, -28, 1, 21, 8, -11, 3, -3, -8, 11, 10, 0, -16, -15, 17, 10, -8, -1, -21, -15, -4, -16, -24, 0, -17, 17, -22, -16, -18, -13, 33, -43, 37, -10, 2, 5, -29, -5, -13, -2, -4, 33, 7, 8, 6, 7, -11, 14, 11, -11, -5, -16, 2, -6, -14, -9, 19, -16, 28, 8, -10, -13, -26, 4, 26, -11, 8, 7, 9, 10, -2, 6, 4, -11, -36, -15, -11, -1, 21, 36, 15, 15, -4, -18, 9, -10, 25, -8, 23, -2, -1, -12, -32, -9, 9, 40, -5, 14, 16, 18, -21, -7, -11, 4, -1, -8, -8, 13, -9, -8, 16, -16, 25, -20, 13, -1, -1, -8, -12, -2, -2, -17, 19, 5, -31, -8, -8, 2, -13, 3, -17, -17, 6, -9, 5, -48, 19, 4, 40, -27, 7, -6, 11, -23, 7, -7, 26, -13, 23, -7, -4, -24, 8, -23, 3, -23, 19, 19, 31, 30, -18, 9, -5, 6, -34, -6, 10, 3, -7, 8, 19, -16, -13, -13, 8, 1, -17, 8, -7, -3, -23, 13, 34, 3, -2, -31, -3, -19, 18, -11, -1, 7, 34, -24, 29, 14, 4, -10, 13, 6, -17, -12, -3, -6, -13, -15, 14, -7, -3, 12, 9, -5, -33, -8, 27, -2, 2, 7, -22, 14, -19, -22, 31, 7, -2, 9, -1, -10, 14, -22, -10, -22, 21, 3, -5, 2, -15, -1, 8, 3, 8, -9, 0, -13, 8, 17, 12, 0, -10, -27, -14, -16, -21, 18, -2, -7, -10, -12, -14, -11, -12, -6, 6, 18, 4, -5, -20, -14, 0, 7, -19, 3, 9, 7, -7, -21, 25, -11, -11, -12, 12, -13, -31, -21, -3, -18, 20, 36, -34, 16, -15, 5, -17, -11, 10, 16, 18, 25, 4, 20, 17, 14, 6, 12, 6, -18, -14, 0, 8, -27, 10, -13, -18, -12, -1, -3, -11, 5, 10, 18, 2, -6, -25, 6, -16, 16, 17, 42, 38, -23, -25, 1, 4, -9, -4, -5, -26, 4, -1, 4, 14, -13, -26, -25, 17, -27, 3, -1, -13, 12, -12, 52, 14, -16, 0, -9, 9, 5, 2, 20, -4, -2, -21, 17, -22, 8, -9, -14, 2, -26, -3, -6, -4, -15, -2, 26, -3, 7, -9, -1, 20, 5, 12, -23, -9, -2, -3, -11, -3, 14, 30, 24, 14, -9, -38, 14, 0, -6, 14, -4, 6, 6, -18, 4, -16, -6, -2, 7, 28, 12, -24, 9, 8, -8, -17, 23, 26, -13, -1, -3, -30, -7, -15, 12, -3, -45, -2, -8, -14, -10, -1, 11, 18, 6, -14, 1, 4, -18, 1, 1, -29, -22, -1, -5, -26, -4, 1, -21, 12, 7, -1, 14, 4, 22, 1, -7, 28, 33, -45, -6, -7, -1, 11, -31, -14, 10, -5, -20, 3, 19, -22, -13, 11, 20, 34, 14, 16, 4, 0, 7, 21, -8, 6, 5, -41, 15, -30, 7, -5, -9, 17, 9, 0, -12, 20, -18, -11, 5, -1, -7, -6, -25, -20, -6, -18, 9, -34, -7, -27, -19, -35, -22, -11, -8, 69, 15, 1, -21, 34, 40, 1, 27, -23, -17, 38, 15, 7, -14, -1, 20, -7, 1, -6, -7, -17, -13, -15, -10, -15, 5, -11, 6, 15, -2, 19, 6, -3, -14, -2, 19, 8, -16, -24, -22, -12, 7, -16, 13, 3, -23, -5, 5, -28, 2, -5, -15, 1, -18, -20, -26, 19, 17, -24, 5, -30, 9, 7, 22, 5, 12, -15, -3, -6, -15, -21, -10, -6, 8, -33, -12, -27, -23, -10, 17, -3, -4, 16, 15, -42, 27, -19, 22, -18, 2, -8, 5, 8, -2, 1, -5, -30, -23, 35, 20, -16, 2, 11, 26, -17, 17, -22, -3, -3, -15, -5, 3, 25, -11, 14, -10, 17, -22, -11, -14, 24, -7, 1, 10, -12, 1, -5, -6, 9, 2, -29, 7, -31, 31, -7, -25, 10, 10, -32, -16, -22, -35, -2, 7, -6, -25, -24, -2, -3, 4, 1, 1, 14, -9, -15, -21, 2, 10, -14, -7, 6, -21, 5, -1, 32, -8, 6, 13, -13, -5, -13, -19, 19, -10, -10, 9, -8, 10, -9, 10, 13, -19, -13, 25, -22, -9, 4, 22, -17, -25, 0, -21, -7, -6, -23, -24, -23, 19, -2, 26, 22, -17, 29, 19, 1, 24, 11, -12, 0, -18, -6, -11, -15, 12, -6, 6, -5, -7, 17, -17, 23, 11, 12, 8, 26, 0, 9, 11, 20, 26, -2, -3, -10, 3, 20, 3, 26, -2, 0, 11, -11, -1, -23, 9, -16, -19, 7, 17, -6, 11, -5, -8, -32, 0, 9, 7, -10, 17, 0, -16, 0, -1, -5, -8, -1, 3, -25, -4, 12, -20, 2, -4, -33, -23, -17, -13, -28, 21, -14, -2, -19, -8, -22, -4, 2, 33, 22, 18, 6, -25, 24, 12, -19, -4, -27, 19, -16, 11, 2, -13, 9, 1, -16, -7, -12, -7, 17, -12, -1, -18, 19, 20, 1, -15, -22, -1, 10, 13, -20, 14, 4, 1, -14, -18, 25, 4, -6, -5, -19, -9, -22, 13, -22, 10, 11, 2, 8, -7, 21, 16, -4, 0, 16, 9, 13, -4, -1, -18, -3, -10, -20, -2, 10, -25, -6, -4, 23, 2, -6, -24, 0, -27, -2, -8, -8, 5, -15, -18, 13, 9, -26, -1, -11, 5, 11, 44, 6, 5, 12, 10, -34, 15, -1, 13, -4, -13, -9, -8, 35, 16, -17, -2, -14, 16, 13, -32, 3, -9, -9, 5, 13, -7, 14, 7, -22, 29, -11, 6, -13, 20, -2, -10, 1, 2, 9, 2, -25, 11, 13, 25, -12, 20, -50, 15, -11, 14, -13, 0, -22, -27, -21, 27, -16, -10, 21, 10, -25, 13, -26, 1, 9, 5, 6, -3, 4, 17, -9, -1, 3, -17, 10, -9, 11, -4, 0, 7, 6, 23, -3, 1, -10, 24, -6, -21, 2, 17, -3, 19, 5, -4, -32, -29, -9, 15, 12, 18, 4, 40, -8, -10, -2, 13, 24, -12, -20, 19, -13, 8, 17, -1, 17, -4, -14, -18, -11, 4, 8, 23, -23, 11, 25, 6, -14, -7, -2, 17, 4, -10, -17, -3, 24, 11, -5, 3, -16, -11, -13, -4, 15, 45, 6, 12, -23, 6, -4, -20, 11, -21, 6, -11, 25, -19, -6, -7, 12, -8, -4, -7, -14, 18, -19, -19, -3, 19, -2, 14, 17, 19, -19, 3, -16, 14, -3, 1, -11, -18, -21, 4, -7, 16, 1, 12, 10, 18, -3, -37, 13, 10, -4, 16, -5, -3, 0, 1, 8, 0, -3, -6, -16, 0, 42, 3, -15, -33, 0, 10, -20, 3, -30, -20, -27, 20, 3, -19, -26, 8, 28, 3, -31, 5, -2, 16, -14, -20, 16, -5, 5, -12, -6, 16, 28, 27, 18, 24, 29, -1, -13, -20, -19, -21, -20, 29, 22, 10, -5, 10, 5, -8, 26, 29, -24, -20, -13, -9, 17, -24, 6, -29, 27, 16, -24, -1, 12, 5, -16, -20, -18, -1, 20, 0, 0, 4, 7, -14, 32, 22, -16, 13, -2, 20, -6, 13, -4, -19, 3, -12, -4, -12, 17, 14, 1, -22, -3, -17, 8, -19, 6, -20, -17, 25, 16, 12, -12, -6, -16, -33, -7, 7, 23, 16, -5, 11, -23, -24, -17, -9, -11, 28, -2, 2, -8, 0, -18, -23, 7, 17, -1, -12, -2, -8, 10, -2, 0, -20, 11, -13, 17, -21, -11, 19, 20, -11, 5, -5, 4, 10, 17, -9, -19, -7, -2, 19, -9, 7, -4, -13, 17, 12, 3, -14, 24, -5, -7, -22, 14, -17, 22, 11, 13, 36, 49, 37, -17, 4, -5, -13, 30, 11, 0, 15, 34, 3, -20, 5, 5, -6, -14, -10, 0, -5, -20, -12, 21, -9, 21, 3, 14, 8, 12, 17, 4, 22, 46, 41, -11, 11, 23, -1, -15, -22, 37, -12, -14, -19, -11, -6, -34, -9, -9, -5, 1, 4, -34, 15, -25, 1, 6, 12, 22, 10, 7, -9, 2, 15, -37, -30, 5, -5, 21, -6, -22, -38, -9, -10, 20, 18, -5, 7, 23, 29, -9, -14, -24, -3, -15, -1, 19, 11, 14, 20, 28, -16, -17, -22, 24, -4, -7, 12, -31, -28, -2, 0, -2, 2, 58, 42, 17, 4, 8, 4, -13, -26, -33, -39, -3, 15, -4, -23, -1, -25, 39, 6, -8, 8, -2, -6, 5, 1, 8, 22, 7, -24, 0, 4, 15, 24, 12, 16, -6, -5, 10, 5, 22, 37, -7, 14, 14, 0, 25, -3, 18, -19, -6, -32, -18, 5, 14, 3, 17, 2, -26, -40, 18, -22, -7, 1, 10, -41, 4, -5, 13, -27, 11, 3, -6, -13, 23, 21, 0, 19, -7, -5, 3, 3, 1, -8, 10, 15, 16, 2, 18, 9, -14, 16, -16, -2, -4, -25, 2, -3, 38, 11, 23, 3, -15, -18, 14, 11, -5, 8, -20, 4, 10, -8, 10, 13, -18, 1, -13, 4, 1, -2, 10, 8, 48, 55, -5, -2, -2, 19, 45, 35, -26, 8, -3, -6, 2, -18, 7, -1, 22, 47, -9, 9, -16, -22, 50, 15, 17, 8, -1, -18, -1, -22, -5, -1, 10, -2, 16, 10, -4, -6, 6, 12, 21, -10, -10, -9, -15, 9, 18, -22, 10, -28, -18, 8, 22, 12, -4, -18, 9, 0, 6, -29, -15, -5, 15, -9, -4, -8, 16, 0, 4, 3, 16, -14, 8, -4, -3, 25, -26, 6, 4, 6, 12, 17, 20, 6, 5, 9, 26, -1, 11, -8, 23, 1, 31, -9, -19, -24, 10, 1, 1, -20, -24, -14, 32, -1, -8, -9, 7, -2, 17, -15, 18, 14, -8, -12, 31, 9, 10, 11, 6, -33, -16, 10, 0, -19, 3, 20, 32, -9, -7, 4, 19, -6, -10, -4, -3, 0, 1, 6, -17, 9, -16, -4, 5, -7, -15, -19, 8, 9, 9, 12, -16, -2, 10, -1, -8, 6, -13, -12, -2, -1, -19, 15, 0, -16, -20, 11, -9, 7, 22, 11, -1, 16, 21, -11, -8, 9, 25, 18, 17, 31, -1, -31, -10, 18, -2, -8, 0, 3, -5, 1, 0, -23, 20, 16, 3, -15, 3, -17, 11, 2, 40, -1, -9, -12, 2, 7, -15, 18, -5, -4, -26, -12, 35, 2, 12, 9, 27, 1, -28, -3, -12, -9, -20, 22, 14, -1, -2, -5, 7, -19, -1, 22, 8, 27, -14, -20, 5, -15, -4, 9, -8, 8, 23, -10, 4, -22, 16, -23, 8, -5, -17, 0, -18, 12, -9, -12, -6, -20, -20, -18, -3, -3, 12, 27, 15, 23, 15, 12, 10, 13, -19, 5, 40, 6, 18, -26, -3, -6, -4, -12, -10, -18, -5, 6, 21, -2, 16, -3, 10, 18, 9, -23, -15, -3, 25, 13, -21, 8, -11, 3, 0, -27, -26, 12, -16, -9, -9, 18, 18, -8, -13, -6, -3, 28, -18, -25, -2, -13, -3, 30, 27, 15, -8, 11, 9, -17, 10, -6, 5, -18, 6, 16, -13, 21, -2, 10, -36, -21, 7, 7, -6, 22, 4, -16, 15, 16, -21, -20, 1, -10, -2, 3, 19, 1, -13, 12, -12, 23, 5, -11, 15, 19, 27, 19, -3, -12, 11, 13, -19, 2, -17, 13, 3, -3, -17, -18, 8, 13, 1, 19, 17, 7, -14, -6, 18, -3, 10, -2, 17, 4, -27, -9, 6, 1, -13, 0, 13, -13, -13, -25, -2, -2, -1, 5, 4, -7, 9, 1, -23, -8, 18, 23, -10, 7, 32, 9, -35, -7, -20, 3, -15, -18, -9, -29, -43, -5, -19, -10, -3, 11, 2, -6, 9, -15, 1, 16, 3, -27, 5, -22, -2, -5, -25, -3, 21, -7, -16, 23, -9, -40, 1, 8, 23, -12, 15, 20, -19, 14, 6, -16, -24, 5, -28, -7, -4, -25, -5, 6, 4, 18, -8, -11, -28, -31, -6, -4, -14, 1, 5, -6, -13, -33, 3, -8, -35, 12, 7, 31, -6, 26, -2, 19, -22, 4, 32, 7, -4, -13, 23, -11, -8, 12, 33, 10, 4, -24, -18, 8, 13, -1, -3, 17, -1, -35, 21, 19, -20, -17, 7, -31, -13, -24, -1, 0, -9, 2, 0, 1, -14, 9, -15, 14, -1, 6, 21, 9, -23, -1, -8, 14, -3, -17, 28, -17, 58, 45, -16, 20, -11, 0, 2, 5, 2, 9, -12, 3, 0, -19, 25, 21, 16, 5, 14, 3, -7, -16, -4, -13, -8, 37, 9, -5, -18, -15, 30, 17, 1, -22, -22, -5, -26, -11, 6, 55, -23, 24, 12, 12, 27, 20, 16, -21, -7, -11, 15, -12, 2, -17, -27, -11, -1, -16, 29, -2, 17, 9, -13, -20, -13, -7, 2, -13, 12, -18, 27, 1, 3, -3, 27, 21, -15, -3, -24, -3, 10, -6, -8, -17, -7, -18, -4, 18, -15, -4, -11, -17, 1, -7, 13, 29, 17, -23, -8, 8, 0, -17, -2, -3, -15, 0, 32, -20, -10, -15, 31, -14, -12, -9, 24, 25, -9, -8, 32, 5, 13, -18, -31, 19, 19, 19, -17, -8, 20, 24, 20, -13, 6, 13, 3, -32, -2, -12, 5, -22, 15, 1, -20, -33, 12, 7, 7, -22, 1, -6, 20, 21, 27, 52, -8, 0, -11, -6, 11, -12, -1, 14, 14, 9, 2, -6, -12, 0, 10, 1, 19, 7, 8, 11, -8, -10, 7, 11, 10, 6, -24, 18, 27, 26, -15, 22, -27, -16, -12, 10, 5, 29, 7, 23, -7, 18, -4, -19, 2, 1, -13, 2, 28, -21, -14, -28, -8, 22, -10, 10, 26, -24, -24, -9, 17, 7, -6, 32, 8, -8, 15, 3, 10, 25, -24, -14, -5, 16, -21, 3, 18, 16, 9, 42, 3, 11, -7, 5, 14, 16, -4, -4, -4, -20, -15, -22, -3, 0, -17, -16, -38, -2, -3, 9, 2, 20, -5, 22, 21, 1, -16, -28, 3, -15, 18, 10, -25, 26, -16, 1, 14, 47, 15, 6, -4, -14, 7, -5, -17, -13, -11, -9, 15, 23, -24, -2, 16, -12, 35, 1, 13, -2, -20, 0, 28, 29, 25, 4, 8, 7, -54, -17, 17, 5, 21, 61, -5, -16, -9, -14, 6, 1, 11, -11, -9, 13, 3, 7, 10, 28, -20, -13, -17, 10, -3, 14, -24, -12, -20, 23, 14, 9, 4, 3, 37, 14, 2, -3, 27, -16, 36, 24, 32, 44, -14, 9, -16, -13, -21, -20, -6, -17, -11, 30, 22, -12, 8, 14, 17, 17, -17, -1, 0, 10, 24, -9, 20, -19, -8, 3, 10, 9, 4, -3, 19, -11, -16, 14, 8, -21, -3, 26, -11, 3, -11, 12, -12, 1, 9, 9, -2, 3, 23, 18, -14, 0, 13, -11, 12, 38, 3, 12, 3, -26, -1, -2, -10, -12, -26, -11, 4, 6, -21, 16, -1, -19, 5, -10, -15, 8, 12, -13, 16, -25, 13, 15, 6, -6, -12, 0, 18, 7, 8, -4, -26, 12, 3, -4, -3, 7, 14, -32, -40, -18, 20, -22, 3, 16, 0, -9, -5, -16, 13, 4, 6, 3, 16, -11, 12, -23, 11, -14, -6, 24, 7, 17, -14, 4, -19, -13, -17, 8, 4, 11, -20, 2, 33, 9, 6, -5, 6, -1, 13, -1, -17, 1, 11, 25, 3, 1, 24, -8, -7, -5, 13, -14, 0, 16, 19, 18, 10, -17, 19, 1, 12, 4, 19, 6, -4, -9, -11, 8, -4, -2, -22, -11, 13, -23, 9, -20, -10, -18, -3, 10, -13, -10, 13, 26, -1, -20, 27, -9, -10, -5, 23, 26, 7, 11, 5, 3, -5, -11, 15, 5, 28, 37, -15, 13, -12, 0, 25, 58, -4, 11, -4, -22, -7, -12, 13, -14, 8, -21, 13, 5, -25, -13, 11, 27, 1, 4, -17, -6, -12, 4, 28, 27, 2, -12, 25, 17, 3, -11, 8, 33, -2, 10, -4, -8, -23, 7, -8, 35, 4, -3, -4, -27, -10, -16, 11, -15, 41, -6, 24, 14, -3, -20, 28, -2, 7, -4, 7, 5, 4, -7, 13, -10, -5, -16, 10, -7, 1, 12, 35, -5, 20, -8, 15, -22, -3, -18, 10, -7, 6, 14, -6, -12, 11, -17, 11, -19, 4, -14, -10, -18, -13, -3, 8, 24, 18, 1, 4, 11, 12, -22, -15, -17, 5, 16, 9, -5, 16, 3, -3, -14, 31, -13, 11, -3, 16, -10, 13, 28, 12, -16, -3, 3, 5, -12, 3, 17, 1, 24, 4, 16, -11, -6, 12, -10, 14, 4, 5, -12, 9, 10, 16, 8, -3, 32, 2, 0, 1, -19, 31, -12, -3, -9, -20, 12, 10, 11, 22, 18, 17, 10, -28, 16, -5, 10, -21, 9, 14, -13, -21, -17, -4, -16, 21, 27, -17, -1, -1, -9, 6, -23, 15, 17, -13, -13, -13, 7, 10, -16, -12, -7, 12, 13, -31, -18, 1, -27, 10, 4, -24, 11, -12, 12, -18, 3, 8, -13, 18, -17, -7, 9, -48, 22, -22, -3, 17, -2, 6, -9, -24, 9, 4, -7, -12, 13, 9, -4, 6, -4, 29, -4, 19, 4, 21, 14, -22, 9, 0, 3, 4, 18, -5, 19, 14, -1, 5, 10, 2, -17, 16, -10, 8, -6, -12, -18, 22, 0, -14, 8, -4, 4, 3, -20, -11, -13, -2, 16, -16, 2, -3, -21, 10, 5, 13, 5, 14, 9, -6, -14, 8, 9, -5, -2, 20, 2, -9, 5, -5, -13, 4, 26, -14, 2, 12, -3, 15, 17, 30, 8, -19, -16, -6, 12, 1, -15, -10, 18, 20, 4, 10, -23, -15, -8, -3, -11, -11, -8, -2, 22, 0, 14, 21, 16, 1, -20, 3, 5, -23, 4, 11, 8, 0, -26, 9, -11, 9, -16, 6, 14, -40, -3, -12, -21, 4, 1, -17, -45, 9, -8, -15, -16, -20, 10, 12, 15, -40, -10, -9, -8, 7, -8, 3, -5, 5, 11, 10, -2, -18, 11, 6, -25, 1, 28, 1, 18, 23, 8, -6, -6, 11, -6, 17, -11, 9, 11, -21, 5, -8, 11, -12, 8, -15, 16, 3, -3, 24, 7, 9, -20, -12, 1, -8, -5, -27, 15, 18, -23, 6, -5, 6, 15, 3, 13, 9, 2, -5, 9, 25, 22, 26, 13, 0, -22, 24, -11, -6, -10, -1, 14, -3, -6, 12, 14, -8, -2, 15, -20, 19, 18, -12, -6, 22, 12, 23, 17, 14, 6, 14, 16, 14, 22, -20, -2, -5, -3, -15, 12, 9, 9, 28, -15, 0, -21, 10, 15, 1, 2, 7, 17, -3, -17, 24, -12, 26, 32, 17, -2, 19, -4, 11, 6, 0, -22, -3, 17, 10, 6, 11, -17, -18, -4, 11, -11, -17, 15, 12, -9, -21, 19, 9, 8, 8, 0, 24, -14, 9, 31, 2, 9, 25, -8, -11, -4, 10, -16, -6, 11, 19, -7, 14, 7, -19, -4, 23, 22, 20, -10, 4, 13, -15, 26, -17, 26, -14, -5, 12, -17, -5, -14, 6, 10, -4, 19, 14, -2, -23, -12, -10, 15, -8, -3, -1, 18, -20, 17, -16, -3, -3, -6, 23, 13, 54, 35, 19, 27, 39, 5, -8, -6, 13, 12, -35, -7, 13, -18, 0, -21, 17, 46, -3, 22, 16, -17, 21, -6, 4, 15, 3, -10, -3, 6, 0, 2, -12, 0, -13, 16, 1, -13, 8, -15, -10, -25, -14, -17, -21, -16, 1, -13, -37, -6, -11, 12, -34, -19, 32, -11, -23, 16, -10, -7, -7, 6, 0, -6, -12, -7, -17, -3, 15, 13, -5, -14, -36, -7, 5, 3, -11, 20, -12, -18, 35, 9, -19, -16, 1, 18, -18, -14, 0, -8, 0, 12, -19, -23, 8, -19, 21, -14, 11, 23, -26, 12, 4, 2, 29, 24, -22, 5, 22, 13, 14, -9, 17, -14, 12, 9, -29, 16, 18, 13, -8, -5, 2, -7, 26, 19, 3, -12, -25, 19, 10, -8, 24, -9, -1, 10, 17, -9, 25, 20, 16, -3, 15, 15, 19, -20, 3, 46, 2, 3, -10, 19, -21, 30, -7, 0, -19, -17, 35, 5, 14, 18, -9, 1, 18, -12, 19, -12, 24, -6, -1, -14, -22, 11, -5, -9, -9, 0, 16, 5, -34, 6, 22, 16, -4, 6, 19, 27, 22, -10, 9, -3, -9, 57, 33, -12, 39, 12, -6, -9, 1, -13, -5, 16, -9, 1, 53, 12, 18, 31, -10, 27, 3, 10, 32, -1, 35, -8, -2, 28, -10, -22, 1, -9, 7, 9, -6, -8, -3, -15, -10, 6, 17, 31, -29, -10, -19, 16, -7, 3, 3, 20, 2, -1, 29, -19, 16, 21, 19, -17, 38, 18, -13, 20, -11, 18, 34, 28, 17, 27, 12, -22, -10, -12, -12, 51, 13, -3, 3, 0, -1, 19, 24, 35, 6, 18, 4, 9, -12, 12, 29, 24, -23, -3, -11, 14, -20, -21, -9, 0, -22, 50, -23, -5, 10, 0, 0, 5, -11, 57, -13, 1, 13, -22, -8, 4, -15, -9, -24, -15, 7, -18, 39, 26, -3, -4, 20, -8, 14, -4, 11, 18, 9, 13, -5, 12, 11, -6, 10, 18, 26, 0, -25, 6, -24, -2, 17, 23, 36, 46, 4, -15, -28, 6, -1, 32, -23, 0, 15, -17, -8, -5, -9, 2, 24, 21, -24, 1, 10, 20, 2, -9, -19, -17, 2, 19, -18, 6, 5, 28, 15, 24, -7, 3, -16, 5, 9, -11, 45, 47, 12, 10, 9, -11, 27, 49, -31, -21, -7, -1, 16, 3, -22, -8, -5, 8, -16, -18, 30, -5, 3, 17, -14, -13, -11, 11, 35, -10, 10, 8, 4, -3, -2, 13, -23, -12, 7, -13, -31, -16, -17, -18, 4, 14, 2, 5, 16, -15, 17, -1, 4, -19, 18, -8, -10, -7, -8, 2, -19, 1, -9, 45, -17, 10, -3, -16, 11, -9, 4, -13, 2, -11, -28, 10, 1, 14, 12, -23, -9, -19, -14, -21, -8, -13, 14, -23, -2, 7, 5, -19, -13, 18, -8, 2, -20, -15, 11, -23, 4, -18, 16, -1, 3, 6, 1, -14, 8, 9, -6, 3, -19, -24, -18, -6, 4, -5, -25, 6, 15, 2, -20, 13, 15, -10, -12, 11, 22, -11, -15, -22, -2, 15, -6, -11, -27, -9, -24, 2, -1, -19, 0, 1, -14, -1, 0, 14, -6, -10, 14, 33, -1, 4, 20, 13, -20, -18, -4, -6, 11, -7, -7, -20, -2, -25, 15, -27, 23, 13, 18, -7, 0, -7, -4, 2, -1, 17, 12, -4, 11, 5, -13, -6, -15, -7, -4, -20, -23, 18, 5, -22, 25, -4, 14, -2, 0, 16, -18, -10, -3, -17, -19, 4, -15, -18, -11, -24, -10, 10, -8, -22, -4, -20, 7, -14, -13, -17, -11, 15, 12, -11, 12, 9, -14, 3, -10, 9, -16, 0, -10, 23, -9, 18, 8, -3, -16, -23, -9, -19, -10, -20, 15, -14, 31, -1, 1, 4, 1, -11, -10, -15, 24, -18, 41, 27, 3, -7, -13, -13, 1, 13, -18, -27, -1, -21, 1, -21, -10, 13, -31, 15, 13, 39, -7, -2, 9, -24, -10, 11, 9, 22, -5, -6, 18, 4, 3, 8, -17, 9, 37, 7, 3, -21, -8, 10, 2, 20, 7, -2, 2, -9, -30, -5, -19, 22, 3, 9, -15, -1, -6, -12, 29, 28, 4, 6, 17, 10, -1, -12, 0, 18, -6, -2, -5, 14, 22, 22, 6, 23, -14, -14, 30, 15, 7, 8, -5, -17, 14, 8, 9, -17, 29, -21, 26, 4, 5, -2, 16, -11, -14, 2, 6, -17, -30, 13, -12, -16, 12, 9, -10, 12, -21, 13, -19, -9, -2, 6, 11, 8, 8, -13, 2, 3, 7, -21, 0, 7, 10, -2, -4, -12, -10, -12, 15, 17, -5, -10, -10, -14, 21, -15, -6, 6, 5, 4, 3, -14, 27, 1, -11, -3, -14, -20, -14, -19, 12, 17, 9, -15, -12, -1, 13, -21, -12, 5, 2, -27, 7, 19, -28, 4, 1, 12, -14, -1, 18, 2, 12, -14, -3, -9, 44, -4, 15, -19, 12, 5, -3, -13, 5, 3, -23, -8, -30, -13, 33, 53, -36, -24, 38, 5, -16, 4, 2, -6, 17, 34, 9, -15, 25, -25, 4, -2, 23, -24, -23, 14, -16, -16, -16, -39, 7, 18, -34, -23, -2, 9, 12, -35, 16, -60, 1, -13, -24, -10, -8, 7, 9, -19, 18, -8, -18, 19, 28, -50, 5, -37, -43, -6, -49, -6, -21, 17, 16, -16, 12, 0, -22, -2, -1, -6, -15, -6, -13, -11, 11, 12, 15, -9, 3, -20, 15, -3, -3, 6, -22, 22, -18, -23, 11, -16, 9, 3, 6, 29, 4, -13, 30, 9, 8, 2, -3, -19, 25, 40, 10, 11, -15, -9, -2, 11, -20, -2, 21, -19, -5, -18, 34, -48, 37, -32, -11, 13, -16, -10, 10, 20, -4, -20, -2, 11, 2, 8, 4, 43, 34, 20, 6, 3, 1, 5, -49, 8, 4, -50, 32, -21, -30, 16, -20, 12, 21, -26, 9, 4, -22, -21, 19, -3, 36, 47, 16, 6, 24, -11, 8, 17, -3, -18, -11, 8, 1, 13, 15, -15, 26, -9, 11, -5, -19, 1, -18, -28, -4, -10, 36, -23, 10, -8, -4, -6, -7, 7, -19, 16, 7, -20, 13, 17, 5, -17, 17, 14, 18, -9, -19, 8, -10, -16, -15, -13, 29, 13, 30, 1, 15, 2, -10, 12, 1, -13, 25, 39, -5, 13, 1, -3, 19, 4, 50, -15, 16, 14, 31, -7, 15, 11, -12, 3, 24, 22, 27, 7, 7, 9, 19, 32, 13, 24, -10, 12, 22, 8, 19, -26, 14, 40, 16, -5, -6, 2, 11, 23, 27, -21, -4, 9, -8, 7, 37, -3, 45, 39, 8, -22, 3, -3, 13, -16, -20, -26, 22, 11, 17, -2, 21, 44, -19, 2, 20, -9, -18, -5, -9, 5, 9, 14, 14, 10, 0, -21, 0, -21, 5, 6, 0, 12, 24, -23, -28, -6, 1, -34, 10, 8, -38, 4, 9, -19, 9, -1, 21, 8, -19, 15, 1, -1, -21, 9, -20, 10, -6, 18, 7, -14, 0, 55, 10, 14, 22, 12, -16, 16, 24, 31, 6, 9, 5, 11, 14, 22, 23, -16, 23, -6, -34, 2, 14, -2, -27, -19, -24, 6, 24, 13, 0, 10, 6, 21, 13, -15, 9, -22, 23, -12, 16, 2, -24, -7, -29, 7, -23, -11, 13, -5, 30, -10, 21, -13, -2, 9, -6, -2, 12, -8, -10, 15, -2, 12, 22, 33, -15, -14, -4, -24, -1, 8, -5, 37, 17, 14, 10, 6, 10, -15, 1, 1, 9, 4, -9, 7, -18, 7, -13, -11, 10, 16, -41, -24, -16, 32, -18, -1, -10, -4, 16, -18, 3, -2, 14, -15, -11, 9, 20, -10, -4, 41, -4, 13, -19, 22, 2, -9, -3, 4, 30, 12, -11, -19, -12, 17, 4, 28, -10, -30, -17, -11, -22, -4, 4, 3, -4, 58, 9, -13, -33, -8, -18, -12, -1, 14, 6, 0, 7, -15, 18, -3, 31, 9, -5, -13, -28, -5, -4, -12, 6, 12, -16, 3, -2, -2, 3, 9, -8, -24, 11, -10, 14, 17, 27, 22, -3, -27, -7, -14, -2, 12, -6, 1, -27, -2, -1, -10, 19, -14, -17, -12, -12, -25, -3, -20, 12, 15, -11, -25, 9, -15, 22, -7, 20, 15, -9, -11, 13, 5, 14, -13, -7, 5, 15, -15, 10, 31, 19, 14, -2, -1, 35, -14, 4, -4, 15, 16, -10, -20, 0, 14, -2, 50, 13, -19, -8, -4, -9, -15, 20, 17, -11, 5, 0, 11, 0, -17, 5, -12, -1, -18, 7, 14, 12, -23, 15, 18, 7, -15, -11, 14, 29, -10, 4, 9, -32, -12, 4, -12, -17, 26, 9, 9, 12, -7, -10, 7, 6, 37, -1, 8, -18, -6, -8, -16, 10, -5, -19, 16, 14, 13, 16, -7, -22, -15, 2, -9, -17, 13, 9, -7, 8, 20, -20, 4, 10, -12, 20, 10, 20, 1, 4, 4, -14, -4, -14, -17, 18, 6, 2, -5, 14, 12, 6, -19, 10, -1, -8, -21, 0, 2, -6, 2, -7, -29, 21, 9, -11, -4, -8, 18, -13, -11, 13, 27, 6, -19, -8, 8, -11, 9, 1, -12, 0, -6, -14, -14, 1, 2, 7, 20, 5, 2, 21, -18, 26, 5, 3, 9, -30, -20, 15, -21, -5, -3, -21, -23, 33, -3, -12, -8, -10, -13, -12, 15, 16, 11, 40, 6, 6, 15, -15, 9, 6, -3, -24, 16, 1, 16, 13, 11, 25, -15, 23, 8, 16, -34, 7, 7, -9, 7, -32, 16, 2, 2, -1, 32, 23, -17, -10, 8, 7, 7, 36, -1, -31, -14, 10, 16, -20, -7, -2, -3, -10, 2, 18, -17, 11, -64, -13, 24, -16, 7, -11, -17, 9, 20, -1, -2, 5, 4, -3, 14, -1, 15, -20, 6, -12, 5, 15, 2, -4, -7, -17, -18, -20, -21, 9, -19, 10, -8, 24, -10, -6, -19, -2, -4, 13, -11, 22, -19, -24, 19, 3, -16, -7, -14, -17, 22, 9, -1, -25, 0, 1, 6, 10, 4, 25, 4, -4, 17, -14, 17, 1, 6, 6, -26, -10, -24, 11, 10, 10, -2, 12, -3, -26, -21, 14, -19, 17, -32, -25, 10, -11, 0, 10, 1, 16, 6, -20, 7, -4, -20, 19, -9, -2, -27, 9, -22, -3, 12, -3, 16, 5, -1, -26, 6, 2, 16, -3, -7, 0, -24, 25, 19, -32, -18, 0, -6, 17, 12, -6, -21, -27, -11, -17, -22, -22, 0, -1, 16, -10, -10, -8, 36, 3, 8, -26, 14, -16, 9, 4, 16, -22, 24, 7, -15, -1, -15, -1, -5, 13, -29, -8, -4, 13, -15, 8, -48, 11, 7, -11, 11, 10, 16, 13, 1, -13, 11, 2, 1, 4, 6, -2, -17, 21, 3, 12, -18, -8, -11, 12, -22, 10, -22, 16, 0, 13, 15, -4, -33, 10, -5, 7, 1, 8, -22, 7, 1, -17, -20, 9, -14, 23, -11, -14, -73, 15, -20, -20, -16, 4, -17, -16, -16, 4, -4, -33, 11, 33, -15, 22, -30, 2, -12, 15, 13, 0, 5, -7, -28, -8, -40, 8, -9, -6, 15, -14, -26, -4, 16, 16, 3, 32, -6, -8, 6, -2, -47, 4, -11, -19, 14, 18, -30, 3, -24, 6, 13, 0, -7, 18, -9, -6, -6, 10, 2, 26, -17, -24, 4, -3, 18, -19, -5, 14, 0, 3, -31, -11, -13, -18, -15, -4, 19, 22, -24, 0, -12, -6, 14, 1, 3, -20, -24, 26, -14, 17, 6, 19, -18, 29, -19, 8, -11, 28, -17, 9, -17, 18, -38, 3, -22, 15, -3, 6, -13, -21, -7, 4, -14, -10, -13, -27, 6, -1, -18, -20, -23, -7, -7, 19, 0, -19, 7, 23, -19, 5, 19, -16, -3, -15, 17, -24, 25, 16, -1, -3, 14, -2, -6, -13, 2, -17, 6, 4, 13, -12, 22, 5, 39, 19, 0, -34, -10, -14, -16, 1, 30, -6, -2, 25, 12, -9, 27, -20, 11, -15, 1, -11, 12, 13, -7, 6, 10, 5, -10, 22, 6, 6, 19, 24, 3, -28, -22, 16, -22, -17, 4, 15, 4, -40, 11, 5, 12, -19, 9, 7, 8, -7, 8, 7, 5, -31, 13, -19, 9, -7, 14, 20, -4, -30, 4, -22, -21, 21, 2, 10, -17, 18, -15, 12, 1, -10, -5, -4, -11, -18, 1, -26, 12, 3, 4, 16, -16, -13, 6, 20, -9, -4, -18, 5, -6, -20, 13, -19, -2, -18, 0, 41, 14, -17, 15, 6, 5, -12, -16, 29, 5, -19, -11, -2, -22, -6, 6, -16, 0, -27, 31, 8, 16, -26, -3, -20, -22, 9, -9, -18, 14, -19, -8, 0, -2, -3, -7, -33, 37, 1, -15, -5, 0, 8, 14, 3, 8, -5, -8, -5, -11, -2, 4, -6, -32, 18, -24, -9, -14, 6, -13, 4, 7, -6, 12, -20, -20, -4, 8, 2, 1, -32, -9, -12, 17, -4, 35, 4, 9, 4, -17, 3, 18, -15, 43, -13, -34, -13, -10, -23, 17, 21, -14, 24, 11, 12, 2, -27, -15, -14, -28, -26, 9, 1, 2, 20, -20, -14, -15, -16, 13, 3, 7, -4, -12, 23, 2, 2, -1, 37, 10, -27, 16, -5, 25, -5, 6, -25, -7, 39, 10, -20, 30, -1, -12, -8, -18, -32, 18, -19, -11, 6, -6, -17, 0, 9, -3, -12, -31, -12, -11, -26, 9, 28, 14, -4, -18, 1, -1, 13, 2, -26, -2, 9, 4, -10, 27, -4, -21, 14, 3, 44, -31, -10, -21, -11, 8, 1, 6, 13, 31, -8, 9, -21, 9, 20, -21, -2, 23, -7, 15, 6, 7, 10, 10, 6, -7, 11, -7, -11, 6, -11, 5, -4, -2, -13, -26, 2, 4, 9, 15, -2, -14, -12, -5, -9, -10, -8, -4, -12, -29, -15, -8, 8, -11, 9, -2, -12, 13, -11, -14, -14, -13, -2, 16, -11, 11, -12, -14, -17, 14, -12, 20, -8, 10, -9, -11, 6, -14, 40, -5, -9, 0, -21, -33, -2, 16, -1, -13, -20, -12, -13, -9, -26, -17, -14, 1, -8, 13, 12, 22, 18, 3, 7, 19, 6, 27, -30, -32, 17, 10, -31, 16, -10, -12, 5, -30, -4, -7, -14, 3, -27, -28, -5, 7, -16, 2, 21, 14, -15, -7, -3, 3, -4, -14, 22, -21, -11, 6, -21, 0, -4, 8, -4, -2, -5, 8, 35, -13, 14, -12, 29, -4, -17, -3, 3, -12, 13, -16, -1, 4, -11, 24, -7, -36, -20, -17, 8, -12, 35, -4, -19, 3, 49, 5, 4, -3, 13, -6, 13, 30, 0, 5, -15, -17, -11, -8, -23, 8, 15, -4, 3, -21, -4, 3, -19, -20, -33, -3, 6, 10, 8, -22, 17, -19, -19, 1, 0, 1, -3, -7, -24, 25, -17, -16, -10, -21, -6, -3, 0, -34, -21, 13, -27, 19, 15, 17, 1, 16, -21, -14, -12, -15, 9, 22, -10, -5, 16, 19, 8, -16, -33, -19, 12, -11, -17, -2, 17, -20, -12, 14, -31, -19, 23, 10, -8, 13, -27, -27, -1, 1, -23, 6, 1, 3, -20, 47, 11, 27, 12, -15, -18, 13, -35, -13, -2, 42, 4, -2, -23, 4, 4, 3, 12, 13, 13, 19, 2, -22, 19, -3, -21, -9, 14, -14, 21, 20, -2, -27, 18, 14, -1, -12, 16, 14, -9, -6, 14, 9, -5, -19, -2, -12, 27, -6, 25, -20, 15, -10, 7, -7, -20, -4, -2, 2, 18, 2, -3, -22, -5, -10, 8, -11, -4, 17, -16, 17, -18, -16, -5, 14, 12, 2, 10, -2, -6, -7, -10, -6, 1, -16, -13, 20, 13, -15, -4, 10, -1, -17, -4, -8, 17, 16, 7, -11, 28, -6, -6, 7, 21, 1, -14, -1, 5, 5, 3, -13, 11, -16, -5, -20, 22, 15, 24, 4, 10, -11, -15, -35, 14, -19, -7, -15, 7, -25, -9, -7, 3, -2, -8, -13, 2, 10, -2, 4, -3, 16, 26, 17, -13, 7, -14, 21, 29, 4, 16, 14, 18, -10, 6, 6, 13, -12, -13, 7, -5, 2, -13, 4, -2, -15, 42, 16, 24, -11, 19, -10, -3, 1, 9, -16, -9, 14, 12, 3, -4, 4, 15, -10, -18, -4, -1, 9, -27, 7, -10, -11, 17, -6, -28, 4, 1, -9, 8, 9, -16, 12, -9, 13, 7, -16, 11, 10, 9, 7, -16, 18, 2, 8, 1, -6, 5, 7, 3, -13, -20, 25, 16, 6, 6, -2, -16, 21, 2, -28, 13, 2, -7, 1, -30, 31, -13, 10, 7, -4, 10, 3, -25, 27, -21, 1, 14, 7, 16, 7, -21, -16, -5, -25, 11, -17, 3, 12, -11, 5, 0, 29, 21, -3, -4, 10, -20, 9, -3, 29, -2, 22, 19, 3, -21, 15, -19, -10, 7, -6, 13, 9, -12, 17, -7, -19, 12, -13, -22, 9, -14, 22, -16, -12, 2, -34, 12, 23, 21, 15, 9, -4, -5, -18, -2, 3, 6, 8, 11, 7, -19, -4, 4, 5, -16, 21, -13, 13, 18, 13, -8, -15, -25, 23, 6, -3, -14, 1, -8, -4, -47, 28, 10, -11, -12, 15, -26, 4, -12, 17, -14, 20, -8, 21, 1, 8, 15, -15, -3, -21, -18, -8, -24, -16, 11, 4, -6, -11, -3, 8, -11, -1, -23, -1, 10, 15, 19, 0, 8, 33, 10, 9, 1, 10, -11, -17, -18, 3, -16, -12, 5, -1, -18, 15, -15, 18, 5, 14, 16, -12, 8, -15, 5, -27, -24, 18, -13, 12, 14, 7, 10, -8, 2, -15, -16, -2, 18, 10, -19, 4, 34, -18, 1, -3, 9, -21, -14, 16, -5, -19, -12, -10, -5, -7, 12, 5, -29, 3, -16, 26, -5, -23, -12, 4, -10, 17, 14, 25, 5, 2, -12, -4, -8, 4, 10, 5, -22, -10, -13, 1, 5, -10, -25, 21, 0, 0, -23, 8, 10, 25, 2, -10, -1, 22, -11, 3, 11, -18, 14, 2, 14, -11, -5, 15, -12, 27, 9, -3, -17, 7, 6, -19, -21, -22, 16, -13, -22, -6, -18, 23, 0, 14, -18, -4, 12, 9, 5, 7, 26, -23, 15, -16, -12, -15, -17, -4, 2, -14, 10, 11, -3, 13, 1, 20, -12, 11, -20, 7, -21, 11, -17, -5, 9, 20, -21, -25, -12, 15, 11, 25, -19, 30, 13, 15, 1, -1, 6, -9, -20, -9, -22, 5, 8, -27, 14, -3, 3, -3, -12, -32, -19, 14, 13, 22, 9, 20, 18, -11, 14, 3, 13, 15, 1, -9, -21, -21, -8, -9, -6, 6, 7, 11, -1, 16, 12, -7, 13, 27, 9, -9, -11, -5, -11, 8, 51, 13, 8, -21, -13, -2, 17, 9, 29, 2, -2, 20, -9, -26, -13, -10, -35, -29, -13, 32, -16, -1, -11, 7, 7, 14, 15, -7, 18, -29, -3, -25, 3, -10, 30, 8, 2, -20, 0, -32, -3, 19, 12, 5, -2, -12, 7, 12, 4, 24, -1, 24, -1, -51, -17, -9, -18, 29, 14, 6, -1, -27, -18, -45, 21, 6, -4, 22, 11, -7, -19, -16, 21, 16, 21, -18, -7, 2, -20, -19, -27, 1, 10, 9, 17, 14, 13, 1, -9, -13, -30, -22, -14, -27, 18, -13, -11, 21, 13, 8, -7, -19, -7, -21, 40, 15, 8, 0, 2, -3, 13, 2, 4, 3, 22, 5, 12, -19, 19, -14, 21, 25, 6, 29, 15, -18, -20, -22, 19, 17, 17, -2, -14, -21, -19, -15, -1, -21, -7, -23, -12, -3, 4, -28, -16, -3, -13, 17, 7, 4, 0, -33, 10, 14, 21, 1, 15, -25, -20, 14, 22, -16, -6, 18, -5, -45, -21, -26, -14, -8, -30, -42, 7, -3, 17, -10, -10, 6, 8, 8, -19, -32, -7, -4, -13, -2, 10, 18, 1, -18, 16, -3, -36, -24, 22, 3, 21, -2, -8, 4, -3, -16, 0, -6, 5, 8, -21, -6, 1, -9, -5, 5, -19, 17, 1, -10, 5, -5, -28, 15, 4, -20, 7, -43, -13, -2, -8, -10, 3, 7, 7, 5, -6, 19, 6, 10, -7, -14, -5, -36, 6, 6, -4, -12, -13, -26, 7, -3, -10, 2, -7, -8, -5, 15, 6, -18, -18, -23, 4, 3, 18, -28, 5, -24, 16, 8, -2, -12, -17, 1, -22, -21, 5, -13, -7, -20, 17, 24, 10, -16, -11, -12, -34, 27, 19, -10, -14, -35, -17, -15, -14, 5, 19, -47, 0, 19, -16, 20, 21, -6, 15, -39, -21, -17, -20, -14, -11, -19, -3, -17, -15, -4, 6, -35, 2, -14, -15, 35, -19, -11, 22, 16, 10, -2, -21, -10, -5, -25, -18, -8, -21, -4, -18, -24, -20, -28, -27, -9, 1, -4, 8, 4, -8, -14, 5, -21, 11, -14, -11, -37, 10, -29, 14, -24, -7, 16, 16, -9, 16, -34, -16, 16, 2, -15, 8, -34, 3, -35, -3, -9, -24, -16, 14, -11, 9, -32, -7, 13, -6, -19, -9, -22, 4, -26, 22, -21, -11, 13, -13, 5, 9, -24, -23, 9, -14, -20, -10, -39, 23, 14, -22, 23, 12, 10, -19, 15, 2, -4, 5, 31, -30, -28, 15, -3, -10, -6, -28, -19, -37, 15, 18, -26, 13, 40, -7, 12, -20, -28, 6, -25, 11, -2, -27, -26, -14, -24, -12, -6, -9, 8, -23, -4, -15, -16, -21, -6, 14, -17, -19, -8, -13, 3, -17, 0, -6, 7, 15, 4, -24, -9, -5, 5, 2, 0, -15, -22, -17, -11, 16, -1, -5, 2, 12, 13, -20, 12, 4, 9, 13, 48, -11, -9, 16, -21, -4, -7, -6, 8, 0, -17, -7, -21, -11, 7, -19, -1, -14, 32, 7, 0, -4, -15, 14, -10, 7, 15, -25, -15, 15, -6, 1, -8, -8, -15, -5, -4, 2, 13, 16, -16, 26, 24, -3, 8, 17, -24, -24, 4, 7, -14, -7, -28, 0, 19, -13, -21, -5, 5, 2, 37, -10, 7, -22, 25, -10, -2, -15, -22, -17, -13, 20, -11, -1, 45, 7, -6, 8, -10, 5, 14, -16, 20, -25, -9, 9, -16, -3, 12, -14, 18, -27, -31, 2, -1, -8, 0, -7, 13, -6, -25, -8, -17, -11, -17, 8, -7, 19, 11, -15, -23, 7, 14, -19, 9, 9, 32, -8, -20, 12, 1, 16, -6, 2, 0, -5, 27, -11, -8, 6, -9, -17, -7, -14, 5, -20, 12, -15, 3, 17, 25, 12, 29, 3, -14, 38, -13, 9, 23, 9, 5, 15, -9, 34, -15, 22, -9, 19, 16, 11, 28, -7, 25, -22, -6, -19, 19, 7, -11, -11, 30, 17, -13, -13, -6, -11, 18, 17, 22, 5, 11, -5, -1, 1, -7, 10, 41, -12, -7, -16, 7, -21, 7, -3, -18, -10, 3, -7, -10, 18, -14, 20, 32, 2, 19, -1, -1, 13, 21, 2, 9, 12, 1, 5, 24, -2, 11, -17, 9, -5, 31, 18, 11, -7, -1, 30, -14, 4, -19, 5, -11, 11, 5, 11, -4, 1, -9, 7, -11, -17, 1, 11, -9, -15, -16, 14, 11, -3, -31, -20, -18, 4, 18, 16, 17, -10, 2, 0, -17, 16, 13, -20, 24, -15, 37, 32, 18, -7, -17, -8, -16, -18, 10, 12, 0, 20, -12, 2, 11, 1, 17, -1, -18, 11, 8, -5, -28, 1, 18, 4, -3, -6, -8, -6, 10, 6, -19, -16, -17, 16, 6, 16, -21, 17, 3, -11, -20, -15, 17, -3, -17, -6, -9, -30, -31, -6, -7, -1, 4, -11, -21, -26, -18, 2, 17, -21, 4, -15, 2, 10, -16, 24, -15, -17, -13, 13, 6, -5, -28, -19, -11, 14, 19, 0, -33, -15, -24, -10, 9, -17, -8, 9, 20, -6, -21, -11, -28, 8, -22, -3, 30, -6, -7, 18, 2, 15, -10, -19, 10, 28, 10, -4, -6, 9, -1, -20, -5, 5, 0, -13, -2, -6, 22, -10, 5, -43, -12, -23, 15, -5, 33, -15, -20, -16, -39, -2, -4, 10, -14, -10, -20, 5, 17, -22, -21, 10, 23, 6, -32, 15, 3, -11, -16, 0, 9, 6, 25, -29, 7, 14, 17, -21, 6, -18, -23, 14, 22, -7, 10, 8, -3, 14, 29, -12, 23, -15, 2, -11, -2, -20, -23, -7, -3, -1, -15, -12, 1, -7, -27, 4, 0, -16, 4, 7, 6, -5, 8, -2, -31, 1, -17, -17, 12, -1, -12, -44, -9, 24, 0, -4, -1, -16, -23, -18, -6, 31, -15, -17, 3, -20, 1, 12, 54, 2, -35, 12, 6, 6, -1, 25, 11, 12, -25, 19, -33, 16, -15, 4, 27, 2, -12, 14, 23, -6, -8, -22, -14, -37, -50, 16, 16, -13, -9, 12, 9, -8, 1, 18, 11, 8, -32, 9, -37, -15, -19, -25, -3, 7, 22, -29, -17, 19, -4, -1, 16, -9, -13, 7, -15, -9, -3, -14, 0, -6, 7, 16, 28, -27, 6, -22, -11, 9, 5, 7, -16, 4, -4, -9, 25, -25, 4, -6, 17, 17, -22, 3, 7, 5, 5, -15, -26, 5, 7, 19, 0, -8, 20, 0, -11, 19, -15, 15, 20, -5, -28, -17, -3, 5, -3, 21, -5, -22, -32, 16, -1, -37, -49, -5, -16, -18, -2, 9, 8, -25, -10, 9, -13, -2, 5, 11, -35, -28, -28, 15, 30, 3, -7, -8, -29, 0, -6, 17, 3, -4, -15, -14, 21, 9, -5, -16, -28, -5, -5, -13, 16, -13, -30, -14, -16, 13, -3, 24, -3, -9, 11, -17, 6, -5, 5, -15, 25, 16, -16, -12, 42, 12, -20, -45, -12, 21, -18, -22, 18, -15, 10, -23, -5, -5, 8, -14, -13, 12, 20, 21, 6, 14, 20, 11, -10, -10, -25, -39, -5, 17, -12, -1, 11, 19, -2, -12, 14, 12, 3, -15, -8, -5, -14, 8, -3, 10, 11, 9, 1, -2, -11, 14, 21, -25, -7, 20, -1, 8, -25, -16, 16, 6, 5, -11, 16, -7, -25, 13, -6, 8, 4, -13, -2, 5, -8, 13, 17, -33, -59, -9, -34, -20, -22, -26, -3, -7, -10, 3, -32, -17, -15, 24, 32, -11, 5, -14, -35, 13, -1, -5, 3, -10, -9, 11, 24, 4, 8, 16, -17, 7, -24, -5, -1, 10, -2, -20, -23, 15, 30, -9, 8, 4, -34, 5, -9, -5, 3, -3, 1, 17, 8, -21, 8, -12, 6, 11, -23, -1, 6, 0, 14, 0, 11, 10, -2, -6, -19, -9, -8, 19, -12, -7, -6, 6, -19, 1, 16, -31, -34, 7, -12, -21, -24, 23, 40, 9, 7, 8, -1, 0, 6, -15, -13, -28, -17, -4, -10, -1, -28, -13, -21, -8, -14, -5, -4, -17, -2, -49, -23, -19, -2, 14, -4, -10, -22, 6, 5, -23, -19, -20, -3, -6, -31, -6, 8, 10, -11, -12, -5, 11, -4, -39, -39, -3, -15, 13, 6, -8, 10, -2, 20, -12, -18, -6, -15, -6, 13, 10, -24, -24, -19, 1, 5, -16, -13, -7, 8, 2, 10, -5, 3, 3, 1, 4, -17, -17, 8, 14, -18, 2, 8, 1, -6, -5, -21, -21, 3, -16, -30, -17, 21, 6, -12, 4, -35, -15, -14, -28, 0, 27, 22, -19, -3, -15, -29, -12, -17, -10, -32, 13, 13, 17, -16, -36, 7, 0, -6, -6, -22, 5, 8, 26, -4, 19, 12, 10, -7, -17, 18, -10, -1, -10, -27, -10, -25, -2, 2, 28, 5, -25, -17, -16, 15, 1, 5, 2, -5, 13, 22, -16, -24, 5, -6, 12, -19, 34, 27, 16, 12, -13, -20, -20, -25, 7, -3, -19, 7, -5, -23, -11, -14, -2, 28, 14, -20, -14, -31, 6, 0, 7, -12, -5, 15, 14, 18, -2, 30, 4, 14, 4, 15, 8, -12, 19, -2, -28, 14, -8, -14, 8, 2, 12, 18, -13, 13, 10, 11, 0, 6, -25, 9, 41, 30, 2, 13, -17, -12, -3, 4, 21, 10, 0, -30, -9, -5, 10, 24, 29, 31, -23, 14, 0, -32, -23, -35, 4, 19, -14, 22, -14, -15, -12, 2, -11, -10, -5, 1, 2, -10, 14, 1, -21, 3, -6, 23, 6, 24, 12, 12, 1, -9, 16, 39, 14, -31, 10, 17, -23, -29, 2, -6, 13, 20, 0, 11, 9, -62, 8, 7, -14, -11, 14, 27, -6, 4, -9, -15, 10, 7, -16, 19, -1, -26, -13, 9, 18, -19, -7, 11, 2, 14, -14, 10, -6, -3, -22, -10, 0, 14, -5, -6, 2, -19, -6, -13, -2, 14, 11, -26, -6, -25, 10, -3, -6, -14, 3, -6, 19, -4, 17, -6, 1, 8, -18, 18, 19, 14, -2, 12, -18, -28, 4, 14, -17, -8, 22, 15, -27, -26, 6, -32, -17, 1, -3, 25, 12, -7, -16, 31, 13, -17, -2, 8, -17, 6, -18, 13, 13, -25, -29, -26, 16, -15, -13, -17, -2, -3, 5, 8, 1, -2, -24, 21, -18, -37, -16, -1, -24, 7, -2, -3, -21, -2, 1, -16, -29, -4, 5, 5, -20, 1, -15, -30, -2, -19, 16, 17, -9, 2, -4, -11, 6, -11, -9, 12, 14, -14, 9, 23, -11, -13, 2, -15, 10, 16, -29, -29, 20, 17, -23, 14, -10, -45, 18, -26, 7, 11, -9, -11, -22, 20, -20, -2, -11, 17, 5, 5, 13, 30, 18, 37, 26, 44, 9, 26, 14, 10, -4, 4, 13, 10, 9, -12, -12, 3, -3, -11, 26, -6, 1, 11, 8, 2, 0, 39, -14, 4, -12, -36, -20, 22, 4, 3, -6, -21, -9, -13, -22, 2, -20, 4, -4, 38, 11, -12, -12, 15, 4, 11, -15, 5, 7, 15, -3, 13, -27, -11, 28, 15, 12, 6, -11, -18, 31, 32, -7, -25, -4, 0, -7, -8, 20, 43, 11, 24, -18, -25, -14, -3, 18, 15, 27, -2, 8, 12, -3, 4, -11, 8, -15, -12, -19, -4, 22, 12, 9, -13, 16, 1, -16, 30, -19, 22, 1, -14, 7, 2, -8, 22, -15, 27, -8, 20, 4, -8, -2, -17, 7, -45, 15, 28, 26, -8, -24, 19, -8, 6, 11, -14, -15, -20, 6, 21, 17, -1, 14, -20, -16, -9, 5, 4, 6, 7, -3, 17, -5, -26, 10, 17, -1, -16, 4, 18, 6, -13, -8, -14, -8, -9, 26, 41, -6, 7, 8, -23, 19, 2, -17, -18, -3, -1, -20, 15, 10, 23, 32, 23, -23, 11, 16, -12, -5, -20, 7, -2, 0, 2, 11, -23, 8, 20, 16, 7, -10, 10, 15, -40, 4, -26, -8, -8, 3, -1, 20, 32, -6, -5, 26, 29, -22, 7, -17, -3, -12, -32, -26, 2, 1, 2, 16, -28, 19, -4, -30, 1, 15, 39, -15, -20, 9, 1, 6, -21, -33, 15, -4, -26, 4, -13, -13, -6, -12, 24, -18, -9, -21, 6, -11, 0, 49, 43, 14, -44, -20, -30, 33, 8, 6, -2, -7, -26, -16, -18, 41, 29, 0, 28, -14, 8, -17, 0, 4, 2, -3, 30, -1, 2, 7, -16, 7, -8, 5, -4, 16, 20, 15, -8, -8, 13, -10, 26, 1, -20, 8, -12, -4, 6, 38, 29, 10, 11, 10, 26, 2, 3, 28, -5, -11, 17, 17, -13, -43, -7, 1, 29, 19, -21, 19, -5, 0, 27, 2, -1, -6, -30, -8, -9, -9, 17, -2, -17, -21, 4, -5, -12, 5, -19, 8, 33, 5, -10, 4, 5, 43, 34, 11, 6, -6, 5, -2, -7, 10, -4, -44, 4, -22, -26, -5, 4, -7, 33, 1, 29, -12, -15, 8, -6, -27, -17, 5, -3, -8, -7, -20, -19, 20, 3, -10, 11, -1, -28, -4, -17, -14, -1, 1, 26, -3, 14, -8, -25, 19, -5, -24, -12, -13, -12, -13, -15, 10, 20, -4, 6, 4, -41, -7, 15, -16, -24, 16, 29, -5, 3, 5, -1, -17, 29, -37, -38, 16, 5, -4, -13, 4, -26, -2, -3, -5, -18, -15, 7, 13, 15, -30, 19, -23, -9, 16, -16, -9, 16, 42, 28, 5, -35, -27, -7, -1, 26, -16, 4, 13, -26, 6, -33, -42, 11, -16, -2, -18, -50, -14, 18, 1, 7, 7, 1, 9, 9, -18, -21, -25, 12, 1, -6, -18, 3, -15, -20, 12, -16, 17, 17, -16, -13, -21, -21, 12, 7, -25, -6, -18, 9, 7, -25, -13, -1, -12, 1, -13, -29, -14, -55, 3, 18, -10, 13, 17, 21, -10, -43, 3, 2, 4, 0, 6, -11, -7, -24, -3, 36, 10, 9, -1, -12, 7, -48, 13, 20, 17, 20, 12, -22, 18, -16, 14, -6, 7, 10, -8, -41, 17, -18, -8, -22, 0, -5, 9, -14, 2, 5, 9, 6, -30, 16, 8, -6, -21, -24, -48, -4, 2, 4, -21, 16, -21, -30, 27, -14, 15, -13, 19, -32, -3, -5, 20, -8, 7, -7, 12, 3, -7, 5, -2, 11, 20, 13, 1, -37, 13, 9, -35, 4, 31, 30, 17, -25, -8, 27, -18, 17, 28, 6, -11, -9, 1, -16, 1, -23, 14, 23, 8, -10, -5, 1, 16, 5, 0, 34, -14, -11, -6, 18, 18, 4, -19, 7, -8, -3, 14, -11, 0, -11, -7, 37, 8, -21, -11, -42, 9, 3, 29, 15, 3, 0, 11, -11, -1, 2, 13, -6, 19, 36, -18, -23, 21, -15, -7, -1, 15, 17, -9, -31, 7, 1, 25, 13, -9, -36, -12, -49, 16, 14, -2, 0, -14, -37, -16, -21, -15, 9, -31, -22, -14, -2, 14, -3, -26, -1, -19, 4, -9, -17, -1, -39, 7, -11, -23, 8, -23, 8, 1, -19, 12, 24, 12, 19, 3, -36, 6, -12, -21, 8, 30, 11, -18, -38, -7, -22, -7, 0, 25, -9, 7, -17, 10, -41, -4, -15, 5, 29, -2, -15, 3, 1, -7, -21, -8, 0, 15, 13, 7, 1, -2, 4, -6, 5, -9, -49, 10, 18, -32, -22, -12, 11, 9, 59, -19, -14, 6, -3, 24, 21, -3, -7, 7, -15, -28, 7, -2, 7, 13, 10, 15, -24, -9, -8, 1, 0, -8, -23, -18, 10, 11, -19, 16, 24, -17, -4, 19, 33, -5, 13, 7, -7, -1, 0, 7, -1, 6, -13, -1, 7, -8, -9, -2, 3, 10, -1, -38, 2, -12, -28, -12, -9, 19, -8, 19, -20, 7, 5, -16, -40, -1, 10, -14, 9, -12, -3, 18, -10, -18, -7, 14, 20, -5, 14, 8, 28, -28, -37, 13, 14, -20, -3, -17, -3, -1, 25, 23, 0, 17, 32, 15, -11, 31, 20, 26, 40, 7, -10, 13, -1, 8, -3, -7, -11, 20, -15, 10, 12, -7, 15, 17, -10, 12, 9, -16, 42, 3, -45, -18, -9, 14, 32, 9, 22, 13, -5, 12, 12, 11, -28, -16, -19, 11, 17, 6, -7, -16, -27, -6, 17, -18, -18, 38, -4, -21, -15, -18, -20, 26, -1, -26, 14, 3, -37, -18, 1, -30, -12, -20, -18, 20, -27, -8, 1, -18, -28, -2, -3, 15, -1, 5, -31, -5, -36, 18, -8, 7, -29, -20, -2, -28, -31, 19, 0, 4, -10, -19, -39, 43, -16, -11, 19, -22, -45, 5, 18, 19, 4, 11, -6, 18, -32, -4, 7, -23, -11, 56, 14, 7, -6, -19, -10, -10, 14, -14, 8, -1, -26, 16, -20, -21, -19, -14, -4, 3, -19, -11, -25, -4, -8, -14, 6, 26, 5, -4, -56, 19, 0, 5, 3, -16, -25, 1, 2, -23, -13, -15, 0, -10, 11, 15, -30, 15, 4, 2, 13, -13, -27, 25, -9, -21, -11, -1, -18, 21, -3, -7, 5, 16, -3, -45, 15, 26, -6, -1, 15, -14, -5, -11, 8, 11, 4, 13, -5, -31, -20, -18, -16, 30, -22, 10, -23, -16, -5, -8, 9, 44, -9, 13, -3, 35, 12, -14, 11, -10, -9, 39, -24, 14, -10, 12, 7, -19, -33, 15, -39, -23, -4, -40, -10, 3, 19, -4, 5, 13, 1, -6, 14, 25, -15, 6, -30, -7, -21, 46, -3, 29, -18, -15, -24, 10, -25, -5, 15, -22, -13, 25, 23, 13, 4, 8, -14, -10, 1, -13, -44, 31, -3, -2, 6, -9, 1, 20, 14, -1, -22, -3, 0, -19, 9, 12, -18, -24, -22, 2, 15, -3, -13, 8, 7, 20, -20, 29, 19, -23, -23, -18, -12, 7, 7, -26, -16, -15, -14, -10, 7, 17, 9, 17, -23, -19, -14, 21, -17, 17, -3, -9, -20, 21, 13, 30, -3, 19, -1, 24, 20, 8, 12, -9, 26, 21, 6, 14, 18, 13, 19, 26, -13, 31, 3, 18, 1, 6, 6, 10, 8, 11, -12, -14, -10, 4, 3, 5, -8, 8, -1, -17, -5, -4, 11, 4, -8, -4, 2, 6, 9, 2, 9, 20, -15, 19, -23, 1, 9, -13, -16, -6, 14, 7, 8, -17, 13, 10, -9, 26, 26, 20, -9, -32, 11, 2, -12, 0, 15, 25, 14, 25, 11, -20, -14, 2, -15, 24, 3, 9, -10, 9, -31, 8, 3, -53, -10, 4, 6, 14, -4, -12, 1, -10, 13, 16, 11, 46, 12, 2, 1, 18, 3, -17, -12, -39, -23, -10, -11, 11, -19, 18, -15, -15, 1, -34, -32, 27, 0, -4, -14, -6, -12, -6, 4, 6, -3, -17, 17, -6, -24, -3, -1, -11, 6, -18, 0, 25, -29, 11, 29, -4, -6, 13, 0, -5, 30, 15, -21, -6, -13, 23, -10, 24, 16, -3, 5, -26, -21, 7, 3, -8, 25, 17, -20, 6, 4, -19, -21, 13, -16, 11, 21, 10, -12, 19, 1, -14, 7, 13, -6, 25, -3, -13, 7, -5, -6, 0, 32, 18, 3, 11, -23, -16, -5, 30, -23, 23, 18, -4, -18, 2, 1, 9, -20, 6, -20, -4, 9, -36, -28, -8, -4, -23, -20, 0, 19, 23, -21, -12, 6, 12, -20, 4, 6, -3, 15, 15, 8, -11, 15, 10, -16, 22, 6, 22, -12, 10, 11, -3, 11, 4, -13, -11, -25, 14, -3, 9, 0, 41, 21, 8, -19, 9, 17, -9, 6, -13, -7, 17, 5, 6, -1, -2, -18, -12, -10, -1, -16, 6, -2, 0, -3, -24, -22, -2, -13, -27, 0, -22, 5, -5, 17, 37, 26, -27, -20, 14, -3, 24, 23, 20, -14, 22, 15, 9, 15, -26, -20, -4, 24, -14, -13, 25, 1, 28, 30, 23, -22, -1, -3, -3, -15, 8, 28, 12, -9, -7, 4, 7, 0, -17, 0, -4, -15, -12, -19, 18, 18, -6, -27, 7, 0, 12, -10, 0, -13, -19, -6, 20, -9, 2, -6, 5, -22, 2, 19, -34, -27, -19, -9, -23, -14, 0, 38, 5, -19, -25, 7, -31, 9, 38, 13, 17, 6, -35, 8, -9, -13, 41, 17, 3, -14, 7, -2, 3, 7, 15, 4, -12, 15, -20, -3, 16, 3, -1, 15, 1, -32, 30, 17, -11, 0, 33, 35, 53, -3, 0, 6, -19, -9, 0, -2, 24, 10, -35, 5, 7, -12, 10, 6, -6, -2, -25, 6, -26, 12, -34, -3, -10, 16, -38, 15, -14, -14, 52, 9, 31, 5, -7, -20, -36, -3, 11, 2, -15, 15, -20, 18, -15, 10, 31, 7, 8, -27, 5, 7, 10, 10, 18, 8, -16, -20, -5, -19, -11, -10, 16, 33, 24, 44, -32, 12, -31, 17, 31, 22, 22, -22, -3, -14, -32, 18, -4, 4, 47, 18, 4, 1, -38, -10, -3, 7, 10, 21, -11, 0, 14, -2, 28, 21, -6, 18, -26, -19, -2, -3, 4, 11, 39, 25, 11, 7, -13, 19, 35, 10, -20, 3, -15, -3, -31, 6, 31, 6, 47, 15, -16, 2, -26, -18, -5, 22, 28, 5, -1, -21, 11, -24, 35, 6, 10, -6, -3, 0, -10, -8, 22, -16, -25, -9, -1, -10, 8, -19, 31, -16, -19, -30, 28, 17, 2, -14, 13, 7, -9, -5, 18, -15, 19, 5, 4, 12, 20, -55, 18, -19, -7, -12, -8, -8, 3, -6, -15, -2, 14, -16, -4, -5, -15, 9, 31, -18, 16, -15, 20, -41, -18, 8, -27, -18, -10, 15, 8, 19, 31, -5, 17, -10, 13, -17, -17, -1, 24, -1, 2, -13, 3, -12, 23, 9, 2, 28, -11, -15, -8, -16, -6, 13, 21, -35, -5, 3, 7, -7, 15, 20, 0, -8, -4, 18, 0, 9, 9, 5, -22, 16, 8, -3, -24, -6, 10, -4, -23, -14, -5, 9, 2, 16, 4, -36, 13, 1, 18, 7, 32, 12, -16, 26, -6, -4, -6, 14, -10, -8, 5, -12, -7, -22, -7, 1, -29, 7, 11, 20, 1, 14, -27, -13, 20, -23, -17, 13, 4, 7, -17, 2, -23, -6, 7, 24, 11, 5, -12, 14, 0, -3, 6, 11, -24, 2, -16, -22, -15, -20, 24, 11, 1, -3, 16, -6, -2, -22, 25, -21, -14, 14, 6, -18, -10, 12, 8, 2, 21, 24, -23, -23, -13, -17, -8, 20, 16, -1, 20, -5, 5, 8, 2, 31, 8, -36, 7, 14, -36, -11, 8, 5, 25, 25, -17, 4, 12, 4, -15, 3, 4, -3, -20, 14, 7, 1, 21, -9, 35, -3, -30, 19, -5, 12, 19, 0, 15, 11, -3, -5, 7, -15, -9, -11, 16, -2, -3, -7, -5, -17, -3, -19, -4, -7, 5, 16, -20, -23, -9, -19, -9, -3, -2, 12, 14, 19, -8, 22, 17, 5, -13, -10, -24, -6, -5, -9, -3, -11, -7, 1, 19, -12, -10, -4, 24, -27, -19, -12, -12, 2, 2, 6, 23, -29, 0, -7, -21, 5, 8, 9, -13, -3, 8, -9, -6, 3, 8, 2, -28, 12, -18, 16, -8, -12, 0, 29, 10, 17, -31, 15, -23, 15, 28, -25, 14, 9, -6, 14, 6, 19, 31, -31, 32, 11, 6, -11, 8, -9, 10, 21, -3, -28, -33, 13, 9, -17, 9, 2, 7, 11, -25, 4, -9, 1, -4, -4, 1, -15, -3, -17, -4, 1, 18, -13, 27, 14, -5, 1, 7, 2, -9, 28, 0, 5, -35, -13, -8, -20, 40, 11, -11, 5, -21, 1, -2, -23, 28, 29, -12, 6, -20, -19, -11, 12, 8, 25, 29, -12, -18, 11, 20, -9, -2, -19, 29, 13, 1, -17, -19, -20, -22, -17, -19, -14, 0, -15, -31, -10, 10, -24, 23, 8, 22, 9, 15, -10, 0, -17, 9, 1, -18, -8, 6, -6, 44, -2, -6, 16, 11, 4, -25, 14, 11, 6, 6, -9, -15, 13, -7, -12, 2, 0, 11, 32, -11, -1, -14, 9, 25, -16, 16, -29, 4, -5, -3, -10, 4, -5, -2, -33, 18, -1, 12, -7, 17, -11, 36, -9, -8, 15, 12, -2, 25, 26, -12, -10, -10, -4, -3, -1, -12, -14, 10, -2, -8, -24, 2, 9, 29, 9, -2, -9, 21, 4, -11, -14, 26, -13, 9, -12, 7, 3, 12, -11, -7, 10, 34, -10, -25, -2, 31, 5, 12, 17, 11, 2, -10, 15, -11, -8, 20, 19, 13, -25, -9, -4, 19, -10, -10, 9, -26, -22, 11, 1, 31, -12, 14, 12, 19, 17, -11, 9, -19, -17, 22, -27, 13, 22, 27, -13, -1, -2, 16, -15, 36, 2, 3, -17, 17, -7, -10, 6, 13, 33, -24, -6, -4, -18, -18, 0, -6, -12, -31, 2, -23, 12, 19, 3, 29, 3, -17, 14, 17, 19, 2, 11, -6, -22, -31, -23, 4, -19, 6, -4, -2, -8, 5, -13, -13, 11, -5, -14, -17, -12, -2, -6, -12, -16, 44, -10, 21, 22, 16, 6, -3, 5, 21, 9, 6, -27, 11, 9, -13, -15, -1, -22, 4, 4, -41, -8, -15, -14, 32, 19, 33, 18, -6, -20, -7, -19, 38, 4, -13, -1, -14, 16, 1, -21, -6, 32, -14, -16, -15, -10, -21, -20, -8, -40, 32, 14, -23, -17, -27, 3, 28, 34, 4, 16, -15, 10, -14, 11, 7, -25, -3, 11, -10, 15, 12, -22, 15, -7, -2, -9, -4, -8, 4, -12, 10, 11, -6, 12, 27, -8, 7, -21, 3, -3, 34, -2, -2, 14, -14, 7, 12, 3, 11, -35, -5, -9, -40, 19, 1, -14, 26, 40, 2, 14, -5, 16, -10, 7, 29, -5, 16, -12, 9, -17, -29, -25, 9, 40, 14, -12, -21, -20, -6, -21, 8, -21, -6, -1, -1, 17, 21, 23, 15, 11, 9, 15, -4, 9, 5, -3, 18, -29, 16, -3, -8, 0, -16, -16, 21, -2, -3, -17, -15, -21, -5, -3, 25, 36, 15, 14, 25, 3, 5, -30, -27, 14, 20, -5, -1, -17, 7, 21, 25, 15, -9, 16, -24, -21, -15, 18, 14, 0, 22, -3, -14, 2, -16, 14, -12, -5, 21, 0, 33, 6, 7, 11, -5, 2, -16, 5, 12, -18, 22, 10, 28, 11, -6, 0, 9, -13, -5, 12, -2, -17, 18, -8, -4, 0, -5, -23, 5, -34, -10, -3, -13, 18, -20, -1, 0, -12, -10, -5, -17, 13, 15, 6, 33, 11, -13, -4, -17, 7, -10, -4, 12, -19, 3, 8, 15, -14, 1, 2, -7, 7, -22, 8, 24, -5, 48, 8, 13, 19, 20, 19, -18, 13, 10, 14, 16, -25, 14, -1, 20, -2, -14, 16, -10, 28, -13, 4, -11, -18, 4, -22, 4, -19, -6, 17, 11, 5, 10, -5, 9, 27, -6, -18, 12, 13, -39, -5, -3, -1, 34, -18, -18, -6, -9, -12, 17, 45, 12, -7, 21, -5, -2, -5, -9, 3, 19, 0, -15, 9, -5, -10, -26, -28, 22, -13, 5, 16, 12, -3, -14, -21, 2, -2, 17, 8, 5, -5, 18, -1, 10, -23, 1, 3, -40, 11, -12, 16, 15, 2, -1, -3, -18, -13, 4, 2, 24, -11, 25, 5, 31, 20, -18, -15, -27, 2, -22, 10, 12, 12, 24, 27, -15, 4, -1, 11, 8, 17, -14, 19, -2, 2, -10, 15, 36, 22, 3, 16, 21, 15, 12, 1, 12, 36, -5, 37, 7, 18, -4, 4, 1, 5, 14, -33, -18, 5, -15, 3, -16, 12, -14, 6, -6, -12, -32, 4, 8, 3, 1, 37, 11, -22, -5, -13, -31, -10, -15, 1, -3, -11, -4, -15, 16, 17, 7, -6, -13, 0, -37, 11, -6, 13, 9, -16, 4, -24, 12, -12, -6, 27, -17, 6, 16, -5, -39, -19, 9, 9, 27, 22, -6, 17, 14, 2, 9, 13, 21, -5, 10, -22, -13, -18, 14, 7, 15, 5, 3, -6, 6, -16, 13, -8, -3, 10, 17, -11, 30, 4, 8, -19, 11, -5, 23, -15, -5, 19, -41, 10, -11, 15, -4, -3, 2, -15, -14, 22, 12, -15, -17, 19, 3, -22, -13, -26, 8, 0, 49, -9, -9, 19, -22, 6, 2, -16, 26, -8, 3, 9, -15, -21, -20, 10, 17, -1, 14, -9, -25, -34, -22, 26, 23, 12, -7, 3, 2, -8, 7, 10, -2, -22, 16, 12, -5, -9, -2, -19, 7, 0, -3, 20, -14, 0, 20, -5, 13, 6, -25, -4, -23, 6, 16, 12, -12, 0, 18, -7, 31, 18, -30, -15, 6, 7, 21, -15, 7, -12, 4, 3, -24, -23, 20, -10, 6, -6, 0, -2, 14, 10, 15, 0, -10, 11, 13, 15, 7, -15, -2, -18, 43, 16, -15, 7, -17, -15, 22, -5, -18, 12, -2, 6, 11, -19, -13, -5, 11, -21, -9, 23, 15, -8, 7, -6, -20, 5, 20, 0, 3, -10, 13, 18, -11, -26, -18, -9, -13, 13, 2, -9, -3, -10, 1, -2, 6, 19, 15, 12, 19, 25, 3, 22, 33, -16, 0, -15, -29, 1, 12, 4, 19, -11, -16, 3, 15, -3, 21, 13, -5, 19, 18, 15, 14, -13, -26, 0, 9, 19, -17, 1, -45, -16, 15, -28, 1, -1, 2, 5, -6, 0, 8, 7, -5, 19, 9, -10, -12, -14, -13, 5, -6, -9, -29, -19, -17, 4, 25, 21, 12, -15, 3, -8, -30, -16, -11, 10, -8, 1, -21, 4, 5, 6, -14, -12, -2, -17, 39, 6, -1, 10, -27, -3, 14, 9, 10, -20, -2, -7, 12, 15, 28, -9, -19, 16, 3, 18, 17, 24, 3, 3, -13, 2, 38, -15, 4, -28, 16, 3, -25, -20, -5, -29, 6, -11, 20, -15, -6, -15, -11, -18, -23, 11, -9, 3, -9, 5, 27, 13, 16, -7, 17, -19, 36, -22, 19, -10, -44, 14, -21, -16, 20, 14, 5, 18, -16, -4, -3, 3, 13, -8, -39, 18, 26, 8, 1, 12, -24, -8, 15, 13, 17, 3, -13, 13, 2, 5, -16, -22, 5, -12, 19, -17, 12, 14, -10, 5, 8, -10, 8, 19, -21, 4, 1, 11, 15, -30, -15, 4, -2, 2, -8, -12, -34, -5, 8, -17, 9, -19, 0, -9, 3, -26, 6, 19, 20, 18, 10, -23, 20, 20, -5, -7, -26, -7, 10, 3, 12, -19, 25, -15, 11, -22, -29, 5, -7, 3, -9, -7, -5, 15, -2, 18, -7, 10, 18, 6, 19, -12, -8, 11, 14, -8, 25, 9, 22, 11, -2, -4, -30, -21, 4, 7, 3, 6, -36, -33, -13, -22, 29, 8, -10, -20, -10, 17, -27, -20, -26, -2, -18, -8, 24, 8, 20, -17, -13, -5, 1, 13, -21, 11, 22, 37, -15, 17, -22, 18, 11, 17, -8, -26, -4, -5, 22, 6, 8, -9, -1, -15, 53, -13, 3, -20, -23, -8, 18, -13, 7, 3, 14, -2, -11, -3, 18, -6, -20, -15, 36, -8, 27, 33, 4, -16, 11, 14, -7, 5, 15, 18, 3, 12, -33, -4, 22, -1, -8, -2, 5, -4, -3, -9, 1, 2, -11, -15, 26, -17, -9, -13, 25, 19, 5, -8, 15, -5, 18, 9, -2, -4, -19, -1, -3, -9, -12, 19, 12, -13, 4, 5, -24, -33, 28, 0, 1, -13, -3, 11, 13, -19, -3, -3, -31, 18, 6, -17, -11, -17, -3, -3, -22, 9, 1, 12, 2, 9, 28, -20, -1, -25, -6, 10, 11, 13, -1, 13, -7, 7, 6, 4, 16, 18, 6, -6, -30, 12, -14, -4, 6, -33, -27, 12, 19, -16, 7, 0, -32, 12, 4, 19, 28, -10, 14, 19, -45, -2, 13, -13, -12, -7, -1, -16, 40, 8, 8, -12, 14, -12, -6, 11, 19, 14, -2, 3, 21, -1, -5, -8, 13, -4, 51, -6, -19, 20, -40, 5, 17, 9, -46, -15, 11, 16, 27, 29, 4, 3, -12, -1, -12, 7, 4, -4, -21, -13, -6, -9, -5, -10, -6, 2, -3, -10, 9, 3, 10, -12, -9, 22, 24, 22, -16, -6, -1, -22, 5, 25, -46, -13, 9, -19, -45, 15, -1, 17, 42, 51, 19, -19, -9, 11, -10, 8, 7, 2, -10, -8, 7, 13, 21, 0, 3, -27, -25, 18, 4, 4, 27, 31, -8, 19, 18, -2, 3, -16, -11, 4, 17, 13, -40, -20, -34, -10, 34, 51, 23, 7, -15, 8, 15, -6, -32, 12, -23, -9, 20, 10, 2, -26, -5, -16, 28, 34, -5, 11, -14, -3, 1, 16, 43, 38, 0, 8, -58, 19, -30, -4, 21, 9, 10, 13, -6, -23, 7, 4, -5, 4, -20, -4, -3, 0, 13, 24, 12, -8, -23, -17, -23, -2, 13, -26, 15, -17, -20, 16, -1, -9, 4, 11, 17, 3, 7, 22, 32, 0, -18, 25, -50, -3, 8, 4, -42, -9, 4, -5, -1, 10, 26, 3, 15, -6, -24, -19, 19, 11, 17, 7, -10, 13, 6, 20, -18, -24, -2, -13, 19, -11, -36, -12, 19, 10, 10, -20, -10, -19, 1, -28, -23, 12, -12, 11, -25, -13, 36, 4, -28, -7, 6, -5, -4, -3, 4, 39, -16, 19, -13, 6, 10, 9, -23, 4, 12, 17, 17, -9, -11, -19, 16, 24, 28, 21, 9, -12, -10, 16, -15, 20, -10, -20, 19, -4, 26, 13, -9, 3, 23, 9, 15, -3, 15, -4, -18, 19, 15, 29, -32, -1, -6, 11, 41, -3, -29, 11, 31, -13, -16, 7, -40, -28, -14, 5, 19, -17, -8, 3, 24, -10, -9, 6, -3, -7, -20, 18, 5, 16, -12, -2, -26, 12, 16, -4, -14, 12, -8, -23, 13, 16, -19, -16, -5, 0, 2, 6, -13, -14, -6, 18, -20, -31, 7, -9, -17, -1, 16, 17, -1, 6, -16, -16, -23, 17, 5, -23, 18, -8, -13, -7, -2, -14, 3, 3, 3, 16, -3, 2, -3, -19, 19, 9, 0, 29, 5, -3, 40, -16, 9, 2, -42, -19, 22, -11, -12, -19, 4, 11, -2, -31, -16, 18, 15, -11, 19, -14, -29, 35, -11, 18, -13, -11, 6, -12, -23, 0, -1, 21, 28, -16, 1, 2, -40, 43, 2, 12, -25, 3, 12, 16, 8, 39, -25, 8, -38, -1, -13, -19, 16, 26, 2, 6, 23, -11, -6, -11, 5, -6, -14, 15, 8, -10, -3, 5, -11, -27, 5, 61, -2, 15, 9, 19, 11, 14, 5, -14, -12, -3, -3, 19, 7, 37, 9, 27, 5, -7, -5, 3, 7, -18, -21, 25, -9, 9, -4, 12, -4, -10, 13, -16, 1, -12, 16, 16, -26, -14, -12, 19, 7, 6, 3, 5, -16, -21, -18, 17, 22, 9, 24, 18, 9, -5, -4, 7, -3, 12, 21, -22, -22, 2, 11, 10, -13, -16, 17, -7, -38, -1, -34, -29, 24, 5, 25, 16, 3, -24, 9, -17, 1, -4, 11, 15, -19, -5, 7, 20, -3, -7, -10, -20, -26, 5, -22, 19, -6, -13, -13, -21, -28, -10, 25, 27, 22, -13, -19, 18, 8, 22, -14, 17, 10, -14, 13, -1, -20, 8, -27, 13, 27, -6, 16, 11, -2, 17, -30, 6, 24, 15, -11, 16, -22, 28, 3, -38, 27, -18, 17, -4, 3, 4, 4, -6, 42, 15, -21, 14, -18, -10, -16, -8, -7, 3, 8, -22, -4, -13, -17, -12, -25, -6, 14, -19, -10, 22, -11, -21, -7, -25, 12, -4, 4, -7, -27, -4, -20, 11, -2, -3, 38, -9, -5, 0, -21, -17, 18, -15, 39, -10, -7, -7, 9, -26, -27, -18, -16, 10, -7, -16, -19, -25, -15, 3, -8, -10, -18, -10, -10, 14, -6, 5, 23, 4, 15, 11, -11, 8, -10, 3, 13, 20, 2, 7, -6, 3, -31, -5, -18, -3, 32, -16, 5, -22, -3, 0, 7, -15, -1, 5, -2, -13, 28, -7, -17, -6, 10, 19, -21, 8, -28, 16, 24, -2, -17, -11, -12, -8, -13, 11, 2, -16, 0, 30, 19, -16, 17, -12, -1, -11, -28, 3, 5, -17, -27, -2, 4, 16, 21, 29, 4, 2, -29, -1, -7, 23, -14, 30, -3, -12, 50, -24, -22, -5, -33, -5, -8, 1, 0, -4, -39, -15, -21, -6, -10, 4, 17, -12, -24, -18, 21, 1, 10, -13, -27, 0, -21, 25, 17, -5, 13, 18, -2, -23, -6, 15, 7, -17, 0, -18, -3, -25, 13, -5, 20, 22, 17, -4, 4, -3, -60, 0, 19, -22, -22, -8, -7, -13, -32, -4, 4, -2, -9, -4, 11, -5, -2, -7, -27, 14, -15, -3, -3, 13, 15, 3, 7, 24, -21, -12, -5, -6, 6, 5, 7, -23, -6, -15, 3, 9, 30, 29, 12, -7, 32, 14, 12, 10, 13, 1, -19, 5, -10, -3, 11, 13, -1, 11, 10, 10, -2, -17, -4, 8, -14, -10, 10, 7, -6, 11, -28, 10, 24, -20, 13, -11, 11, -16, 0, -16, -18, 16, 10, -15, 3, -11, -7, 6, 12, 4, -4, 9, 9, 15, 9, -1, -1, 8, 2, 23, 8, 19, 12, 10, -26, -11, -10, 1, -3, -22, -18, -21, -11, -9, 26, 15, 9, 7, -16, 9, 13, 19, -18, 12, 23, -8, -4, -16, 52, 7, 15, -20, 0, -16, 6, -1, -32, 0, -17, -6, 10, -27, 20, 16, -4, 6, 2, -4, 19, -14, -16, -10, 5, -9, 16, -8, -11, -10, 18, -17, -10, -21, 32, -6, -8, 14, 16, 11, -18, 10, -8, -11, 36, -2, 7, 11, 74, -34, 7, -15, -4, -13, 9, -7, -11, -2, 15, 16, 16, 13, 29, -26, -25, -6, 13, 6, -8, 6, 7, 14, -40, 0, 7, -11, 21, -6, 19, -21, -14, -13, 10, 1, -15, -21, 5, -16, -22, 8, -25, -15, -8, -14, -20, -1, -27, 10, 21, -8, 9, -8, -16, -1, 5, 6, 17, -13, 6, -17, -8, 7, 18, 17, -3, -6, -32, 9, 6, 24, 2, -23, -11, -4, 0, -9, 13, 1, 35, 11, -31, -8, -5, -11, 1, -4, -23, 34, 9, 19, 3, -7, -13, 9, -4, -21, -6, -13, -29, -10, -4, -14, 55, -17, 35, 1, 12, -10, 6, -1, 14, 6, -4, 2, 6, -4, -17, 9, -18, -19, -25, 20, 0, 18, 8, -12, 8, 18, 7, 13, 5, 1, -8, -13, 20, -11, 13, -24, 7, 25, -7, 18, 38, 39, -8, -4, 18, -6, 0, -23, 11, -3, 6, -14, -3, 7, -8, 1, -1, -31, 13, -19, -11, -1, 14, 8, -8, 4, 26, 41, -8, -16, 17, 9, -43, -5, 19, -10, -1, -10, 15, 12, 12, 0, -3, 8, 25, -12, 11, -15, 0, 5, 19, -10, -3, 15, 27, 1, 9, 3, 5, 0, 6, -10, 30, -11, 27, 3, 10, -10, -17, 1, -11, -7, 37, 1, -19, -13, 5, -9, 20, 13, 20, -3, 1, -5, -1, -12, 2, -2, 26, -24, -20, -8, 16, -4, -18, 12, 37, 1, -3, 20, -3, 22, -20, -8, 11, -17, 11, 32, 1, 13, -3, -20, 0, -3, -8, -17, 11, 12, -29, -2, 20, -12, 18, -3, 25, -13, -10, -1, 19, -18, 12, 0, 0, 14, -6, -23, -10, 25, 9, -5, -16, -14, 5, -10, 25, -5, 4, -25, -48, -15, -16, -4, -3, -9, 10, -11, -17, -19, -19, -6, -6, -14, -16, 8, 15, 22, 3, -10, 23, -13, 22, -14, -14, 18, -16, -15, 1, -10, -11, -4, -20, 2, -24, 2, -6, 3, 24, 24, 8, -12, -10, 9, 10, -18, -8, -12, -2, -11, 24, 7, 21, 13, 26, -44, 2, 1, -36, 5, 6, 10, 44, 6, 2, -7, 13, -16, 22, 3, -12, 11, -1, 15, 14, 12, 18, 1, 17, 5, 6, -18, 35, -15, 17, -15, 11, 5, 7, -1, -9, -5, 43, 5, -18, 10, -14, 11, 25, -2, -4, 4, -4, -24, -36, 6, 38, 12, -5, 5, 16, 37, -17, -20, -19, -6, -8, 14, 18, 0, 30, -1, -8, -15, -22, -5, 8, 19, 20, 7, -12, 11, 8, 17, 3, 13, -8, -23, 21, -24, 4, -12, 4, -6, 26, 6, -13, -1, -25, 44, 22, -14, -8, -11, -2, -17, 8, 4, -1, 10, 21, 5, 14, 9, 11, -18, 7, 21, -27, 12, 22, -5, 8, 0, 11, -11, 8, -21, 5, -4, 11, -15, 34, 39, -5, 12, 21, 18, 8, -10, 5, -22, 14, -27, 2, -7, 0, -6, 7, -11, 8, 6, -17, 17, -33, 24, 5, 3, -23, -1, 19, -10, -1, -28, -5, 5, -26, 3, 23, 19, 9, 18, 23, -38, -25, 14, -10, -17, -14, -16, 3, -14, 0, 4, -3, -2, 9, 3, 4, 12, -28, 15, 17, -24, 9, 6, -24, 7, -41, -21, -20, -3, 15, -23, 17, 0, -38, 2, 6, 17, -12, 36, 10, 24, 2, 5, 10, 4, 10, 39, 3, 7, 5, -22, 30, -5, -32, -8, 17, -20, -4, -20, -3, -7, -1, 34, 7, -7, 22, 10, 5, -24, -6, -5, -5, 6, 6, -16, 14, -25, -12, 16, -30, 16, -4, -19, 10, -11, -24, 10, -4, -10, -10, -11, 5, -18, -4, 23, -25, -19, 13, -6, 6, -10, 14, 9, -8, 11, -4, 9, 6, -26, 13, -8, -5, 2, 13, -10, -17, 6, -29, -43, 19, -1, -25, -1, -16, -17, -14, 1, 5, 31, 1, -2, 5, 8, -22, -28, 41, -6, -12, -6, 14, -20, -8, -10, -17, 23, 4, -4, 12, -11, -4, 3, 7, 29, -8, -17, -6, 16, 14, 17, -27, -21, -15, 11, -23, 7, -4, 2, -5, -7, 19, -26, 0, -1, 1, 9, 8, 11, -27, -23, 5, -23, -16, -6, 9, 9, -30, 2, 26, 3, 26, -1, 33, 16, 23, -17, 5, -7, 7, 2, 0, 23, -15, -18, -4, 4, 9, 9, 5, 20, 1, 15, -20, 10, 10, -8, -55, 1, 17, -18, 26, -1, 6, -4, 8, -30, -8, -18, 1, -3, -5, 8, -4, -7, 8, 1, -12, -21, -16, 9, 28, -10, 19, -19, -16, 17, 6, 12, -16, -2, -25, 9, 2, 19, 11, 18, 26, -16, 6, -21, -1, -7, 25, -15, -22, 4, -14, -5, -2, 10, -7, -21, 4, 25, -7, -23, -10, -21, 11, 23, 7, 45, 28, -8, 22, 0, -4, -16, 6, -1, -5, -16, 42, 18, 8, 21, -9, -14, 19, 11, 3, 0, 30, -5, 2, 0, -5, 14, -16, -15, 21, -15, -11, 15, -7, 0, 1, -3, -2, -25, 13, -9, -11, -27, -11, 13, -23, -17, 22, 1, 21, -4, 16, 11, 43, -10, 16, 16, 2, -8, -6, 15, -17, 16, 7, -6, 7, 17, 11, -8, 21, 48, -13, -15, -18, 11, 3, -28, 7, 13, -4, -1, 10, -3, 12, -16, 22, -11, -21, 8, 17, 2, 15, 10, 12, 4, -10, -4, 7, 14, -5, -21, -20, -1, -17, 5, 2, -2, -13, 10, -2, 14, 2, 15, -14, -17, -4, -6, -23, -24, 16, 0, -11, -5, 17, -14, -14, 19, -3, -23, -2, -7, 12, -25, 23, 16, -23, 8, 35, -3, 35, 14, 8, 28, -5, 20, 24, -7, -18, 4, -22, 28, 6, -8, -5, -9, 4, 9, 17, 18, -8, 19, -23, 11, 20, 3, -13, -39, 23, 7, -5, -14, 14, 7, 17, 7, 12, 10, -18, -23, 30, -18, -3, 26, -4, -15, 2, 11, 67, -10, 0, 27, -10, 53, 24, -2, 24, 4, 2, -25, -11, 4, -13, 17, 32, -16, 27, 23, 18, -22, 9, 7, -13, 14, 5, 39, 2, 11, -19, 2, 16, -5, 49, 7, 24, -20, 15, -28, 29, 16, 12, 4, 10, 28, 10, 15, -20, 6, -12, 13, 32, -7, -19, -15, 14, -7, -15, 8, -21, 16, 13, -10, 19, -22, 21, 30, 4, 17, -13, 11, -17, -3, -32, 11, -14, -4, -9, -12, -13, -26, 19, 11, 14, -20, 13, 9, 16, -7, 1, -8, 9, -4, 10, -5, 5, 10, 4, -9, 9, 19, 20, -25, 36, 17, 13, 22, -1, 17, 4, 16, -15, 9, 24, -9, 11, 14, -22, 0, 11, -4, 8, 0, 16, 23, 5, -13, 3, -2, 15, -24, -10, -9, 9, -10, 9, 1, -15, -18, 21, 4, 17, 5, 2, -5, -31, 1, -8, -9, 10, -7, 19, 1, -1, 7, 1, -4, -34, -17, -6, 8, -9, 10, -13, -4, -24, 4, 3, -8, 11, 13, -5, 15, 23, 22, -4, -3, -18, 6, 15, 14, 14, -11, -39, 8, 14, 3, -19, 23, 28, 4, 21, -7, -1, 24, 8, 5, 7, -1, -12, 7, -27, -20, -27, 5, 25, -3, 7, -18, 15, -8, -1, -18, 42, 14, 8, -27, -18, 12, 15, 4, -18, 26, 4, -7, 35, -38, 20, 10, -5, -15, 15, -11, -25, 10, -16, -4, -15, 3, 26, -22, 5, 11, -25, -8, 2, -2, 22, 1, 30, -4, -8, 0, 35, 11, -21, -22, -1, -14, -18, -1, 19, 5, -14, -16, -13, -5, -4, -2, -9, 12, 1, -8, 4, 19, 0, 14, 25, 4, 16, 0, -14, -29, -10, 0, -20, 4, 9, 5, -11, -22, 5, -8, 31, -12, -6, -22, -20, 1, 28, 6, -2, 16, 8, 17, -2, -6, -8, -36, 7, 16, 32, -22, 22, -3, -1, 8, 0, 3, 19, -18, -12, -9, 15, 12, 35, -15, 5, -17, 1, 19, -25, -4, 16, 23, 12, -21, 13, 20, -19, 21, -12, 5, -7, 5, -15, -30, -1, 14, -13, -8, 1, 14, 6, 4, 18, 5, -3, 19, 13, 9, 12, 13, 4, -12, 40, -25, 6, -20, 30, 10, 22, -14, 43, 5, -12, -18, 2, -18, 12, 7, 38, 10, -24, -1, -18, -4, 25, -2, 0, 7, 10, 16, -3, -3, -18, 2, -17, -16, -14, 6, -12, -20, 11, -45, -16, 15, -22, -16, 18, -7, 17, -31, 47, -7, -2, 12, 18, -2, 10, -21, 15, 3, -10, 15, 8, 8, -36, -15, -8, 0, 11, 18, -14, 15, 26, -32, -12, 15, 1, 11, -7, 10, -22, -20, -29, 7, -40, 14, 16, -29, 14, -17, 29, -22, -22, 6, -2, -3, 14, -12, -11, 2, 9, -3, 3, 19, 26, 14, 28, 8, -13, 5, -3, 1, 10, -18, 24, -10, 24, -9, 11, 21, -5, 23, 11, 11, -29, 1, -3, 8, 32, -20, -7, -4, -23, 14, 16, -9, 2, -21, 9, 4, 3, -17, 11, 4, 7, -15, -7, 10, 13, 14, 8, 11, -18, 9, 4, -7, -7, -15, 15, 0, -17, 17, 46, -23, 20, -10, 14, 3, 24, -17, 6, -4, 25, 10, -10, 4, -9, 17, -4, -17, -41, -10, -11, 24, -3, -3, 10, 19, -11, 18, 6, 45, 11, 3, -6, -14, -17, 15, 19, 26, 6, 0, 33, -21, 4, 9, -20, -24, -15, -2, 24, -2, -13, -23, 4, 6, -14, 2, 4, 8, 10, 3, -17, -2, 12, 5, 12, 9, 15, -16, 18, 19, -18, -6, -30, 23, -4, 3, 5, 14, -13, 31, 8, 3, -20, 9, 17, 26, -2, 4, -4, -5, -9, -11, 4, 18, -23, -29, 2, 12, 6, -22, -16, 6, -1, -27, 9, 10, -5, -21, -11, 17, 16, 20, 22, -10, -4, -20, 14, -24, -20, 16, 19, -16, 26, -1, 1, -11, 0, -19, -1, -6, 4, -19, 10, 19, -10, -4, 6, -9, -8, 0, -20, 6, -6, -14, 27, -17, -19, -6, -13, 16, -8, -2, -17, -14, 17, -5, -26, -1, -15, -15, 0, -23, 5, 17, 20, -8, -6, 12, -8, 7, 10, -3, 13, -11, -4, -10, 9, -17, 9, -12, -15, 21, -11, 21, -24, -21, 26, 5, 1, 9, -29, 16, 23, 9, 2, -21, 4, -4, 16, -22, 3, -24, 16, -19, -20, -17, -10, -4, 18, 0, 1, 2, 28, -4, 11, 12, 15, -1, -29, 15, -2, -14, 24, -31, 1, 17, 3, 4, 16, 7, 18, 3, 1, -13, -3, 12, -1, 9, 1, 12, 14, -8, -25, -14, 3, 7, 2, 6, 10, 15, -4, 0, 7, 8, 2, 20, 9, -19, -4, 2, -17, 6, -8, -34, -3, 0, 0, -2, -24, 27, -17, 2, -14, -8, -8, -1, 8, -10, -22, -2, 23, -9, 6, -7, -17, 22, -27, -12, 14, -14, -20, 19, 16, 51, 17, 22, -1, -16, 23, -20, 11, 20, -19, 17, -16, 10, 29, -21, 17, -1, 17, 45, -9, 1, -13, 7, -26, 1, -22, 19, -12, 0, -14, -19, -12, 9, -11, 13, 26, 15, -2, 7, -5, -11, -17, 68, -12, 9, -17, -24, -17, 25, -4, 27, 21, -22, 3, 1, 3, 5, 13, 25, -27, -20, -10, 10, -11, 24, 10, -22, -4, -24, 19, -9, 19, 1, -8, -7, -6, -7, 21, -19, -23, 28, -7, 30, -7, 3, -16, -22, 15, -12, 7, 44, -3, 0, 1, -8, 2, 20, -7, -42, 12, 14, 14, -19, -33, 20, 17, 20, -2, -10, 23, -3, 3, -25, -1, 22, -21, 6, -11, -16, 4, 2, -20, -12, -11, -3, 18, 0, -3, -20, -4, 1, 12, 16, 16, -9, 0, 0, -26, 0, -53, -5, -6, 13, 30, -4, 4, -21, 10, -4, -42, -10, 22, 16, -9, -15, 10, 19, 19, 5, -5, -2, -17, -1, -29, -17, -6, -24, 6, -16, -12, -2, 54, 9, 0, 5, 3, -22, 32, -23, -10, -15, -2, 13, -9, 11, 22, -16, -30, -22, 26, 1, -29, -5, -17, 11, -22, 11, 10, -7, 0, 18, 24, 22, -9, 15, 23, -8, -11, -15, 11, -11, 4, 1, -36, -1, 9, 24, -13, -3, -14, 1, -3, -8, 1, 2, 13, 6, 10, 19, -23, 16, -12, 33, -3, -16, -19, -19, 4, 7, -9, -17, 9, -3, -8, -3, 25, 4, -13, -14, 13, 8, -39, -11, 17, -15, -20, -19, 15, 23, 16, -8, -16, 10, 13, 5, 28, -2, -33, -6, 8, 7, -10, 3, -20, 16, -14, 14, -2, 9, -22, 26, 10, -20, 29, -16, 3, 1, -1, -8, 6, 16, 15, -9, -5, 4, 23, 20, -18, 6, -30, 9, 35, -22, -14, 18, -20, 3, 12, 10, -48, -10, -20, 8, 23, -23, -31, -7, 9, 13, 8, 21, -4, -2, -17, -23, -1, -21, 6, 14, -17, 9, 28, 11, -9, 1, -29, -5, -12, -10, 3, -21, 16, -17, 18, 22, -22, -9, -51, -24, 9, 2, 7, 27, 22, 1, -8, -17, -1, -3, -3, -22, 8, 0, 38, 2, 8, 3, -2, 1, -15, 3, 6, 14, 58, 15, -8, 6, -34, -15, 6, 13, -48, 20, -20, 4, 9, 18, -8, 18, -10, -6, 6, 20, 10, 4, -24, 25, 21, 3, 1, -14, 7, 11, -23, -22, 9, -21, 17, 16, 5, -16, -4, 6, 6, 16, 17, 19, -1, 2, -37, 3, -36, -13, 38, 15, 4, -10, -8, -8, 28, -21, -10, -13, -1, -18, 1, -15, -17, -17, -4, 1, 0, 5, -35, -6, -47, 2, -9, 22, 28, 14, -5, -6, -54, -14, 12, 4, 31, 12, -13, -20, 19, 19, 2, -29, 20, -9, -36, -19, -28, -1, 25, -9, -6, -17, 13, 11, -10, -18, 9, 10, 29, -4, -20, -3, -24, -22, 14, 1, -2, -16, -11, -2, 32, -9, -8, 15, 16, 12, 17, -14, -46, 9, 0, 0, 6, -4, 16, -20, -1, 9, -20, 19, -11, 0, 16, 6, -9, 15, -10, 3, -25, -12, -16, 1, -31, -7, -44, -20, 8, 13, 0, -12, 28, 1, -3, -11, 0, 15, -43, 0, 11, 0, 6, 21, 12, -5, -13, 11, -19, -13, -15, -5, -7, -11, 9, 9, 11, -6, 0, 2, -6, 17, -5, -2, -8, 18, 13, -24, 1, -7, 25, 13, -31, 15, 1, -32, -21, -12, 14, -18, -30, -13, -22, -5, 15, -17, 19, 3, -10, -11, 7, -21, 16, -1, 13, -12, 24, -4, -13, 7, -23, 0, 43, 9, -10, 1, -9, -13, -23, 14, -15, -4, -29, 10, 11, -6, 6, 2, -12, 14, 5, -6, 13, -13, 13, 6, -21, -15, 25, -16, -14, -22, -5, -2, -26, -12, -46, 25, -19, -22, 7, 16, -10, -18, -42, 8, 8, 4, -2, -1, -11, 12, 18, -3, 17, 6, -16, -22, -36, -6, 6, 18, -16, 16, -13, -13, 10, 11, 1, -19, 10, -14, -12, -7, -13, -17, -26, -21, -4, 27, -4, 4, -7, -10, -33, 4, 9, -6, -18, -15, -11, 13, 17, -17, -11, -2, 18, -1, 23, 17, -23, 16, -2, 9, -4, 4, 17, 10, 4, 14, -25, -7, 9, -12, 22, 18, -17, -20, 19, -13, -5, -15, -7, -4, 28, -19, 2, -7, 6, -3, 5, -2, -26, 4, 19, 27, 1, -8, -16, -12, -9, -14, -2, -3, -12, 14, 3, -20, -7, -6, -33, -2, -5, -3, 29, -18, -17, -10, -16, 8, -3, -8, 13, -18, -21, 7, -6, -8, 10, 7, -14, -15, -12, 2, -15, 3, -28, -2, 11, 17, -34, 12, 8, 9, 0, -13, 47, 10, 2, -14, 38, 11, -6, 0, -8, -11, -5, -2, -42, 16, -3, 0, -16, 8, -17, -18, -14, -9, 15, -12, -17, -22, -15, -2, -24, -13, 8, 3, 6, 0, 22, -21, -11, 1, -5, -6, 52, -2, 0, -15, 8, 19, 27, 6, -1, 1, -29, 9, -1, -21, -28, 1, 22, -12, 5, -15, -4, 3, -32, -17, 0, -17, -3, -4, 0, 10, -14, -13, -13, -1, 14, 10, -30, 1, -2, 17, -36, -6, -6, 13, -36, -31, 11, 15, 18, -22, 27, -8, -10, -6, -19, -8, -4, 0, 0, -20, -3, 12, -7, -12, 20, -16, -5, 9, 19, -16, -26, 2, -23, 1, 20, -14, 34, 30, 34, 9, 5, -10, 22, -20, -23, 1, -7, -7, 15, -8, -18, -13, 25, -4, 2, 2, 16, -18, -18, 15, 1, -14, 1, 16, 16, -8, -7, -13, -12, -30, 7, 6, -10, -18, -16, -11, -43, -6, -17, -10, -13, -5, -9, 6, 21, -8, 20, 29, 14, -20, 18, -16, 26, -5, -7, -3, 10, 17, -12, -10, -16, 1, 12, 7, -20, 15, -20, -17, 0, -18, 1, -11, -17, 19, 14, 1, -21, 11, 11, 7, 8, -8, -13, -1, 20, -13, 17, -8, -13, 14, 13, -18, -3, 15, -36, -16, -12, 4, -8, 7, -5, -18, 3, 23, -5, 11, 6, 5, -10, -3, -12, 7, -19, -7, 21, 7, 15, -5, -12, 13, -18, -14, -7, 23, -20, -9, 14, -24, 5, -1, 19, -5, -33, 11, 11, 31, 0, -8, -18, -28, 1, 23, 25, 10, 3, -11, 1, -24, 39, 27, 10, -11, -18, -20, -5, -10, 11, -9, 3, 5, 11, -10, -8, -3, 22, 22, 12, -2, 7, -15, -15, -44, 9, -11, 5, 8, 9, -11, -18, 24, 8, -11, 1, -22, -11, -11, 3, 6, -3, 8, -5, 15, -10, 13, -5, -1, 17, 26, -21, -1, 10, -3, 0, -3, 17, 15, 0, -3, 17, 10, -9, 18, 2, 18, -15, 27, 2, -8, -18, 7, -11, -2, -26, 1, -4, -17, 4, 7, -13, 10, -35, -3, -2, 13, 18, 25, -39, 29, 19, 29, -17, 6, 8, 29, -32, 2, 5, 0, -9, -20, -14, 3, -3, 6, 5, 20, -22, -11, 2, -28, 3, 17, -22, -14, -17, 9, -17, -5, -2, 6, 5, 27, -12, -21, 4, 0, -16, -1, -32, -20, 6, 13, -14, -9, 22, 1, 0, -14, -21, 17, -18, 4, -22, 14, -10, 0, -2, 13, -19, 9, 14, 18, 3, -21, 13, 10, -8, 7, 7, -16, 10, -4, -11, -10, 2, 19, -44, -8, -25, -20, -9, -10, -14, 3, 25, 19, -2, -17, 13, 0, -15, 1, 19, -20, 28, -23, 0, 8, 6, 3, -31, 15, -4, -19, -9, -3, 4, 0, 13, 10, -10, -34, 10, 13, -1, 9, 17, 2, -27, 15, -17, 16, 8, 6, -47, -45, 4, -15, -14, -20, -19, -5, -17, 15, -13, 27, 12, 5, -2, 11, -14, -40, -2, -1, -11, -16, 16, 36, 2, 7, -23, 15, -1, -22, 11, -8, -11, -4, 0, 13, -10, -11, -24, -1, -8, 5, 11, -5, 6, 7, -25, -1, 9, 45, -22, -1, -14, 15, -15, -6, 19, 36, -22, 2, -21, 1, 18, -4, 15, 8, 19, 25, -2, 9, 17, -6, -2, 38, -22, 7, 4, -21, 3, -4, -27, -29, 26, 28, -18, 2, -21, -14, -23, -3, 15, 2, -5, 16, 13, 8, 0, -10, 16, 10, -4, -12, -4, 6, -32, 14, -4, 5, 16, -17, 3, 13, 15, 21, -20, 11, -15, -13, 5, -25, 15, 10, 12, 11, 5, 11, -14, 15, 19, 11, -16, -1, 3, -16, 5, 19, -8, -22, -26, -32, 9, 20, 4, 1, -21, -19, -6, -4, -19, -3, 10, 12, 4, 0, 15, 22, -17, -4, -13, 2, -12, -22, -37, 4, -2, 11, 6, 16, -32, 10, -15, -3, 14, 3, -16, 17, 16, -5, -22, -11, -8, -6, 11, -13, 13, 20, 15, -4, 21, -1, -18, 2, -6, -38, -25, -2, -7, 25, -12, 5, 15, -1, 2, 14, -14, 0, -3, 6, 5, -15, -5, -15, 4, 17, -13, -13, -11, 3, -27, -30, -3, 14, 8, -18, -2, 5, 17, -2, -5, -3, 18, 14, -36, -11, 24, -3, -15, -16, -14, 9, -14, -23, -27, -18, 16, 15, -17, 25, -17, -19, -29, 2, -14, 11, 12, -19, -41, -33, 21, 24, 2, -4, 11, 8, 16, -30, -4, -21, -5, 16, 15, -18, -24, -54, 11, -16, -9, 18, 14, -3, -31, -20, 18, -14, 15, -22, 0, -6, 2, -18, -6, -5, -4, -13, 18, 14, -13, 15, -58, -14, 8, 23, -14, 1, -1, 14, 15, -25, -20, 10, 17, 33, -27, -20, -13, 4, 14, 1, -13, 6, -4, -20, 15, -12, -22, 7, -14, 7, -17, -16, -31, -14, -7, -13, -3, -22, -29, -43, -25, -19, -8, 13, -7, 9, -7, -20, 20, 33, -6, 7, -13, 7, -7, -6, -11, 0, -14, -33, 9, 7, -12, 25, -5, -27, 2, 13, 7, 14, -23, -4, -27, -16, 10, -27, 10, 13, 7, 8, -46, -31, -8, -32, -20, -19, -8, -18, 10, -17, 5, -12, -22, -24, -17, -14, -21, -39, -2, 16, -11, -3, -3, -13, -24, 39, 20, 6, -1, -12, 21, 24, 9, -3, -26, -15, -15, -5, -9, -4, -3, 9, -7, 6, 4, 4, 23, 3, 13, 10, -12, -9, 9, -7, 26, 2, -12, 9, -5, 29, 16, -20, -54, 4, 3, -21, 13, -13, 3, -9, 17, 2, -16, -4, 19, -18, -15, 0, -9, -9, 1, 14, -3, -5, 3, -9, 7, 2, -28, 1, 12, -25, -3, 2, 19, 0, 10, 27, -23, 4, -19, -22, -6, 3, -11, 16, -11, -19, -9, 0, -7, -17, 23, 3, 7, 6, -17, -16, -7, 4, -6, -19, -20, 3, -1, -2, -19, 27, 18, -4, 13, -20, 0, -2, 11, -10, 18, -4, -2, 10, -3, 22, -5, -13, -13, -1, -21, -6, 13, -1, -7, -8, 28, -15, -2, -3, -22, -35, 8, 15, 10, 19, -10, 17, 6, -12, 33, -26, 12, 16, -5, -25, 0, -11, 26, 9, -16, 15, 15, 5, 19, -6, -9, 21, 12, 19, -14, -16, -10, -5, 18, -10, -3, 7, -23, -3, 12, 16, -9, 21, 3, -41, -19, -18, -11, 14, 6, -5, 22, 1, -9, -3, -14, -12, 3, 8, 14, 4, -20, 0, -23, -6, 3, 10, -3, -22, -3, -3, -10, -9, -15, -21, 27, -13, 6, 14, 15, 11, -19, -18, -7, 25, -6, 12, 16, -1, -18, 14, 18, 16, -18, 14, 16, 6, 34, 7, -24, 2, -17, 14, -20, -8, 6, 21, 24, 4, 16, -17, -13, 14, 4, 16, -3, -23, -3, -2, -13, -15, -23, -2, -4, -27, 3, 28, 15, 21, -15, -18, 16, 20, 17, 4, -4, 20, 1, -6, -10, 17, 17, -13, -15, 28, -5, -7, -1, -13, 11, 1, 16, 20, 25, -29, -9, 4, -2, 8, -15, -19, 1, 0, 8, -5, 22, 5, -19, 24, 18, -18, -11, -5, 16, -37, 6, -19, 18, -7, -2, -7, -7, -11, 11, -3, -18, 5, -4, -4, -6, 7, -5, -11, 4, 16, -9, 8, 18, -18, -10, 7, 2, -4, 7, -3, 5, -5, 3, 9, 20, 19, 11, -2, -11, -4, 11, 8, 13, 19, -26, 14, -19, -10, -8, -2, 7, 1, 11, -26, -2, -24, -5, 11, -14, 9, -8, 6, 3, 13, -10, 7, -12, -26, 8, -3, 1, -6, -8, 3, -21, -18, 6, -14, 3, 15, -28, -14, -7, -23, 22, 13, -13, -1, -10, -13, 13, -4, -5, -14, -8, 19, 9, -8, -6, 13, 1, 3, 18, 8, 7, -33, -22, 16, -10, 2, 3, 2, 7, 3, -46, -30, 31, 19, -1, -23, 2, -28, 7, -5, -17, -7, 7, -5, -18, 14, 32, -20, 14, -13, 14, -28, -4, -1, 26, 12, 16, -21, 3, -11, 9, 7, 38, 11, 42, 17, -3, -14, -18, -33, -8, -18, 14, -16, 19, 6, -1, 24, 20, -23, -3, 16, 14, 2, 3, -19, -22, -22, 32, 1, -8, -36, 18, 3, -25, -6, 12, -4, -9, 0, -15, 19, -20, 14, 2, 26, -25, 23, -1, 5, 1, 1, 0, 31, 8, -8, -1, -10, -8, -31, 13, -6, -21, -9, 12, -13, 15, 25, -1, 3, 2, 14, -12, 15, -35, 18, 32, 23, -8, -4, -13, -5, -2, -4, -21, 14, -11, -6, 16, 3, -11, -24, 10, -12, 2, -28, 17, -12, -4, -3, -26, -9, 14, 22, 17, 28, 13, 15, 21, 8, -14, 2, -6, 8, 13, -6, -9, -11, 3, 24, -15, -7, -16, 20, -24, -30, 3, -20, 2, 33, 9, -3, 13, 12, 5, -17, 19, 6, 17, 24, 18, 7, -1, -31, 22, 12, 9, 8, -8, -34, 5, -16, -6, -7, 3, -31, -19, -18, 10, 19, 17, -22, -7, 28, 47, 5, -24, -18, -9, -14, 4, 17, 4, 9, -20, -6, -27, -16, -18, -9, -22, 3, -4, 6, 17, -6, 2, -5, -16, 19, -12, -13, 15, -16, -22, -27, -16, -6, 15, 9, -12, -6, -13, 5, -21, -16, 2, 20, -24, 16, -44, -17, 15, -2, 15, -20, 19, -3, 22, 27, -26, -23, 1, -6, 9, 0, 15, -17, 25, -12, 5, 11, 13, -26, -1, -7, -40, -20, 16, -21, 2, -12, 8, -28, 13, 12, -5, -14, -4, -14, -14, -20, 8, 19, -21, -6, -15, 0, 22, 3, -26, -16, 15, -29, 16, -4, -3, 3, 9, -14, -3, -3, 6, -11, 14, 7, 8, 2, -20, -4, -18, 8, -15, -5, 5, 3, -12, 14, -15, 1, -23, 7, -3, 35, -2, -19, 8, 13, 5, 6, -20, 10, -9, 8, 16, -11, 3, -26, -19, 15, -18, 1, 20, -6, -8, 9, 5, -23, 2, -2, -18, -18, -1, 13, 9, 18, -8, -9, 11, 9, -25, 21, -11, 2, -18, 3, -20, 10, 5, 15, -4, -8, 14, -20, -13, 10, -6, 6, 15, 14, -9, -15, 16, 1, -17, -2, -25, 11, 10, -17, 2, 24, -8, -18, -31, -20, -14, -17, 13, -7, 14, 37, -19, -30, -17, -17, -15, 7, 2, 9, 3, 16, -15, 6, -13, 7, 3, -1, 15, -4, -8, 20, -13, 2, -9, 3, 21, 7, -21, -15, -17, -2, 16, 20, 7, 13, -12, -6, -9, -24, -5, -7, 5, 0, 16, -10, 16, -28, -16, -1, -27, -14, -21, -9, 15, -5, -1, 19, -23, 16, 0, 1, -20, -7, -1, -4, -19, -21, -9, 9, -2, 5, 14, 29, 23, 37, 0, -23, -20, -23, 0, 6, -5, -10, 8, -25, -16, -29, 16, -16, -22, 4, 11, -19, 1, -8, -13, -18, -9, -29, -21, 6, -13, -8, -21, 10, 17, -21, -3, 1, 20, -23, -33, 0, -1, -4, -20, -24, -12, -7, 17, -4, 34, -4, -11, -16, 6, -26, -8, 19, -16, 19, -17, -4, -23, -13, 16, 2, 0, -35, 1, 21, 1, 15, -6, 24, 25, 8, 12, 2, 7, -19, -20, 0, 7, -10, 3, 0, 1, 12, -3, 30, -26, -9, 5, -7, 7, -3, 2, 5, -16, -4, 9, -3, -5, -9, -30, -31, 13, -6, 7, -10, 19, -3, -19, -17, -33, -5, -3, -11, -21, -9, -1, -17, -10, 27, 12, -16, 6, 11, 11, -12, -12, 11, -8, 3, -6, -11, -34, -23, -7, -25, 23, 4, -1, -3, 8, 3, 19, 5, -4, 19, 12, 7, -6, -23, 18, 6, 16, -9, -18, 8, -9, -12, 10, 11, -13, -27, 9, -16, 6, -3, -1, -14, -10, -21, -19, 2, 11, -5, 14, -13, -16, -20, 1, 2, 2, -22, 17, -9, -10, -13, -23, -14, -9, -15, 2, 0, -11, 15, -14, -19, 13, -21, 24, -25, 9, -15, 5, 16, 19, 14, -7, 0, 14, -10, 4, -25, 30, 1, 1, 34, 2, 17, -16, -18, 5, -10, 7, 8, 4, -15, -5, 2, 11, 42, 28, 30, -24, -6, 19, 12, -14, -21, 9, 17, 2, -9, 0, -12, 16, -3, -13, 9, 8, 22, 7, -20, 16, 28, 3, 23, 6, 5, -7, -11, -10, -9, -4, -2, -4, -4, 5, -9, -12, 13, -27, -10, 2, 14, -3, -18, 4, -8, -4, -4, 17, 1, 5, 0, 7, 18, -22, 10, -22, -2, 17, 7, 25, 2, -16, -13, -8, 17, 14, 25, -18, 4, -15, 19, -20, 9, -19, 21, -6, 5, -4, 17, -2, -14, -20, 17, 16, -25, -11, -11, 3, -14, -12, -10, -18, 25, -2, -11, 18, 15, -17, -3, 18, -24, -3, 5, 13, -31, -16, -15, -18, -16, -2, -10, -11, 17, 15, 11, -21, 10, -10, -31, -13, -16, 11, -6, 3, 29, -22, 20, 8, 6, -14, 3, -7, 22, 14, -14, -9, -6, -7, 13, 11, -20, 11, 4, 2, -3, 3, 7, 9, -19, -9, -11, -17, -23, 18, -11, -19, -16, 7, 32, -10, -15, -19, 15, 7, -20, -6, -6, -2, -8, -12, 19, -10, -6, -16, -19, 13, -4, -13, 17, 13, 8, -20, 1, 0, -17, -18, -28, 18, -2, -14, 7, -9, 23, 20, 11, -27, -7, -4, -9, -14, 19, 9, -11, 12, -18, 7, 12, -15, -18, 4, 11, -9, 15, 13, 23, 19, -8, -12, -16, 8, -9, -10, -6, -9, 5, 19, -2, 10, -11, -14, -10, 18, -36, -8, 0, -5, 5, -30, 16, -14, 17, -13, -20, 18, 21, 9, 39, -12, 13, 19, -26, 26, 6, -4, 2, 6, -3, -12, 7, 4, 15, 15, -5, -11, 14, -10, -23, 11, 15, 18, 9, -9, -18, 25, -29, 12, 32, 26, -5, 14, -23, 3, 0, 4, 13, 23, 27, 11, -28, 7, 25, -7, 8, 0, 9, 7, 14, -18, -6, -1, 25, -10, 16, -6, 3, -21, 12, 4, -16, 25, 19, -7, -24, -1, -11, 12, -2, 7, -16, 7, -25, -11, 8, 20, -7, 20, 26, 4, -17, 6, 7, 0, -7, -4, 7, 0, -9, -3, -18, 23, 33, -1, -3, -10, -12, -22, -3, -23, -1, 5, 22, 3, -33, -5, 6, 22, -17, 20, 6, 18, 6, -10, 3, 10, 1, 1, -23, -22, -11, 2, -16, 12, 6, 7, 13, 13, -18, 3, -13, 9, -3, -12, 26, 19, -12, -13, 15, -13, 4, 11, 23, -22, 7, -2, -6, 26, 21, 3, 12, -18, 9, 3, 8, 5, 13, 15, -13, 8, -32, 19, -5, -19, -10, -5, 24, -7, 15, 9, 10, 1, -10, 3, -38, -27, -8, 11, 1, 7, 5, -21, 7, 9, -12, 15, -5, -8, -39, 15, 16, 1, -21, 13, -16, -6, 55, 2, -29, 15, 14, 0, -11, -12, -34, -1, 35, -17, 14, -10, -27, -19, -10, -5, 10, -21, -11, 7, 2, 25, -14, -12, 4, -7, 8, 17, 2, 8, 17, -1, 18, 22, 12, -12, -12, 12, 20, 1, 25, 19, -26, 20, -11, -10, -6, 20, 24, 16, 7, -17, -16, 9, 30, 11, 17, -12, 3, 10, 9, -8, -11, -11, 30, -9, 15, -12, 10, -16, -17, -1, -4, -1, 14, 18, 4, 7, 37, 3, 17, 7, 3, -18, 8, 25, 21, 5, 27, -10, -2, 18, 11, -18, 4, 13, 13, 0, 20, 2, -23, -10, 15, -17, 6, 18, -16, -2, -6, 21, 5, 38, 13, -4, -10, -19, -19, 3, 7, -15, 21, -23, -9, -12, -17, -11, -3, 3, -24, 12, -2, -18, 5, 17, 1, 22, -4, -12, 13, -5, -3, 2, 0, 0, 14, -14, -15, -24, -13, -10, -1, -3, 0, 13, 17, -6, -5, -22, -11, 13, 7, -30, -23, -17, -12, -18, -4, -22, -25, 17, -3, 3, -8, 30, -31, 17, 4, -19, -12, 1, -7, 1, -6, -19, 9, -10, -7, -2, 14, -6, 28, 19, -9, 4, -2, 6, 12, -15, 5, 4, 18, -7, 10, 19, -7, -21, 13, -11, -34, 12, -7, 10, 17, -20, -15, -15, 34, 16, 10, 5, 6, 21, -10, 20, -10, 15, 13, 1, 16, 28, 1, 1, -1, -5, -7, 18, 14, 4, -13, -19, -17, 11, 11, 19, 15, -11, -33, 10, -21, -20, 18, 6, 24, -4, 23, -5, 28, -8, -9, 15, -11, 10, 12, -30, -30, -6, -14, 14, -38, -9, 0, 23, -13, -24, 13, -7, -17, 25, 29, -5, -25, 19, 20, -1, 25, -21, -13, 13, 5, 5, -3, 14, -12, 7, -5, 21, -5, -20, 9, 0, -1, -6, 25, 27, -13, 16, -8, -10, -16, 8, 1, -3, 0, -9, -4, -18, -18, -7, -23, 6, -4, 0, -19, 2, 3, -5, -4, 38, -29, -14, 5, -3, -11, 9, -22, -7, 14, -21, -5, 3, -18, 11, -10, 4, -11, -18, 3, -17, -6, 20, 10, 14, -21, 5, -6, -6, -1, 7, 6, 14, -4, -15, -24, -21, -6, 20, -3, 27, 14, -9, -26, 18, -6, 26, -14, -10, -6, -6, 0, -16, -18, -12, -3, 0, 17, 7, 27, 12, 1, -5, 11, 14, 14, 8, 23, -11, -20, 15, -17, -21, 12, -17, -23, -1, -3, -28, -20, 21, -17, -17, -16, -18, -19, 11, 22, -7, -4, -32, 9, 2, 23, 13, -34, -21, 19, 0, -5, 9, -3, -3, 9, -2, -9, -10, -1, 10, 6, -15, -3, -9, -2, -17, -10, 9, -12, 22, -5, 15, 10, 0, -20, 17, 0, -3, -22, 6, -6, 4, 2, 6, 10, 0, -31, 20, -19, 4, 12, 4, 3, 4, -17, 4, 10, -17, 22, -4, -20, -19, 8, 15, 20, -7, -4, -17, 19, 32, -24, -12, 0, -16, -33, 6, -1, 3, 7, -9, -25, -19, 23, 7, -5, 18, -36, 4, 0, -16, 11, 4, -14, -22, -25, -29, 5, -3, -9, -23, -15, 22, 17, -9, -33, 9, -14, 7, -3, 0, 3, 6, 17, -13, 25, 16, 17, 5, -2, -7, -16, 11, 10, 3, 7, 43, -8, -12, -27, 5, -18, 0, 7, -15, -10, 20, -10, 18, 9, 0, 11, -16, -34, 30, 0, 13, 10, -9, -5, 2, -24, -12, 15, -3, -4, 7, -5, -23, 16, 18, -12, -14, 20, -7, -6, -1, -17, 3, 12, -10, -20, 24, 2, -23, -12, -21, -2, 5, -22, 15, -1, -27, 0, 5, 4, 12, -3, -12, -7, -8, 8, 4, -11, -15, 0, 5, -12, 1, 6, -3, 6, 14, -17, 14, 4, -1, 19, 10, 12, 15, -24, -15, -19, 15, -35, 2, -21, 3, -15, -16, 13, 20, -27, 1, -9, 1, -8, 14, 4, -8, -34, -15, 16, 6, 2, 20, 4, -20, 24, -29, 5, 3, 7, 9, 11, -10, -2, 7, -21, -20, 10, -20, -7, 22, 10, 25, -6, 1, 9, -17, 9, -25, -25, 10, 13, -17, 15, 12, 12, 10, -5, -28, -23, -10, -16, 8, 3, 41, -16, -13, -25, -13, 13, -17, -9, 12, 13, 19, 2, -13, -17, -17, 3, -12, -5, -35, -10, -19, 3, 1, -7, 29, 12, -9, 8, -1, -11, 27, -5, 12, -3, 15, 23, 5, 13, 5, 12, 7, -26, -15, -9, -17, 6, -28, -4, -5, -2, 19, -13, 20, -24, 0, 13, -17, -20, 0, 20, 4, -17, -10, -23, -14, 3, 28, -2, 22, -24, 5, -11, -15, -10, -13, 2, -5, -20, -7, -7, 8, 12, 10, 7, 10, -20, 8, -8, -21, 14, -18, -19, 5, -25, 24, -13, -26, 0, 16, -15, -2, -9, 18, -18, 24, 10, -14, -7, 31, 12, 18, -26, 6, -12, 9, -3, -9, 16, 20, 1, -21, -4, -3, 12, 25, -16, 18, -24, -23, 8, -10, -43, 25, -18, -16, 12, 10, -24, 28, -25, 18, -11, -8, 0, -12, 1, 13, -34, -3, 6, -1, -9, 26, 0, -36, -10, 7, -8, 27, -9, 12, 7, 11, -12, 0, -11, -19, 8, 3, -16, -10, -6, 14, 10, 19, -15, 17, -17, -2, -1, -17, -1, -5, -22, -1, -1, -2, 26, 2, 6, -11, 1, -8, 10, -24, 9, 3, -24, 25, 2, -17, 15, 5, 17, -2, -8, 4, -11, 4, 30, -11, -20, 9, -15, 13, -4, -21, 10, 10, -12, 19, -9, 8, -9, -13, -21, 14, 0, 10, 14, -10, -2, -13, -1, -9, 9, 0, -13, 33, 8, 16, 18, -4, 0, 13, 6, 23, -19, 1, 8, -19, -9, 8, 1, -9, 14, -5, -28, 3, -17, 8, 6, -9, 29, -23, -3, 10, -28, -18, -7, 0, -6, 8, -24, 0, -1, 26, 16, -4, -9, -17, -9, -25, -2, -24, 1, 13, -22, 3, 23, -15, 15, -11, -20, 2, -15, 18, -15, -19, -13, 15, -12, -10, 14, -5, 16, 0, 10, -22, -14, -21, 10, 0, -7, -9, -7, 8, -7, -4, -3, 8, -4, -1, 16, -5, -11, -11, 13, -12, 11, -14, 2, -18, -16, 16, -2, 1, -19, 11, -30, -1, -25, 0, -24, 12, -26, -2, -19, 21, 1, 29, -14, -25, -18, -2, -25, -17, 24, -29, -4, -28, 4, 11, -14, -10, 16, -11, 0, 4, 7, 5, 5, -13, 5, -17, 1, -8, -33, 23, -31, -7, -16, 28, -13, -10, 8, -2, -29, 25, -1, -26, 2, -1, 2, 16, 8, -30, -23, -6, -15, 27, -23, -9, -11, -16, 9, -3, -6, 6, 26, 7, 6, -8, 16, -9, -2, 8, -2, 15, -38, -21, -15, 0, 8, -5, 34, 12, -1, 2, -6, 24, -22, 16, -12, -16, -4, -18, 9, -11, -13, -7, 1, -16, 24, 4, 2, 0, -2, 4, 8, 12, -27, 16, 6, 2, 6, -9, -5, -1, -15, -17, -19, -9, -22, 9, -2, 1, 21, 30, -4, 10, 16, 1, -24, 0, -7, -9, 5, 13, -1, 9, -32, 18, 13, 7, -5, 14, -20, -17, -15, 28, 10, 7, -18, -8, 11, 2, -6, -14, -5, -10, 7, 38, 1, 22, -42, -27, 2, 17, -19, 40, -16, -5, 7, -8, 4, 14, -14, -22, 3, 0, 23, 4, 14, 16, -6, 8, 16, -12, -20, 12, -18, 21, -6, -17, -13, 2, -10, -8, -24, 21, 12, 15, 14, -10, 6, -8, 7, 6, 2, -13, 0, -15, -10, -8, -13, 0, -25, 3, 14, 11, 10, 15, -8, -22, -5, 21, -9, 17, -2, 15, 13, 24, -25, -21, 14, -1, 3, -11, -10, 6, -1, -9, 4, 8, 11, -3, -15, -12, -12, -10, -10, 20, -29, -11, 19, -7, 13, 11, 7, 15, 18, -27, -4, -10, 4, 6, -10, -13, -29, 11, -20, 5, 10, 18, 14, -5, -10, -4, -3, 4, -7, -12, -2, 2, 12, 16, 24, 8, 12, -3, -11, -21, -4, -2, 8, -40, 23, 16, -17, 14, -53, -14, -13, -23, 5, 2, 6, -21, -4, -34, -20, 11, 9, -4, 12, -14, -12, -11, -8, 1, -16, 19, 4, -17, 9, 8, -15, -11, 4, 15, -5, 17, -36, -12, 18, 27, 3, 0, -22, 4, -17, 14, -19, 24, -19, -34, 12, -6, 4, -17, -21, 27, 0, 38, -5, 33, 8, -6, -25, 11, -11, 20, 14, 8, 4, 17, -8, 3, 26, 15, -4, 9, -13, -16, -11, -19, -7, 22, 9, 9, -6, -13, 3, -8, -8, 17, -6, -3, -2, 22, -30, -14, 8, 20, 18, -10, 2, 12, -30, -2, -21, 14, -9, 19, -29, 16, -5, 0, 13, 50, 19, 2, 7, 14, -4, 26, 6, 24, 1, 15, -10, 2, -10, -3, 17, 3, 16, 8, -7, -11, -1, 21, -8, -9, -13, 4, -7, -17, -29, 8, 4, -6, -26, -1, 2, 5, 3, 5, 9, -10, -9, 5, -8, -20, -21, -9, -18, -11, 14, -14, 8, -32, 21, -12, 14, 2, 15, -9, -5, 37, 10, 17, -17, -27, -8, 23, 5, 3, -12, 15, -7, -4, -2, -6, 13, 20, -7, 0, -5, 4, 12, 28, -21, -7, -10, 14, -12, -12, -6, -13, 0, 51, -27, 7, -4, -17, 5, -8, 14, -25, -13, 12, -18, -11, 7, 13, 3, 19, -14, 8, -20, 4, -13, 10, -4, 13, -30, -9, -15, -20, -1, -5, 16, 0, -17, 1, -15, -20, -29, 10, 23, 10, 25, 4, 7, 7, -9, 7, 26, 11, -13, 1, 9, 1, -16, -8, 4, 50, -1, 2, -9, -5, 11, 4, -22, 8, -16, 20, 4, -5, -9, 8, 7, 25, -36, -4, 2, 25, 11, 19, -9, 7, 18, 11, -22, -12, -22, 2, -15, 22, 7, 4, 4, -5, -12, -4, 36, -10, -24, -7, -20, 13, -18, -1, 4, 14, 4, -8, -12, 6, -19, -43, 43, 8, 12, 3, 14, 7, -9, -12, 13, 18, -27, -26, 12, 10, -18, 8, -25, 29, -1, -17, 2, -32, -7, -13, 4, 48, -19, -6, -2, 19, 1, 24, -16, -11, 8, 5, 10, -21, 9, -13, -19, -15, -5, -7, -22, -1, -17, -27, 13, -10, 12, 12, 0, -23, 8, -4, -17, 6, 21, -1, -21, -7, -6, 0, -26, 10, -5, -6, -15, -6, -20, -12, 17, 16, 1, -17, 8, 16, 7, 2, 15, 9, -19, 14, 9, 3, -14, 0, -24, -3, 11, 17, 13, 18, -19, 10, 7, 4, -19, -18, 20, 13, -1, -9, -2, -11, 2, 14, 14, -15, 0, -11, 23, 3, -12, 14, 7, -19, 2, -25, 10, -9, 9, 10, 17, 17, 17, 13, -3, -2, -18, 11, 7, -2, 7, -16, -17, -12, -22, 16, -5, 34, 13, 14, 8, -30, 15, 2, 12, 2, 9, -12, -17, 12, -30, 1, -21, 1, 3, 27, -4, -14, -14, -24, 3, -6, -11, 12, 2, 3, 15, -18, -14, 4, -18, 16, -8, 13, 20, -20, 16, -25, -14, 5, 8, 15, 17, 4, -18, -15, 3, -32, 58, 19, -15, 6, -13, 12, -2, -12, 2, 8, -22, 21, -7, 11, -2, 10, -9, 17, -2, -5, -18, 10, -16, -2, -2, -23, 8, -25, -13, 6, -22, 18, -2, 23, 12, 32, -25, -13, 6, -4, 18, -2, -10, 10, -12, -21, -13, 12, 0, 18, 30, -9, 19, -13, -8, 4, 2, -1, -4, -1, 9, -45, 11, 2, -12, -2, 16, -7, 5, -13, -15, 6, 2, 33, -14, -8, 13, 12, 15, -2, -29, 10, 16, -9, 3, -14, -27, 6, 18, -1, -10, -17, -5, -36, -11, -8, -17, 14, 9, -23, -18, -7, -23, 32, -13, -5, -31, -31, 6, -33, -19, 30, -11, 27, 7, 27, -23, -13, -10, 15, 0, 19, -17, -41, -13, -19, -14, 16, 12, 16, 17, -20, 1, -38, 3, -15, 3, 14, 6, -24, -3, -8, -23, 12, -3, -8, 8, -3, 13, -1, -26, 9, -1, 24, 20, 13, -17, 10, 7, 0, -11, 19, 24, -41, -11, -15, 11, -1, 2, 3, 2, 6, -24, 25, -26, 24, -15, -9, 7, -4, -15, -8, -21, 12, -7, 19, 0, -7, 9, -23, -4, 14, 17, 12, 14, 20, -11, 4, 11, 18, -25, 22, 15, 19, -18, 9, -9, -37, -23, 12, -23, 13, 10, -33, -23, -21, 18, 30, 21, -16, -12, -7, 13, 24, -12, -12, 1, 35, 17, -4, -7, -30, 22, -29, -30, 22, 1, -1, 5, -17, 27, -12, 9, 18, 11, 2, -6, -10, 26, 20, 11, 4, -1, -22, -25, 17, 6, 4, -6, 11, -21, 22, -11, -19, -34, 12, -10, 35, 21, -17, -7, -24, -9, -1, 30, -7, -7, 13, 6, 13, 24, 18, -8, 25, 20, -17, -8, 0, 0, 31, 0, 35, 10, -21, 13, -28, 9, 3, 9, -39, 9, -27, 3, -27, -53, -33, -26, -36, -12, 41, 8, 3, 2, -1, 9, 10, -13, 14, 4, 13, -29, -26, 3, 23, 6, -7, 14, -12, -9, 2, -9, 2, 17, 14, -24, 7, 24, -7, 13, 10, 9, 10, -8, -12, 8, -13, -19, 18, -3, -24, 12, 26, -25, -5, 11, 39, -11, 21, -13, -35, -6, -10, 12, -8, -23, 15, 6, -12, 12, -10, -6, -4, -2, 16, -23, -7, -1, -25, 27, -10, 0, -21, -11, -13, -2, 15, 22, 21, -25, 25, -14, -4, -15, -12, 16, 4, -4, 22, -21, 4, -16, -17, 24, -17, 4, 17, -21, 1, 29, -49, -9, -5, 6, 15, 0, -10, -29, -7, -17, 0, -1, -13, 2, 9, -8, 18, -19, -9, 12, 2, 2, -38, -1, 25, 17, 20, -7, 15, 2, -22, -7, -1, 4, 0, -27, 24, 8, 1, -17, -15, 17, 16, 30, 17, -19, -21, 8, -9, 13, 1, 7, 3, 17, -11, -34, 7, -23, -1, 11, -7, 17, -26, -16, -26, -57, 2, -9, 8, -18, -2, 1, -29, -17, 17, 5, -6, -17, -5, 16, 5, -1, -17, 6, -8, -17, -4, 26, 22, -17, 7, -19, 4, -1, -8, 7, 16, 3, 28, 4, -17, -3, -5, 28, -6, -5, 7, 18, 10, -9, -26, -26, 8, -4, -6, 2, 20, -14, -9, 11, 2, -15, 26, -14, -9, 4, -45, 14, 33, -18, -19, 7, 19, -10, 7, 6, -28, -24, -18, -16, 22, -23, 21, 6, -14, 6, -16, -3, -13, 16, 11, -13, 16, 4, -9, -16, -19, 17, 16, -2, -12, -24, -37, 16, -29, -12, 40, -24, 36, 12, 20, 12, -18, -18, -10, -25, -9, 5, -23, 1, 4, -18, -4, 9, -21, 17, 6, 6, -19, 7, 16, 1, 0, 2, -14, -17, -6, 13, 2, 34, -2, 18, 13, 17, -29, 0, -20, -19, -8, -26, -19, 1, -7, 8, -24, 4, -3, -13, 5, -19, -8, 0, 6, 6, -23, -12, -6, 1, 15, 6, -4, 27, -27, -14, -22, 3, -16, 15, 23, -4, 5, -28, -9, -12, -18, -11, -12, -24, 23, 21, -15, -24, 11, -17, -12, -27, -18, -12, -21, 18, -4, -7, 41, -13, 10, -1, -15, -4, 9, 16, 0, 18, -26, 0, 23, 17, 22, -12, 1, -20, 15, -28, -15, 6, -10, 11, -18, -6, 22, -2, 4, -14, 16, 6, -6, -19, -5, -18, 3, 14, 11, -23, -13, -11, -2, -7, 10, -7, 7, 5, 11, 18, -21, 9, 7, -10, -23, -3, 6, -3, 22, -2, 10, -19, 0, -22, -1, 12, 4, -13, -4, 4, -5, 7, 19, -6, 40, 3, -6, -1, 16, -18, -9, 4, -11, -6, -5, -11, -13, -10, 1, 10, 17, 12, -4, -11, -17, -5, -16, -8, 0, 30, -21, -13, -10, -10, 0, 8, -7, 8, 7, 7, 10, -19, -21, 43, 19, 16, -24, -8, -2, -15, 19, -21, -6, -22, -34, -10, 10, -9, 7, -6, 1, -2, 12, -2, -1, 16, -6, 6, 26, 21, -21, 10, -4, -23, -21, -16, 30, 4, -5, 13, -3, -11, -45, 23, 0, -6, -10, 10, -17, 22, -9, -22, 4, 3, -14, 1, -1, -18, 1, -5, 17, 21, -39, -2, 20, -3, 31, -18, -34, 6, 27, -19, -2, -10, -18, 25, -10, 13, 3, -16, -15, 5, -23, 9, -1, 17, -20, 5, -31, 8, 37, -15, -2, 17, -3, 8, 19, -4, 45, 14, -29, 10, -44, -23, -15, 10, 7, -9, -11, 21, 14, 14, -15, 12, 2, 0, 22, -2, 10, 9, 8, 16, -17, 13, -7, -7, -14, 12, -5, -9, -26, 8, -10, -10, 10, 6, -17, 15, 9, 12, 27, 1, -29, -20, -44, 17, 17, -28, 29, -10, -15, 20, 9, 16, 39, -16, 17, 9, 10, -11, -3, -9, -5, 13, -2, -2, 2, -15, 32, 13, -16, -23, -43, 6, -4, 5, -29, 7, -10, -2, 34, -24, -19, -7, 16, -12, -6, 9, 16, -13, -31, 11, 19, 0, 23, 24, 5, 9, 16, 15, 9, 6, 4, 8, 10, -20, -5, 23, -16, -26, -9, 11, 11, 10, -12, 8, -26, -1, 6, 8, 28, 24, -11, -3, -11, -9, 15, 18, -9, -20, -20, 14, 16, 11, 13, 22, -33, -12, 8, 6, 11, 17, 1, -15, -33, -33, -13, -6, -11, -2, -15, 29, -6, 5, 2, 6, 22, 11, 18, 18, -5, -11, -2, -23, -18, 8, 17, -22, 4, -7, -28, -23, -38, -23, 14, 25, 30, -15, 2, -12, -11, -1, 15, 1, 1, 25, 6, -21, -26, -5, -3, 15, -12, 24, -24, 7, -24, -16, -5, 32, -14, 14, 8, -12, 15, -3, -4, 19, -8, -7, -18, 6, -2, 12, -12, -17, 21, 18, -26, 8, -2, -12, 1, -8, -10, 16, -12, -7, 3, -3, 16, 11, 3, 23, 13, -22, 4, -25, -15, 26, 22, 5, -14, 6, -11, 6, -1, -11, 18, -19, 4, -17, 2, 6, -18, 2, 10, 19, 12, 9, -2, 3, 2, 1, 9, 15, -27, 0, -25, -15, 3, 4, 26, 23, -30, 10, -20, -17, 17, 1, 19, 19, -38, -12, 1, -1, 8, 20, 6, -7, -15, 15, 1, -23, 29, 12, 2, 30, 25, -25, 5, -26, -30, 7, -5, -9, 21, -14, -17, 7, 7, -12, 0, -3, -13, -8, 32, -3, 35, -23, -19, 23, 4, 2, -4, 12, -7, 9, 19, -42, -8, -22, 11, 15, 14, 8, -32, 2, 8, -12, -3, -6, 12, -43, -10, 23, 9, -24, -21, -14, -26, 52, 27, -3, -6, 27, -10, -12, -13, -9, 8, 8, -12, 13, -1, 24, 2, -55, 7, -2, 4, 11, -13, 3, -12, -11, -23, 41, 10, -18, 4, -18, -20, 22, -17, -4, 6, -13, -19, -23, -44, 38, -11, -5, 20, -6, -16, -14, 7, 10, -13, 12, 11, -7, -18, -21, 3, -1, 4, 7, 12, -5, 3, 15, 2, -15, 0, -2, -19, 12, -36, -3, -14, -39, -4, 14, -3, -4, -2, -3, 15, 11, -11, 5, -2, -27, -17, -18, 9, -5, 33, 1, 16, -13, 14, -12, -10, 2, -19, 1, -13, -7, 24, 22, 2, 2, -35, 20, -2, 0, 33, 14, -17, 19, -3, 56, -5, -20, -5, -17, -19, 16, -27, -6, -23, -11, -28, 3, 10, 8, 18, 1, -17, 15, 6, 10, 0, -10, 13, -12, 22, 13, 17, -22, 36, -40, -17, -3, -2, 15, -2, -11, -46, 16, 9, -12, 6, 13, -7, -4, -36, -12, 25, -23, 9, -3, -11, -4, 42, -7, -11, -21, -17, 4, -13, -14, 36, -32, 47, -13, 15, 3, -14, -24, -8, -22, -11, 28, -5, -26, 4, -10, -13, 4, 24, -1, 22, 5, -6, 9, -20, -10, -21, 15, 7, -1, -24, 1, -25, 13, 52, -62, 1, -19, 32, 11, 15, -7, -2, 34, -9, 0, 0, -4, 49, -54, -25, 22, -6, 3, -3, 11, 46, -43, -24, -13, -2, -25, 9, -8, -16, -7, 2, -19, -35, 5, -16, -8, 68, -52, 27, 4, 37, -13, 19, -4, -24, 2, -16, -19, 1, -19, -4, 19, 8, 22, 22, -35, -19, 8, -2, -9, 25, -21, 43, -38, 15, 11, -10, 8, -8, -6, 63, -61, 15, 9, 10, -11, 4, -8, -4, -7, 17, 4, 9, 22, 24, -14, -12, 10, 15, -11, -16, -14, -25, 16, 43, -8, 5, 4, 21, 15, -4, -12, -12, 14, 6, -8, -19, -7, 13, -11, 43, 5, 18, 17, -5, -2, 13, 10, 11, -28, -14, 2, 18, -23, -7, -8, -27, -7, -2, -13, 9, 14, -3, 27, 3, 13, -8, -5, 4, 13, -66, 53, 11, 15, 16, 0, -34, -5, -11, 2, -11, 24, -26, -4, -31, -11, 3, 3, 8, 33, -48, 10, 13, -20, 48, -3, -15, 2, 7, 9, 14, -17, 1, -10, -14, -3, -27, 4, -58, 0, 4, -3, 39, -3, 24, 13, 20, -19, -21, -9, 18, -4, 29, -8, 1, -5, -12, -6, 33, 9, -9, -21, 9, 1, 3, 32, -10, -13, -16, 17, 14, 14, 50, -5, -47, 13, -15, 5, 29, 9, -7, 21, 9, 11, -1, 10, -24, 3, 24, 3, -15, -21, 1, 4, 3, 9, -5, 17, 13, 16, 2, 15, -12, 9, 29, 6, -4, 17, -13, 12, -39, -17, 22, -2, 37, -24, 10, -20, 6, 5, -25, 13, 14, -32, 19, 12, -3, 18, -24, -3, 7, 6, 7, -3, -6, -7, -6, -10, 3, 25, 15, -5, 2, 20, 33, -5, 5, -3, -3, -1, -7, -5, 50, 5, 7, 0, 8, -4, 2, 0, -52, 10, -14, -10, -14, 2, -13, 15, 8, -3, 1, -26, -34, 7, 8, 1, 34, -13, 0, -1, -16, 0, -17, -4, -6, -2, -33, 0, -10, 16, 23, 4, 42, 14, -4, -18, 23, -21, 8, 15, 13, -10, 23, 2, -15, 20, -30, -3, 22, -41, 20, -6, -13, 13, -5, -6, 34, -14, -3, 23, 18, -29, -12, -18, -2, 13, -6, -11, 12, -3, -1, 19, 7, 13, 25, -8, -12, 16, 15, 5, -13, 4, 7, -2, 14, -6, -34, -15, -28, 27, -22, 23, -19, 1, -17, -17, 17, 7, 15, -6, -22, 19, -1, 7, 31, -33, 8, 5, -3, -22, -17, 0, -14, 8, -3, 21, 21, -23, -32, 14, 0, 11, 12, 10, 0, -17, 14, -6, 18, 8, 10, 9, -4, 7, 17, 13, 16, 3, 10, -21, -30, 0, 13, 8, 5, 8, -18, 2, -10, 13, 18, 14, -18, -2, -13, -15, -10, -3, -32, 12, -6, 2, 9, 16, 7, 2, -30, -6, 1, 9, 6, 3, 13, -14, 6, -5, -13, 13, 14, -4, -49, 5, -14, -2, 25, -35, -12, 11, 11, -23, 3, -13, 30, -13, -25, 6, -7, -21, 31, -24, -5, 5, -5, 15, -15, 3, 16, -2, 31, -31, -19, -5, -7, -2, 9, 10, -16, 14, 12, 7, 11, 21, -19, 17, -29, 4, -15, -15, -15, -12, 17, -6, 22, -18, -3, 8, 14, -14, -2, -6, 4, 5, -9, -21, 18, 9, -14, 20, -3, -23, -5, -4, -8, 5, 8, 33, -49, -1, -27, 0, -7, 5, 2, 5, 5, -4, -19, -4, -7, -14, 10, -10, 11, -7, 13, 13, -9, 8, 13, 19, 12, -15, -33, 13, -10, 7, 15, -12, -9, 2, -7, -17, -21, 2, -8, 23, -6, -20, 15, -28, -10, 13, 18, -4, -3, 11, 15, -10, 1, -9, -11, -14, -14, 16, 2, 10, -22, -1, 5, -15, -9, -9, -7, -14, -19, -7, 4, -6, -29, -16, -7, 7, -11, 2, 4, -5, 15, -21, -28, -25, -4, -2, 0, 3, 13, -1, 19, -16, -8, -8, -17, 3, 1, -25, 3, -26, 12, -6, -17, 19, -23, -1, 20, -11, -23, -11, -21, -20, 4, -16, -9, -17, -24, 11, 17, -1, 5, 10, 13, -4, 5, 8, -23, 11, 1, -18, 0, -19, 15, 15, -4, 4, 22, 6, 7, -3, -4, -16, -10, -6, -13, -26, 2, -2, -16, 24, 10, -24, -12, -16, -14, 19, -14, -17, 12, -19, -3, -30, -2, 1, -18, -12, -10, -7, 5, -3, -12, -22, -6, 7, 8, -14, -10, -13, -4, 14, 7, -24, 16, -19, -1, -8, 1, 7, 2, 26, 11, -2, 5, 6, -3, -20, 11, 0, -15, -8, -8, -1, -2, -15, -10, 12, 6, 3, 9, -15, 7, 7, -4, -26, 3, -18, -4, -10, 22, -10, -18, -13, -1, -14, -27, 13, -14, -11, 13, -12, -21, -30, 11, -18, 24, 16, -4, -30, 2, -16, -12, 17, 24, -17, -15, -12, -1, 22, 11, -5, 12, -7, -10, -19, -9, 9, 10, -17, 21, -19, 11, 14, -9, 28, -30, -25, -2, 10, 13, 5, -6, -1, -8, 5, 21, 12, -9, -14, -23, -23, -3, 13, 12, 10, -9, -9, 15, -3, 19, 11, -28, -2, -10, 1, 7, -6, 5, -3, -4, 10, 15, -3, 1, -13, 15, 24, 7, -5, 9, -18, -1, 19, 6, -22, -20, -17, -7, 28, 15, -32, 1, -34, 18, -11, -7, -20, -4, 19, -15, -5, 15, 10, -19, 11, -10, -9, -11, 1, 19, -14, -8, -1, 13, -24, 18, -8, -7, 19, -19, -24, -2, 7, 27, -7, -20, -9, -16, -4, 2, -2, 18, -15, -13, -8, -2, -14, -15, -11, 11, -20, 3, 8, -5, 1, 8, 1, 3, -16, -17, -1, -15, -15, 1, 2, -2, -5, -16, 1, -18, 7, 1, -11, 14, 10, 5, -10, -11, 13, -16, -18, 15, 10, 10, 30, -8, 0, -19, 12, -30, 30, -10, 12, 7, -5, 5, -25, 25, 14, 9, -15, 10, -12, 22, -17, -11, 2, -7, 4, -1, -19, 13, 0, -15, 35, 15, 10, 1, 13, -21, -2, -4, -16, 17, 7, 10, 5, -13, -8, 15, -6, -6, -14, -3, 5, -13, -30, 30, -8, -10, 14, -5, -9, 11, -24, -19, 3, -19, 8, -17, 18, -9, -20, -4, 21, -5, -11, -5, -1, -9, -6, 12, -6, 2, 11, 17, -20, -14, -15, 1, 27, 0, 15, -1, 13, 11, -24, -30, 13, -33, 8, 6, -3, 1, 24, -5, -12, -15, -15, -24, 2, 12, -22, 2, 15, 24, 13, -4, 2, -6, -31, -39, 22, 5, -15, -20, -2, -9, -14, -26, 15, 6, 8, 11, 4, 12, 17, -28, -10, -22, -18, -1, 7, -22, -22, -9, 9, -50, -15, -20, -6, -23, -21, -15, -16, 15, -22, 12, -32, -14, 16, -26, 6, -4, 1, -10, 8, -15, -1, -32, 0, 10, -12, 3, -3, 15, 20, -12, 27, 11, 10, -9, -1, -21, -11, -35, 8, -12, 13, -4, 16, -15, 6, 1, 11, -16, -22, 14, -19, -17, 7, -35, 1, -10, -3, -24, 14, 10, 4, -22, 16, 10, 35, -11, -1, -14, -5, 15, 14, -18, -29, -19, 11, 19, -32, 6, -13, -8, 20, 17, -17, 7, -20, 6, 4, 17, 14, 4, 4, -5, -6, -19, -12, 18, 19, -11, 6, 14, -9, 17, -14, 2, -11, 7, 27, 17, -9, 13, 27, -12, 8, -3, -8, -12, -18, -39, -19, -7, 26, -17, -6, 5, 5, -6, 22, -18, 7, -11, 23, -2, -6, -10, -11, -9, -6, -1, 2, -6, -7, 11, 12, -11, -11, 10, -8, -14, -12, -2, 2, -25, 4, -7, -5, 7, 2, -15, 8, -39, -6, 21, -26, 4, -1, -3, 14, -20, 3, -22, 6, 13, -4, -19, -8, -9, -25, -4, 6, -13, 14, 16, -27, 3, 15, 0, 5, -23, 1, 17, 3, -17, 1, 21, -11, 9, -8, -2, 5, -21, -8, 7, -26, 10, -10, -21, -20, -30, -14, 19, 3, 8, 11, -25, -23, -1, 26, -14, -1, -11, -11, -25, -21, -32, 6, -20, -1, -11, -17, -22, 9, 2, -9, -14, -23, 16, 5, -5, 19, -16, 20, -4, 0, -23, -19, -8, -9, -21, 14, -24, -2, 10, 22, 3, -15, -18, 21, 0, -4, 9, 10, -13, -14, 2, 2, -19, -10, -3, -9, 0, 19, -27, -8, 0, 14, 3, -16, 22, 11, -9, 16, 9, -7, -12, 8, 6, 7, -9, 17, 10, 24, -16, 0, 3, 0, -23, 12, -15, 10, 18, -5, -18, 4, 1, -20, 9, -11, 8, -20, 7, -12, -7, -14, 12, 5, -15, 18, 6, -22, 0, -14, 11, -19, 7, 18, -11, 12, 7, -23, 10, -2, -22, -15, 69, 15, -15, 31, -16, 17, 3, 7, 8, 4, -8, -2, -23, 7, 11, 23, 4, -4, 11, -8, 6, -24, 24, 8, 28, 4, -30, -12, 1, -10, 3, -16, -10, -20, -6, -2, -14, 4, -4, 21, -6, -30, -7, -8, 17, -12, -2, -22, 22, -17, -6, 4, 12, -13, -5, 12, 14, -6, 9, 0, -22, -12, 6, 19, -17, 21, 32, -14, 6, 23, -15, 12, 18, -27, 14, -7, -14, -10, -15, 2, 6, -16, -7, -23, -22, -25, 14, -13, -2, 8, 13, -17, 11, -11, -5, 33, -3, -21, 1, -21, -19, -9, 10, 16, 7, 10, 14, -20, 14, -21, 12, 14, 14, 17, 21, 8, 15, 15, -8, -16, -6, 19, -11, -5, 8, -25, -15, -19, 17, 9, -7, 14, 14, -14, -18, -2, 16, -25, -12, -17, 0, 1, -3, 12, -2, -5, 42, -28, 18, -17, 8, 5, 26, -36, 0, -12, 5, 0, -20, 23, 20, 12, 0, 17, -10, -9, 17, -1, 7, 2, 12, 16, 14, -23, -22, -21, -14, -11, 12, -29, -17, -8, -7, -13, -14, 6, -15, 2, -22, 29, -19, -24, 36, 4, -25, -17, -14, -16, 18, 1, 7, -20, 6, 16, 4, 14, 14, 3, -17, -15, -11, -20, -16, 0, -21, -7, 26, 19, -11, 7, -21, -9, -13, 0, -25, -15, 27, -14, 11, -18, -15, 15, 6, 8, 2, -24, 12, -18, -10, 15, -26, -19, 23, 3, -12, 3, 0, -8, 7, -9, -9, 12, -20, -21, -6, -4, -1, -14, 8, 12, -9, 18, -8, -8, -33, -4, -16, -12, 16, 0, -22, 14, -37, 8, -14, 11, -18, -30, -23, -11, 5, 12, -28, -6, -14, 4, 6, 18, 12, -10, -8, -18, 16, -7, 15, 14, 11, 15, -26, -43, -4, -15, -17, 3, 1, -9, -21, 14, 19, 30, -17, 16, -12, -26, -20, 8, -16, 11, -10, -3, -16, 17, -9, -19, -18, 11, -5, 17, -22, -15, -8, -17, -22, -1, -1, 4, 13, -19, -21, -12, 11, -8, -19, 11, -6, -19, 5, 9, 9, 6, -1, 36, -11, 3, 21, 3, 13, 5, 2, -7, -10, 3, -7, 11, 15, -20, -24, -5, 20, 4, -11, 7, -21, -8, 7, -22, 12, 21, 23, 13, -22, -28, 4, 11, -13, 15, -16, -2, -8, -23, -9, 3, 5, -6, 13, 19, -20, -13, -11, 14, -15, 1, -6, 32, 0, -15, -14, 8, 47, 1, 6, 12, -17, 7, 0, 10, 19, 10, 12, -17, -4, 9, -16, -10, 27, -5, 2, 10, 11, -3, 5, 23, 15, -17, -26, 3, 9, 8, -19, -21, -3, 28, -4, 3, -17, 9, -2, -12, -14, 17, -22, 0, -16, -8, 9, 1, -33, -16, 15, -15, 12, 5, 13, 4, 7, -7, 18, 38, -12, 13, -19, -6, -11, -13, 1, 1, 10, -26, -6, -8, -6, -7, -32, 22, 12, 9, -23, 1, -7, -22, 9, -22, -23, -11, -6, 21, 3, -25, -12, 12, -24, 10, -25, 9, 2, 12, 20, 1, -18, -27, 6, -28, -31, 5, 6, 20, -4, -13, 14, -42, -18, -12, -11, -18, -4, 16, 12, 10, 7, 17, -11, 8, -1, 2, 10, 5, -26, 10, -40, 3, -18, -2, 15, 30, 7, 23, 17, 9, 8, 6, 19, 17, 7, 24, 9, 6, 5, -30, 0, -7, -27, 4, -20, 7, 18, 17, -1, -12, 0, 26, 1, 30, 2, 17, -18, -10, 3, -15, -8, 21, -18, 7, 7, -2, 3, 4, -8, 15, -19, 36, -7, 8, 17, -5, 0, 17, -5, 20, -21, 21, -11, 32, -8, -16, 13, -23, -13, 9, -49, -14, -18, -7, -3, -12, -24, 9, -13, -12, 3, 15, -6, -7, 19, 18, 3, -11, 12, -3, 12, -4, -23, -1, 11, 16, 9, 22, -13, 9, -16, 16, 7, -4, -12, -2, -13, -12, -6, -9, 19, 19, -17, -12, 17, -21, -20, -18, -28, -6, -48, 18, -11, -4, -14, 1, 19, 9, -26, 9, -7, -18, 18, -35, 12, 12, 3, -2, 15, 13, -8, 6, 6, -5, 12, 18, -22, -19, 15, -14, -13, -17, -24, -15, -9, 3, -21, -33, -12, 2, 1, 5, -21, 7, 4, 6, 29, 16, 15, 16, 4, -37, -3, -22, -27, -15, 0, 28, 0, -29, 0, -1, 2, -4, -8, -17, 2, 2, -14, -9, 11, 22, 5, 9, 7, -24, -11, -8, 35, 22, -3, -3, -15, 11, -12, 7, 38, -5, 16, -9, -22, -26, 9, 12, 16, 23, -23, 8, -21, -19, 6, -13, 12, -17, -31, 9, -2, -10, -4, 4, 8, -6, -16, -4, -11, -16, 13, 9, -16, -11, -2, -5, -24, 13, -19, 0, 14, -9, 29, -5, 8, -17, 23, 4, 14, -2, 7, 39, -12, -24, 4, 14, -5, 18, 1, -12, -5, -15, -4, -23, 21, 18, -8, -15, -9, 5, -17, -8, 23, 12, 2, 1, -21, -18, -23, 3, -19, -17, -7, 15, 6, 12, 11, 0, 6, -7, -9, -29, 13, 3, 9, -26, 16, -24, 4, 12, -5, -11, -21, -23, 11, -14, -1, 12, 16, -31, -11, -25, -29, -12, -8, -35, -10, -8, 7, -27, -9, 6, 20, -12, -19, 7, 11, -29, -3, -4, -10, -29, -20, -28, 10, 5, 18, 1, 5, -5, -19, -23, 2, -15, -5, 22, -9, 6, -11, 10, 4, 6, -11, 0, -15, 11, -7, -6, -20, 14, -21, -9, -4, 16, -4, -18, -14, -16, -22, 9, -10, 7, -12, 5, -13, 2, -11, -7, -18, 12, -12, 10, -7, -2, 15, -2, 4, 5, -3, 8, -12, -14, 9, -6, -13, -1, -11, 4, 17, 10, 15, -11, 4, -3, -18, -17, 0, -12, 10, -1, 6, 0, 2, 15, -20, 7, 5, -21, 11, -8, 16, -25, -23, 9, -7, -5, 8, 16, -11, 18, -20, 2, 41, 17, 7, 3, -17, -14, 14, 12, 9, 15, -4, -13, -18, -4, -23, -2, 6, 21, -22, -28, 16, 7, 0, -11, -14, 0, 20, -4, 15, 3, 11, -17, -3, -12, 0, 8, 5, -2, -2, -23, 29, 8, 7, -19, -6, -9, -17, -19, -22, -5, 10, -19, 13, 13, -4, -18, 16, -23, 7, -13, 10, 23, -12, 0, -5, -3, 24, -3, -9, 8, 12, 15, 26, 7, -1, -12, -3, 17, -17, -10, -18, 3, 24, 11, -9, 8, -2, -12, -14, -9, 6, 1, 3, 13, 17, 6, -25, -18, 3, -19, 18, -4, -16, 7, -10, -6, 1, -2, -15, 16, 5, -4, 23, 19, 19, 1, 10, -8, 11, -19, 26, -4, -4, 12, 1, 7, -13, 7, 5, 9, 2, -26, -7, 2, 1, 11, 9, -16, -21, 4, -6, 10, -2, 7, -34, -28, -12, 2, -11, -23, -11, -30, 17, 14, -23, 13, -16, 16, 12, 11, 3, -19, 3, -7, -21, -12, -18, -16, 11, 14, -14, 10, -10, -5, -4, -12, 8, 12, -9, 2, -2, -9, -17, -8, 16, -12, 1, -5, -10, 5, -9, -22, -18, -13, -3, -4, 12, -6, -18, -17, -30, -9, 14, 4, -13, -11, 17, 17, -22, -20, 2, -22, -17, 33, 15, -3, 7, 11, 22, 7, 8, -4, 20, -1, -4, -23, 31, -13, 13, -5, 9, 21, 11, 17, 15, 7, 0, 20, 2, 1, -15, -10, 7, -19, 13, 5, -1, 3, 33, -1, 19, 13, -9, -8, -12, -13, 18, 14, 20, -4, -23, 20, 10, 6, 0, 4, 24, -4, -14, 7, 17, 19, 32, 2, 27, 21, 0, -11, -12, 20, -13, -3, 15, -18, -19, 9, -4, -6, 41, -21, 8, 11, 18, -5, 5, 21, 0, 0, 5, 2, -12, 1, -22, -4, 13, 0, -8, -20, 0, 5, -14, 3, 15, 14, 27, -6, 10, -2, -12, -5, 12, -20, 13, 8, 16, 17, 1, -17, -15, -5, 13, 7, -18, 6, 9, 8, 2, -2, 10, 9, 8, 12, -9, -10, -26, -16, -9, 7, 1, 26, 12, -8, -13, -25, 4, 13, 6, 12, -2, 16, -22, 3, 23, -25, -14, -5, 7, 1, -3, 1, 19, 8, 1, 22, 15, 17, 17, -5, -13, 2, 2, 10, -2, -3, 52, -23, -17, -15, -7, 17, 12, -23, 24, -17, -3, -2, -21, 19, -9, -6, 9, -22, -8, -18, -23, -17, -20, -16, 1, -15, 30, 8, -10, -6, -17, -8, 0, 8, -2, -15, -16, 12, -18, -33, 12, -24, -18, -16, -4, 15, -11, -4, 15, 8, 21, 18, -20, -4, -23, 7, -20, -1, 20, 25, 18, -11, 2, 3, 4, -13, -16, 3, -5, -26, 2, 8, -20, 4, 15, -8, 10, -27, -18, 11, -10, -1, 1, 19, -23, 12, 4, 11, 6, 8, 23, 3, 12, 10, -3, 5, 5, -11, 29, 18, 12, -24, 8, 6, 26, -5, 2, 2, 2, -21, -5, -19, 13, 12, 10, 10, 9, -22, 16, 2, -2, 7, 14, -1, 14, 15, -15, 17, 17, 24, 28, 5, 16, -14, 7, -23, 6, 23, 23, 6, 19, 26, -5, 15, 18, -3, 3, -2, 13, 3, -13, -14, 3, -24, -20, -16, -10, -18, 15, -16, -7, -22, 2, 30, 3, 7, 16, -27, 51, 25, -4, -6, -25, -21, 21, -17, 10, -10, 28, 32, -23, 22, 3, -2, 20, -8, 17, 25, -23, -14, 2, 1, 16, 24, 15, 16, 19, 5, -7, 2, 0, 35, -3, -3, -2, -11, -6, -19, -38, -26, 3, 6, -18, -8, -4, 13, 10, -5, 8, -15, -21, 13, -5, 21, 7, 3, 12, 0, -5, 1, 14, 11, -7, -2, -17, -21, -15, 14, 18, -4, -42, 12, 21, 5, -13, 10, 4, -4, 13, -28, -1, 17, -23, 7, 6, -18, 22, 2, -10, -15, 16, 6, 14, -16, 16, -17, 25, -9, 17, 7, 21, -15, 12, 1, -20, 8, -2, 4, 2, 24, 4, -21, -2, 30, 2, -7, 17, -11, 2, 17, 13, -8, -10, -3, -8, -21, -29, 0, 20, 2, -4, 14, -3, -21, 14, -8, 11, -1, -1, 1, 16, 21, -36, 19, 7, -8, -8, -29, -1, 1, -8, 5, 10, 36, 13, 5, 14, -13, -1, 2, -15, -25, -22, -7, 11, -9, 18, 4, -37, 6, -16, -20, 20, -12, 0, -8, -15, -2, -2, -23, -7, -17, 9, 7, 18, 17, -17, -1, -10, 1, -17, 3, 12, -11, -19, 0, 8, -11, 13, 7, 11, -21, -22, 5, 5, -5, 24, -19, -11, 9, 9, -27, 6, 9, -21, -7, -2, 24, 5, -24, 11, -20, -7, 18, -20, -16, -11, 14, -2, -6, -5, 8, 20, 16, -16, 4, -21, 8, 26, 6, -18, -12, 0, 15, 6, -1, 52, -1, 21, -14, -17, -2, -24, -18, -11, -7, 6, 27, -7, 18, 7, 24, 29, 14, 38, -8, 10, 17, 12, -15, 16, -13, 28, -27, 5, 7, -14, 12, -12, -12, 10, -14, 8, 19, 2, 5, 12, -18, 4, 11, -21, -13, 1, 23, 1, -27, 4, -6, -1, -18, 4, 0, 7, -20, 16, 9, -17, -1, 23, -8, -12, -4, 25, -6, 1, 16, -20, 8, -22, 22, 25, -11, 2, 19, -2, -19, -13, -20, -10, 1, 17, 16, 6, 19, -24, 14, 35, -2, -12, 0, 8, 15, 1, -21, -8, -12, -19, 8, 14, 12, -7, -14, 0, 15, 6, -19, -10, 17, -10, 1, -1, -13, -21, -4, 1, -12, -6, 6, -24, -13, 17, -7, -5, -18, 5, -22, 8, -23, -17, 11, -5, -21, -7, 10, 4, -1, 8, -1, -5, 7, 19, -19, 23, 15, 16, -29, 3, -10, -25, 15, 27, 11, -2, 2, 17, -24, 11, -13, -2, 15, 1, 16, 6, -27, 28, -1, -37, -14, 8, -20, 10, 8, -41, 33, 12, 12, -10, 14, 15, 2, 1, -1, 22, -8, 1, -19, -15, -16, 20, -23, -2, -4, 16, 5, -11, 13, -3, -18, 3, -18, -9, 8, -18, 2, -32, 17, 46, -5, -7, -22, -11, 9, -24, -5, -5, -18, -12, 0, 15, 20, -24, -4, 15, -2, 18, -17, 5, -18, -11, -2, -22, -1, -13, -2, 3, -11, -19, -30, 10, 13, 8, 12, -17, -10, 28, -16, 25, -5, -6, -7, 5, -7, -12, 7, 3, -15, 4, -41, -18, 31, -43, 6, 17, 6, -7, 27, -16, -6, 8, 4, 8, -11, -19, -7, 3, 24, 28, -15, 23, -1, 23, 3, -12, 24, 17, 0, 17, 7, 20, 18, -18, 2, 7, -18, 1, 15, 14, -25, -21, -37, -31, -6, -49, 2, -15, -9, 24, -2, -10, -23, 3, -11, -16, 19, -14, 41, 10, 11, 27, -21, 5, -24, 0, 19, 7, 12, 8, -21, -7, -29, 7, -8, 21, 8, 1, -18, 0, 9, 7, -4, -24, 5, 5, 12, 11, 6, -4, 0, -23, 7, 4, -24, -10, -15, -20, 9, 6, -15, -17, 7, -20, -35, -5, -13, -6, 5, 5, -3, 13, -5, 4, 6, 7, -10, -16, 15, -20, -17, 15, -9, 23, -21, 22, -11, -10, 15, 19, 7, 38, 11, -23, -6, -22, -23, 1, -10, 34, -4, -5, -11, -17, -13, -19, -9, 32, 11, 14, -20, 13, 14, -21, 1, 13, 1, 29, -36, 15, -13, -21, -5, -16, 5, 40, 12, -1, 14, -2, 20, 27, 17, -13, 22, -11, -7, -8, -3, -4, -15, 11, -18, 13, 1, 0, -4, 12, 9, -25, 8, 5, 6, 3, -2, 21, -15, -6, 33, -1, 6, 12, 16, 6, 19, 5, 6, 18, 13, 2, 12, 40, 3, 20, 16, 7, 16, 5, -5, 7, -7, 35, -21, -4, 8, -14, -18, -8, 2, 42, -13, -19, 19, -6, -15, -10, -11, 23, -5, 11, 11, 14, -11, 2, 4, 5, 9, 16, -19, -23, -5, 25, -4, 36, -35, 17, -17, 9, -16, 20, -18, -14, 2, -15, 0, 4, -11, 14, 13, 15, 7, -5, -10, 9, 18, -3, 12, -1, -12, 6, 3, 19, -15, 26, -13, 1, -18, -2, 8, -15, 9, -4, 16, 8, -1, 14, 4, 6, -3, 5, 7, 5, -32, -20, 6, 14, 13, -9, -17, -16, 26, 6, 2, 20, 10, 1, -18, -17, 19, 8, 0, 2, -1, -17, 11, 30, -17, -20, -7, 12, -4, -2, -9, 19, 12, 6, 10, -23, 18, 0, -3, 14, -20, -6, 21, -10, 7, 21, -57, 8, -18, -18, 16, -10, 1, 6, -8, 5, -18, -10, 12, 13, -8, 7, 7, -21, -24, 5, -8, -22, -1, -20, -3, 6, -10, -16, -19, -12, 5, -11, -8, 13, 12, 1, -9, 7, 7, -7, -11, -14, -2, 5, -4, -18, -5, 11, 25, -8, -27, -20, -12, -13, 18, -14, 1, -18, 5, -6, 10, 5, -11, 16, -33, -18, 6, -15, 10, -6, -17, -18, -26, -1, 0, 13, -6, -7, -16, -15, 40, 8, 8, 0, 7, -16, 7, 48, 7, 8, -17, -15, -12, -9, -6, 11, 14, -5, 3, -9, -4, 7, -8, 13, -7, -10, 0, 1, -6, 1, 10, 3, 1, 26, 6, 2, -23, 18, -5, -18, 16, 8, 15, -21, 6, -8, -10, 20, 16, -2, -13, 15, 12, 6, -7, -21, 22, -1, 12, -15, 12, 8, 7, 15, -11, -7, -37, 3, -12, 13, 16, -25, 25, -25, -14, -6, -17, 9, 13, 1, 31, -4, -8, -15, -20, -12, 18, 15, -17, 18, 9, -20, 11, -20, 14, 13, 3, -9, -10, 3, -7, -16, -23, 21, -10, 9, -17, -10, 1, -21, -1, 24, 13, 16, -23, 12, -14, -13, -20, -17, -9, -14, -15, 16, -11, 2, -20, 21, -18, 21, -7, 19, -11, -17, 0, -7, -1, 13, -10, 3, -17, 0, -6, 29, 17, -19, 6, 15, -1, 12, 7, -8, 27, -8, -12, 2, -5, -3, 3, 19, -11, 49, -10, -10, -8, 18, 1, -9, 3, 21, -10, 2, -20, -18, -7, -15, 4, 2, 14, -19, 13, 9, -18, -2, -12, 1, -3, 18, -14, 9, -20, 1, -13, 2, 7, -2, -15, 18, -22, 27, -3, -13, 22, 13, -12, -4, -17, -6, 11, -18, 15, 17, 2, 17, -5, -26, 6, 29, -28, 1, 16, 11, 14, 13, -3, 40, -6, -9, -18, 2, 3, -16, 19, 13, -14, 4, 6, 6, -18, -21, 11, -2, -23, -16, -22, -14, 0, -42, 53, 27, -12, 16, 7, -18, -16, -19, -14, -14, -16, -8, 3, -13, -2, -30, 35, 17, 14, -13, -6, 7, 15, 15, 27, -6, 22, -13, -16, -2, 11, 10, 4, 11, 1, 1, -3, -7, 0, 20, 7, -2, 4, 1, -16, -3, 4, -20, 9, -8, 26, 8, -5, -2, -14, 11, 13, -1, 11, 6, -6, 17, 3, -27, 22, 5, 19, 7, -13, 20, 8, -14, 13, 13, 11, 14, -18, -20, -21, -24, 48, 15, 10, 13, 4, -11, -13, 20, 27, 19, 0, -3, -1, 14, -1, 5, 1, -26, 21, 14, -4, 7, 8, 46, 34, 20, 34, 16, -1, -16, 19, 4, 20, 12, -14, -7, -11, 18, 6, -10, 28, 10, 20, -22, -13, -6, -10, 18, 14, 5, -7, -22, 3, 10, 6, 19, 13, 42, -23, -8, 14, 12, 2, -41, 42, -12, 9, 10, -4, -21, 2, -8, 10, 9, 3, -20, 5, 12, -13, 13, 22, 17, -20, 8, 10, 8, 18, 19, 10, -25, -8, 14, -5, 2, 19, 17, 23, 7, -14, 6, 16, 8, -22, 15, -4, -15, 20, 14, -18, 18, -22, 33, -1, 4, -2, 13, 15, 8, 9, 20, 6, -1, 36, 13, 18, 20, -22, 19, -38, 15, -19, 11, 0, -9, 1, 19, -4, -26, 31, 18, -16, -10, 3, -7, 28, 46, -74, 15, -9, -15, 3, -4, 2, 32, -16, -15, -22, 3, 9, -17, 2, 12, 8, 7, 2, -20, -9, 7, -13, -22, -6, 12, 12, -16, -4, 15, -15, 31, -14, -14, 0, 17, 8, -6, 15, 13, -10, -13, -4, -13, -13, 2, 22, -34, 0, -11, -13, 8, 7, -5, 33, 18, -37, -17, 0, -2, 14, 2, 12, -27, -5, -2, -9, 12, -12, 9, 15, -9, 4, 17, -20, -23, 10, 1, -18, -3, 17, -8, 8, -1, 19, 1, -24, -9, 11, 1, -11, 0, 16, 4, 8, -23, 19, -14, 16, 0, 15, 15, -14, 28, 21, -16, -12, 1, -4, -15, 5, -21, 17, -23, -14, 6, -10, 25, 7, 29, -30, 7, 1, 4, -21, 32, 43, 18, -11, 15, -7, -3, 18, 30, 7, 0, -2, 8, -4, -12, 10, 25, -14, -13, 7, -15, 4, 15, -17, -16, -14, -25, 10, 17, -13, -18, 15, 36, 3, -12, -1, -15, -4, -18, 15, -13, 1, 16, 6, 0, -19, -17, -19, -8, -14, 7, -19, 8, -4, -15, -11, 10, 5, -15, 28, -18, -6, 16, -17, -3, 2, -28, 14, -8, -20, -4, -5, -7, 14, 41, -2, -4, -9, -7, -6, 5, 3, 21, 16, -6, -7, -20, -17, 7, 27, -9, 3, 5, -9, 9, -7, 13, -16, 2, -8, -18, -3, -11, 18, 9, 13, 10, 28, -12, -4, -3, -16, -4, -2, 22, 15, 5, -10, -7, -3, 22, -4, 14, -24, -21, -8, 11, 1, -14, 27, 21, 7, -20, 4, 15, 4, 17, -5, 13, -12, -17, 10, -15, 18, 16, 11, -11, 0, -15, 5, 16, -2, 14, -3, -5, -5, 5, -2, -18, 16, -1, 0, 0, 17, 9, -5, 0, 6, -12, -1, 5, -1, 2, 12, -21, 16, 14, -11, -8, -16, -23, -15, -10, -11, 23, 5, -8, 0, -3, -1, -10, 12, -11, 29, -17, 5, 13, 5, 13, -15, 14, 16, 5, 24, -14, 16, 16, -21, 22, -10, 12, 8, 16, 18, -9, -19, -9, 21, -5, 3, -11, 7, 11, -2, -16, -32, 10, 14, -18, -22, -4, 0, 11, -11, 3, 3, 1, 3, -13, -22, -19, -8, -4, -3, -4, 17, 0, -19, -5, 17, 23, -44, -5, -22, 10, 11, 1, 18, 7, 8, 4, 2, -20, 19, 19, -16, -33, 9, 10, -7, 4, 11, -2, -12, -24, -16, 5, 4, -10, -10, 11, 47, 9, -19, -8, -22, -15, -5, 2, 15, -24, -4, 12, 7, -4, -2, -13, 23, -28, -27, 1, -16, -16, 2, -3, -14, 3, 2, -2, 13, -12, -15, -21, -1, 15, -38, -12, 8, 0, -9, 44, -17, 1, -11, 7, -8, -14, -14, -22, -23, -3, -26, -23, -14, -18, -16, 19, -8, 26, -9, -16, -6, 3, -15, -21, 18, -13, 22, -9, -1, -19, 2, -10, -11, 1, -34, 9, -21, -10, 0, 39, -26, 60, -15, 14, 1, 7, 15, -6, 11, -14, -13, -13, 1, -19, 3, -14, -8, -4, 13, -20, 13, 6, -22, 9, -12, -9, 3, 4, -2, -14, -2, -35, -29, 0, -10, 7, -11, -17, -19, 25, -14, 22, 16, 13, -16, 10, -1, 10, 23, -4, -26, -16, -21, -17, -4, -3, -5, -17, -36, 3, 16, -10, -5, -5, 21, 8, 4, 16, -3, -19, -3, 4, 14, -5, 41, -15, 16, -9, -20, -4, 13, -36, -6, -12, -8, -12, -20, 28, 4, 12, 7, -12, 13, -5, -9, 18, -17, -28, 30, 11, -14, 3, 13, 22, -20, 33, -13, 0, -9, -1, 9, 25, 6, -8, 18, -1, -4, 17, 19, 29, 8, -1, -4, 13, 17, -13, 9, 3, 13, -1, -1, -10, 10, 17, 18, 22, 5, 11, -1, 5, 1, -6, -6, 0, 8, 8, 8, -13, -1, 6, 5, -11, 3, 9, 13, 11, 9, -2, -1, -27, 22, -12, 14, -20, 9, -11, 12, -5, 1, -16, 45, 18, 1, -2, 2, -6, 25, -9, -5, -3, 11, 16, 3, 7, 0, -11, 0, 16, 6, -3, -10, -20, -12, 43, -41, -1, 13, 8, 4, 40, 11, 43, -13, 12, 8, 6, -11, 5, 10, 1, 28, -2, -1, 4, 9, 1, 3, 17, -5, 13, 3, -19, -12, 0, 16, -8, -8, -5, 9, -17, 8, 21, 21, 15, 13, -19, 7, -6, 0, 26, -5, 31, 13, -13, -12, 16, 16, 34, -38, 27, 8, -5, 9, -20, -16, -12, 3, 0, 19, -2, 8, -2, 1, -8, -7, 16, 13, 2, 17, -7, -15, 0, -43, -9, 14, 9, 7, -6, -1, -10, -17, -13, -26, -1, -23, -11, 3, -19, -19, -8, 5, 1, -4, 10, 8, -33, -6, 20, 19, -4, 14, -17, 2, 34, -29, -37, 11, -15, 18, 17, -13, -5, -21, -22, -25, 18, -18, -22, 6, 43, -2, -1, -17, -18, 10, 8, 2, 17, 12, 1, 1, -11, 1, 15, 1, 1, 26, 4, 28, 11, 10, 1, 14, -14, 13, 24, -13, -15, 7, -10, -10, -6, -5, -1, 26, -8, -14, -4, -1, -4, 10, -19, 1, 10, -12, -13, 9, 39, -25, -19, -17, 0, -10, -6, 17, -1, -11, 21, -38, -15, 9, -24, 11, 2, 1, -1, 14, 4, -22, 6, 0, 17, 22, 23, 29, 14, -2, -16, 18, 5, 17, -18, 6, 13, -3, -18, -20, -35, -35, -11, 19, 6, 9, 15, -3, -13, -9, -19, 2, -23, 7, -7, 9, -12, -4, -21, -19, 15, -19, -10, -7, 12, -43, -21, -11, 6, -5, -18, 13, 26, -10, -21, -8, 17, -10, 14, -8, 5, 14, -20, 23, -8, 1, -15, 1, 3, 7, -18, 8, -12, 7, 10, -18, -14, 10, 9, 6, -18, 0, 16, 18, -10, 8, -6, -1, 15, -4, 10, -13, -18, -3, -11, 30, -18, -18, 3, 14, -25, -7, -8, 22, 2, 14, 12, 5, -41, -20, 20, 27, -20, -13, -23, -19, -22, -14, -40, 6, 10, -9, 3, -17, -19, -13, -20, 4, 13, 9, 7, -15, 21, 1, 7, 5, 1, -10, 17, -9, 27, 3, -12, 23, -12, -10, -17, -8, -24, 7, -6, 2, -19, 7, 9, -19, 6, 9, -17, 5, -4, 19, -17, 5, 8, 6, 3, 13, 1, 13, -15, -23, -15, 13, -11, 0, -5, 1, -1, 17, -26, 3, -13, -14, -18, -7, 2, -6, 20, -21, -6, -23, -11, -10, 20, -8, 8, -10, -12, -19, -18, 16, 15, -10, 4, -15, -10, 9, -11, -16, 10, -6, 1, 21, -1, -26, 6, 8, -16, -18, -33, 8, 14, 3, 15, 14, -18, 13, -31, -17, 2, -7, 2, -20, -8, 7, 21, 11, 36, -21, -21, -7, -22, -13, -42, 21, -12, -8, 8, 0, -22, 5, -24, -2, 8, -3, 7, -5, -15, 18, 12, -6, 9, -11, -13, 0, 17, -15, 11, 11, -39, 2, 18, 11, -12, 11, -2, 12, 18, 1, -21, 9, -14, -1, 7, -15, -39, 26, -6, -1, 1, -19, 30, -13, 13, -16, -21, 6, -11, -13, -7, -13, 18, 14, -24, -22, -20, 15, -5, 14, -14, -20, -12, -18, 5, -2, 29, -11, -18, 20, 5, -19, -23, 9, -17, -9, -25, -32, -18, -20, 1, -11, -35, -18, 4, 4, 2, -13, -12, 8, -5, 3, 0, 11, -19, -21, 16, -7, -17, -3, -35, -12, 0, -4, 5, -14, 4, 9, -17, -5, -9, -13, 12, -16, 36, 5, 20, 17, 3, 3, -19, -17, 12, -17, -18, -8, 12, -18, -11, -13, -25, -15, -19, -24, -14, -11, -8, 7, -14, 9, -4, -2, -14, -1, 6, -14, -11, 14, 33, -27, -17, -13, 6, 2, 5, -18, -1, 13, 11, 7, 11, -12, -20, 18, -18, 2, 12, -7, -14, -16, -9, 13, -2, 25, 16, -14, 0, -16, 1, -32, -19, 15, -17, 13, 13, -23, 3, 13, -25, 6, 18, 5, -18, 13, -24, 6, -18, 18, -10, -13, -3, -6, 4, -30, 35, -24, 5, -7, 2, 9, 10, -11, 20, -21, -20, -14, -16, 7, -22, -5, 7, 9, 12, 4, 17, 5, -23, -44, 19, -9, 19, 9, -22, -15, 3, -51, 31, 1, 19, 15, 19, 11, 14, 5, -4, -10, 16, 1, -19, 19, 33, 3, 7, -4, -3, 10, -21, -17, 46, -64, -12, -34, 14, 1, -14, -2, 20, -9, 14, -28, 17, -12, -12, -5, 10, -27, 16, 1, 4, -4, -2, -19, -6, -26, 29, -10, -17, -9, -2, -10, 13, 17, 30, -6, -17, -8, 5, -18, -22, -16, 45, -31, 7, 7, 4, -10, 13, -14, 18, 5, -4, 6, 5, -16, 10, -17, 51, 12, 5, -4, -3, 12, 1, 3, 12, -8, -11, 18, -12, 1, 27, -14, -9, -13, -10, -23, -21, -9, 14, -17, 34, -27, 21, 3, -19, -9, -1, 2, 14, -14, -10, -5, 20, -17, -23, -18, 10, -1, 17, -6, -19, 8, 29, -24, -38, 8, 11, 14, -10, 9, 6, -21, 21, -22, -7, 12, 19, -8, -17, -12, 11, -20, 14, -9, 20, -15, 44, 9, -7, -16, -13, -23, -21, 5, -16, 14, -12, -14, 4, -12, -6, -2, 5, -4, 26, -11, 19, 17, -20, 6, 41, -21, -19, 3, 17, 5, 1, 19, 6, -17, -22, 12, 13, -20, 11, 0, -18, 5, -15, -21, -21, 7, 0, -17, 28, -19, 7, 0, 8, -24, 19, -18, -10, -10, -19, -14, -3, 17, 0, 21, 4, -27, 7, 25, -11, 17, 7, -20, -2, -13, -10, -26, -21, 11, -4, -17, 15, 33, -29, 9, 9, -22, -8, -6, 19, 7, 12, -3, -17, 15, 2, -10, 1, -2, -21, 11, 17, -16, -19, -9, -12, 3, -6, -10, 6, -18, 11, 2, 7, -26, 7, -21, 1, 2, 19, -20, -22, 3, -7, -18, -23, -11, 20, 13, 10, -23, 5, 4, -6, -13, -20, -19, 14, -33, 32, 26, -24, 19, -23, 16, 9, 0, -15, -17, 9, 8, -18, 0, -3, -2, -14, 5, 11, 6, -1, 9, -19, 23, 25, -19, 8, 9, -7, -8, 30, 35, -12, -16, -7, -1, -16, 12, -5, 6, -39, -17, -17, -17, -14, -1, 5, 4, 24, -8, 15, -21, -19, -15, 0, -24, -11, -2, -11, -18, -12, -9, 0, 17, 8, 4, -13, -4, -13, 18, -22, 5, -3, -20, 0, 2, -13, 5, 9, -8, -33, 10, -8, 11, 13, 1, 7, 1, 17, 27, -12, -21, -23, -16, -12, -5, 8, 14, -20, 2, -8, -22, -14, -14, -23, 15, -15, 3, 6, 1, -23, -9, -28, 20, 4, -18, 8, -13, -7, 4, -9, -18, 4, 7, -18, -4, 7, 4, 8, -1, 10, -16, 18, -15, 6, -30, -9, 6, 11, -5, 16, 13, 10, -1, -1, 18, 1, 14, 7, -7, -12, -3, 9, 20, 7, -13, -3, -9, -13, -26, -9, 13, 0, -5, -18, -22, -18, 17, 5, 13, 4, -20, 1, -16, 7, 24, -18, -14, -22, -19, 5, -6, 12, 20, 21, 11, 6, 18, -5, -5, -28, -20, 9, -23, 2, -23, -18, -10, -12, 18, 18, 16, 7, -4, -1, 19, 9, 11, 24, 17, -9, -6, 15, 2, 29, 5, 7, -22, 5, -8, -21, 18, 1, -27, 20, -5, 8, -7, 2, 15, -24, -2, -7, -9, 13, -21, -7, 11, -4, 7, 3, -20, -14, 19, 4, -1, -23, -23, 22, 2, -7, 8, -14, 15, 15, 23, 18, 24, -10, 6, 21, -1, -8, 25, 25, -21, 14, -11, 16, -15, 9, 25, -27, 3, -3, 8, -17, -20, -8, -26, 10, -17, 17, -2, 4, 14, -14, 1, -5, 13, 13, -5, -17, 18, -7, 32, -4, -26, 1, -3, 20, -15, -11, 10, 19, -16, 8, -13, -23, -17, 10, 7, -1, -1, 6, 11, 12, 0, 12, 8, 0, -32, 5, -15, 5, -18, -33, -2, -3, 5, 14, -16, -17, 16, -3, -12, 18, 26, -15, -2, -19, -22, -22, 6, -1, -5, 3, -2, -17, -2, 23, 2, -13, 3, 17, 3, -2, -22, -15, -2, -2, 24, 3, 11, 18, 10, -1, -5, -32, 15, -12, 20, -21, -11, 0, -29, 8, -15, 5, -18, 4, 15, 6, 23, 8, 12, 15, 9, -20, -5, -12, -10, 3, -10, -17, -2, -3, 9, 17, 5, 9, 12, 4, -4, 15, -5, 34, -2, -5, -28, 7, -14, 11, 9, -40, 23, 18, 6, 10, 5, -7, -6, 4, -22, -6, 6, 21, 0, -16, -14, 17, -7, 3, 2, 10, 1, 13, -3, -41, 28, 13, 4, -2, 17, -20, -3, -23, 24, 3, 3, 10, 19, -15, 13, -20, 24, 18, -2, 0, -3, -9, 3, 3, 5, -1, -19, -14, 6, 11, 0, 5, -15, -21, -23, 18, 0, -10, -4, 15, -16, -8, -18, -8, -17, -1, -20, -14, 8, 3, -11, 6, 6, -12, -2, -13, 14, 11, 20, -3, -8, -19, 15, -19, 25, 19, 23, -21, -20, 7, -19, 17, -10, 13, -3, 10, 15, -17, -9, 19, -24, 19, -2, -9, 2, 0, -4, -25, 19, -18, 15, 17, -19, -18, -17, -26, -11, -7, -20, -12, 8, -4, -12, -7, 1, 14, -21, -6, -6, 21, 9, 2, 7, 10, -1, 6, -2, 8, 12, -33, 29, 0, -7, -13, 0, 5, -6, -19, 12, 16, -2, -12, 14, -2, 18, 12, -6, -3, -16, -8, -5, 17, -5, -7, -10, 15, -16, -8, -17, 14, -13, 5, -2, 20, -2, 16, -7, 10, 4, 13, 3, 8, -11, -22, 8, 4, 18, -1, 13, -7, 15, 11, -16, -8, 0, -9, 25, -8, -9, 10, 17, -2, 14, -26, 13, -8, 18, 14, 15, -17, -13, -25, -6, 13, 19, -13, 28, 23, -11, 9, 7, -6, 24, -8, -17, -2, -18, 6, -4, -16, -4, -1, -4, -42, 7, 17, -1, 16, 11, 15, -35, 25, -13, 20, -6, 19, 13, 4, 1, -3, 10, -22, 0, -4, 6, -14, -16, 9, -9, -3, 3, -18, 14, 7, 33, -2, -23, -27, 12, -9, 14, -18, 11, -13, -17, 8, -11, -7, -14, -1, 13, -15, 12, -13, -17, -14, -9, 10, -8, 11, -1, -22, 6, -22, 24, -13, 3, 12, -27, -3, -5, -3, 5, 13, -23, 30, -17, -21, 6, -12, 22, 7, -43, 4, -8, 8, -6, 15, 0, -3, -14, 13, -24, -5, 17, -3, 0, -39, 10, -3, -26, -1, 14, -5, -16, 18, 20, -16, -7, -16, -16, -25, 19, -7, 2, 20, 9, -5, 14, 17, 5, -10, -4, -2, -7, -3, -15, -8, -13, -33, 18, 9, 3, -19, 6, 14, 14, -22, 8, 18, -4, -20, 10, -9, 2, 11, -8, -26, -16, 16, 4, -6, -15, 13, -17, 9, -23, -10, -17, -25, -12, 2, -1, -15, -12, 7, -8, 13, -6, -3, -10, -4, -14, -6, 11, -15, -33, -11, -35, -10, 11, -22, -1, -14, 2, -18, 4, -10, 6, -7, 14, 6, 17, -11, 2, -1, -11, -19, 18, 3, 29, 10, 10, -3, -2, -21, -3, 9, 2, 7, -22, 7, 18, 13, -21, -23, 9, 15, -14, 7, -9, 14, -38, -3, -42, -20, 2, 17, -28, -13, -10, -26, -7, 2, -5, 2, 11, -21, 23, 12, 2, -29, 22, -2, 8, -20, -6, -26, 1, -16, 15, 16, 6, -14, 13, 11, -6, -13, -9, 26, 5, -15, 5, 10, 6, -5, -4, 5, -18, -15, -5, 15, -22, 7, -20, -2, 24, -11, 7, -22, -17, -14, 15, -18, -20, 10, -8, -13, 9, -27, -1, -25, -13, -20, 15, -21, -18, 19, -17, -26, -9, -9, 24, 5, 22, 42, 14, -7, 4, -21, -41, -4, 14, -13, 20, -10, -27, 6, 25, -17, 10, -5, -5, 2, 37, 7, -12, 14, -24, -2, -8, 9, -6, 1, 0, -9, 1, -20, -6, -3, -4, 11, 7, 18, 27, 9, -11, 7, 6, 14, 22, -12, 6, -78, 23, 9, -17, 1, 7, -21, 17, 5, 2, -8, 7, -12, -3, 18, 5, 0, 7, -28, 16, -9, 19, -2, -26, -25, 14, -15, 2, -23, -13, -5, -22, 13, -5, 18, 4, 17, -16, 8, 15, 7, 17, 2, 8, 13, -7, 0, -3, 4, 23, 3, 8, -15, -6, 2, -11, 9, 15, 1, 22, 4, 14, -9, -1, 6, 33, 4, -17, -10, -2, -1, 4, 21, 3, 21, 3, 15, 3, -4, 5, -14, -14, -7, -3, 10, 15, -20, 6, 42, -15, -6, 6, -11, -7, -14, -7, 11, 17, -7, 15, 14, 4, 11, 18, -16, -2, 27, -17, -19, -14, -15, -17, -6, 7, 18, -11, -6, -7, -16, 4, 0, 9, 14, 19, 1, 4, -12, 10, 7, -16, -5, 1, 9, 20, 9, -14, -16, 5, -20, -17, -5, -13, 9, -5, 48, -9, 9, -10, -7, -48, 15, 16, 3, 6, -33, 17, -22, 7, 9, -15, -17, 5, -19, 9, -4, -15, 8, -14, -20, -7, 32, 8, -9, -18, 4, -34, 3, -16, -11, -8, -3, 11, -2, 7, -24, 14, 1, -6, 8, 28, -4, -16, -47, 15, -13, -7, -4, 55, 15, 1, -16, -15, 19, 5, -13, 5, 12, 16, -23, 13, -18, 38, -11, 0, 15, 0, -7, -5, 5, -30, -3, 5, -17, 31, 9, -3, -9, 26, -8, -7, -15, 16, 2, -26, 4, -2, -14, 12, -15, -9, -17, 40, 12, 2, -17, 18, 19, 18, -13, 5, 35, -1, -12, -24, -9, -14, 12, -6, 5, -6, 16, 1, 14, -6, 4, 13, 1, -5, -15, 30, -21, -19, -28, 24, -8, 10, -6, 53, 16, -10, 26, -32, -8, -21, 4, 24, -6, 4, -8, 18, 8, 7, 3, 17, 11, 9, 34, -26, -4, -10, 2, 11, -3, 26, 14, 6, -5, -7, 16, 0, -4, 32, -1, 1, -10, -21, 9, 8, 9, 28, -5, 12, -7, -15, 1, 7, -11, 8, 27, 14, 19, 8, 16, -18, -7, -15, -3, -3, -7, -24, -14, 20, -8, 9, -11, 3, -3, 34, -21, -11, 9, -14, 20, -8, 35, -10, 19, -4, -9, 17, 29, -15, 8, 7, -18, 11, 13, -11, 9, -23, 8, 9, -19, 19, 6, -20, -14, 12, 5, -13, 12, -6, 10, 8, 19, 9, -19, 10, 8, 9, 8, 16, 2, -2, 3, -13, 5, -8, 1, 8, -4, -46, 21, 33, 2, 5, 14, -26, -26, 14, 11, 11, -4, -1, 6, 16, 4, -6, -19, 14, -10, -7, 2, -61, 10, 4, 4, 7, 5, -10, -12, -12, -18, 17, 8, 11, 0, -5, 12, -3, 2, -53, -46, 17, -20, 20, -8, 9, 21, 8, 26, -29, 22, 10, -9, 5, -3, 33, 3, -3, 9, -13, 12, -30, 3, 2, 1, 11, 13, -6, -23, -2, 6, -11, 13, 2, -17, -14, 2, -35, -6, -2, 7, 26, -16, 14, -1, 18, -11, 10, -10, 14, -4, -27, 14, 12, 25, -17, -15, 16, 12, -28, -13, -4, -18, -10, 4, -9, -18, 4, 4, 4, 12, -14, -56, 19, 1, -22, 11, 1, -6, 3, -10, -18, 8, -4, -1, 0, -31, -1, -10, 0, -24, -8, -24, -8, 20, -27, -12, 10, -2, 26, 5, 7, 0, -19, -3, -19, -20, 27, -13, 6, 48, -50, 15, 0, 0, -14, 1, -11, 24, -9, -14, -4, -23, -23, 3, -21, -1, 28, 13, 29, -12, -30, 2, -20, 26, 7, 5, 1, -17, 10, -20, 16, -1, 27, 12, 13, -16, 4, 12, -12, 0, -14, -1, 15, -18, -13, -2, -12, 14, -34, -10, 5, -13, 37, 12, 27, -11, 6, -7, -7, -13, -7, -19, 2, 4, 21, 8, -32, 9, 7, 3, -26, 9, -26, 1, -21, -18, 15, -7, -7, -35, 1, 2, -32, -15, 40, -21, -5, -16, 6, 27, -14, -5, -4, -1, 0, -19, -26, 22, -2, -6, 7, 0, 16, -13, -30, 5, 13, -24, 24, -19, 6, 18, -34, 7, -16, 15, -9, -15, -25, 33, -4, -13, 9, -14, 12, 6, 14, 7, -16, 6, -21, -12, 19, -6, 19, 3, 15, -1, 11, 5, -19, -14, -13, -11, -20, 8, -53, 20, 3, 14, 9, 15, -15, -22, 16, 15, -2, -14, 0, -12, 18, 31, 8, 10, 8, 7, 3, -37, 30, 8, -14, -28, -46, 8, -16, -4, -20, -17, 13, 1, 15, -22, 6, 17, 4, 1, 2, -3, -4, -15, -12, 63, -27, -6, 12, 0, 12, 0, 17, 28, -28, -3, 7, -18, -13, 12, -12, -13, -17, -22, -5, -15, 22, -13, 27, 43, -23, 5, 12, -3, -9, -5, 3, 10, 12, 12, -15, 14, 11, 2, -8, -5, -7, 3, -6, -17, 11, -5, 16, 1, -8, 11, -1, -3, 11, 4, -23, -40, 8, -3, -6, -24, -2, -6, 10, 6, -20, -22, 40, -1, 2, -8, -49, -10, 37, 7, 28, 12, -3, -6, -18, -29, -3, -25, -9, 4, 33, -25, 9, 0, -3, -1, 17, -8, 1, -4, 24, 0, 18, -21, 19, -13, -18, 4, -26, -8, -2, 10, 7, 16, -29, -1, -24, 4, -8, -16, 13, 1, -4, -16, -48, 30, -16, -15, -16, -12, 10, -21, -5, -21, 5, -5, 9, -20, -45, -2, -42, -17, 9, -15, 8, -23, 17, 1, -28, -18, -24, -14, 18, 14, -6, -18, 4, -16, -9, 11, -4, -2, -15, 12, -3, 2, 7, 45, 1, -8, -4, -15, -10, -12, 3, -1, 12, -4, 16, 11, -4, 9, -11, -14, 10, 12, -1, -24, 26, 0, 17, -7, -13, 16, 11, -8, 5, -22, -1, 2, 36, -23, -44, 9, -33, 21, 6, -13, -2, 20, -3, 13, 19, -3, -12, -4, -6, -11, 37, -19, -18, 25, -9, 0, -8, 11, 15, -3, -38, -3, 28, -5, 7, 2, 2, -22, -22, 18, -27, 19, 0, -14, -1, 6, 21, 47, -12, 2, 23, 4, 7, 8, -6, 8, 0, -18, 17, -21, -16, -17, -12, 28, 13, -14, 7, 16, -28, -10, 17, 60, -11, 14, -19, -21, 22, 0, 25, -5, -22, 17, 16, -20, -7, -1, -9, -2, 9, -2, 27, -14, 0, -19, -23, 34, 16, -6, 24, -5, 18, -16, -20, 6, -35, -4, 24, -6, 17, -22, 7, -15, 24, -21, 3, -18, -3, -9, 7, -4, -14, 23, -22, 1, -5, -11, 55, 19, -9, 12, -15, -3, 37, 4, 18, -9, -4, 3, -9, 19, -17, 9, 32, 9, 5, 30, -4, -6, 12, 5, -28, 36, -11, 15, 1, -9, -17, 0, 33, -17, 6, 27, -15, -17, -12, 14, -5, -1, -19, 39, -5, -17, 14, 4, 0, 19, 21, 28, -15, 4, 27, -1, 6, -12, 8, 22, 12, -22, -26, 6, -19, -26, -9, -16, 5, -20, 28, 15, -32, 28, 5, 35, -21, -7, 12, -19, -26, 38, -9, 30, -32, -16, -5, -4, 24, 4, -18, -10, -21, 12, -25, -19, 33, -22, -6, 35, -1, 16, 24, -18, 23, -6, -8, -1, -43, -22, 9, -27, 13, 3, -39, 0, 15, -8, -12, 14, 1, 0, -37, 18, -15, -5, -2, -9, -3, -14, 7, 0, 7, -9, 12, 2, 16, -1, 4, 19, 11, 13, -25, -6, 13, -30, -7, -31, 22, -25, -14, -8, -23, 11, -15, -33, -37, -8, 11, -7, 17, -5, 2, -2, -10, -5, 7, 10, -3, 12, 15, 6, 20, -23, -12, -13, -16, 12, -3, -3, -7, -1, 1, -5, -14, 5, -16, 37, 4, -22, 29, -19, 2, 20, 19, 18, 13, -21, -14, 9, 12, -26, -17, -21, 10, 5, 9, 0, 10, 19, -19, -10, 17, -16, -18, 17, 6, 1, -21, -4, 0, 8, 19, -1, -30, 6, -21, -43, -23, 13, 10, -23, 31, 11, -3, 36, 23, 16, 10, 15, 4, -4, 7, 9, -11, -22, -21, 10, -8, -22, 9, 9, 7, 19, -4, 17, 22, -20, 3, -20, 7, 13, -16, -15, 5, -15, -30, -12, 15, -8, 49, -1, 2, -11, -29, 24, 1, -4, -28, -1, 8, 2, -13, 5, 30, 4, 1, 14, -26, 28, 4, -3, 16, 8, -4, 12, 2, 15, 23, -2, -17, -15, -7, 2, 22, -1, -7, 14, 14, -10, 1, -10, 19, -15, 20, 2, 10, 12, -32, -7, 7, 4, 16, -33, -58, -15, 16, 6, 40, -23, 17, -10, -35, -23, -23, 6, 15, -1, -18, -11, 18, 4, -37, 8, -2, 6, -6, -8, -38, -5, -2, 9, 17, -4, -19, -32, 0, -10, 1, 6, -38, 11, 11, -37, -10, -5, 1, -7, 10, -31, -29, 8, -20, 10, -4, -6, 5, -8, 12, -5, -23, -14, -6, -21, -5, 13, -30, 5, 0, 2, -10, -11, 10, 8, -2, 22, 19, -8, -4, -17, -2, 1, 17, -3, -8, -23, 6, 15, 2, 8, 1, -16, -23, -2, -19, 2, -3, 2, -20, -22, 8, 17, -21, -13, -10, 4, -29, -27, -25, 12, -3, -22, -5, 8, -7, 11, -39, 14, -21, 6, 40, 23, -7, 8, 7, -14, -21, -14, 13, -19, 10, 11, 18, -7, 21, -1, 5, -38, 23, -9, -6, -14, -5, 17, -20, 26, 10, 22, -29, -10, 11, -8, 9, -3, 25, -24, 3, 4, -17, -13, 8, -9, 15, -7, -8, 19, 4, -21, 5, 9, 16, 18, 5, -21, -3, 5, -25, -19, 6, -8, 17, -19, -8, 0, -17, 24, -35, 6, 25, 3, 32, 0, 3, -1, -5, 3, 27, -8, -3, -12, 4, 13, 7, 29, 13, 12, -16, -4, 10, 9, 15, 19, 5, 7, -49, -22, 18, 34, 14, 1, -24, 3, -8, 1, 5, -1, -1, -7, -15, -3, 6, -9, 11, -8, 3, -6, 6, -20, -17, 0, -10, 7, 1, 10, -23, 0, -22, -4, 19, -29, -2, 7, -4, 13, -6, -17, -4, 1, -38, -20, -3, 2, 27, -14, -8, 22, -7, -27, 5, 12, -6, 3, -19, -9, -2, 6, -16, -21, -7, 9, 21, 4, 11, 10, 10, -3, -26, -18, -3, -13, 4, -3, 13, 9, -18, -28, -16, 12, 13, -32, 14, -3, 21, 17, -10, 13, 21, 21, 7, -5, -17, 0, -14, 5, -10, 17, -13, -3, 29, -18, 22, 7, 1, -25, -2, 0, -12, -21, -20, -11, 15, 1, -1, 0, 7, 0, -1, -11, -3, -3, 6, -7, -31, 4, -23, -24, -12, -21, -11, 13, 7, 14, 15, -17, 4, 5, 20, -10, 12, 8, 14, 15, 31, 14, 11, 20, 0, -9, -35, 32, 11, -14, -21, -28, -18, 13, -5, 11, 3, -4, -18, 10, 3, 11, 11, 0, -5, 18, -23, -8, -6, -17, 10, 17, -22, 7, 20, -9, -25, -10, -13, -11, 14, 28, -22, 8, -19, 17, 7, -13, 3, -11, -18, -5, 10, -21, -7, 6, -22, 6, -2, -15, -37, -20, 5, -7, 32, 26, 15, -13, -16, -16, -38, 6, -31, 32, 23, -12, -13, -12, -33, 23, 23, 32, 29, -15, -4, 6, -34, 36, -4, 23, 20, -2, -10, -9, 0, 1, 11, 36, -14, -10, -12, 10, 3, 17, 26, -21, 10, -7, -12, -11, -22, -3, 13, 26, -20, -5, -13, -9, 2, 34, -27, 0, 4, 14, 17, -6, 23, -17, 5, -25, 28, -5, -1, 12, 16, 4, 22, 17, 2, -4, 2, 18, -27, 14, 19, 23, 13, 20, 18, -20, -31, -6, 21, 2, 18, -6, -10, -15, -3, 19, -25, -21, 31, -16, -30, -3, -7, -9, 9, -21, 31, -17, -21, -15, 6, 17, 5, 29, -11, 5, -6, -14, -5, 22, -43, 13, 19, -16, -2, -5, -2, 13, 2, 23, 17, 7, 12, 21, 9, -14, -13, -8, -11, -13, 15, -12, 19, 30, -20, -34, -57, -17, -8, -4, 43, 11, 10, 19, 20, -17, 9, 3, 6, -27, -39, -17, 7, -18, 14, -12, 17, -20, 42, -27, -29, 8, -38, -19, -15, -11, -21, 12, -11, -2, 27, 1, -19, -37, -27, -21, 19, -20, 35, 9, -31, -7, -10, -1, -39, -19, 5, -9, -4, 38, -1, 23, -20, -5, -7, -15, -9, 1, 39, -5, -17, 23, -3, -8, 6, -22, -11, -21, 20, 7, -14, 16, -2, 42, 5, -13, 1, 6, 3, 19, -15, -10, -28, -9, -8, 7, 27, -14, 15, -20, -22, -6, 1, 2, 34, 9, -32, 10, -16, 10, 30, -4, 23, -19, -14, -13, 13, -14, 7, -1, -3, 2, -7, -19, -21, 11, -13, -15, 33, 19, -9, 15, -1, 12, -8, -14, 11, 11, 30, 8, -3, 18, -12, 10, -24, -5, -4, -6, 20, -6, 0, 9, -8, -2, -15, 6, 6, 3, -32, 17, -12, 16, -16, -14, -22, -17, -38, 19, 11, 18, -6, -19, -15, -19, -24, 16, 27, -13, 21, 8, 5, -25, 10, -5, 6, 17, -6, -22, -53, -18, -5, -16, 16, -10, 1, -6, -12, -20, -16, -4, -9, -1, 2, -1, -16, -11, 28, -5, 1, -2, -8, -8, -33, -15, 9, 8, 6, 11, -9, 0, 8, 0, -1, -3, -14, -5, 9, 18, -12, -10, 18, -6, -1, 8, 16, 7, -12, -32, -19, -20, -1, 1, 0, 16, -37, 4, 29, -18, 23, 4, -19, 8, -57, -10, 22, 17, 4, 11, 22, -30, -27, -24, -12, 13, -17, 2, 3, -11, 10, 12, 9, 18, -1, 2, 10, -33, -34, 13, -7, 8, -19, 2, 15, 13, -4, 10, 3, -12, 3, -2, -12, 10, -33, 9, -5, -15, -9, -2, 6, 0, -38, -9, -47, -10, -5, 17, -14, 10, 20, -38, -7, -15, -11, 17, 4, 6, -28, 7, 27, -6, 24, -16, 24, 4, 16, 16, -21, 19, -9, -18, 25, 0, 10, -18, -6, -16, 18, 7, -11, 12, -17, -19, -4, 16, 8, 19, -9, 3, -31, -22, 1, -12, -5, -11, -6, -5, 36, 14, -57, -22, -13, 14, -23, -20, -37, 14, -2, 11, 24, 11, 14, -1, -17, 17, -17, 0, -18, -15, -3, -7, -16, 4, -36, -19, 9, -14, 19, -14, 24, -4, 16, 7, 7, 16, -21, -10, -47, -14, -11, 6, 1, -1, 8, 11, -29, -9, 1, -5, -14, 17, 7, -2, 4, 20, -19, -7, 29, 11, 10, 11, 10, 2, 20, -10, -6, -17, -19, -4, 4, 5, 7, -15, -9, -18, 5, -9, -30, -23, -34, -5, 37, 7, -6, -39, 1, 4, 25, 7, 19, 6, -26, 3, 11, -16, 5, -7, -11, -1, 29, -1, 11, 5, 11, 4, -11, -11, -2, -24, -38, 8, -23, 18, 3, -8, 10, 17, 13, -23, 20, -1, 16, 14, 24, -23, -19, -32, 22, -10, 3, 3, -10, 10, -29, -17, -29, 11, -13, 6, -19, 10, 2, 10, -38, -12, 37, -12, 2, -12, 11, 7, 21, 16, 17, 6, -4, 17, -42, 15, -16, 10, -8, -22, -21, 10, -2, 17, -33, 10, -23, 8, 16, 13, -14, 11, -43, 10, 26, 2, -16, -9, 17, 10, -12, 6, 9, -19, -4, 26, -32, -14, -10, -25, -18, 16, 42, -49, 35, -3, -20, 15, -1, -12, 19, -2, 4, -1, -1, -14, 16, -10, -1, -31, -10, 1, -10, -22, -16, 3, 12, 19, -39, 6, 17, 6, -11, -18, -3, 7, 7, -1, -20, 0, 18, -12, -26, -37, -5, 21, 0, 19, 2, 2, -16, 8, 1, -8, -13, 4, 18, -12, 0, 10, 8, -17, 16, 1, -8, 4, 3, -25, 6, 24, -27, 5, 21, -3, 17, 17, -27, 11, -19, 15, 22, 6, -18, 4, 31, 4, -13, -6, 21, 3, -12, -31, 21, 23, -5, -13, 10, 11, 4, -10, 12, 14, 12, 3, 22, 4, 9, 1, -25, 15, 8, 0, 15, -6, -11, -14, -4, 17, -31, 8, 5, -8, 7, -7, -8, 13, -4, -14, 14, 13, -6, -12, 19, -28, 24, -1, -2, 2, 1, -3, 12, 3, -15, 15, 22, 3, 47, 0, 16, -5, -16, 15, -26, 19, 18, 19, 10, -6, -30, -13, 22, -17, 9, 11, -33, 1, -13, 19, -5, -12, 23, -7, 28, -11, -17, -15, 10, 3, -18, -36, 16, -9, -4, 22, 3, -6, 19, 3, 2, -13, 11, 17, -17, 15, 42, -38, 36, 14, -13, -20, -1, -1, 2, 18, -30, 3, 11, -29, 6, 24, -8, 30, 1, -2, 16, 9, 15, -11, -3, -1, -8, 6, -29, 10, 1, -17, -23, -11, -8, -19, -13, -18, 11, 8, 29, -7, 28, 7, -9, 7, 7, 4, 17, 18, -8, 41, -7, 17, 22, -13, 16, 13, 3, 12, 2, 12, -9, 18, -9, -19, -12, -17, 8, 4, 10, -37, -1, 3, -3, 39, 24, -6, 19, -1, 21, -9, -10, -28, 9, 11, 5, -17, -26, 23, 10, 20, -1, 1, 3, 19, 7, 2, -4, -1, 11, 7, 17, 6, -12, 8, 4, -1, -9, 14, 19, -20, 2, 22, -10, 23, 5, 33, 19, -7, 8, 15, -8, 41, -37, -30, 0, -19, 30, -24, -2, -6, -33, -20, 5, -2, 27, -17, 1, 9, 19, 5, -9, 0, -26, -1, -12, 2, -9, 25, 19, -10, 15, 6, -11, 3, -15, -26, 8, -10, -3, -15, -7, 29, 9, -3, -14, -1, -6, -13, 6, 15, 11, -6, 0, -29, -11, -4, 8, 31, 23, -10, 10, 15, -22, 28, 23, -12, 5, 33, -23, -29, 3, 12, 17, 4, 22, -2, 19, 11, 4, -21, -34, 17, -6, -20, 39, 31, 18, -10, 0, -5, -7, -21, -8, 13, -5, -3, -5, -7, -9, 15, -13, 10, 9, 5, 4, 2, 10, 8, -6, -14, 5, 5, -6, 12, 10, 37, 13, 2, -7, 24, -23, 2, 2, 13, 27, 18, -20, -22, 6, -11, -14, 5, -1, 9, -2, 12, 23, 24, -17, 16, -9, -4, -19, -14, 12, 17, -15, -29, -15, 25, 17, -19, 15, 9, 29, 21, -9, 18, 15, -4, -26, 20, 31, -11, -8, 26, 2, -1, 3, 22, -5, 4, -13, -1, -6, 7, 0, 21, -5, -22, 12, 20, 21, -26, -9, -22, -24, -6, 0, -22, 8, 8, 0, -10, 12, -34, 25, -22, -7, -5, 22, -14, -17, -17, -31, -53, 32, 36, 39, 2, 5, 25, -4, 6, -5, -24, 4, 15, -18, -21, 6, 10, 4, 5, 0, -34, -14, -20, 13, -29, -11, 19, -6, 15, 35, 8, 27, 15, -4, -16, -1, 12, -3, -13, 14, 3, 3, -14, -3, -18, 20, -29, 2, 22, -20, -16, 42, -5, -16, 6, -41, -8, 0, 29, -6, 3, -16, 18, -2, -10, 20, 2, -1, -15, 27, -38, 22, -28, -7, -15, -27, 11, 15, -15, -21, -8, 5, 19, 13, -17, -17, -2, -12, 15, 17, 4, 11, -24, 7, 6, 12, 16, 17, -24, 0, 15, 34, 20, 12, 23, -2, -10, 8, -20, -9, -14, 5, 13, -16, 4, -2, -1, 2, -12, 12, -18, 19, -17, 27, 15, 23, 19, 1, 4, -16, -38, 12, -11, -8, -7, -19, 2, -9, -11, 6, 1, -3, -28, 21, -13, -3, -1, 12, -11, 19, 2, 23, -13, 23, 7, 51, 41, 21, 10, -13, 9, -8, 4, -2, 15, 9, 2, -20, 33, -10, -21, 16, -23, -18, 0, -10, -8, 16, -13, -10, 1, 20, -14, -9, 1, -9, -16, 21, -26, -14, 4, -12, -28, 18, 14, 15, -29, -19, -19, 24, -4, 10, 6, -19, -12, -3, -21, 6, 14, -15, -8, 5, -9, -6, 17, 11, 6, 3, -8, -11, 15, 52, -11, 18, -5, -9, -17, -31, -24, -7, -19, 25, -25, -5, -11, -19, -31, -17, -35, 21, 7, -20, -4, -10, -23, -9, -47, -14, 14, 9, -27, 14, -14, -2, 22, 5, 4, 1, -14, -7, 32, 34, -7, -37, -41, 4, -30, -5, -18, -30, 19, -7, -4, 34, 13, -17, 17, -1, -14, 19, 12, 15, -13, 21, -1, -9, 31, 18, 16, -2, -42, 6, 16, -28, 2, -29, -21, -7, 15, -41, 7, -17, 27, -5, 19, 9, 11, -15, 5, 25, 5, -17, -12, 22, -23, 7, 3, 2, -25, -29, 10, 11, -32, 16, -21, 1, -25, -9, 9, -5, -29, -30, 2, 17, -11, 34, -3, -15, -27, 8, 13, 9, 14, 10, -34, -38, -9, -23, 25, 4, 29, 7, -27, 47, -21, 28, -11, -12, 26, 13, -7, 14, -25, -11, -19, -10, 3, 1, -11, 2, -7, -2, 46, -3, -7, -5, -12, 26, -3, -2, 4, 16, 27, 16, 29, 32, 22, 19, 9, -19, -44, 2, -2, 43, -5, -1, -2, 24, -18, -1, 25, -7, 12, 11, 10, 15, 10, 20, 33, 10, 20, 32, 0, -24, -1, -10, 26, -7, -30, -16, -10, 23, -8, -16, 10, 28, 13, 31, -23, 11, -1, 9, -8, 2, 17, 44, 12, 19, 28, 13, 0, 0, 23, 15, -10, -12, -17, 24, -37, -15, 3, -36, -1, -15, -17, 9, 20, -3, 21, -12, -3, 6, 12, -9, 18, -23, 27, -3, 45, -42, 7, -1, -11, 6, -21, 3, 26, -9, 10, -16, 6, 9, 9, 21, -4, 22, 7, 11, 10, 18, 12, -71, 8, 32, -15, 39, -21, -19, 17, 10, -6, -23, 26, 6, 8, 0, 34, -7, 25, 8, -2, 1, -5, -27, 17, 9, -25, 13, 22, -11, 17, -10, 3, 1, -9, -14, 0, -2, -16, -6, -9, 23, 25, -5, -11, -10, 1, -17, -14, 16, 2, 11, 9, -12, -8, -30, 40, -31, -31, 26, -38, -12, -10, -22, 16, -5, 7, 9, 13, 29, 23, -10, 2, 3, 4, 18, 5, -18, -16, -25, 21, -10, 2, -5, 17, -16, -15, 4, 3, -2, 2, 37, -9, -27, 1, -6, -10, 10, -7, -27, 23, 20, -16, -9, 25, -11, 26, 0, -22, -21, -14, 1, -24, -37, 10, 25, -31, 2, -7, 0, 25, -8, 9, -15, 23, 1, -20, 16, -3, -35, -5, 30, -29, 3, -11, -7, 6, 17, -7, 14, -9, -1, 17, -20, -9, 4, -8, 24, -13, -8, 21, 13, 13, 2, 14, -17, 10, 17, -27, -4, -2, -18, -7, -2, 3, 2, 8, 4, -9, 3, 11, -24, 1, 6, 5, -8, 14, 26, -10, -42, 5, 15, 8, -2, 29, 36, -2, -7, 30, 0, -3, 0, -17, -2, -4, 0, 3, -4, 33, -15, 21, -25, 9, -6, -21, 7, -15, 5, 1, -13, 1, 50, -23, 21, -25, 15, 12, 17, -11, -11, 10, -9, -19, 3, -9, -31, -8, 3, 11, 0, -11, 19, 6, 7, 12, -28, -8, -8, -45, 10, 0, 0, 15, -9, 8, -12, 11, -5, -1, 21, -16, 0, -8, 17, 30, -5, -1, -25, 21, -18, -16, -2, 14, -10, 12, 20, -5, 20, -1, -10, -7, 3, -12, 11, 15, -7, -8, 7, -6, -26, -11, -32, 18, 10, -16, -15, 9, 5, -10, 15, -26, -5, 22, 18, -8, 11, -8, -2, -20, 5, -16, -11, -19, -18, -3, 8, 15, -10, 26, 15, -15, 2, 29, 10, 1, -1, -2, -18, -3, -18, -2, 19, -9, -2, -6, -23, -6, 15, 5, -42, -32, 19, -14, -22, 12, 13, 4, 17, 9, -3, 22, -11, -3, 5, 3, -11, -5, -12, 13, 4, -39, -5, 10, 4, 29, 8, -20, 20, 33, -7, -18, 15, -17, 2, -1, 4, 3, -5, 25, 16, -9, -30, -7, 20, -12, -14, 8, -17, 21, 1, -3, 9, -7, -5, 6, 25, 28, 3, -1, 6, 20, 3, -28, -4, 13, -4, -11, -13, -34, -6, 7, 5, -9, -8, -19, 15, 27, -32, 3, 9, 19, 4, 36, -15, -33, 12, 1, -6, 20, -29, 7, -10, 11, -9, 24, 7, -15, -29, 2, 12, -17, 21, 9, 7, 8, -15, 11, -41, 9, 5, -3, 3, 21, -21, 1, 9, -1, -34, -12, -7, 7, -8, -17, 22, -6, 9, 14, -11, -15, -5, -7, -10, 7, -14, -22, 9, 14, 36, -4, 6, -11, 9, -2, -39, 5, -3, -29, 1, 17, -13, -18, 22, 14, 10, 11, 2, -19, 17, 9, 26, -17, 23, 16, 1, -3, -2, -4, -17, -7, 4, 8, -14, -3, -28, -6, -15, -4, 13, -24, 5, 17, -12, -16, 30, -14, -14, -23, 35, -22, -7, 23, 1, 5, 5, -22, 24, 31, -43, 22, 19, 8, -14, -10, -11, 13, -18, -11, -15, -20, 6, 8, 24, -20, 36, -7, 13, 25, -12, 14, -11, 2, 31, 7, 5, 15, 4, -15, 14, -13, 2, 37, 17, 10, 24, 33, 2, -27, -19, -6, 10, 13, -30, 7, 33, -6, -4, 11, 37, -17, 18, 54, 14, 8, -31, -29, 4, 3, -5, 12, 8, -17, 13, -25, 1, 29, 25, -15, -3, -7, -6, 1, -3, 19, 15, 30, 37, 3, 16, -10, -15, 14, -38, -3, -22, -29, -11, 20, -20, -13, 0, -3, -33, 8, 39, -30, -18, -23, 17, -13, 16, -5, 13, 17, 22, 5, -10, 1, 25, -26, 30, 2, 10, -7, -17, 10, 23, -6, -18, -21, -12, 12, -23, 2, 31, -21, 15, 13, 10, 42, -2, -13, 3, 25, -43, -8, 20, 27, 7, 11, 27, 29, 11, 0, 23, -24, -5, -20, 8, 4, 27, -7, 34, -15, 15, 10, 6, -35, 24, -7, 9, 27, 19, -18, 0, 15, -40, -1, -1, 25, 30, -25, -4, 25, -6, 33, 4, 0, 17, 41, 1, 30, 25, 12, -7, 0, -12, 6, 1, -7, 16, 20, 5, -21, -9, -32, 12, 23, 0, 15, 4, 4, 3, -13, -14, 10, 13, -19, -11, -17, 33, -16, -14, -11, 4, 2, -27, -2, -29, -17, 34, -16, -15, -24, -12, 9, 23, -20, 20, 12, -14, -20, -19, 15, 31, -16, 4, 18, 14, 32, 4, 9, -16, 6, -12, 20, 23, 10, -3, -5, -48, 5, 15, -19, -4, -19, -1, -12, -27, -10, 6, -19, 15, 18, 42, -2, 5, -3, -4, 17, 0, 17, 10, 34, -5, -4, 18, 0, -21, -30, 11, -5, 7, 14, 20, 14, -4, 9, 15, -13, 10, 9, 6, 25, 13, -16, -16, 9, -2, 2, 3, -7, -28, -21, -5, -1, 12, 2, 20, 7, -10, -5, -15, 26, 20, -3, 5, 31, -10, 5, -17, -31, -6, 19, -2, -1, 8, 23, 16, -5, 11, 7, 2, 22, 16, 7, 13, -14, 20, -4, 10, -5, -21, 0, -25, -5, -29, -19, -18, -4, -15, 3, 0, -6, -17, -21, -36, -18, -29, -2, 6, -8, -18, 9, 30, -11, 16, 18, 11, 2, 17, 29, 10, -4, 23, 15, 17, -8, 25, 25, 7, 20, 11, -1, 6, -16, 34, 12, -12, 15, -6, -11, 1, 5, -12, 6, 4, 26, 9, 9, -24, 0, 33, -13, -18, -6, 15, -11, 1, 1, -25, 12, -3, -22, -10, -9, 15, 2, -21, 0, 17, 19, 12, 6, -26, -4, -17, -12, -14, 12, -8, 1, -23, 0, 25, 13, -8, 9, 12, -2, -28, -27, -17, 3, -11, -7, 4, 16, 2, 12, 4, -4, 7, 2, -16, 14, 13, -4, -18, -14, 0, -12, 4, 1, -20, 24, 0, -14, -6, 29, 11, -25, -10, -3, -1, 7, -5, 13, -26, -6, -15, 3, 26, -9, -18, -6, 12, 9, -8, -18, 26, -23, -19, 13, -8, -9, 21, -28, 24, 16, 10, 7, -9, -7, -6, -8, 5, 20, 4, 32, -36, 2, -12, -4, -14, 8, -34, 0, -24, 12, 2, 7, -4, 2, -4, 30, 6, -19, -3, 8, 21, 11, -9, -4, 18, 10, 6, -11, -25, -5, -5, -14, 0, 8, 33, -3, 22, 2, -22, 28, -6, -36, -1, 4, -6, -22, -27, 3, -7, 3, -5, -1, -6, -1, -1, -9, -1, 23, 10, -20, 26, -27, -42, -1, -17, -3, -37, 10, 3, -13, -37, -15, 1, -9, 14, 3, 6, 7, 2, 7, -12, -11, -10, -9, 12, -5, -4, 3, -12, -32, 11, -29, 15, 8, 30, -30, 41, -19, -18, -13, 16, 10, -24, 19, 31, -21, -30, 3, -10, 5, -13, 25, 5, 4, 3, 9, 19, 41, -54, 4, 19, -8, 11, 8, -4, -20, -4, -9, -4, -5, -17, -4, 22, 3, 11, 4, 7, -6, 9, -5, -9, -14, -3, 6, -29, 23, 20, 27, 1, 29, 6, 19, 16, -20, -7, 15, -10, -12, -32, 13, -18, 31, -12, -1, -14, -5, 18, 13, -28, -12, 7, 22, -17, -27, 24, 16, -24, 16, -6, -6, -12, 10, 9, 4, 0, -16, 14, 27, -29, 12, 7, -8, -23, 13, -24, 14, -3, 6, 9, -15, -22, -2, -7, 4, -19, 14, -27, 14, -18, -4, -1, 28, -2, 17, 5, 5, 16, -24, -27, 17, 12, 7, -12, -22, 24, -11, 16, 8, -39, 17, -14, -4, -4, -11, -1, 15, 14, -3, -22, -4, -9, -33, 27, -16, -20, 16, -2, 19, -23, -3, 2, 23, -14, 4, 12, 21, -4, 11, -4, -16, -19, 5, -4, 7, -9, -4, 10, 34, -10, -6, -41, -5, -34, 22, -7, 21, 6, -4, -11, -30, -19, -16, 21, -16, -9, 8, 0, 4, 4, 2, 0, -33, -10, -8, 8, -2, 20, 0, -15, 30, 15, -18, -35, -4, -34, -1, 9, 17, 3, -9, 0, 13, 40, 4, -8, -9, -8, 0, 18, -33, -16, 15, -6, 6, -19, 5, -56, -23, -22, 28, 16, -6, 3, 2, -20, -26, -34, -28, -18, -4, -14, -13, -6, -11, -18, -19, -26, 30, -3, -3, -19, 16, -2, 3, -2, 11, 16, 24, 6, 13, 15, 16, 19, 33, -1, -9, 17, 13, 10, 0, 18, -2, 0, -3, 1, 5, -24, 2, -19, -2, 37, 15, -32, 31, 27, -9, -10, 3, 24, 15, 27, -36, -5, 15, 14, -5, 11, 3, -16, 7, 16, 6, -9, 5, -10, -5, -16, 12, -6, 9, 9, -4, -3, -20, 12, 18, -10, -9, 0, 16, -10, 25, -34, 15, -21, -14, -14, 21, 4, -23, -46, -11, -15, -25, -17, 27, 30, -31, -37, 26, 4, -9, 2, 6, 25, -3, -4, -15, -2, -4, 25, -8, 26, 1, 6, 15, -8, 16, 11, -38, -20, 11, -4, -19, 19, 7, -12, 0, 8, 12, -9, 13, -10, 28, 13, -10, -30, 26, 26, 0, -1, -25, -2, -19, 5, -12, -1, -26, -4, -10, -29, 7, 2, -11, 4, -17, -6, -1, 9, -2, -18, -6, -24, -8, -3, -7, 3, -22, -16, -5, 23, 26, -4, 7, -25, -3, -3, -20, -2, -4, 6, -1, 15, -35, 1, 19, 9, 19, 20, -8, 8, -43, -6, 18, 8, -16, 11, 11, 38, -28, -7, 29, 32, -23, -10, 1, -12, 6, 6, -8, 24, 17, -14, -3, 17, 4, -19, 32, -3, -22, 26, -19, 0, -15, 5, -2, -11, 23, 9, -33, -37, 8, 2, -23, 16, -20, -29, -9, -9, 4, 38, -8, 13, 12, 3, 5, -2, -12, 27, 7, -17, -15, -21, 23, 13, -23, 1, 7, 20, 1, 35, -7, -1, -15, 3, -3, 9, 9, -1, -22, 14, 1, 20, -2, -5, 3, 42, -26, 47, -28, 7, 18, 32, -21, -6, 6, -24, 42, -7, -15, -30, -7, -16, -12, -12, 29, 13, -16, 12, 31, -6, 32, 4, 11, -14, -25, -17, 13, -18, -11, 12, -35, -13, 13, 24, 7, -10, 15, -4, 21, 8, -20, 20, -6, -13, 6, 19, 17, 3, 17, -33, 19, 29, 12, 12, -6, 7, -5, -12, -28, -21, -11, -24, -19, -3, 6, -10, -4, -15, -21, 4, -20, -6, -5, -10, 18, 7, -7, -7, -13, 12, 14, 0, -19, -4, -10, 21, 34, 5, 23, -7, 3, -29, -1, -19, -12, 14, -8, -9, 13, 18, 5, -19, -32, 3, 1, -22, 3, 0, 1, 6, -30, -19, 28, -41, 10, 11, 9, -18, -12, 16, -31, 2, 5, -5, 41, 17, -16, -20, 33, -29, 7, 15, -25, -8, -4, -8, 9, -4, -11, 7, -2, 14, -11, -19, -11, -7, 10, -10, -15, -24, 0, 4, -13, -31, 19, -18, 1, 8, 27, -27, -1, 10, 11, -44, 12, -6, -31, -5, -5, 1, -2, 2, -24, 5, -3, 2, 9, -4, -11, 11, -29, 19, -1, 10, 0, 10, 19, -4, -6, -3, -12, 29, 12, 22, 9, 12, -17, -33, -10, -8, 15, -17, 1, -40, -11, 4, 1, -6, 1, 24, -27, -20, 10, -14, -33, 17, -21, -23, -16, 29, -18, -2, -37, 3, 6, -6, -5, 17, 10, 20, 14, -12, -7, 22, -19, -12, 8, -7, 7, 4, -3, 9, -16, -37, -6, -14, 7, -11, -8, 18, 5, -32, -15, -16, 10, 4, -4, -27, 2, 14, 10, 9, -12, -2, -6, 13, -4, 6, -2, -12, 12, 16, -13, 8, 10, -88, -13, 9, 28, 7, 3, 1, -18, 0, 7, 8, 4, 0, 14, 10, -8, -4, -10, 14, 4, -5, -18, 2, 10, -35, 39, -15, -31, 6, 3, -38, -19, 25, 2, -13, 3, 19, -9, -30, -17, -8, 9, -28, 23, -7, -18, 30, -20, 11, -20, 3, -5, 6, 17, -18, -16, -10, 7, -16, -3, 10, 5, -26, -36, -13, -4, -20, 2, -40, -16, 0, 2, 2, 2, 23, 14, -4, 25, -4, -18, -8, -31, 15, 2, 22, -3, 15, 7, -21, -17, -6, -25, -17, -5, 4, 2, -9, 19, 13, -8, -31, 23, 32, -1, -10, 19, -3, 7, -19, -31, -20, 8, -5, 21, 3, 17, -5, 3, 38, 0, -8, -6, 11, -27, -4, -30, -5, -16, 6, 16, 4, 9, 9, 4, -8, 18, 19, -14, 2, 7, -16, 14, 21, 18, 11, 15, -13, -21, 11, -8, -8, -8, 4, -5, 3, -7, 20, 23, -5, -20, -13, 16, -21, 0, -20, -23, 19, 5, -28, 13, 6, 3, 11, 4, -3, 8, -5, -20, 23, 2, -36, 16, 10, -10, -25, 0, -15, -28, -6, -25, -3, -5, -30, 19, -17, -1, -3, 2, -14, -3, 23, -20, -17, -2, -5, 2, 4, -3, -4, -2, -11, 2, -40, -17, -16, -28, -5, 2, 31, 12, 8, -18, -5, -19, -19, 8, -26, -24, -19, -12, -15, -19, -27, -2, 21, 25, 2, 8, 26, -20, -38, -18, 3, -15, -17, -15, -21, 3, -16, -22, -16, -5, -26, -20, 11, -13, 0, -8, -15, 1, -6, -23, -4, 14, 10, 11, -2, -2, 15, -1, 11, -19, -4, -13, -1, -1, -1, -5, -25, 8, -17, 4, 8, -28, 0, 0, 23, -13, -21, -19, -35, 8, -10, 5, 5, -16, -13, -1, -24, -1, 11, -26, -10, -22, 9, 12, 16, -15, -9, -19, 0, -22, -2, -22, -3, 17, -6, -23, 36, 7, 38, 27, -5, 8, 16, -36, -18, 0, -13, -8, 15, -23, -12, -28, 27, -8, -26, 1, 6, 8, 2, -35, 11, -12, 16, 15, -6, -2, 9, -22, 11, 11, 52, 8, -10, 15, -22, 8, -4, -23, 21, 30, 6, -2, -9, 22, 10, -18, 17, 6, -3, 36, -2, 8, -21, 6, 27, 8, -11, 31, -19, 4, -15, 12, -2, -3, 1, -23, -23, -13, -18, 18, -6, 24, -19, -8, -29, -16, 8, -1, -7, -26, -10, 1, -4, -24, -6, -18, -27, 3, -5, 3, -2, -18, -8, -16, -10, 2, 5, -21, -13, -13, -23, -26, -27, 18, 14, 15, -11, -22, -8, -6, -22, -12, 2, 15, 1, -12, -5, -25, -15, 1, 14, 58, -21, -21, 17, -39, 9, -35, 11, 25, 30, -26, 11, 0, 39, -7, -13, 17, -15, 2, -20, 11, 9, 19, 13, 27, -2, -3, 7, 20, -4, 24, -15, 10, 4, -7, 19, -26, 30, 1, 9, 14, -11, -21, 2, -2, -28, 7, 23, 6, -13, -14, 3, -12, 1, 8, -7, -10, 1, 5, -15, -11, 17, -3, 10, -14, -16, -10, -12, 3, -1, 8, -21, 11, -28, -13, 4, 15, -13, 12, 5, 3, -25, -5, 30, 29, -20, 7, 7, -19, -4, -5, 8, -2, -15, 6, 11, 7, -4, -13, 9, 5, 13, 17, -26, 23, 20, 8, 9, 26, 5, -1, 10, -10, -14, 30, 11, 6, -8, 33, 20, -4, 12, 14, 32, 21, 13, 32, 2, 5, 0, 24, 43, -24, 15, 22, 16, -15, -15, 16, -14, 9, 2, 19, -9, -8, 8, 6, 3, -1, -19, 8, -14, 4, -30, 18, 9, -11, -1, -3, 26, -12, 1, -17, 24, 27, 0, -15, 7, 8, -25, 13, -15, 14, 12, 4, 18, -5, -7, -8, 15, 1, -35, 15, 3, 16, 19, 22, 6, 5, 15, 6, 7, 8, 6, -7, 6, -16, 14, -19, -17, -1, 0, 26, -10, 7, 30, 19, 15, 9, -6, 0, 13, -8, -2, 19, -15, -11, 9, 22, 20, -2, 10, 6, -8, -17, 7, -4, 29, 4, -14, 9, -9, -3, -11, 3, 19, -13, -14, -24, 12, -13, 5, -17, 18, -6, 1, 25, 4, -8, 20, 5, -2, 7, 13, -9, 5, -15, 4, 41, 3, 2, 10, 37, 6, 44, -11, -3, -28, -1, 8, -36, 0, -8, 5, 4, -30, 8, 20, 6, 7, 5, 9, 12, 14, 13, 6, -4, -35, -9, 4, -25, -15, 8, 4, 8, -13, 12, -19, 22, 11, -2, -14, 18, -2, 22, -7, -16, -4, -5, -4, -10, 21, 31, -6, -15, 1, 17, 27, -23, -10, 0, 8, -14, 15, 8, -9, 33, -21, 21, 5, 4, -7, 7, 4, -4, -12, 21, 15, -2, 6, 15, 12, 8, -25, -2, 8, 2, 15, -10, 12, -3, 6, 13, 10, -8, -3, -8, -11, 28, -1, 6, -10, 4, -8, -18, 10, -14, -19, 44, -17, 7, -3, 6, -3, -27, -13, -2, 15, 23, 3, -15, -5, 25, -27, -9, 6, 18, 31, -14, -6, 5, -22, -13, 5, 5, -11, 27, 0, -6, -12, 31, -8, 13, 16, 21, -24, -12, -22, -10, 8, 5, 21, 12, -16, -13, -5, -4, 3, 32, -3, -3, 32, 21, 3, 8, -8, 6, -5, -13, 16, 4, -19, 20, -17, 1, -8, 26, -1, -2, -16, -6, -3, -12, -2, 13, 18, -36, 14, 1, 7, -12, -32, 10, -4, -15, -21, 17, -25, 5, 9, 4, 15, -13, -12, -19, -29, 31, -54, 13, 14, 15, -8, 15, -2, 27, -33, 6, -14, 31, 9, -16, 9, 22, -11, -1, 11, 8, -5, -20, -35, -3, 5, -22, -19, -39, -9, 23, 8, 8, -2, -30, -1, 34, 4, 33, 5, -9, -13, 15, -3, -20, 20, 13, 0, -3, -5, -23, -26, -15, -21, 11, 20, -12, -39, -33, -20, -5, -20, 5, -19, 29, -34, -15, -19, 6, -17, 30, 1, -22, -19, -27, 0, 6, 10, -16, -23, -4, -23, 13, -10, 36, -11, 46, 7, -1, -21, -5, 12, 14, -5, 16, 16, 15, -9, 8, -26, -22, 26, 8, 8, -8, 2, -6, 2, -5, 8, -9, 7, -3, -15, -16, 0, 8, -21, 21, 5, -8, -13, -29, 5, -30, -8, 0, -4, 11, -44, 9, 13, -1, -26, 9, 9, -5, -11, -6, -48, -11, -11, 23, -20, 9, -6, -5, 10, 29, -19, -18, 12, 7, 10, 1, -2, -20, -9, 7, -4, -16, -1, -4, -21, -5, -6, 11, -17, -24, 4, 14, -25, -8, -15, -10, -26, 4, -14, -29, 10, -11, -15, 6, -31, -8, -5, 16, -24, 43, 11, 33, 0, -10, 21, -14, -13, -3, -5, -29, -30, 7, -7, 2, -31, -18, -2, 12, -4, 7, -15, 6, 41, -19, 11, 27, -10, -24, 7, -30, -3, -25, -23, -15, -5, -31, -17, 8, -4, 55, -11, -1, 2, 12, 61, 5, -25, -7, 22, 18, -10, 8, -2, 5, 52, -29, 0, -7, -31, -24, 10, -14, 16, -26, -31, -8, -30, -25, -35, -4, -7, -12, -8, 27, -4, 3, -15, 14, -3, 19, 4, -24, -32, -21, 2, 2, -5, -21, -27, -3, -6, -1, -1, 8, -1, -4, -18, -15, -13, -11, 29, -8, -17, 7, 15, 10, -1, -14, 18, -8, -32, 7, 15, -41, 7, 2, 6, 14, 5, -1, 22, 8, -9, 25, -26, 35, -1, -24, -4, -30, -21, 13, -33, -8, -22, -6, 9, 19, 20, -13, -31, 2, 12, -14, -7, 14, 8, -8, -41, 13, -7, -1, -28, -14, -2, 10, 10, -7, -3, 39, -11, -7, -4, -16, -4, 9, -9, -6, -11, -15, -15, 5, 40, -5, -12, 6, 9, -23, 4, 3, -19, -14, -7, -49, -26, 5, 8, -9, -29, 0, 3, 31, 0, 10, 10, -35, 29, 17, 25, 1, -24, 18, -20, -4, -20, -16, 0, 7, -4, -26, 17, 8, -8, -15, -15, 3, -11, 33, 9, -13, 5, 38, 34, 18, -6, -11, -4, 30, -38, 4, 17, 22, 16, 15, -19, 43, -6, -7, -39, -6, -5, -4, -3, -17, 9, 28, -7, 20, -6, -27, 12, 1, 7, 30, -30, 11, -9, 27, -6, 8, -4, -10, 15, 21, 28, 16, -9, 9, -1, 28, 7, -6, -19, -22, 5, -22, -8, 4, 28, 20, 18, 25, 6, 28, 11, 1, 22, 5, -29, 9, -13, 2, -22, 20, -10, 4, -15, 14, 0, -13, -2, 9, -30, 6, -7, 10, -13, -20, -34, 4, 4, -13, -8, -2, -11, 19, 0, -15, -27, 9, -17, -2, 4, 25, 0, 27, 1, -12, -3, 14, -9, -14, 1, 11, 24, -1, -1, -15, 9, -32, -42, -9, -19, 21, -2, -18, -27, -12, -39, 13, -19, 5, 20, 9, 2, 6, 9, 30, 17, -30, -12, -18, 16, -10, 6, -14, -1, -7, 3, 14, 23, 37, 17, 26, 2, 11, -3, -1, -10, -2, -17, 6, 20, 13, -3, 4, -11, -25, -10, -9, 37, 18, -1, 20, 3, -35, -11, -17, 0, -8, -9, -20, -19, -15, 10, -20, 9, 23, 3, -19, -4, 8, 13, 50, 0, -9, 9, -26, -6, -31, -2, 2, -16, 26, -21, -6, -3, 10, -12, 5, 8, 0, -19, 4, -23, -16, 7, 19, -21, -6, -16, 9, 14, 15, -17, 46, 11, -18, -25, 15, -18, -15, 17, 13, -3, -27, -11, -44, -16, -6, -25, 16, 25, 34, -1, 9, 2, -14, 20, -2, 17, 1, -18, -15, -14, 37, -11, 1, 2, 6, -20, -11, -3, 14, -10, 14, -5, -7, 8, -9, 6, 6, 13, -22, -19, 15, -3, -2, -5, 11, -2, 2, 21, 28, 4, 18, 21, -12, 21, 28, 12, -21, 7, -1, 7, 6, 13, -4, 0, -1, 9, 0, -9, 4, 16, -21, 5, 4, 11, 5, -13, 11, -13, -27, 8, 12, -13, -21, -13, -1, -1, 4, -10, 20, -6, 21, -23, -17, 18, 17, -3, 4, -4, 19, 4, -29, -9, 9, -18, 16, -16, -8, -22, 0, 2, 33, -7, -16, -5, 1, -15, 13, 5, -11, 1, 34, -24, -7, -18, -13, -9, 11, 2, 12, -4, -17, 14, -17, -2, -8, -2, 17, -6, -29, -3, 12, -13, 5, 13, -25, -5, -29, -19, 9, 8, -16, -29, 21, -20, 12, 2, -43, 8, -10, -3, 60, -4, -34, 6, 12, 8, 8, -14, 11, -8, -24, -11, 4, -11, 11, 9, -3, -21, -2, -8, 20, -36, -26, -7, 55, -16, 19, 25, 6, 7, -18, -12, -32, 10, -5, 14, 9, 11, 25, 17, 29, -21, 41, -29, 17, 22, 7, -11, 34, -22, 13, 7, 1, 0, -11, -3, 17, -19, -1, -15, 2, 9, -16, -22, 13, -1, 10, -4, -24, 5, -20, -4, 10, 15, 30, -14, -14, -2, 24, 6, -17, -12, 20, -23, -9, -20, -18, 6, 43, -14, -1, 6, 18, -11, -40, -4, -4, -19, -17, 9, -7, -2, 13, -33, 15, 21, -8, -14, -17, -10, 26, -8, 20, 5, -6, -15, 22, 24, 31, -1, -12, 11, -8, -11, -18, 15, -9, 3, 19, -3, 26, -19, 2, -7, -50, -23, 0, -3, 34, -16, -13, -11, 11, 20, -10, -22, 27, -8, -2, 13, -20, -16, -1, -15, -8, -13, -18, 1, -19, -3, 31, 8, 0, -13, 8, 7, -21, -18, 14, 6, 29, 24, 14, 19, -4, 22, -13, -12, -5, -30, 14, -14, -24, 0, -3, -24, -9, -18, -8, 7, -3, 1, -10, 8, 7, 15, 18, -6, 21, -8, 21, 6, 26, -17, -13, 23, 19, 9, 22, 2, 23, 14, 8, -20, -13, -12, -20, -23, 15, 26, -10, -11, 11, 31, -27, -27, 11, -25, 6, -7, -4, -27, 10, 5, 10, -26, 19, -1, -13, 2, 1, 2, -27, -11, -9, -26, -12, -16, 28, -19, -5, -10, -21, 36, -1, 3, 26, -27, -12, 6, -5, -5, 20, 3, 11, -1, 20, -9, -12, 28, 12, 17, 13, -14, 40, -11, -23, 8, -11, -14, -7, 0, -20, -33, 0, 1, -1, 6, 11, 4, -2, 4, 1, -1, -20, -1, 12, 2, 27, -9, 4, 20, -15, -5, -22, -28, -2, 8, -21, 10, 7, -18, 10, -21, -12, -5, -17, -13, -10, 1, 29, -4, 11, -30, -3, 0, -6, -6, 12, -12, -5, 6, 7, 17, -21, -26, -12, -14, -10, -29, 17, -3, 10, -25, -2, -21, -6, 8, -11, -5, -11, -20, -12, 1, -24, -17, -16, 2, -4, -24, 14, 3, 8, 21, 8, -9, 24, -12, -5, -24, 37, -13, 21, -21, -8, 5, 11, -1, -10, -18, 14, 17, 7, -13, 6, -1, -17, 2, 11, 2, 20, -17, -21, 20, 25, -15, -14, 18, -10, 8, -2, -24, 2, 35, -9, 2, 25, -7, -2, -16, -2, -28, 6, 9, -5, 27, 9, -3, 1, -10, 23, -7, -3, -11, -3, -12, -2, 2, 26, -13, -21, 2, -2, -9, -17, -20, -18, 2, 8, 0, -12, -7, -10, -26, 0, -5, 3, 9, 12, -15, 18, -29, 3, -7, -3, -4, -1, 2, 8, -11, -20, -26, 24, 17, -5, 3, -7, 1, 6, 9, 23, 3, 5, -24, -7, 6, -19, 4, -6, -24, 19, -6, 0, 5, 10, -10, -40, -25, -14, 9, 13, 14, 16, -2, 3, -17, -37, -1, 2, 4, -17, -8, 21, -17, 11, -13, 10, 21, -13, 20, -2, 20, 15, 4, 6, -14, 7, 12, -21, 7, -16, -11, -11, -24, -4, 8, 9, 20, 14, 0, 0, -30, 7, 21, 15, 13, 1, -19, 1, -16, -16, 8, -10, -22, 12, -18, -10, -17, -2, -10, 10, -18, 12, 12, 2, -19, 16, 13, -14, -19, -27, 4, -25, 0, -15, 17, 13, -18, 0, -23, -3, 29, 23, 11, -4, 16, -11, -7, 30, -10, 10, 16, -6, -20, 23, -22, -5, 12, 3, 12, 18, 19, 15, 4, 15, -4, -20, 25, -3, 9, -15, 10, -6, 1, 0, -35, 6, 24, 2, -2, 21, -18, -2, -11, -18, -13, -27, -18, -5, -5, 9, 25, 15, -16, 17, -8, -21, 0, -11, -20, -27, -31, 27, -24, -22, -15, -5, -14, -15, 4, 25, -22, 31, -16, 18, 2, 7, -18, 4, 2, -26, -18, 30, 6, -2, -6, -8, -2, -11, -12, -14, 2, 11, -11, -6, 17, 2, -2, -28, 7, 24, -28, 20, -24, 14, -22, -21, -6, -23, 8, -4, -12, 25, -31, -3, -11, -17, 21, -16, -15, -19, 17, -20, 5, 21, 21, 3, -5, -18, 6, 30, 19, 5, -14, -1, -25, 8, -6, 15, 8, 21, -2, 25, -11, 3, -14, -11, 16, -18, -22, -11, 2, 18, -7, 12, 19, 5, -3, 3, -2, 15, 4, 18, 18, -1, 25, 8, 13, 36, -26, 9, 22, 10, -10, -2, -6, -1, -12, 27, -20, -24, -7, 16, -17, 19, -21, -2, -13, 4, -3, 15, -21, 9, -28, -9, 31, -6, 6, -17, 13, 4, 17, -31, -3, 17, -2, 28, 21, 4, 8, 7, 11, 16, -19, -14, -19, 4, 10, 22, 36, -4, 26, -3, 7, -17, -4, -15, 12, 39, 13, 51, -28, -3, -15, 29, 39, 10, -16, 7, -3, 1, 8, 26, 19, 15, 28, 0, 6, 2, -15, -6, 26, 6, 9, -11, -3, 17, 7, 10, 7, -28, 24, 8, -7, -5, -17, -24, -8, -9, 27, -30, 27, -9, 1, 22, 22, -38, 21, 30, -4, -18, 14, 29, -4, 26, -12, 19, -27, -12, -30, -18, -1, -14, 12, -3, -17, 19, 7, -20, -14, 11, 21, -17, -12, -1, -2, 2, -6, 17, 14, -32, -8, -3, 17, 21, 19, 7, -33, -14, -16, -20, 3, -5, 10, 3, -5, -2, -28, 6, -2, 19, -19, 2, 19, 18, -3, 17, -22, -6, -6, -10, 6, 16, 5, 4, -5, -16, 19, 17, -7, 21, -11, 29, -23, -15, -13, 5, -4, -4, -8, 5, -6, -27, -7, -6, -19, 36, -1, 15, -10, -9, 4, -22, 26, -6, -10, 33, 0, -4, -3, -22, 10, 4, 8, 11, 11, -20, 10, -6, 4, -1, -13, 19, 2, 8, 11, 23, 13, 24, -13, 8, -25, -12, 4, 11, -25, 10, 15, -10, -20, 6, 5, 1, -14, 12, -12, 31, 20, -12, 7, 31, -4, 3, -6, 9, 3, -2, -6, -7, 2, -5, -8, 7, -20, 28, -25, -9, 26, 23, 6, -2, 9, -16, -20, -14, -3, -21, -2, -12, -17, -5, 18, 4, 21, -13, 9, -7, -7, 7, -3, 17, -5, 21, -25, 3, 5, 24, 17, 0, -7, -3, 1, 26, -1, 9, 1, 32, -18, 19, -23, -8, -13, 8, 10, 6, 12, -8, 14, 0, -6, -24, 3, -10, -7, 6, -7, 14, -27, -10, 14, 12, 6, 3, -22, 8, -2, 15, 16, 5, -17, -10, -18, 1, -29, 6, 0, -20, -11, 3, -17, -22, -1, -6, -7, -46, 2, -7, -13, -3, -14, -11, -5, -10, -6, 0, 17, 4, -9, -3, 19, -17, 10, -4, -4, 13, -1, 11, -12, 4, 5, 29, 21, -23, 9, -28, 38, -13, -11, -28, 30, -1, 0, 11, 1, -18, 0, 9, -11, -7, -11, -14, 27, 22, -19, -11, 8, 10, -18, -14, -8, 8, 0, 12, -3, -24, -5, -6, 14, -3, 7, 13, -6, -32, -28, -9, -14, -25, -8, 12, -4, 1, -1, -10, 12, 0, -7, 20, -13, 29, -25, 0, -15, 37, 19, 30, -28, 11, -12, -25, -20, 21, -31, 1, 26, -6, 8, -25, -2, 8, 9, -10, 3, 21, -10, 1, 18, 10, 16, -21, 8, 14, 13, 10, 13, -2, 18, 18, -34, -5, -27, -11, -6, 4, -4, 0, 16, -8, -2, -9, -13, 3, -10, 11, 9, -9, -5, 7, 6, -2, -5, -13, -18, -14, 22, 15, 1, 3, 13, -8, 1, -4, -15, -11, 13, 9, -1, -3, 7, 15, 8, 15, -8, 0, -15, 39, -1, 6, -4, 18, -1, 11, 0, 20, -13, 13, 13, 16, -18, -2, -3, 23, 14, -18, 11, -17, 2, -18, 15, -2, -11, -4, -1, 7, 11, -23, -3, -14, -5, -7, 7, -33, 10, -11, -3, -12, 9, 11, -9, -6, 9, 5, -4, -13, 13, -3, 17, -17, 8, -20, 9, 14, -32, -8, -2, 2, -20, 1, 20, 25, -2, -12, 4, -14, 0, 10, -16, -41, 10, 19, -3, -27, 7, -11, -12, -12, 12, 10, -7, -3, -9, 16, -4, 16, -16, -26, 0, 13, 13, -8, -1, -8, -22, 4, -20, 25, -25, -22, -18, 7, -14, 7, -10, 3, -35, 8, -21, -23, 9, -22, -25, 18, 10, -8, -1, 4, 29, 22, 7, 1, 7, 28, -12, -12, -24, 20, 8, -3, 6, 21, -17, 24, 5, 0, 20, -17, -14, -6, -3, -2, -25, -5, -19, 17, -12, -23, 13, -7, -13, 3, 21, -9, 15, -2, 11, -10, 4, 15, 22, -34, -30, -18, -22, 31, -16, 19, 15, 4, -14, -28, 18, -13, -17, 4, -26, 8, 9, 11, 15, 15, -13, 16, 5, -24, 6, 15, 8, 38, -2, 10, 4, 18, -21, -13, -3, -44, 17, 26, -19, -11, 6, -19, -3, 22, -1, 29, 2, 7, -16, 8, 14, 33, -1, -18, 2, -13, 20, 10, -7, -47, 16, 7, -9, -26, -31, 20, -22, -5, 8, 16, 2, 12, -33, -2, 24, -21, 15, 13, -1, -1, -30, 17, 17, 8, 16, 4, 7, -18, -30, 18, 29, 13, 17, -34, 11, 16, 18, -5, -3, 5, 10, 12, 12, 19, 6, -17, -23, 28, -12, -31, -17, 10, 4, 3, -10, 6, -7, 1, -25, -16, -26, -8, -11, -26, -19, -12, 8, 2, -40, -10, -49, 18, -25, -7, -17, -2, 8, 1, -13, -13, -9, 29, -11, -10, 9, -2, -2, 25, 12, -11, -24, -7, -2, 7, 5, 27, 11, 11, -19, -5, 32, 14, 2, 4, 1, 7, 0, 6, -45, 6, 11, -13, -9, -4, 10, 20, -16, -8, 7, -37, 10, 2, -5, -7, -2, -11, 20, 16, -21, 20, -17, -31, 33, -26, 6, 7, -21, 0, -16, -11, 10, -17, -36, -21, 1, -7, 12, 1, -10, 8, 14, 1, -11, -25, 1, -9, -10, 54, 11, 3, -11, -2, 16, 22, -1, -18, 14, -8, -3, -11, 4, -11, -27, 14, -14, 4, 9, -5, 7, 4, -19, -24, -5, -17, -10, 28, -1, -14, -25, -4, 17, 14, 12, 12, -14, -12, 12, 15, -25} + +#define TENSOR_DENSE_KERNEL_0_DEC_BITS {8} + +#define TENSOR_DENSE_BIAS_0 {19, -29, -30, 28, -15, -17, 6, 6, 8, 35, 4, -17, 12, -28, -17, 12, -13, 18, -22, -3, 17, 24, -6, -11, -22, -5, -11, 17, 29, -18, 24, 48, -24, -12, -91, -28, 31, -70, 8, 34, -10, -13, 39, -2, 22, 41, -15, 38, 22, -35, 31, 7, -11, 58, 17, -14, 32, 45, -12, -20, 32, -14, -12, -12, -3, -27, 27, 0, -17, -1, -14, 35, 32, -25, -19, 15, -24, -46, 62, -18, -23, -20, 46, 19, -2, -22, -17, 40, -1, 35, 31, 34, 62, -86, -4, -14} + +#define TENSOR_DENSE_BIAS_0_DEC_BITS {11} + +#define DENSE_BIAS_LSHIFT {1} + +#define DENSE_OUTPUT_RSHIFT {9} + +#define TENSOR_DENSE_1_KERNEL_0 {-55, 67, 47, -19, 50, 47, 15, -41, 28, 35, 13, -67, -11, -4, -54, 6, 14, -13, 10, 65, -59, 10, -59, -54, -19, -35, -3, 16, 37, -71, 46, -25, 42, -62, -67, -28, 17, 26, -57, 48, -16, -28, 62, 32, 48, -15, 54, -21, -35, 14, -22, -9, 45, 15, 32, -52, 52, 22, -54, -4, -3, 15, -58, -1, 55, -54, 58, -89, -27, -39, -63, -69, -40, 31, -2, -61, 41, 52, -53, 10, -10, 57, 12, -46, 43, -15, 43, -17, -68, -34, 22, -89, -25, 18, 2, -40, -61, -53, -18, 51, -55, -7, 8, -16, -43, -66, 80, 25, 43, 24, 23, -49, 44, 1, 7, 56, -27, 60, 31, -84, -44, 21, -38, -48, -29, -40, 53, 57, 11, -85, -40, -37, -40, -65, 16, -3, 33, 34, -21, 61, 29, 56, -50, 2, 28, 29, 6, 15, 10, -23, 10, 8, -3, -44, 8, -16, 8, -22, -44, 45, 30, 47, 51, 46, 39, 22, -9, 44, -11, 37, 67, 37, -21, 11, 8, -61, -58, 25, -29, -36, 41, 61, 15, -10, -36, -68, -23, -5, -11, 51, -77, 16, -65, -51, -49, 52, 41, 37, 5, 43, 29, 52, -14, 47, 59, -4, -23, 25, 50, 9, -31, 42, -70, -65, 47, -46, -57, 14, -22, 11, -78, 16, -22, -58, 42, 54, 58, 56, 26, -73, 11, -49, -35, 24, 26, -13, 19, 7, 51, 18, -1, -43, -39, 38, 20, 34, -63, -73, -18, 44, 25, 44, 34, 11, -54, -47, 57, -15, 22, 47, 43, 42, -71, -75, -40, 28, -44, -41, -24, -26, 47, -13, 5, -7, 45, -81, 10, -46, 57, -18, -37, 50, 58, 20, -23, -11, 44, 23, -82, -44, 16, 16, -90, 24, -66, -36, 58, 43, 38, -49, -49, 54, 43, -18, 46, -14, -59, 46, -20, 43, 66, 6, -40, -31, 36, 17, 52, -20, -70, -61, -29, -60, -38, 21, 58, 44, 0, -26, -21, -45, 24, -58, -43, 25, -65, -21, -55, -65, -14, -24, 23, -49, -11, -27, 60, -86, -72, -2, 0, -19, -86, 45, 22, -32, 58, 35, 41, 13, -62, -17, 43, 17, 7, -51, -39, -44, -9, 49, -89, 53, 17, 44, 17, 42, 25, -62, 7, 45, -13, 28, -31, -49, 51, 49, 46, -21, 47, -12, 33, 60, 56, 35, -62, -38, -77, 17, -52, 18, 45, -95, 39, -35, -88, 53, 55, -61, 1, 36, 22, 15, 69, -50, 41, -92, -13, 32, -76, 14, -25, 46, -38, 13, -47, 29, -15, -38, 33, 30, 29, -43, 0, -51, 47, -30, -4, -17, 13, 60, -12, -29, 12, 10, 38, -10, 54, -10, -27, -17, -3, -40, -46, 67, -28, 2, 57, 50, -68, -32, 42, 8, 40, 50, -42, 23, -11, -63, -83, 42, 48, -37, -19, 40, 2, 12, 45, -48, 51, -14, -92, 3, 42, 26, 16, -60, -83, 31, -11, -47, -57, 32, -72, -54, -22, 31, -10, -18, -6, -30, 36, -65, -63, 21, -24, -1, 55, 16, -10, -81, 30, 29, -27, -47, 17, 42, 2, 24, 41, 14, -16, 48, 11, 34, -45, 19, 58, 46, -39, -44, -70, -63, 71, -3, -87, 37, 55, -49, -24, 42, -86, -3, -29, -25, 31, 28, -64, 52, -44, -44, -35, -63, -72, -63, -26, 3, 40, 8, -47, 13, -46, 66, 45, 42, -57, -28, -63, 37, 41, -28, -17, -39, -45, -63, -38, 37, -81, 1, -50, -9, 55, 47, -104, 11, 4, -75, -42, 38, -52, 43, -1, 50, -61, 43, 20, 47, -46, -32, -6, -40, 64, -49, -32, 32, -17, -34, 36, -83, -57, -14, -42, -46, -44, -24, -15, -50, -17, 37, -33, -89, -72, -17, -51, 27, 17, -45, -9, -45, 49, 30, -11, -45, -57, 47, 7, -46, 75, 35, 57, 7, -48, -59, -4, 41, 37, -29, 59, -79, 51, -27, -56, -36, 37, 16, 29, -41, -4, 21, -18, 16, 11, -67, 2, -22, 28, 33, -1, 35, -6, -61, -44, 16, -21, 31, 40, 23, 33, -63, 7, -30, 9, 51, 35, 40, -36, -20, -59, -37, -76, -45, 41, 35, -29, -45, -61, 31, -69, -25, 47, -18, -43, 23, 3, 18, -14, 11, 53, -31, 23, -81, 37, -20, -50, 22, -15, -31, -44, 25, 45, -42, 34, -88, 29, 3, -5, -24, -27, -40, 0, 33, -10, -51, -17, 41, 55, -83, 37, -17, 34, -37, 52, -52, -11, 41, -33, -28, 32, -46, 19, 53, 59, -79, 12, 5, 28, 29, -66, 43, -34, 26, -3, -23, -25, -65, -14, -13, 3, 14, -46, 71, -42, 1, -47, 30, -36, 16, 21, 51, 39, 24, -16, -29, -48, -3, 25, 38, -44, -71, 7, -30, -23, 55, 54, -56, 16, 4, -33, 20, -37, 13, -44, 24, 29, 27, -1, -76, 13, -15, -25, -35, -38, 56, -4, 25, 38, 27, 34, 14, -5, 18, -19, -85, -69, 22, 24, 11, 31, 17, -48, -32, -65, 46, -11, -36, 15, -31, -37, -19, -17, 6, 46, 43, 55, -42, -58, 59, -24, 33, 51, 27, 28, -41, 36, 33, 24, -58, -8, 55, -13, 28, -2, 52, 31, -33, -10, -40, -56, 3, -30, -8, -10, 9, -70, 16, 38, -50, 34, -73, 37, -34, 39, 70, -40, -10, -22, -46, -43, 4, -53, 41, 48, -34, 14, -47, 56, 25, 12, -14, 37, -12, 5, -56, 39, 8, -10, -48, 40, 9, 39, -53, 48, 60, -48, 37, -18, 29, 20, 43, -43, -9, 3, -53, -26, 8, 51, -21, -20, 51, -5, 67, 1, 43, -36, -23, 2, -49, 53, -45, -39, 41, 47, -28, 20, 3, 16, 45, -31, -13, -71, 17, -76, -14, -31, 48, -22, -19, -5, -32, -26, 38, -53, -28} + +#define TENSOR_DENSE_1_KERNEL_0_DEC_BITS {8} + +#define TENSOR_DENSE_1_BIAS_0 {-27, 45, -79, 21, 36, -19, 9, -24, 46, -42} + +#define TENSOR_DENSE_1_BIAS_0_DEC_BITS {11} + +#define DENSE_1_BIAS_LSHIFT {0} + +#define DENSE_1_OUTPUT_RSHIFT {9} + + +/* output q format for each layer */ +#define INPUT_1_OUTPUT_DEC 7 +#define INPUT_1_OUTPUT_OFFSET 17 +#define CONV2D_OUTPUT_DEC 6 +#define CONV2D_OUTPUT_OFFSET 3 +#define RE_LU_OUTPUT_DEC 6 +#define RE_LU_OUTPUT_OFFSET 0 +#define MAX_POOLING2D_OUTPUT_DEC 6 +#define MAX_POOLING2D_OUTPUT_OFFSET 0 +#define CONV2D_1_OUTPUT_DEC 5 +#define CONV2D_1_OUTPUT_OFFSET -1 +#define RE_LU_1_OUTPUT_DEC 5 +#define RE_LU_1_OUTPUT_OFFSET 0 +#define MAX_POOLING2D_1_OUTPUT_DEC 5 +#define MAX_POOLING2D_1_OUTPUT_OFFSET 0 +#define CONV2D_2_OUTPUT_DEC 4 +#define CONV2D_2_OUTPUT_OFFSET -4 +#define RE_LU_2_OUTPUT_DEC 4 +#define RE_LU_2_OUTPUT_OFFSET 0 +#define DROPOUT_OUTPUT_DEC 4 +#define DROPOUT_OUTPUT_OFFSET 0 +#define MAX_POOLING2D_2_OUTPUT_DEC 4 +#define MAX_POOLING2D_2_OUTPUT_OFFSET 0 +#define FLATTEN_OUTPUT_DEC 4 +#define FLATTEN_OUTPUT_OFFSET 0 +#define DENSE_OUTPUT_DEC 3 +#define DENSE_OUTPUT_OFFSET -7 +#define DROPOUT_1_OUTPUT_DEC 3 +#define DROPOUT_1_OUTPUT_OFFSET 0 +#define RE_LU_3_OUTPUT_DEC 3 +#define RE_LU_3_OUTPUT_OFFSET 0 +#define DENSE_1_OUTPUT_DEC 2 +#define DENSE_1_OUTPUT_OFFSET -4 +#define SOFTMAX_OUTPUT_DEC 7 +#define SOFTMAX_OUTPUT_OFFSET 13 + +/* bias shift and output shift for none-weighted layer */ + +/* tensors and configurations for each layer */ +static int8_t nnom_input_data[784] = {0}; + +const nnom_shape_data_t tensor_input_1_0_dim[] = {28, 28, 1}; +const nnom_qformat_param_t tensor_input_1_0_dec[] = {7}; +const nnom_qformat_param_t tensor_input_1_0_offset[] = {0}; +const nnom_tensor_t tensor_input_1_0 = { + .p_data = (void*)nnom_input_data, + .dim = (nnom_shape_data_t*)tensor_input_1_0_dim, + .q_dec = (nnom_qformat_param_t*)tensor_input_1_0_dec, + .q_offset = (nnom_qformat_param_t*)tensor_input_1_0_offset, + .qtype = NNOM_QTYPE_PER_TENSOR, + .num_dim = 3, + .bitwidth = 8 +}; + +const nnom_io_config_t input_1_config = { + .super = {.name = "input_1"}, + .tensor = (nnom_tensor_t*)&tensor_input_1_0 +}; +const int8_t tensor_conv2d_kernel_0_data[] = TENSOR_CONV2D_KERNEL_0; + +const nnom_shape_data_t tensor_conv2d_kernel_0_dim[] = {3, 3, 1, 12}; +const nnom_qformat_param_t tensor_conv2d_kernel_0_dec[] = TENSOR_CONV2D_KERNEL_0_DEC_BITS; +const nnom_qformat_param_t tensor_conv2d_kernel_0_offset[] = {0}; +const nnom_tensor_t tensor_conv2d_kernel_0 = { + .p_data = (void*)tensor_conv2d_kernel_0_data, + .dim = (nnom_shape_data_t*)tensor_conv2d_kernel_0_dim, + .q_dec = (nnom_qformat_param_t*)tensor_conv2d_kernel_0_dec, + .q_offset = (nnom_qformat_param_t*)tensor_conv2d_kernel_0_offset, + .qtype = NNOM_QTYPE_PER_TENSOR, + .num_dim = 4, + .bitwidth = 8 +}; +const int8_t tensor_conv2d_bias_0_data[] = TENSOR_CONV2D_BIAS_0; + +const nnom_shape_data_t tensor_conv2d_bias_0_dim[] = {12}; +const nnom_qformat_param_t tensor_conv2d_bias_0_dec[] = TENSOR_CONV2D_BIAS_0_DEC_BITS; +const nnom_qformat_param_t tensor_conv2d_bias_0_offset[] = {0}; +const nnom_tensor_t tensor_conv2d_bias_0 = { + .p_data = (void*)tensor_conv2d_bias_0_data, + .dim = (nnom_shape_data_t*)tensor_conv2d_bias_0_dim, + .q_dec = (nnom_qformat_param_t*)tensor_conv2d_bias_0_dec, + .q_offset = (nnom_qformat_param_t*)tensor_conv2d_bias_0_offset, + .qtype = NNOM_QTYPE_PER_TENSOR, + .num_dim = 1, + .bitwidth = 8 +}; + +const nnom_qformat_param_t conv2d_output_shift[] = CONV2D_OUTPUT_RSHIFT; +const nnom_qformat_param_t conv2d_bias_shift[] = CONV2D_BIAS_LSHIFT; +const nnom_conv2d_config_t conv2d_config = { + .super = {.name = "conv2d"}, + .qtype = NNOM_QTYPE_PER_TENSOR, + .weight = (nnom_tensor_t*)&tensor_conv2d_kernel_0, + .bias = (nnom_tensor_t*)&tensor_conv2d_bias_0, + .output_shift = (nnom_qformat_param_t *)&conv2d_output_shift, + .bias_shift = (nnom_qformat_param_t *)&conv2d_bias_shift, + .filter_size = 12, + .kernel_size = {3, 3}, + .stride_size = {1, 1}, + .padding_size = {0, 0}, + .dilation_size = {1, 1}, + .padding_type = PADDING_SAME +}; + +const nnom_pool_config_t max_pooling2d_config = { + .super = {.name = "max_pooling2d"}, + .padding_type = PADDING_SAME, + .output_shift = 0, + .kernel_size = {2, 2}, + .stride_size = {2, 2}, + .num_dim = 2 +}; +const int8_t tensor_conv2d_1_kernel_0_data[] = TENSOR_CONV2D_1_KERNEL_0; + +const nnom_shape_data_t tensor_conv2d_1_kernel_0_dim[] = {3, 3, 12, 24}; +const nnom_qformat_param_t tensor_conv2d_1_kernel_0_dec[] = TENSOR_CONV2D_1_KERNEL_0_DEC_BITS; +const nnom_qformat_param_t tensor_conv2d_1_kernel_0_offset[] = {0}; +const nnom_tensor_t tensor_conv2d_1_kernel_0 = { + .p_data = (void*)tensor_conv2d_1_kernel_0_data, + .dim = (nnom_shape_data_t*)tensor_conv2d_1_kernel_0_dim, + .q_dec = (nnom_qformat_param_t*)tensor_conv2d_1_kernel_0_dec, + .q_offset = (nnom_qformat_param_t*)tensor_conv2d_1_kernel_0_offset, + .qtype = NNOM_QTYPE_PER_TENSOR, + .num_dim = 4, + .bitwidth = 8 +}; +const int8_t tensor_conv2d_1_bias_0_data[] = TENSOR_CONV2D_1_BIAS_0; + +const nnom_shape_data_t tensor_conv2d_1_bias_0_dim[] = {24}; +const nnom_qformat_param_t tensor_conv2d_1_bias_0_dec[] = TENSOR_CONV2D_1_BIAS_0_DEC_BITS; +const nnom_qformat_param_t tensor_conv2d_1_bias_0_offset[] = {0}; +const nnom_tensor_t tensor_conv2d_1_bias_0 = { + .p_data = (void*)tensor_conv2d_1_bias_0_data, + .dim = (nnom_shape_data_t*)tensor_conv2d_1_bias_0_dim, + .q_dec = (nnom_qformat_param_t*)tensor_conv2d_1_bias_0_dec, + .q_offset = (nnom_qformat_param_t*)tensor_conv2d_1_bias_0_offset, + .qtype = NNOM_QTYPE_PER_TENSOR, + .num_dim = 1, + .bitwidth = 8 +}; + +const nnom_qformat_param_t conv2d_1_output_shift[] = CONV2D_1_OUTPUT_RSHIFT; +const nnom_qformat_param_t conv2d_1_bias_shift[] = CONV2D_1_BIAS_LSHIFT; +const nnom_conv2d_config_t conv2d_1_config = { + .super = {.name = "conv2d_1"}, + .qtype = NNOM_QTYPE_PER_TENSOR, + .weight = (nnom_tensor_t*)&tensor_conv2d_1_kernel_0, + .bias = (nnom_tensor_t*)&tensor_conv2d_1_bias_0, + .output_shift = (nnom_qformat_param_t *)&conv2d_1_output_shift, + .bias_shift = (nnom_qformat_param_t *)&conv2d_1_bias_shift, + .filter_size = 24, + .kernel_size = {3, 3}, + .stride_size = {1, 1}, + .padding_size = {0, 0}, + .dilation_size = {1, 1}, + .padding_type = PADDING_SAME +}; + +const nnom_pool_config_t max_pooling2d_1_config = { + .super = {.name = "max_pooling2d_1"}, + .padding_type = PADDING_SAME, + .output_shift = 0, + .kernel_size = {2, 2}, + .stride_size = {2, 2}, + .num_dim = 2 +}; +const int8_t tensor_conv2d_2_kernel_0_data[] = TENSOR_CONV2D_2_KERNEL_0; + +const nnom_shape_data_t tensor_conv2d_2_kernel_0_dim[] = {3, 3, 24, 48}; +const nnom_qformat_param_t tensor_conv2d_2_kernel_0_dec[] = TENSOR_CONV2D_2_KERNEL_0_DEC_BITS; +const nnom_qformat_param_t tensor_conv2d_2_kernel_0_offset[] = {0}; +const nnom_tensor_t tensor_conv2d_2_kernel_0 = { + .p_data = (void*)tensor_conv2d_2_kernel_0_data, + .dim = (nnom_shape_data_t*)tensor_conv2d_2_kernel_0_dim, + .q_dec = (nnom_qformat_param_t*)tensor_conv2d_2_kernel_0_dec, + .q_offset = (nnom_qformat_param_t*)tensor_conv2d_2_kernel_0_offset, + .qtype = NNOM_QTYPE_PER_TENSOR, + .num_dim = 4, + .bitwidth = 8 +}; +const int8_t tensor_conv2d_2_bias_0_data[] = TENSOR_CONV2D_2_BIAS_0; + +const nnom_shape_data_t tensor_conv2d_2_bias_0_dim[] = {48}; +const nnom_qformat_param_t tensor_conv2d_2_bias_0_dec[] = TENSOR_CONV2D_2_BIAS_0_DEC_BITS; +const nnom_qformat_param_t tensor_conv2d_2_bias_0_offset[] = {0}; +const nnom_tensor_t tensor_conv2d_2_bias_0 = { + .p_data = (void*)tensor_conv2d_2_bias_0_data, + .dim = (nnom_shape_data_t*)tensor_conv2d_2_bias_0_dim, + .q_dec = (nnom_qformat_param_t*)tensor_conv2d_2_bias_0_dec, + .q_offset = (nnom_qformat_param_t*)tensor_conv2d_2_bias_0_offset, + .qtype = NNOM_QTYPE_PER_TENSOR, + .num_dim = 1, + .bitwidth = 8 +}; + +const nnom_qformat_param_t conv2d_2_output_shift[] = CONV2D_2_OUTPUT_RSHIFT; +const nnom_qformat_param_t conv2d_2_bias_shift[] = CONV2D_2_BIAS_LSHIFT; +const nnom_conv2d_config_t conv2d_2_config = { + .super = {.name = "conv2d_2"}, + .qtype = NNOM_QTYPE_PER_TENSOR, + .weight = (nnom_tensor_t*)&tensor_conv2d_2_kernel_0, + .bias = (nnom_tensor_t*)&tensor_conv2d_2_bias_0, + .output_shift = (nnom_qformat_param_t *)&conv2d_2_output_shift, + .bias_shift = (nnom_qformat_param_t *)&conv2d_2_bias_shift, + .filter_size = 48, + .kernel_size = {3, 3}, + .stride_size = {1, 1}, + .padding_size = {0, 0}, + .dilation_size = {1, 1}, + .padding_type = PADDING_SAME +}; + +const nnom_pool_config_t max_pooling2d_2_config = { + .super = {.name = "max_pooling2d_2"}, + .padding_type = PADDING_SAME, + .output_shift = 0, + .kernel_size = {2, 2}, + .stride_size = {2, 2}, + .num_dim = 2 +}; + +const nnom_flatten_config_t flatten_config = { + .super = {.name = "flatten"} +}; +const int8_t tensor_dense_kernel_0_data[] = TENSOR_DENSE_KERNEL_0; + +const nnom_shape_data_t tensor_dense_kernel_0_dim[] = {768, 96}; +const nnom_qformat_param_t tensor_dense_kernel_0_dec[] = TENSOR_DENSE_KERNEL_0_DEC_BITS; +const nnom_qformat_param_t tensor_dense_kernel_0_offset[] = {0}; +const nnom_tensor_t tensor_dense_kernel_0 = { + .p_data = (void*)tensor_dense_kernel_0_data, + .dim = (nnom_shape_data_t*)tensor_dense_kernel_0_dim, + .q_dec = (nnom_qformat_param_t*)tensor_dense_kernel_0_dec, + .q_offset = (nnom_qformat_param_t*)tensor_dense_kernel_0_offset, + .qtype = NNOM_QTYPE_PER_TENSOR, + .num_dim = 2, + .bitwidth = 8 +}; +const int8_t tensor_dense_bias_0_data[] = TENSOR_DENSE_BIAS_0; + +const nnom_shape_data_t tensor_dense_bias_0_dim[] = {96}; +const nnom_qformat_param_t tensor_dense_bias_0_dec[] = TENSOR_DENSE_BIAS_0_DEC_BITS; +const nnom_qformat_param_t tensor_dense_bias_0_offset[] = {0}; +const nnom_tensor_t tensor_dense_bias_0 = { + .p_data = (void*)tensor_dense_bias_0_data, + .dim = (nnom_shape_data_t*)tensor_dense_bias_0_dim, + .q_dec = (nnom_qformat_param_t*)tensor_dense_bias_0_dec, + .q_offset = (nnom_qformat_param_t*)tensor_dense_bias_0_offset, + .qtype = NNOM_QTYPE_PER_TENSOR, + .num_dim = 1, + .bitwidth = 8 +}; + +const nnom_qformat_param_t dense_output_shift[] = DENSE_OUTPUT_RSHIFT; +const nnom_qformat_param_t dense_bias_shift[] = DENSE_BIAS_LSHIFT; +const nnom_dense_config_t dense_config = { + .super = {.name = "dense"}, + .qtype = NNOM_QTYPE_PER_TENSOR, + .weight = (nnom_tensor_t*)&tensor_dense_kernel_0, + .bias = (nnom_tensor_t*)&tensor_dense_bias_0, + .output_shift = (nnom_qformat_param_t *)&dense_output_shift, + .bias_shift = (nnom_qformat_param_t *)&dense_bias_shift +}; +const int8_t tensor_dense_1_kernel_0_data[] = TENSOR_DENSE_1_KERNEL_0; + +const nnom_shape_data_t tensor_dense_1_kernel_0_dim[] = {96, 10}; +const nnom_qformat_param_t tensor_dense_1_kernel_0_dec[] = TENSOR_DENSE_1_KERNEL_0_DEC_BITS; +const nnom_qformat_param_t tensor_dense_1_kernel_0_offset[] = {0}; +const nnom_tensor_t tensor_dense_1_kernel_0 = { + .p_data = (void*)tensor_dense_1_kernel_0_data, + .dim = (nnom_shape_data_t*)tensor_dense_1_kernel_0_dim, + .q_dec = (nnom_qformat_param_t*)tensor_dense_1_kernel_0_dec, + .q_offset = (nnom_qformat_param_t*)tensor_dense_1_kernel_0_offset, + .qtype = NNOM_QTYPE_PER_TENSOR, + .num_dim = 2, + .bitwidth = 8 +}; +const int8_t tensor_dense_1_bias_0_data[] = TENSOR_DENSE_1_BIAS_0; + +const nnom_shape_data_t tensor_dense_1_bias_0_dim[] = {10}; +const nnom_qformat_param_t tensor_dense_1_bias_0_dec[] = TENSOR_DENSE_1_BIAS_0_DEC_BITS; +const nnom_qformat_param_t tensor_dense_1_bias_0_offset[] = {0}; +const nnom_tensor_t tensor_dense_1_bias_0 = { + .p_data = (void*)tensor_dense_1_bias_0_data, + .dim = (nnom_shape_data_t*)tensor_dense_1_bias_0_dim, + .q_dec = (nnom_qformat_param_t*)tensor_dense_1_bias_0_dec, + .q_offset = (nnom_qformat_param_t*)tensor_dense_1_bias_0_offset, + .qtype = NNOM_QTYPE_PER_TENSOR, + .num_dim = 1, + .bitwidth = 8 +}; + +const nnom_qformat_param_t dense_1_output_shift[] = DENSE_1_OUTPUT_RSHIFT; +const nnom_qformat_param_t dense_1_bias_shift[] = DENSE_1_BIAS_LSHIFT; +const nnom_dense_config_t dense_1_config = { + .super = {.name = "dense_1"}, + .qtype = NNOM_QTYPE_PER_TENSOR, + .weight = (nnom_tensor_t*)&tensor_dense_1_kernel_0, + .bias = (nnom_tensor_t*)&tensor_dense_1_bias_0, + .output_shift = (nnom_qformat_param_t *)&dense_1_output_shift, + .bias_shift = (nnom_qformat_param_t *)&dense_1_bias_shift +}; + +const nnom_softmax_config_t softmax_config = { + .super = {.name = "softmax"} +}; +static int8_t nnom_output_data[10] = {0}; + +const nnom_shape_data_t tensor_output_dim[] = {10}; +const nnom_qformat_param_t tensor_output_dec[] = {SOFTMAX_OUTPUT_DEC}; +const nnom_qformat_param_t tensor_output_offset[] = {0}; +const nnom_tensor_t tensor_output = { + .p_data = (void*)nnom_output_data, + .dim = (nnom_shape_data_t*)tensor_output_dim, + .q_dec = (nnom_qformat_param_t*)tensor_output_dec, + .q_offset = (nnom_qformat_param_t*)tensor_output_offset, + .qtype = NNOM_QTYPE_PER_TENSOR, + .num_dim = 1, + .bitwidth = 8 +}; + +const nnom_io_config_t output_config = { + .super = {.name = "output"}, + .tensor = (nnom_tensor_t*)&tensor_output +}; +/* model version */ +#define NNOM_MODEL_VERSION (10000*0 + 100*4 + 0) + +/* nnom model */ +static nnom_model_t* nnom_model_create(void) +{ + static nnom_model_t model; + nnom_layer_t* layer[16]; + + check_model_version(NNOM_MODEL_VERSION); + new_model(&model); + + layer[0] = input_s(&input_1_config); + layer[1] = model.hook(conv2d_s(&conv2d_config), layer[0]); + layer[2] = model.active(act_relu(), layer[1]); + layer[3] = model.hook(maxpool_s(&max_pooling2d_config), layer[2]); + layer[4] = model.hook(conv2d_s(&conv2d_1_config), layer[3]); + layer[5] = model.active(act_relu(), layer[4]); + layer[6] = model.hook(maxpool_s(&max_pooling2d_1_config), layer[5]); + layer[7] = model.hook(conv2d_s(&conv2d_2_config), layer[6]); + layer[8] = model.active(act_relu(), layer[7]); + layer[9] = model.hook(maxpool_s(&max_pooling2d_2_config), layer[8]); + layer[10] = model.hook(flatten_s(&flatten_config), layer[9]); + layer[11] = model.hook(dense_s(&dense_config), layer[10]); + layer[12] = model.active(act_relu(), layer[11]); + layer[13] = model.hook(dense_s(&dense_1_config), layer[12]); + layer[14] = model.hook(softmax_s(&softmax_config), layer[13]); + layer[15] = model.hook(output_s(&output_config), layer[14]); + model_compile(&model, layer[0], layer[15]); + return &model; +} diff --git a/APP_Framework/Framework/knowing/Kconfig b/APP_Framework/Framework/knowing/Kconfig index 13807b9ae..d4324bf77 100644 --- a/APP_Framework/Framework/knowing/Kconfig +++ b/APP_Framework/Framework/knowing/Kconfig @@ -10,4 +10,5 @@ if SUPPORT_KNOWING_FRAMEWORK source "$APP_DIR/Framework/knowing/image_processing/Kconfig" source "$APP_DIR/Framework/knowing/cmsis_5/Kconfig" source "$APP_DIR/Framework/knowing/kpu/Kconfig" + source "$APP_DIR/Framework/knowing/nnom/Kconfig" endif diff --git a/APP_Framework/Framework/knowing/cmsis_5/Kconfig b/APP_Framework/Framework/knowing/cmsis_5/Kconfig index 4057189f2..b512a4b9c 100644 --- a/APP_Framework/Framework/knowing/cmsis_5/Kconfig +++ b/APP_Framework/Framework/knowing/cmsis_5/Kconfig @@ -4,11 +4,11 @@ menuconfig USING_CMSIS_5 if USING_CMSIS_5 - menuconfig USING_USING_CMSIS_5_NN + menuconfig USING_CMSIS_5_NN bool "CMSIS-5 NN" default n - if USING_USING_CMSIS_5_NN + if USING_CMSIS_5_NN config USING_CMSIS_5_NN_ACTIVATION bool "CMSIS-5 NN ACTIVATION" diff --git a/APP_Framework/Framework/knowing/cmsis_5/SConscript b/APP_Framework/Framework/knowing/cmsis_5/SConscript index fd2bf8518..665035f46 100644 --- a/APP_Framework/Framework/knowing/cmsis_5/SConscript +++ b/APP_Framework/Framework/knowing/cmsis_5/SConscript @@ -8,7 +8,7 @@ CPPPATH = [] CPPPATH += [os.path.join(cwd, 'Core/Include')] -if GetDepend('USING_USING_CMSIS_5_NN'): +if GetDepend('USING_CMSIS_5_NN'): CPPPATH += [os.path.join(cwd, 'DSP/Include')] CPPPATH += [os.path.join(cwd, 'NN/Include')] CPPDEFINES += ['__FPU_PRESENT=1'] diff --git a/APP_Framework/Framework/knowing/kpu/k210_yolov2_detect_procedure/k210_yolov2_detect.c b/APP_Framework/Framework/knowing/kpu/k210_yolov2_detect_procedure/k210_yolov2_detect.c index 5724e08bc..028784b2d 100644 --- a/APP_Framework/Framework/knowing/kpu/k210_yolov2_detect_procedure/k210_yolov2_detect.c +++ b/APP_Framework/Framework/knowing/kpu/k210_yolov2_detect_procedure/k210_yolov2_detect.c @@ -199,6 +199,8 @@ static void *thread_detect_entry(void *parameter) /* display result */ for (int cnt = 0; cnt < detect_info.obj_number; cnt++) { + detect_info.obj[cnt].y1 += (detect_params.sensor_output_size[0] - detect_params.net_input_size[0])/2; + detect_info.obj[cnt].y2 += (detect_params.sensor_output_size[0] - detect_params.net_input_size[0])/2; draw_edge((uint32_t *)showbuffer, &detect_info, cnt, 0xF800, (uint16_t)detect_params.sensor_output_size[1], (uint16_t)detect_params.sensor_output_size[0]); printf("%d: (%d, %d, %d, %d) cls: %s conf: %f\t", cnt, detect_info.obj[cnt].x1, detect_info.obj[cnt].y1, diff --git a/APP_Framework/Framework/knowing/nnom/Kconfig b/APP_Framework/Framework/knowing/nnom/Kconfig new file mode 100644 index 000000000..1bcef879e --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/Kconfig @@ -0,0 +1,46 @@ +menuconfig USING_NNOM + bool "NNOM" + default n + +if USING_NNOM + + config NNOM_USING_STATIC_MEMORY + bool "Using static memory" + default n + help + must set buf using "nnom_set_static_buf()" before creating a model. + + config NNOM_TRUNCATE + bool "Using NNOM Truncate" + default n + help + disable: backend ops use round to the nearest int (default). enable: floor + + choice + prompt "Select NNOM Format" + default NNOM_USING_HWC + + config NNOM_USING_HWC + bool "Using HWC Format" + + config NNOM_USING_CHW + bool "Using CHW Format" + help + CHW is incompatible with CMSIS-NN and must be used when using hardware accelerator such as KPU in K210 chip + endchoice + + choice + prompt "Select NNOM Backend" + default USING_NNOM_NORMAL + + config NNOM_USING_LOCAL + bool "Using NNOM local backend" + + config NNOM_USING_CMSIS_NN + bool "Using CMSIS-NN backend" + select USING_CMSIS_5 + select USING_CMSIS_5_NN + endchoice + +endif + diff --git a/APP_Framework/Framework/knowing/nnom/README.md b/APP_Framework/Framework/knowing/nnom/README.md new file mode 100644 index 000000000..34082b201 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/README.md @@ -0,0 +1,14 @@ +# Neural Network on Microcontroller (NNoM) + +NNoM is a high-level inference Neural Network library specifically for microcontrollers, released under Apache License 2.0. + +Current version is 0.4.3. More information available in [NNOM](https://github.com/majianjia/nnom). + +## CMSIS-NN Backend + +[CMSIS-NN/DSP](https://github.com/ARM-software/CMSIS_5/tree/develop/CMSIS/NN) is an inference acceleration libraries for Arm Cortex-M CPUs and can be used as the backend of NNoM for high performance. + +## Notes + +- CHW format is incompatible with CMSIS-NN and must be used when using hardware accelerator such as KPU in K210 chip. +- Static memory buffer must be set by using "nnom_set_static_buf()" before creating a model. \ No newline at end of file diff --git a/APP_Framework/Framework/knowing/nnom/SConscript b/APP_Framework/Framework/knowing/nnom/SConscript new file mode 100644 index 000000000..dce24bb2c --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/SConscript @@ -0,0 +1,18 @@ +import os +from building import * + +cwd = GetCurrentDir() +src = [] +CPPDEFINES = [] +CPPPATH = [] + +src += Glob('src/core/*.c') +src += Glob('src/layers/*.c') +src += Glob('src/backends/*.c') + +CPPPATH+=['%s/inc'%(cwd), '%s/port'%(cwd)] + + +group = DefineGroup('nnom', src, depend = ['USING_NNOM'], CPPPATH = CPPPATH, LOCAL_CPPDEFINES=CPPDEFINES) + +Return('group') \ No newline at end of file diff --git a/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_activation.h b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_activation.h new file mode 100644 index 000000000..7cda07ce3 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_activation.h @@ -0,0 +1,96 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2020-05-03 Jianjia Ma The first version + */ + +#ifndef __NNOM_ACTIVATION_H__ +#define __NNOM_ACTIVATION_H__ + +#ifdef __cplusplus +extern "C" { +#endif + +#include +#include +#include + +#include "nnom.h" +#include "nnom_layers.h" +#include "nnom_local.h" +#include "nnom_tensor.h" + + +// activation layer +typedef struct _nnom_activation_layer_t +{ + nnom_layer_t super; + nnom_activation_t *act; +} nnom_activation_layer_t; + + +// activation with fixed q format (tanh and sigmoid) +typedef struct _nnom_activation_fixed_q_t +{ + nnom_activation_t super; + uint8_t dec_bit; +} nnom_activation_fixed_q_t; + +// leaky relu +typedef struct _nnom_activation_leaky_relu_t +{ + nnom_activation_t super; + q7_t alpha; // alpha is present by q0.7 format. (-128 = -1) +} nnom_activation_leaky_relu_t; + +// advance relu (full ReLU) +typedef struct _nnom_activation_adv_relu_t +{ + nnom_activation_t super; + q7_t negative_slope; // negative_slope is present by q0.7 format. (-128 = -1) + float max; // cap of the max value + float threshold; // threshold +} nnom_activation_adv_relu_t; + +// method +nnom_status_t activation_run(nnom_layer_t* layer); +nnom_status_t activation_free(nnom_layer_t *layer); + +// activation delete +void act_delete(nnom_activation_t* act); + +// a direct api on tensor +nnom_status_t act_tensor_run(nnom_activation_t* act, nnom_tensor_t* tensor); + + +// Layer API +nnom_layer_t *Activation(nnom_activation_t *act); +nnom_layer_t *ReLU(void); +nnom_layer_t *LeakyReLU(float alpha); +nnom_layer_t *AdvReLU(float alpha, float max, float threshold); +nnom_layer_t *Sigmoid(int32_t dec_bit); +nnom_layer_t *TanH(int32_t dec_bit); + +// Activation API. +nnom_activation_t* act_relu(void); +nnom_activation_t* act_leaky_relu(float alpha); +nnom_activation_t* act_adv_relu(float negative_slope, float max, float threshold); +nnom_activation_t* act_tanh(int32_t dec_bit); +nnom_activation_t* act_sigmoid(int32_t dec_bit); +nnom_activation_t* act_hard_tanh(int32_t dec_bit); +nnom_activation_t* act_hard_sigmoid(int32_t dec_bit); + +// utils +int32_t act_get_dec_bit(nnom_activation_type_t type, int32_t dec_bit); + +#ifdef __cplusplus +} +#endif + +#endif /* __NNOM_ACTIVATION_H__ */ diff --git a/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_avgpool.h b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_avgpool.h new file mode 100644 index 000000000..6f8354630 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_avgpool.h @@ -0,0 +1,47 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2020-05-03 Jianjia Ma The first version + */ + +#ifndef __NNOM_AVGPOOL_H__ +#define __NNOM_AVGPOOL_H__ + +#ifdef __cplusplus +extern "C" { +#endif + +#include +#include +#include + +#include "nnom.h" +#include "nnom_layers.h" +#include "nnom_local.h" +#include "nnom_tensor.h" + +#include "layers/nnom_maxpool.h" + +// Avg Pooling +typedef nnom_maxpool_layer_t nnom_avgpool_layer_t; + +// method +nnom_status_t avgpooling_build(nnom_layer_t *layer); +nnom_status_t avgpool_run(nnom_layer_t *layer); + +// API +nnom_layer_t *avgpool_s(const nnom_pool_config_t * config); +nnom_layer_t *AvgPool(nnom_3d_shape_t k, nnom_3d_shape_t s, nnom_padding_t pad_type); + + +#ifdef __cplusplus +} +#endif + +#endif /* __NNOM_AVGPOOL_H__ */ diff --git a/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_baselayer.h b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_baselayer.h new file mode 100644 index 000000000..940bce578 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_baselayer.h @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2020-05-03 Jianjia Ma The first version + */ + +#ifndef __NNOM_BASELAYER_H__ +#define __NNOM_BASELAYER_H__ + +#ifdef __cplusplus +extern "C" { +#endif + +#include +#include +#include + +#include "nnom.h" +#include "nnom_layers.h" +#include "nnom_local.h" +#include "nnom_tensor.h" + +#include "layers/nnom_input.h" + +// method +nnom_status_t default_build(nnom_layer_t *layer); +nnom_status_t default_run(nnom_layer_t *layer); + +// API +nnom_layer_t *baselayer_s(const nnom_layer_config_t * config); +nnom_layer_t *BaseLayer(void); + +#ifdef __cplusplus +} +#endif + +#endif /* __NNOM_BASELAYER_H__ */ diff --git a/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_concat.h b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_concat.h new file mode 100644 index 000000000..d47b26365 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_concat.h @@ -0,0 +1,55 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2020-05-03 Jianjia Ma The first version + */ + +#ifndef __NNOM_CONCAT_H__ +#define __NNOM_CONCAT_H__ + +#ifdef __cplusplus +extern "C" { +#endif + +#include +#include +#include + +#include "nnom.h" +#include "nnom_layers.h" +#include "nnom_local.h" +#include "nnom_tensor.h" + +// concatenate layer +typedef struct _nnom_concat_layer +{ + nnom_layer_t super; + int8_t axis; +} nnom_concat_layer_t; + +typedef struct _nnom_concat_config_t +{ + nnom_layer_config_t super; + int8_t axis; +} nnom_concat_config_t; + +// method +nnom_status_t concat_build(nnom_layer_t *layer); +nnom_status_t concat_run(nnom_layer_t *layer); + +// API +nnom_layer_t *concat_s(const nnom_concat_config_t *config); +nnom_layer_t *Concat(int8_t axis); + + +#ifdef __cplusplus +} +#endif + +#endif /* __NNOM_CONCAT_H__ */ diff --git a/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_conv2d.h b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_conv2d.h new file mode 100644 index 000000000..2b6efb198 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_conv2d.h @@ -0,0 +1,83 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2020-05-03 Jianjia Ma The first version + */ + +#ifndef __NNOM_CONV2D_H__ +#define __NNOM_CONV2D_H__ + +#ifdef __cplusplus +extern "C" { +#endif + + +#include +#include +#include + +#include "nnom.h" +#include "nnom_layers.h" +#include "nnom_local.h" +#include "nnom_tensor.h" + +// child layers parameters +typedef struct _nnom_conv2d_layer_t +{ + nnom_layer_t super; + nnom_3d_shape_t kernel; + nnom_3d_shape_t stride; + nnom_3d_shape_t pad; + nnom_3d_shape_t dilation; + nnom_padding_t padding_type; + uint32_t filter_mult; // filter size (for conv) or multilplier (for depthwise) + + nnom_tensor_t *weight; + nnom_tensor_t *bias; + + // test + nnom_qformat_param_t * output_rshift; + nnom_qformat_param_t * bias_lshift; +} nnom_conv2d_layer_t; + +// a machine interface for configuration +typedef struct _nnom_conv2d_config_t +{ + nnom_layer_config_t super; + nnom_qtype_t qtype; //quantisation type(per channel or per layer) + nnom_tensor_t *weight; + nnom_tensor_t *bias; + nnom_qformat_param_t *output_shift; + nnom_qformat_param_t *bias_shift; + uint32_t filter_size; + int8_t kernel_size[2]; + int8_t stride_size[2]; + int8_t padding_size[2]; + int8_t dilation_size[2]; + nnom_padding_t padding_type; +} nnom_conv2d_config_t; + +// method +nnom_status_t conv2d_run(nnom_layer_t *layer); +nnom_status_t conv2d_build(nnom_layer_t *layer); +nnom_status_t conv2d_free(nnom_layer_t *layer); + +// utils +uint32_t conv_output_length(uint32_t input_length, uint32_t filter_size, nnom_padding_t padding, uint32_t stride, uint32_t dilation); + +// API +nnom_layer_t *conv2d_s(const nnom_conv2d_config_t *config); +nnom_layer_t *Conv2D(uint32_t filters, nnom_3d_shape_t k, nnom_3d_shape_t s, nnom_3d_shape_t d, nnom_padding_t pad_type, + const nnom_weight_t *w, const nnom_bias_t *b); + +#ifdef __cplusplus +} +#endif + +#endif /* __NNOM_CONV2D_H__ */ diff --git a/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_conv2d_trans.h b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_conv2d_trans.h new file mode 100644 index 000000000..26249f3d9 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_conv2d_trans.h @@ -0,0 +1,52 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2020-05-30 Jianjia Ma The first version + */ + +#ifndef __NNOM_DECONV2D_H__ +#define __NNOM_DECONV2D_H__ + +#ifdef __cplusplus +extern "C" { +#endif + + +#include +#include +#include + +#include "nnom.h" +#include "nnom_layers.h" +#include "nnom_local.h" +#include "nnom_tensor.h" +#include "layers/nnom_conv2d.h" + +// child layers parameters +typedef nnom_conv2d_layer_t nnom_conv2d_trans_layer_t; + +typedef nnom_conv2d_config_t nnom_conv2d_trans_config_t; + +// method +nnom_status_t conv2d_trans_run(nnom_layer_t *layer); +nnom_status_t conv2d_trans_build(nnom_layer_t *layer); + +// utils +uint32_t conv_trans_output_length(uint32_t input_length, uint32_t filter_size, nnom_padding_t padding, uint32_t stride, uint32_t dilation); + +// API +nnom_layer_t *conv2d_trans_s(const nnom_conv2d_config_t *config); +nnom_layer_t *Conv2DTrans(uint32_t filters, nnom_3d_shape_t k, nnom_3d_shape_t s, nnom_3d_shape_t d, nnom_padding_t pad_type, + const nnom_weight_t *w, const nnom_bias_t *b); + +#ifdef __cplusplus +} +#endif + +#endif /* __NNOM_DECONV2D_H__ */ diff --git a/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_cropping.h b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_cropping.h new file mode 100644 index 000000000..252357481 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_cropping.h @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2020-05-03 Jianjia Ma The first version + */ + +#ifndef __NNOM_CROPPING_H__ +#define __NNOM_CROPPING_H__ + +#ifdef __cplusplus +extern "C" { +#endif + +#include +#include +#include + +#include "nnom.h" +#include "nnom_layers.h" +#include "nnom_local.h" +#include "nnom_tensor.h" + +#include "layers/nnom_zero_padding.h" + +// Cropping, same as zeropadding +typedef nnom_zero_padding_layer_t nnom_cropping_layer_t; + +typedef nnom_zero_padding_config_t nnom_cropping_config_t; + +// method +nnom_status_t cropping_build(nnom_layer_t *layer); +nnom_status_t cropping_run(nnom_layer_t *layer); + +// API +nnom_layer_t * cropping_s(const nnom_cropping_config_t *config); +nnom_layer_t *Cropping(nnom_border_t pad); + +#ifdef __cplusplus +} +#endif + +#endif /* __NNOM_CROPPING_H__ */ diff --git a/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_dense.h b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_dense.h new file mode 100644 index 000000000..a0504a317 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_dense.h @@ -0,0 +1,63 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2020-05-03 Jianjia Ma The first version + */ + +#ifndef __NNOM_DENSE_H__ +#define __NNOM_DENSE_H__ + +#ifdef __cplusplus +extern "C" { +#endif + +#include +#include +#include + +#include "nnom.h" +#include "nnom_layers.h" +#include "nnom_local.h" +#include "nnom_tensor.h" + +typedef struct _nnom_dense_layer_t +{ + nnom_layer_t super; + size_t output_unit; + nnom_tensor_t *weight; + nnom_tensor_t *bias; + nnom_qformat_param_t *output_rshift; + nnom_qformat_param_t *bias_lshift; +} nnom_dense_layer_t; + +// a machine interface for configuration +typedef struct _nnom_dense_config_t +{ + nnom_layer_config_t super; + nnom_qtype_t qtype; //quantisation type(per channel or per layer) + nnom_tensor_t *weight; + nnom_tensor_t *bias; + nnom_qformat_param_t *output_shift; + nnom_qformat_param_t *bias_shift; +} nnom_dense_config_t; + +// method +nnom_status_t dense_free(nnom_layer_t *layer); +nnom_status_t dense_build(nnom_layer_t *layer); +nnom_status_t dense_run(nnom_layer_t *layer); + +// API +nnom_layer_t *dense_s(const nnom_dense_config_t *config); +nnom_layer_t *Dense(size_t output_unit, const nnom_weight_t *w, const nnom_bias_t *b); + +#ifdef __cplusplus +} +#endif + +#endif /* __NNOM_DENSE_H__ */ diff --git a/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_dw_conv2d.h b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_dw_conv2d.h new file mode 100644 index 000000000..5a9b58b25 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_dw_conv2d.h @@ -0,0 +1,44 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2020-05-03 Jianjia Ma The first version + */ + +#ifndef __NNOM_DW_CONV2D_H__ +#define __NNOM_DW_CONV2D_H__ + +#ifdef __cplusplus +extern "C" { +#endif + +#include +#include +#include + +#include "nnom.h" +#include "nnom_layers.h" +#include "nnom_local.h" +#include "nnom_tensor.h" + +#include "layers/nnom_conv2d.h" + +// method +nnom_status_t dw_conv2d_build(nnom_layer_t *layer); +nnom_status_t dw_conv2d_run(nnom_layer_t *layer); + +//API +nnom_layer_t *dw_conv2d_s(const nnom_conv2d_config_t *config); +nnom_layer_t *DW_Conv2D(uint32_t multiplier, nnom_3d_shape_t k, nnom_3d_shape_t s, nnom_3d_shape_t d, nnom_padding_t pad_type, + const nnom_weight_t *w, const nnom_bias_t *b); + +#ifdef __cplusplus +} +#endif + +#endif /* __NNOM_DW_CONV2D_H__ */ diff --git a/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_flatten.h b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_flatten.h new file mode 100644 index 000000000..c77160fca --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_flatten.h @@ -0,0 +1,46 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2020-05-03 Jianjia Ma The first version + */ + +#ifndef __NNOM_FLATTEN_H__ +#define __NNOM_FLATTEN_H__ + +#ifdef __cplusplus +extern "C" { +#endif + +#include +#include +#include + +#include "nnom.h" +#include "nnom_layers.h" +#include "nnom_local.h" +#include "nnom_tensor.h" + +// no special parameters but we need it. +typedef struct _nnom_flatten_config_t{ + nnom_layer_config_t super; +} nnom_flatten_config_t; + +// method +nnom_status_t flatten_build(nnom_layer_t *layer); +nnom_status_t flatten_run(nnom_layer_t *layer); + +// API +nnom_layer_t *flatten_s(const nnom_flatten_config_t *config); +nnom_layer_t *Flatten(void); + +#ifdef __cplusplus +} +#endif + +#endif /* __NNOM_FLATTEN_H__ */ diff --git a/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_global_pool.h b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_global_pool.h new file mode 100644 index 000000000..febccb0e8 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_global_pool.h @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2020-05-03 Jianjia Ma The first version + */ + +#ifndef __NNOM_GLOBAL_POOL_H__ +#define __NNOM_GLOBAL_POOL_H__ + +#ifdef __cplusplus +extern "C" { +#endif + +#include +#include +#include + +#include "nnom.h" +#include "nnom_layers.h" +#include "nnom_local.h" +#include "nnom_tensor.h" + +#include "layers/nnom_maxpool.h" + +typedef struct _nnom_global_pool_config_t +{ + nnom_layer_config_t super; + int16_t output_shift; +}nnom_global_pool_config_t; + +// method +nnom_status_t global_pool_build(nnom_layer_t *layer); + +// API +nnom_layer_t * global_maxpool_s(const nnom_global_pool_config_t *config); +nnom_layer_t * global_avgpool_s(const nnom_global_pool_config_t *config); +nnom_layer_t * global_sumpool_s(const nnom_global_pool_config_t *config); + +nnom_layer_t *GlobalMaxPool(void); +nnom_layer_t *GlobalAvgPool(void); +nnom_layer_t *GlobalSumPool(void); + + +#ifdef __cplusplus +} +#endif + +#endif /* __NNOM_GLOBAL_POOL_H__ */ diff --git a/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_gru_cell.h b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_gru_cell.h new file mode 100644 index 000000000..8ba459624 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_gru_cell.h @@ -0,0 +1,60 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2020-08-27 Jianjia Ma The first version + */ + +#ifndef __NNOM_GRU_CELL_H__ +#define __NNOM_GRU_CELL_H__ + +#ifdef __cplusplus +extern "C" { +#endif + +#include "nnom_rnn.h" +#include "nnom_activation.h" + +typedef struct _nnom_gru_cell_config_t +{ + nnom_layer_config_t super; + nnom_tensor_t *weights; + nnom_tensor_t* recurrent_weights; + nnom_tensor_t *bias; + nnom_qformat_param_t q_dec_z, q_dec_h; // z, r, h + uint16_t units; +} nnom_gru_cell_config_t; + + +typedef struct _nnom_gru_cell_t +{ + nnom_rnn_cell_t super; + + nnom_tensor_t* weights; + nnom_tensor_t* recurrent_weights; + nnom_tensor_t* bias; + + // decide later. + // z, r, h + nnom_qformat_param_t q_dec_z, q_dec_h; + nnom_qformat_param_t oshift_iw, oshift_hw, bias_shift; + +} nnom_gru_cell_t; + +// gru +nnom_rnn_cell_t *gru_cell_s(const nnom_gru_cell_config_t* config); + +nnom_status_t gru_cell_free(nnom_rnn_cell_t* cell); +nnom_status_t gru_cell_build(nnom_rnn_cell_t* cell); +nnom_status_t gru_cell_run(nnom_rnn_cell_t* cell); + +#ifdef __cplusplus +} +#endif + +#endif /* __NNOM_GRU_CELL_H__ */ diff --git a/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_input.h b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_input.h new file mode 100644 index 000000000..42322a61f --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_input.h @@ -0,0 +1,57 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2020-05-03 Jianjia Ma The first version + */ + +#ifndef __NNOM_INPUT_H__ +#define __NNOM_INPUT_H__ + +#ifdef __cplusplus +extern "C" { +#endif + +#include +#include +#include + +#include "nnom.h" +#include "nnom_layers.h" +#include "nnom_local.h" +#include "nnom_tensor.h" + +// IO layer +typedef struct _nnom_io_layer +{ + nnom_layer_t super; + nnom_3d_shape_t shape; + nnom_qformat_param_t dec_bit; + void *buf; //input or output +} nnom_io_layer_t; + +typedef struct _nnom_io_config_t +{ + nnom_layer_config_t super; + nnom_tensor_t *tensor; +}nnom_io_config_t; + +// method +nnom_status_t input_build(nnom_layer_t *layer); +nnom_status_t input_run(nnom_layer_t *layer); + +// API +nnom_layer_t *input_s(const nnom_io_config_t* config); +nnom_layer_t *Input(nnom_3d_shape_t input_shape, void *p_buf); + + +#ifdef __cplusplus +} +#endif + +#endif /* __NNOM_INPUT_H__ */ diff --git a/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_lambda.h b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_lambda.h new file mode 100644 index 000000000..80c5e6915 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_lambda.h @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2020-05-03 Jianjia Ma The first version + */ + +#ifndef __NNOM_LAMBDA_H__ +#define __NNOM_LAMBDA_H__ + +#ifdef __cplusplus +extern "C" { +#endif + +#include +#include +#include + +#include "nnom.h" +#include "nnom_layers.h" +#include "nnom_local.h" +#include "nnom_tensor.h" + +#include "layers/nnom_input.h" + +// lambda layer +typedef struct _nnom_lambda_layer_t +{ + nnom_layer_t super; + void *parameters; // parameters for lambda +} nnom_lambda_layer_t; + +// lambda layer +typedef struct _nnom_lambda_config_t +{ + nnom_layer_config_t super; + nnom_status_t (*run_func_name)(nnom_layer_t *layer); // run method. required + nnom_status_t (*build_func_name)(nnom_layer_t *layer);// compute output buffer shape. can be left null, will call default_build() + nnom_status_t (*free_func_name)(nnom_layer_t *layer); // a callback to free private resources (comp buf not included) can be left null + void *parameters; // parameters for lambda +} nnom_lambda_config_t; + + + +#ifdef __cplusplus +} +#endif + +#endif /* __NNOM_LAMBDA_H__ */ diff --git a/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_lstm_cell.h b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_lstm_cell.h new file mode 100644 index 000000000..f0563fc91 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_lstm_cell.h @@ -0,0 +1,64 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2020-08-24 Jianjia Ma The first version + */ + +#ifndef __NNOM_LSTM_CELL_H__ +#define __NNOM_LSTM_CELL_H__ + +#ifdef __cplusplus +extern "C" { +#endif + +#include "nnom_rnn.h" +#include "nnom_activation.h" + +// a machine interface for configuration +typedef struct _nnom_lstm_cell_config_t +{ + nnom_layer_config_t super; + nnom_tensor_t *weights; + nnom_tensor_t* recurrent_weights; + nnom_tensor_t *bias; + nnom_qformat_param_t q_dec_z, q_dec_h, q_dec_c; // z = iw + hw, c = cell state; h=output and memory + uint16_t units; +} nnom_lstm_cell_config_t; + + +typedef struct _nnom_lstm_cell_t +{ + nnom_rnn_cell_t super; + + nnom_tensor_t* weights; + nnom_tensor_t* recurrent_weights; + nnom_tensor_t* bias; + + // experimental, + // iw: input x weight + // hw: hidden state x recurrent weight + // h: hidden state (memor) + // c: cell state + nnom_qformat_param_t q_dec_z, q_dec_h, q_dec_c; + nnom_qformat_param_t oshift_iw, oshift_hw, oshift_zc, bias_shift; + +} nnom_lstm_cell_t; + +// LSTM +nnom_rnn_cell_t *lstm_cell_s(const nnom_lstm_cell_config_t* config); + +nnom_status_t lstm_cell_free(nnom_rnn_cell_t* cell); +nnom_status_t lstm_cell_q7_q15_build(nnom_rnn_cell_t* cell); +nnom_status_t lstm_cell_q7_q15_run(nnom_rnn_cell_t* cell); + +#ifdef __cplusplus +} +#endif + +#endif /* __NNOM_LSTM_CELL_H__ */ diff --git a/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_matrix.h b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_matrix.h new file mode 100644 index 000000000..11b775bbe --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_matrix.h @@ -0,0 +1,63 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2020-05-03 Jianjia Ma The first version + */ + +#ifndef __NNOM_MATRIX_H__ +#define __NNOM_MATRIX_H__ + +#ifdef __cplusplus +extern "C" { +#endif + +#include +#include +#include + +#include "nnom.h" +#include "nnom_layers.h" +#include "nnom_local.h" +#include "nnom_tensor.h" + +// the maximum input layer hooked to this layer +#define MAX_INPUT_LAYER 8 + +// matrix layer +typedef struct _nnom_matrix_layer_t +{ + nnom_layer_t super; + int16_t oshift; // output right shift +} nnom_matrix_layer_t; + +typedef struct _nnom_matrix_config_t +{ + nnom_layer_config_t super; + int16_t output_shift; // output right shift +} nnom_matrix_config_t; + +// methods +nnom_layer_t* _same_shape_matrix_layer(void); +nnom_status_t add_run(nnom_layer_t *layer); +nnom_status_t sub_run(nnom_layer_t *layer); +nnom_status_t mult_run(nnom_layer_t *layer); + +// API +nnom_layer_t *add_s(const nnom_matrix_config_t * config); +nnom_layer_t *sub_s(const nnom_matrix_config_t * config); +nnom_layer_t *mult_s(const nnom_matrix_config_t * config); +nnom_layer_t *Add(int16_t oshift); +nnom_layer_t *Sub(int16_t oshift); +nnom_layer_t *Mult(int16_t oshift); + +#ifdef __cplusplus +} +#endif + +#endif /* __NNOM_MATRIX_H__ */ diff --git a/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_maxpool.h b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_maxpool.h new file mode 100644 index 000000000..690a02d2f --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_maxpool.h @@ -0,0 +1,63 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2020-05-03 Jianjia Ma The first version + */ + +#ifndef __NNOM_MAXPOOL_H__ +#define __NNOM_MAXPOOL_H__ + +#ifdef __cplusplus +extern "C" { +#endif + +#include +#include +#include + +#include "nnom.h" +#include "nnom_layers.h" +#include "nnom_local.h" +#include "nnom_tensor.h" + +// Max Pooling +typedef struct _nnom_maxpool_layer_t +{ + nnom_layer_t super; + nnom_3d_shape_t kernel; + nnom_3d_shape_t stride; + nnom_3d_shape_t pad; + nnom_padding_t padding_type; + int16_t output_shift; // reserve +} nnom_maxpool_layer_t; + +// a machine interface for configuration +typedef struct _nnom_pool_config_t +{ + nnom_layer_config_t super; + nnom_padding_t padding_type; + int16_t output_shift; + int8_t kernel_size[2]; + int8_t stride_size[2]; + int8_t num_dim; +} nnom_pool_config_t; + +// method +nnom_status_t maxpool_build(nnom_layer_t *layer); +nnom_status_t maxpool_run(nnom_layer_t *layer); + +// API +nnom_layer_t *maxpool_s(const nnom_pool_config_t * config); +nnom_layer_t *MaxPool(nnom_3d_shape_t k, nnom_3d_shape_t s, nnom_padding_t pad_type); + +#ifdef __cplusplus +} +#endif + +#endif /* __NNOM_MATRIX_H__ */ diff --git a/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_output.h b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_output.h new file mode 100644 index 000000000..8e62e22f2 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_output.h @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2020-05-03 Jianjia Ma The first version + */ + +#ifndef __NNOM_OUTPUT_H__ +#define __NNOM_OUTPUT_H__ + +#ifdef __cplusplus +extern "C" { +#endif + +#include +#include +#include + +#include "nnom.h" +#include "nnom_layers.h" +#include "nnom_local.h" +#include "nnom_tensor.h" + +#include "layers/nnom_input.h" + +// method +nnom_status_t output_build(nnom_layer_t *layer); +nnom_status_t output_run(nnom_layer_t *layer); + +// API +nnom_layer_t *output_s(const nnom_io_config_t* config); +nnom_layer_t *Output(nnom_3d_shape_t output_shape, void *p_buf); + +#ifdef __cplusplus +} +#endif + +#endif /* __NNOM_OUTPUT_H__ */ diff --git a/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_reshape.h b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_reshape.h new file mode 100644 index 000000000..fc68c45d1 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_reshape.h @@ -0,0 +1,56 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2020-12-07 Jianjia Ma The first version + */ + +#ifndef __NNOM_RESHAPE_H__ +#define __NNOM_RESHAPE_H__ + +#ifdef __cplusplus +extern "C" { +#endif + +#include +#include +#include + +#include "nnom.h" +#include "nnom_layers.h" +#include "nnom_local.h" +#include "nnom_tensor.h" + +typedef struct _nnom_reshape_layer_t +{ + nnom_layer_t super; + nnom_shape_data_t* dim; + uint8_t num_dim; + +} nnom_reshape_layer_t; + +typedef struct nnom_reshape_config_t +{ + nnom_layer_config_t super; + nnom_shape_data_t* dim; + uint8_t num_dim; +} nnom_reshape_config_t; + +// method +nnom_status_t reshape_run(nnom_layer_t *layer); +nnom_status_t reshape_build(nnom_layer_t *layer); +nnom_status_t reshape_free(nnom_layer_t *layer); + +// API +nnom_layer_t *reshape_s(const nnom_reshape_config_t *config); + +#ifdef __cplusplus +} +#endif + +#endif /* __NNOM_CONV2D_H__ */ diff --git a/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_rnn.h b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_rnn.h new file mode 100644 index 000000000..6a9d6efb6 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_rnn.h @@ -0,0 +1,85 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2020-05-03 Jianjia Ma The first version + */ + +#ifndef __NNOM_RNN_H__ +#define __NNOM_RNN_H__ + +#ifdef __cplusplus +extern "C" { +#endif + +#include +#include +#include + +#include "nnom.h" +#include "nnom_layers.h" +#include "nnom_local.h" +#include "nnom_tensor.h" + +// a machine interface for configuration +typedef struct _nnom_rnn_config_t +{ + nnom_layer_config_t super; + bool return_sequence; + bool stateful; + bool go_backwards; +} nnom_rnn_config_t; + +// RNN cell base type +typedef struct _nnom_rnn_cell_t +{ + nnom_status_t (*run)(struct _nnom_rnn_cell_t* cell); // cell runner + nnom_status_t (*build)(struct _nnom_rnn_cell_t* cell); // cell builder, calculate buffer size, output data size + nnom_status_t (*free)(struct _nnom_rnn_cell_t* cell); // + nnom_layer_t *layer; // pointer to its layer holder + nnom_layer_config_t *config; // config for the cell event it is a layer type + nnom_rnn_cell_type_t type; + + void *in_data; // input data + void *out_data; // output data + void *in_state; // input state data (or hidden state) + void *out_state; // output state data + + size_t comp_buf_size; // the size of temporary buffer. + size_t state_size; // the size of hidden state + uint16_t units; // the output units + uint16_t feature_size; // the input feature size (vector size) + + size_t macc; // stat of MAC count. +} nnom_rnn_cell_t; + +typedef struct _nnom_rnn_layer_t +{ + nnom_layer_t super; + nnom_rnn_cell_t *cell; + void *state_buf; // memory allocated to store state, size = 2 x size of state required by cell. + + uint16_t timestamp_size;// size of timestamp + bool return_sequence; // whether to return the output for each unit (sequence) + bool stateful; // whether the states are kept after one inteference + bool go_backwards; // whether go backwards timestamping +} nnom_rnn_layer_t; + + +// rnn layer +nnom_layer_t *rnn_s(nnom_rnn_cell_t *cell, const nnom_rnn_config_t* config); + +nnom_status_t rnn_run(nnom_layer_t* layer); +nnom_status_t rnn_build(nnom_layer_t* layer); +nnom_status_t rnn_free(nnom_layer_t* layer); + +#ifdef __cplusplus +} +#endif + +#endif /* __NNOM_RNN_H__ */ diff --git a/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_simple_cell.h b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_simple_cell.h new file mode 100644 index 000000000..87977ed8f --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_simple_cell.h @@ -0,0 +1,86 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2020-08-20 Jianjia Ma The first version + */ + +#ifndef __NNOM_SIMPLE_CELL_H__ +#define __NNOM_SIMPLE_CELL_H__ + +#ifdef __cplusplus +extern "C" { +#endif + +#include "nnom_rnn.h" +#include "nnom_activation.h" + + +// This Simple Cell replicate the Keras's SimpleCell as blow +/* + def call(self, inputs, states, training=None): + prev_output = states[0] if nest.is_sequence(states) else states + + h = K.dot(inputs, self.kernel) + h = K.bias_add(h, self.bias) + + output = h + K.dot(prev_output, self.recurrent_kernel) + output = self.activation(output) + + new_state = [output] if nest.is_sequence(states) else output + return output, new_state +*/ + +// a machine interface for configuration +typedef struct _nnom_simple_cell_config_t +{ + nnom_layer_config_t super; + nnom_tensor_t *weights; + nnom_tensor_t* recurrent_weights; + nnom_tensor_t *bias; + nnom_qformat_param_t q_dec_iw, q_dec_hw, q_dec_h; + nnom_activation_type_t act_type; // type of the activation + uint16_t units; +} nnom_simple_cell_config_t; + + +typedef struct _nnom_simple_cell_t +{ + nnom_rnn_cell_t super; + nnom_activation_type_t act_type; + + nnom_tensor_t* weights; + nnom_tensor_t* recurrent_weights; + nnom_tensor_t* bias; + + // experimental, + // iw: input x weight + // hw: hidden state x recurrent weight + // h: hidden state + nnom_qformat_param_t q_dec_iw, q_dec_hw, q_dec_h; + nnom_qformat_param_t oshift_iw, oshift_hw, bias_shift; + +} nnom_simple_cell_t; + + +// RNN cells +// The shape for RNN input is (batch, timestamp, feature), where batch is always 1. +// +// SimpleCell +nnom_rnn_cell_t *simple_cell_s(const nnom_simple_cell_config_t* config); + +nnom_status_t simple_cell_free(nnom_rnn_cell_t* cell); +nnom_status_t simple_cell_build(nnom_rnn_cell_t* cell); +nnom_status_t simple_cell_run(nnom_rnn_cell_t* cell); + + +#ifdef __cplusplus +} +#endif + +#endif /* __NNOM_SIMPLE_CELL_H__ */ diff --git a/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_softmax.h b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_softmax.h new file mode 100644 index 000000000..230be3277 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_softmax.h @@ -0,0 +1,47 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2020-05-03 Jianjia Ma The first version + */ + +#ifndef __NNOM_SOFTMAX_H__ +#define __NNOM_SOFTMAX_H__ + +#ifdef __cplusplus +extern "C" { +#endif + +#include +#include +#include + +#include "nnom.h" +#include "nnom_layers.h" +#include "nnom_local.h" +#include "nnom_tensor.h" + +typedef struct _nnom_softmax_config_t +{ + nnom_layer_config_t super; +} nnom_softmax_config_t; + + +// method +nnom_status_t softmax_run(nnom_layer_t *layer); +nnom_status_t softmax_build(nnom_layer_t *layer); + +// API +nnom_layer_t *softmax_s(const nnom_softmax_config_t * config); +nnom_layer_t *Softmax(void); + +#ifdef __cplusplus +} +#endif + +#endif /* __NNOM_SOFTMAX_H__ */ diff --git a/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_sumpool.h b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_sumpool.h new file mode 100644 index 000000000..927615e82 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_sumpool.h @@ -0,0 +1,46 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2020-05-03 Jianjia Ma The first version + */ + +#ifndef __NNOM_SUMPOOL_H__ +#define __NNOM_SUMPOOL_H__ + +#ifdef __cplusplus +extern "C" { +#endif + +#include +#include +#include + +#include "nnom.h" +#include "nnom_layers.h" +#include "nnom_local.h" +#include "nnom_tensor.h" + +#include "layers/nnom_maxpool.h" + +// Sum Pooling +typedef nnom_maxpool_layer_t nnom_sumpool_layer_t; + +// method +nnom_status_t sumpool_build(nnom_layer_t *layer); +nnom_status_t sumpool_run(nnom_layer_t *layer); + +// API +nnom_layer_t *sumpool_s(const nnom_pool_config_t * config); +nnom_layer_t *SumPool(nnom_3d_shape_t k, nnom_3d_shape_t s, nnom_padding_t pad_type); + +#ifdef __cplusplus +} +#endif + +#endif /* __NNOM_SUMPOOL_H__ */ diff --git a/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_upsample.h b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_upsample.h new file mode 100644 index 000000000..5db7c9708 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_upsample.h @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2020-05-03 Jianjia Ma The first version + */ + +#ifndef __NNOM_UPSAMPLE_H__ +#define __NNOM_UPSAMPLE_H__ + +#ifdef __cplusplus +extern "C" { +#endif + +#include +#include +#include + +#include "nnom.h" +#include "nnom_layers.h" +#include "nnom_local.h" +#include "nnom_tensor.h" + +// Up Sampling layer (UnPooling) +typedef struct _nnom_upsample_layer_t +{ + nnom_layer_t super; + nnom_3d_shape_t kernel; +} nnom_upsample_layer_t; + +typedef struct _nnom_upsample_config_t +{ + nnom_layer_config_t super; + nnom_shape_data_t kernel[2]; +} nnom_upsample_config_t; + +// API +nnom_layer_t *upsample_s(const nnom_upsample_config_t *config); +nnom_layer_t *UpSample(nnom_3d_shape_t kernel); + +// Methods +nnom_status_t upsample_build(nnom_layer_t *layer); +nnom_status_t upsample_run(nnom_layer_t *layer); + +#ifdef __cplusplus +} +#endif + +#endif /* __NNOM_UPSAMPLE_H__ */ diff --git a/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_zero_padding.h b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_zero_padding.h new file mode 100644 index 000000000..9aefd6d03 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/inc/layers/nnom_zero_padding.h @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2020-05-03 Jianjia Ma The first version + */ + +#ifndef __NNOM_ZERO_PADDING_H__ +#define __NNOM_ZERO_PADDING_H__ + +#ifdef __cplusplus +extern "C" { +#endif + +#include +#include +#include + +#include "nnom.h" +#include "nnom_layers.h" +#include "nnom_local.h" +#include "nnom_tensor.h" + +typedef struct _nnom_zero_padding_config_t +{ + nnom_layer_config_t super; + nnom_border_t pad; +} nnom_zero_padding_config_t; + +// zero padding +typedef struct _nnom_zero_padding_layer_t +{ + nnom_layer_t super; + nnom_border_t pad; +} nnom_zero_padding_layer_t; + +// API +nnom_layer_t *zeropadding_s(const nnom_zero_padding_config_t* config); +nnom_layer_t *ZeroPadding(nnom_border_t pad); + +// method +nnom_status_t zero_padding_build(nnom_layer_t *layer); +nnom_status_t zero_padding_run(nnom_layer_t *layer); + +#ifdef __cplusplus +} +#endif + +#endif /* __NNOM_ZERO_PADDING_H__ */ diff --git a/APP_Framework/Framework/knowing/nnom/inc/nnom.h b/APP_Framework/Framework/knowing/nnom/inc/nnom.h new file mode 100644 index 000000000..ba802f0e5 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/inc/nnom.h @@ -0,0 +1,415 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2019-02-05 Jianjia Ma The first version + * 2019-02-10 Jianjia Ma Compiler supports dense net connection + */ + +#ifndef __NNOM_H__ +#define __NNOM_H__ + +#ifdef __cplusplus +extern "C" { +#endif + + +#include +#include +#include +#include +#include + +#include "nnom_port.h" + +#define NNOM_ALIGN (sizeof(char*)) // alignment when doing memory ops. Equal to size of pointer in byte. +#define q7_t int8_t +#define q15_t int16_t +#define q31_t int32_t +#define q63_t int64_t + +/* version */ +#define NNOM_MAJORVERSION 0 /**< major version number */ +#define NNOM_SUBVERSION 4 /**< minor version number */ +#define NNOM_REVISION 3 /**< revise version number */ +#define NNOM_VERSION ((NNOM_MAJORVERSION * 10000) + (NNOM_SUBVERSION * 100) + NNOM_REVISION) + +#ifdef ARM_NN_TRUNCATE +#define NNOM_TRUNCATE +#endif + +#ifndef NNOM_TRUNCATE + #define NNOM_ROUND(out_shift) ((0x1 << out_shift) >> 1 ) +#else + #define NNOM_ROUND(out_shift) 0 +#endif + +typedef enum +{ + NN_SUCCESS = 0, /**< No error */ + NN_ARGUMENT_ERROR = -1, /**< One or more arguments are incorrect */ + NN_LENGTH_ERROR = -2, /**< Length of data buffer is incorrect */ + NN_SIZE_MISMATCH = -3, /**< Size of matrices is not compatible with the operation. */ + NN_NANINF = -4, /**< Not-a-number (NaN) or infinity is generated */ + NN_SINGULAR = -5, /**< Generated by matrix inversion if the input matrix is singular and cannot be inverted. */ + NN_TEST_FAILURE = -6, /**< Test Failed */ + NN_NO_MEMORY = -7, + NN_MORE_TODO = -8 +} nnom_status_t; + +typedef enum +{ + NNOM_INVALID = 0, + NNOM_BASE, + NNOM_INPUT, + NNOM_OUTPUT, + NNOM_CONV_2D, + NNOM_DW_CONV_2D, + NNOM_CONV2D_TRANS, + NNOM_BATCHNORM, + NNOM_DENSE, + NNOM_ZERO_PADDING, + NNOM_CROPPING, + NNOM_RNN, + NNOM_ACTIVATION, + NNOM_RELU, + NNOM_LEAKY_RELU, + NNOM_ADV_RELU, + NNOM_SIGMOID, + NNOM_TANH, + NNOM_SOFTMAX, + NNOM_MAXPOOL, + NNOM_GLOBAL_MAXPOOL, + NNOM_AVGPOOL, + NNOM_GLOBAL_AVGPOOL, + NNOM_SUMPOOL, + NNOM_GLOBAL_SUMPOOL, + NNOM_UPSAMPLE, + NNOM_FLATTEN, + NNOM_RESHAPE, + NNOM_LAMBDA, + NNOM_CONCAT, + NNOM_ADD, + NNOM_SUB, + NNOM_MULT, + NNOM_TYPE_MAX + +} nnom_layer_type_t; + +#define DEFUALT_LAYER_NAMES \ + { \ + "Unknown", \ + "Base", \ + "Input", \ + "Output", \ + "Conv2D", \ + "DW_Conv2D", \ + "Conv2DTrsp", \ + "BatchNorm", \ + "Dense", \ + "ZeroPad", \ + "Cropping", \ + "RNN", \ + "Activation", \ + "ReLU", \ + "Leaky_ReLU", \ + "Adv_ReLU", \ + "Sigmoid", \ + "Tanh", \ + "Softmax", \ + "MaxPool", \ + "GL_MaxPool", \ + "AvgPool", \ + "GL_AvgPool", \ + "SumPool", \ + "GL_SumPool", \ + "UpSample", \ + "Flatten", \ + "Reshape", \ + "Lambda", \ + "Concat", \ + "Add", \ + "Sub", \ + "Mult", \ + } +extern const char default_layer_names[][12]; + +// We dont count softmax an activation here, softmax is instanced as a layer +typedef enum +{ + ACT_UNKNOWN = 0, + ACT_RELU, + ACT_LEAKY_RELU, + ACT_ADV_RELU, + ACT_TANH, + ACT_SIGMOID, + ACT_HARD_TANH, + ACT_HARD_SIGMOID +} nnom_activation_type_t; + +#define ACTIVATION_NAMES \ + { \ + "Unknown", \ + "ReLU", \ + "LkyReLU", \ + "AdvReLU", \ + "TanH", \ + "Sigmoid", \ + "HrdTanH", \ + "HrdSigd", \ + } +extern const char default_activation_names[][8]; + +// RNN cell type +typedef enum +{ + NNOM_UNKOWN_CELL = 0, + NNOM_SIMPLE_CELL, + NNOM_GRU_CELL, + NNOM_LSTM_CELL, + NNOM_CELL_TYPE_MAX +} nnom_rnn_cell_type_t; + +#define DEFUALT_CELL_NAMES \ + { \ + "Unknown", \ + "Simple", \ + "GRU", \ + "LSTM", \ + } +extern const char default_cell_names[][8]; + + +// parameters +typedef enum +{ + PADDING_VALID = 0, + PADDING_SAME +} nnom_padding_t; + +#define NNOM_TENSOR_BUF_NULL (0) // This buffer is not in used +#define NNOM_TENSOR_BUF_TEMP (1) // The memory in IO is temporary occupided, can be reused by other layer once the computation is done. +#define NNOM_TENSOR_BUF_RESERVED (2) // the mem is reserve for this layer only (not to be reused by other layer. + +// currently used in compiling. +#define NNOM_BUF_EMPTY (0) +#define NNOM_BUF_FILLED (1) + +// basic types +#define nnom_qformat_param_t int32_t // this should match the backend, need a better way to do it. +#define nnom_shape_data_t uint16_t + +typedef struct _nnom_3d_shape_t +{ + nnom_shape_data_t h, w, c; +} nnom_3d_shape_t; + +typedef struct _nnom_border_t +{ + nnom_shape_data_t top, bottom, left, right; +} nnom_border_t; + +// nnom_3d_shape_axis_t type provide the axis[] format access to nnom_3d_shape_t +typedef union { + nnom_3d_shape_t s; + nnom_shape_data_t axis[sizeof(nnom_3d_shape_t) / sizeof(nnom_shape_data_t)]; +} nnom_3d_shape_axis_t; + +// tensor quantisation types +typedef enum +{ + NNOM_QTYPE_PER_TENSOR = 0, + NNOM_QTYPE_PER_AXIS = 1 +} nnom_qtype_t; + +typedef struct _nnom_weights +{ + const void *p_value; + nnom_qformat_param_t shift; +} nnom_weight_t; + +typedef struct _nnom_bias +{ + const void *p_value; + nnom_qformat_param_t shift; +} nnom_bias_t; + +// experimental +typedef struct _nnom_tensor_t +{ + void* p_data; // value + nnom_shape_data_t *dim; // dimension of this tensor + nnom_qformat_param_t *q_dec; // number of decimal bit for Q format (scale) + nnom_qformat_param_t *q_offset; // offset for each channel + nnom_qtype_t qtype; // the quantisation type + uint8_t num_dim; // the number of dimension + uint8_t bitwidth; // the data bit width, only support 8bit now +} nnom_tensor_t; + +// nn wrappers +typedef struct _nnom_layer_t nnom_layer_t; +typedef struct _nnom_layer_io_t nnom_layer_io_t; +typedef struct _nnom_layer_hook_t nnom_layer_hook_t; +typedef struct _nnom_mem_block_t nnom_mem_block_t; + +// activation wrapper +typedef struct _nnom_activation_t nnom_activation_t; + +typedef struct _nnom_buf +{ + nnom_mem_block_t *mem; + size_t size; + uint8_t type; +} nnom_buf_t; + +// a memory block to store pre-assign memories during compiling. then assigned to each tensor after. +struct _nnom_mem_block_t +{ + void *blk; // data block location + size_t size; // the maximum size for this block + uint8_t owners; // how many layers own this block + uint8_t state; // empty? filled? for static nn, currently only used in compiling +}; + +typedef struct _nnom_stat_t +{ + size_t macc; //num. of mac operation + uint32_t time; +} nnom_layer_stat_t; + +struct _nnom_layer_hook_t +{ + nnom_layer_io_t *io; // hooked io + nnom_layer_hook_t *next; // next hook include secondary hooked layer +}; + +struct _nnom_layer_io_t +{ + nnom_layer_hook_t hook; // for example: (layer->out)--hook--(layer->in) + nnom_layer_io_t *aux; // point to auxilary I/O (multiple I/O layer) + nnom_tensor_t *tensor; // experimental + nnom_mem_block_t *mem; // memory blocks handles for compiling only. The memory are now pass by tensor. trying to remove it. + nnom_layer_t *owner; // which layer owns this io. + uint8_t type; +}; + +// structured configuration base type +typedef struct _nnom_layer_config_t +{ + char* name; // the name of the layer prequantiesd model (the model trained by user before converted to nnom) +} nnom_layer_config_t; + +// layers base +struct _nnom_layer_t +{ + nnom_layer_t *shortcut; // shortcut points to the next layer, applied on compiling + + nnom_status_t (*run)(nnom_layer_t *layer); // run method. required + nnom_status_t (*build)(nnom_layer_t *layer); // compute output buffer shape. can be left null, will call default_build() + nnom_status_t (*free)(nnom_layer_t *layer); // a callback to free private resources (comp buf not included) can be left null + nnom_buf_t *comp; // computational buf + nnom_activation_t *actail; // I have an activation, I have a tail, wooo haaaa, act-tail!!! + + nnom_layer_config_t *config; // point to the configuration of the layers. for machine api only. + nnom_layer_type_t type; // layer types + nnom_layer_io_t *in; // IO buff, last*layer, states + nnom_layer_io_t *out; // IO buff, next*layer, states + nnom_layer_stat_t stat; // stats, timing, ops +}; + +// activation base +struct _nnom_activation_t +{ + nnom_status_t (*run)(struct _nnom_activation_t *act); + nnom_tensor_t *tensor; + nnom_activation_type_t type; +}; + +// local static functions when libc is not available +#ifdef NNOM_USING_STATIC_MEMORY + void nnom_set_static_buf(void* buf, size_t size); + void *nnom_malloc(size_t size); + void nnom_free(void* p); +#endif //NNOM_USING_STATIC_BUF + +typedef struct _nnom_model nnom_model_t; + +#include "nnom_tensor.h" +#include "nnom_layers.h" +#include "nnom_utils.h" + +// models, I dont want to make model class as a child of layer class yet +struct _nnom_model +{ + nnom_layer_t *head; + nnom_layer_t *tail; + + // model constructor + nnom_status_t (*add)(struct _nnom_model *m, nnom_layer_t *layer); // has too pass a raw value + nnom_layer_t *(*hook)(nnom_layer_t *curr, nnom_layer_t *last); // create hook between 2 layers' primary IO. + nnom_layer_t *(*merge)(nnom_layer_t *method, nnom_layer_t *in1, nnom_layer_t *in2); // an older interface of merge 2 inputs. + nnom_layer_t *(*mergex)(nnom_layer_t *method, int num, ...); // merge a few layers using mutiple input method (concate, add, ...) + nnom_layer_t *(*active)(nnom_activation_t *act, nnom_layer_t *target_layer); // add the activation to the existing layer's tail + + // callback + nnom_status_t (*layer_callback)(nnom_model_t *m, nnom_layer_t *layer); // layer callback will be called after each layer(after actail). + + // block memory for layers + nnom_mem_block_t blocks[NNOM_BLOCK_NUM]; + + size_t total_ops; + + bool is_inited; // is this structure initialized + bool is_allocated; // is this structure allocated by nnom (not by user) +}; + +#define NNOM_NULL_CHECK(p) \ + if ((p) == NULL) \ + { \ + NNOM_LOG("Error: NULL object.\n"); \ + return NN_ARGUMENT_ERROR; \ + } + + +// utils +size_t nnom_alignto(size_t value, uint32_t alignment); +size_t nnom_io_length(nnom_layer_io_t *io); +size_t nnom_hook_length(nnom_layer_hook_t *hook); + +// memory (malloc + memeset 0) +void *nnom_mem(size_t size); + +// get how much memory has been taken +size_t nnom_mem_stat(void); + +// Model APIs +// create or init a model +nnom_model_t *new_model(nnom_model_t *m); +// compile as sequencial model +nnom_status_t sequencial_compile(nnom_model_t *m); +// compile as functional model +nnom_status_t model_compile(nnom_model_t *m, nnom_layer_t *input, nnom_layer_t *output); +// run a prediction +nnom_status_t model_run(nnom_model_t *m); +// delete model. +void model_delete(nnom_model_t *m); +// check version +nnom_status_t check_model_version(unsigned long model_version); + +// callback, called after each layer has finished the calculation. +// this callback must return NN_SUCCESS for continually run the model. otherwise, model will be returned with the ERROR code. +// this function return NN_LENGTH_ERROR if the callback is already set to other. +nnom_status_t model_set_callback(nnom_model_t *m, nnom_status_t (*layer_callback)(nnom_model_t *m, nnom_layer_t *layer)); +// delete callback. +void model_delete_callback(nnom_model_t *m); + +#ifdef __cplusplus +} +#endif + +#endif /* __NNOM_H__ */ diff --git a/APP_Framework/Framework/knowing/nnom/inc/nnom_layers.h b/APP_Framework/Framework/knowing/nnom/inc/nnom_layers.h new file mode 100644 index 000000000..cba44874f --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/inc/nnom_layers.h @@ -0,0 +1,194 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2019-02-05 Jianjia Ma The first version + */ + +#ifndef __NNOM_LAYERS_H__ +#define __NNOM_LAYERS_H__ + +#ifdef __cplusplus +extern "C" { +#endif + + +#include +#include +#include + +#include "nnom.h" + +// properties +nnom_3d_shape_t shape(size_t h, size_t w, size_t c); +nnom_3d_shape_t kernel(size_t h, size_t w); +nnom_3d_shape_t stride(size_t h, size_t w); +nnom_3d_shape_t dilation(size_t h, size_t w); +nnom_border_t border(size_t top, size_t bottom, size_t left, size_t right); +//nnom_qformat_t qformat(int8_t m, int8_t n); +size_t shape_size(nnom_3d_shape_t* s); + +// this function is to add a new IO to current inited IO +// input, the targeted IO that the new IO will be added to +// output , the new IO +nnom_layer_io_t* io_add_aux(nnom_layer_io_t* targeted_io); +nnom_layer_io_t *io_init(void *owner_layer, nnom_layer_io_t *io); + +#define NN_CEILIF(x,y) ((x+y-1)/y) + +#include "layers/nnom_activation.h" +#include "layers/nnom_concat.h" +#include "layers/nnom_conv2d.h" +#include "layers/nnom_cropping.h" +#include "layers/nnom_conv2d_trans.h" +#include "layers/nnom_dense.h" +#include "layers/nnom_dw_conv2d.h" +#include "layers/nnom_flatten.h" +#include "layers/nnom_reshape.h" +#include "layers/nnom_global_pool.h" +#include "layers/nnom_input.h" +#include "layers/nnom_lambda.h" +#include "layers/nnom_matrix.h" +#include "layers/nnom_maxpool.h" +#include "layers/nnom_avgpool.h" +#include "layers/nnom_output.h" +#include "layers/nnom_rnn.h" +#include "layers/nnom_softmax.h" +#include "layers/nnom_sumpool.h" +#include "layers/nnom_upsample.h" +#include "layers/nnom_zero_padding.h" +#include "layers/nnom_rnn.h" +#include "layers/nnom_simple_cell.h" +#include "layers/nnom_lstm_cell.h" +#include "layers/nnom_gru_cell.h" + +// Layer APIs ****** +// (a summary for each individual layer's files) + +// input/output +nnom_layer_t *Input(nnom_3d_shape_t input_shape, void *p_buf); +nnom_layer_t *Output(nnom_3d_shape_t output_shape, void *p_buf); + +// Pooling +nnom_layer_t *MaxPool(nnom_3d_shape_t k, nnom_3d_shape_t s, nnom_padding_t pad); +nnom_layer_t *AvgPool(nnom_3d_shape_t k, nnom_3d_shape_t s, nnom_padding_t pad); +nnom_layer_t *SumPool(nnom_3d_shape_t k, nnom_3d_shape_t s, nnom_padding_t pad); +nnom_layer_t *GlobalMaxPool(void); +nnom_layer_t *GlobalAvgPool(void); +nnom_layer_t *GlobalSumPool(void); + +// padding, cropping, upsample +nnom_layer_t *UpSample(nnom_3d_shape_t kernel); +nnom_layer_t *ZeroPadding(nnom_border_t pad); +nnom_layer_t *Cropping(nnom_border_t pad); + +// Activation +nnom_layer_t *Activation(nnom_activation_t *act); +nnom_layer_t *ReLU(void); +nnom_layer_t *LeakyReLU(float alpha); +nnom_layer_t *Softmax(void); +nnom_layer_t *Sigmoid(int32_t dec_bit); // input dec bit +nnom_layer_t *TanH(int32_t dec_bit); // input dec bit + +// Matrix +nnom_layer_t *Add(int16_t oshift); // output shift +nnom_layer_t *Sub(int16_t oshift); // output shift +nnom_layer_t *Mult(int16_t oshift); // output shift + +nnom_layer_t *Flatten(void); +nnom_layer_t *Concat(int8_t axis); +// -- NN Constructers -- +// conv2d +nnom_layer_t *Conv2D(uint32_t filters, nnom_3d_shape_t k, nnom_3d_shape_t s, nnom_3d_shape_t d, nnom_padding_t pad, + const nnom_weight_t *w, const nnom_bias_t *b); + +// deconv2d +nnom_layer_t *Conv2DTrans(uint32_t filters, nnom_3d_shape_t k, nnom_3d_shape_t s, nnom_3d_shape_t d, nnom_padding_t pad, + const nnom_weight_t *w, const nnom_bias_t *b); + +// depthwise_convolution +nnom_layer_t *DW_Conv2D(uint32_t multiplier, nnom_3d_shape_t k, nnom_3d_shape_t s, nnom_3d_shape_t d, nnom_padding_t pad, + const nnom_weight_t *w, const nnom_bias_t *b); + +// fully connected, dense +nnom_layer_t *Dense(size_t output_unit, const nnom_weight_t *w, const nnom_bias_t *b); + + +// Lambda Layers +nnom_layer_t *Lambda(nnom_status_t (*run)(nnom_layer_t *), // run method, required + nnom_status_t (*build)(nnom_layer_t *), // optional, call default_build() if left null + nnom_status_t (*free)(nnom_layer_t *), // not required if no resources needs to be deleted, can be left null. + void *parameters); // user private parameters for run method, left null if not needed. + +// building methods +nnom_status_t default_build(nnom_layer_t* layer); +nnom_status_t input_build(nnom_layer_t* layer); + +nnom_status_t conv2d_build(nnom_layer_t* layer); +nnom_status_t dw_conv2d_build(nnom_layer_t* layer); +nnom_status_t conv2d_trans_build(nnom_layer_t* layer); +nnom_status_t dense_build(nnom_layer_t* layer); +nnom_status_t rnn_build(nnom_layer_t* layer); + +nnom_status_t upsample_build(nnom_layer_t* layer); +nnom_status_t zero_padding_build(nnom_layer_t* layer); +nnom_status_t cropping_build(nnom_layer_t* layer); + +nnom_status_t maxpool_build(nnom_layer_t* layer); +nnom_status_t avgpool_build(nnom_layer_t* layer); +nnom_status_t sumpool_build(nnom_layer_t* layer); +nnom_status_t global_pool_build(nnom_layer_t* layer); + +nnom_status_t flatten_build(nnom_layer_t* layer); +nnom_status_t reshape_build(nnom_layer_t* layer); +nnom_status_t concat_build(nnom_layer_t* layer); + +// run +nnom_status_t input_run(nnom_layer_t* layer); +nnom_status_t output_run(nnom_layer_t* layer); +nnom_status_t flatten_run(nnom_layer_t* layer); +nnom_status_t reshape_run(nnom_layer_t* layer); +nnom_status_t default_run(nnom_layer_t* layer); // simply copy data from input to output + +nnom_status_t dw_conv2d_run(nnom_layer_t* layer); +nnom_status_t conv2d_run(nnom_layer_t* layer); +nnom_status_t conv2d_trans_run(nnom_layer_t* layer); +nnom_status_t dense_run(nnom_layer_t* layer); +nnom_status_t rnn_run(nnom_layer_t* layer); + +nnom_status_t upsample_run(nnom_layer_t* layer); +nnom_status_t zero_padding_run(nnom_layer_t* layer); +nnom_status_t cropping_run(nnom_layer_t* layer); + +nnom_status_t activation_run(nnom_layer_t* layer); +nnom_status_t softmax_run(nnom_layer_t* layer); + +nnom_status_t maxpool_run(nnom_layer_t* layer); +nnom_status_t avgpool_run(nnom_layer_t* layer); +nnom_status_t sumpool_run(nnom_layer_t* layer); + +nnom_status_t concat_run(nnom_layer_t* layer); +nnom_status_t add_run(nnom_layer_t* layer); +nnom_status_t sub_run(nnom_layer_t* layer); +nnom_status_t mult_run(nnom_layer_t* layer); + +// Activation APIs +// Softmax is not considered as activation in NNoM, Softmax is in layer API. +nnom_activation_t* act_relu(void); +nnom_activation_t* act_leaky_relu(float alpha); +nnom_activation_t* act_sigmoid(int32_t dec_bit); +nnom_activation_t* act_tanh(int32_t dec_bit); + +// direct API +nnom_status_t act_tensor_run(nnom_activation_t* act, nnom_tensor_t* tensor); + +#ifdef __cplusplus +} +#endif + +#endif /* __NNOM_LAYERS_H__ */ diff --git a/APP_Framework/Framework/knowing/nnom/inc/nnom_local.h b/APP_Framework/Framework/knowing/nnom/inc/nnom_local.h new file mode 100644 index 000000000..35845a564 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/inc/nnom_local.h @@ -0,0 +1,974 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Notice: + * Code in this file inlcudes derivative works from CMSIS, which is released under alternative license. + * Please check the LICENSE file for detial. + * + * Change Logs: + * Date Author Notes + * 2019-02-05 Jianjia Ma The first version + * 2019-03-19 Jianjia Ma Local C implementation partly from CMSIS-NN + */ + +#ifndef __NNOM_LOCAL_H__ +#define __NNOM_LOCAL_H__ + +#ifdef __cplusplus +extern "C" { +#endif + + +#include "stdint.h" +#include "nnom_port.h" + +#ifdef ARM_NN_TRUNCATE +#define NNOM_TRUNCATE +#endif + +// SSAT implementation with C code +#ifndef __NNOM_SSAT +static inline int __NNOM_SSAT(int32_t value, int32_t bit) { + int32_t min = -(1<<(bit-1)); + int32_t max = (1<<(bit-1)) - 1; + if (value < min) + return min; + else if (value > max) + return max; + else + return value; +} +#endif + +// USAT implementation with C code +#ifndef __NNOM_USAT +static inline int __NNOM_USAT(int32_t value, int32_t bit) { + int32_t max = (1<<(bit-1)) - 1; + if (value < 0) + return 0; + else if (value > max) + return max; + else + return value; +} +#endif + +#define MAX(A, B) ((A) > (B) ? (A) : (B)) +#define MIN(A, B) ((A) < (B) ? (A) : (B)) + + +// Those functions/tables below are partially modifed from CMSIS-NN lib +// https://github.com/ARM-software/CMSIS_5 +// +void local_avepool_q7_HWC(const q7_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimension x or W + const uint16_t dim_im_in_y, // input image dimension y or H + const uint16_t ch_im_in, // number of input image channels + const uint16_t dim_kernel_x, // window kernel size + const uint16_t dim_kernel_y, // window kernel size + const uint16_t padding_x, // padding sizes + const uint16_t padding_y, // padding sizes + const uint16_t stride_x, // stride + const uint16_t stride_y, // stride + const uint16_t dim_im_out_x, // output image dimension x or W + const uint16_t dim_im_out_y, // output image dimension y or H + const uint16_t output_shift, // output right shift + q7_t *bufferA, // a buffer for local storage, NULL by now + q7_t *Im_out); + +void local_avepool_q7_CHW(const q7_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimension x or W + const uint16_t dim_im_in_y, // input image dimension y or H + const uint16_t ch_im_in, // number of input image channels + const uint16_t dim_kernel_x, // window kernel size + const uint16_t dim_kernel_y, // window kernel size + const uint16_t padding_x, // padding sizes + const uint16_t padding_y, // padding sizes + const uint16_t stride_x, // stride + const uint16_t stride_y, // stride + const uint16_t dim_im_out_x, // output image dimension x or W + const uint16_t dim_im_out_y, // output image dimension y or H + const uint16_t output_shift, // output right shift + q7_t *bufferA, // a buffer for local storage, NULL by now + q7_t *Im_out); + +// modified from CMSIS-NN test_ref +void local_maxpool_q7_HWC(const q7_t * Im_in, // input image + const uint16_t dim_im_in_x, // input image dimension x or W + const uint16_t dim_im_in_y, // input image dimension y or H + const uint16_t ch_im_in, // number of input image channels + const uint16_t dim_kernel_x, // window kernel size + const uint16_t dim_kernel_y, // window kernel size + const uint16_t padding_x, // padding sizes + const uint16_t padding_y, // padding sizes + const uint16_t stride_x, // stride + const uint16_t stride_y, // stride + const uint16_t dim_im_out_x, // output image dimension x or W + const uint16_t dim_im_out_y, // output image dimension y or H + q7_t * bufferA, // a buffer for local storage, NULL by now + q7_t * Im_out); + +void local_maxpool_q7_CHW(const q7_t * Im_in, // input image + const uint16_t dim_im_in_x, // input image dimension x or W + const uint16_t dim_im_in_y, // input image dimension y or H + const uint16_t ch_im_in, // number of input image channels + const uint16_t dim_kernel_x, // window kernel size + const uint16_t dim_kernel_y, // window kernel size + const uint16_t padding_x, // padding sizes + const uint16_t padding_y, // padding sizes + const uint16_t stride_x, // stride + const uint16_t stride_y, // stride + const uint16_t dim_im_out_x, // output image dimension x or W + const uint16_t dim_im_out_y, // output image dimension y or H + q7_t * bufferA, // a buffer for local storage, NULL by now + q7_t * Im_out); + +void local_sumpool_q7_HWC(const q7_t * Im_in, // input image + const uint16_t dim_im_in_x, // input image dimension x or W + const uint16_t dim_im_in_y, // input image dimension y or H + const uint16_t ch_im_in, // number of input image channels + const uint16_t dim_kernel_x, // window kernel size + const uint16_t dim_kernel_y, // window kernel size + const uint16_t padding_x, // padding sizes + const uint16_t padding_y, // padding sizes + const uint16_t stride_x, // stride + const uint16_t stride_y, // stride + const uint16_t dim_im_out_x, // output image dimension x or W + const uint16_t dim_im_out_y, // output image dimension y or H + q7_t * bufferA, // a buffer for local storage, size = 4*output_size + q7_t * Im_out); + +void local_sumpool_q7_CHW(const q7_t * Im_in, // input image + const uint16_t dim_im_in_x, // input image dimension x or W + const uint16_t dim_im_in_y, // input image dimension y or H + const uint16_t ch_im_in, // number of input image channels + const uint16_t dim_kernel_x, // window kernel size + const uint16_t dim_kernel_y, // window kernel size + const uint16_t padding_x, // padding sizes + const uint16_t padding_y, // padding sizes + const uint16_t stride_x, // stride + const uint16_t stride_y, // stride + const uint16_t dim_im_out_x, // output image dimension x or W + const uint16_t dim_im_out_y, // output image dimension y or H + q7_t * bufferA, // a buffer for local storage, size = 4*output_size + q7_t * Im_out); + +// customised up sample pooling +void local_up_sampling_q7_HWC(const q7_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimension x or W + const uint16_t dim_im_in_y, // input image dimension y or H + const uint16_t ch_im_in, // number of input image channels + const uint16_t dim_kernel_x, // window kernel size + const uint16_t dim_kernel_y, // window kernel size + const uint16_t dim_im_out_x, // output image dimension x or W + const uint16_t dim_im_out_y, // output image dimension y or H + q7_t *bufferA, // NULL + q7_t *Im_out); + +void local_up_sampling_q7_CHW(const q7_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimension x or W + const uint16_t dim_im_in_y, // input image dimension y or H + const uint16_t ch_im_in, // number of input image channels + const uint16_t dim_kernel_x, // window kernel size + const uint16_t dim_kernel_y, // window kernel size + const uint16_t dim_im_out_x, // output image dimension x or W + const uint16_t dim_im_out_y, // output image dimension y or H + q7_t *bufferA, // NULL + q7_t *Im_out); + +void local_convolve_HWC_q7_nonsquare(const q7_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimention x + const uint16_t dim_im_in_y, // input image dimention y + const uint16_t ch_im_in, // number of input image channels + const q7_t *wt, // kernel weights + const uint16_t ch_im_out, // number of filters, i.e., output image channels + const uint16_t dim_kernel_x, // filter kernel size x + const uint16_t dim_kernel_y, // filter kernel size y + const uint16_t padding_x, // padding sizes x + const uint16_t padding_y, // padding sizes y + const uint16_t stride_x, // stride x + const uint16_t stride_y, // stride y + const uint16_t dilation_x, // dilation x + const uint16_t dilation_y, // dilation y + const q7_t *bias, // bias + const nnom_qformat_param_t *bias_shift, // bias shifts + const nnom_qformat_param_t *out_shift, // output shift + const nnom_qtype_t q_type, // per channel or per tensor + q7_t *Im_out, // output image + const uint16_t dim_im_out_x, // output image dimension x + const uint16_t dim_im_out_y, // output image dimension y + q15_t *bufferA, //buffer space for input + q7_t *bufferB //buffer space for output +); + +void local_convolve_CHW_q7_nonsquare(const q7_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimention x + const uint16_t dim_im_in_y, // input image dimention y + const uint16_t ch_im_in, // number of input image channels + const q7_t *wt, // kernel weights + const uint16_t ch_im_out, // number of filters, i.e., output image channels + const uint16_t dim_kernel_x, // filter kernel size x + const uint16_t dim_kernel_y, // filter kernel size y + const uint16_t padding_x, // padding sizes x + const uint16_t padding_y, // padding sizes y + const uint16_t stride_x, // stride x + const uint16_t stride_y, // stride y + const uint16_t dilation_x, // dilation x + const uint16_t dilation_y, // dilation y + const q7_t *bias, // bias + const nnom_qformat_param_t *bias_shift, // bias shifts + const nnom_qformat_param_t *out_shift, // output shift + const nnom_qtype_t q_type, // per channel or per tensor + q7_t *Im_out, // output image + const uint16_t dim_im_out_x, // output image dimension x + const uint16_t dim_im_out_y, // output image dimension y + q15_t *bufferA, //buffer space for input + q7_t *bufferB //buffer space for output +); + +void local_conv_trans_HWC_q7_nonsquare(const int8_t * Im_in, + const uint16_t dim_im_in_x, // input image dimention x + const uint16_t dim_im_in_y, // input image dimention y + const uint16_t ch_im_in, // number of input image channels + const q7_t *wt, // kernel weights + const uint16_t ch_im_out, // number of filters, i.e., output image channels + const uint16_t dim_kernel_x, // filter kernel size x + const uint16_t dim_kernel_y, // filter kernel size y + const uint16_t padding_x, // padding sizes x + const uint16_t padding_y, // padding sizes y + const uint16_t stride_x, // stride x + const uint16_t stride_y, // stride y + const uint16_t dilation_x, // dilation x + const uint16_t dilation_y, // dilation y + const q7_t *bias, // bias + const uint16_t bias_shift, const uint16_t out_shift, q7_t *Im_out, // output image + const uint16_t dim_im_out_x, // output image dimension x + const uint16_t dim_im_out_y, // output image dimension y + q15_t *bufferA, //buffer space for input + q7_t *bufferB //buffer space for output +); + +void local_depthwise_separable_conv_HWC_q7_nonsquare(const q7_t *Im_in,// input image + const uint16_t dim_im_in_x, // input image dimention x + const uint16_t dim_im_in_y, // input image dimention y + const uint16_t ch_im_in, // number of input image channels + const q7_t *wt, // kernel weights + const uint16_t ch_im_out, // number of filters, i.e., output image channels + const uint16_t dim_kernel_x, // filter kernel size x + const uint16_t dim_kernel_y, // filter kernel size y + const uint16_t padding_x, // padding sizes x + const uint16_t padding_y, // padding sizes y + const uint16_t stride_x, // stride x + const uint16_t stride_y, // stride y + const uint16_t dilation_x, // dilation x + const uint16_t dilation_y, // dilation y + const q7_t *bias, // bias + const nnom_qformat_param_t *bias_shift, // bias shifts + const nnom_qformat_param_t *out_shift, // output shift + const nnom_qtype_t q_type, // per channel or per tensor + q7_t *Im_out, // output image + const uint16_t dim_im_out_x, // output image dimension x + const uint16_t dim_im_out_y, // output image dimension y + q15_t *bufferA, //buffer space for input + q7_t *bufferB //buffer space for output +); + +void local_depthwise_separable_conv_CHW_q7_nonsquare(const q7_t *Im_in,// input image + const uint16_t dim_im_in_x, // input image dimention x + const uint16_t dim_im_in_y, // input image dimention y + const uint16_t ch_im_in, // number of input image channels + const q7_t *wt, // kernel weights + const uint16_t ch_im_out, // number of filters, i.e., output image channels + const uint16_t dim_kernel_x, // filter kernel size x + const uint16_t dim_kernel_y, // filter kernel size y + const uint16_t padding_x, // padding sizes x + const uint16_t padding_y, // padding sizes y + const uint16_t stride_x, // stride x + const uint16_t stride_y, // stride y + const uint16_t dilation_x, // dilation x + const uint16_t dilation_y, // dilation y + const q7_t *bias, // bias + const nnom_qformat_param_t *bias_shift, // bias shifts + const nnom_qformat_param_t *out_shift, // output shift + const nnom_qtype_t q_type, // per channel or per tensor + q7_t *Im_out, // output image + const uint16_t dim_im_out_x, // output image dimension x + const uint16_t dim_im_out_y, // output image dimension y + q15_t *bufferA, //buffer space for input + q7_t *bufferB //buffer space for output +); + +void local_zero_padding_HWC_q7(const q7_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimention x + const uint16_t dim_im_in_y, // input image dimention y + const uint16_t ch_im_in, // number of input image channels + const uint16_t padding_top, // padding sizes y + const uint16_t padding_bottom, // padding sizes y + const uint16_t padding_left, // padding sizes x + const uint16_t padding_right, // padding sizes x + q7_t *Im_out, // output image + const uint16_t dim_im_out_x, // output image dimension x + const uint16_t dim_im_out_y); // output image dimension y + +void local_zero_padding_CHW_q7(const q7_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimention x + const uint16_t dim_im_in_y, // input image dimention y + const uint16_t ch_im_in, // number of input image channels + const uint16_t padding_top, // padding sizes y + const uint16_t padding_bottom, // padding sizes y + const uint16_t padding_left, // padding sizes x + const uint16_t padding_right, // padding sizes x + q7_t *Im_out, // output image + const uint16_t dim_im_out_x, // output image dimension x + const uint16_t dim_im_out_y); // output image dimension y + +void local_cropping_HWC_q7(const q7_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimention x + const uint16_t dim_im_in_y, // input image dimention y + const uint16_t ch_im_in, // number of input image channels + const uint16_t padding_top, // padding sizes y + const uint16_t padding_bottom, // padding sizes y + const uint16_t padding_left, // padding sizes x + const uint16_t padding_right, // padding sizes x + q7_t *Im_out, // output image + const uint16_t dim_im_out_x, // output image dimension x + const uint16_t dim_im_out_y); // output image dimension y + +void local_cropping_CHW_q7(const q7_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimention x + const uint16_t dim_im_in_y, // input image dimention y + const uint16_t ch_im_in, // number of input image channels + const uint16_t padding_top, // padding sizes y + const uint16_t padding_bottom, // padding sizes y + const uint16_t padding_left, // padding sizes x + const uint16_t padding_right, // padding sizes x + q7_t *Im_out, // output image + const uint16_t dim_im_out_x, // output image dimension x + const uint16_t dim_im_out_y); // output image dimension y + +void local_fully_connected_q7_opt(const q7_t * pV, // pointer to vector + const q7_t * pM, // pointer to matrix + const uint16_t dim_vec, // length of the vector + const uint16_t num_of_rows, // numCol of A + const uint16_t bias_shift, // amount of left-shift for bias + const uint16_t out_shift, // amount of right-shift for output + const q7_t * bias, q7_t * pOut, // output operand + q15_t * vec_buffer); + + +void local_fully_connected_q7(const q7_t * pV, // pointer to vector + const q7_t * pM, // pointer to matrix + const uint16_t dim_vec, // length of the vector + const uint16_t num_of_rows, // numCol of A + const uint16_t bias_shift, // amount of left-shift for bias + const uint16_t out_shift, // amount of right-shift for output + const q7_t * bias, q7_t * pOut, // output operand + q15_t * vec_buffer); + +// matrix dot, +// it takes reorderd weight as input, (see dense layer for detail. this is basiclly a dense opt without bias) +void local_dot_q7_opt(const q7_t *pV, // pointer to vector + const q7_t *pM, // pointer to matrix + const uint16_t dim_vec, // length of the vector + const uint16_t num_of_rows, // numCol of A + const uint16_t out_shift, // amount of right-shift for output + q7_t *pOut); // result buffer + +void local_dot_q7(const q7_t *pV, // pointer to vector + const q7_t *pM, // pointer to matrix + const uint16_t dim_vec, // length of the vector + const uint16_t num_of_rows, // numCol of A + const uint16_t out_shift, // amount of right-shift for output + q7_t *pOut); // output operand) + + + +// softmax +void local_softmax_q7(const q7_t * vec_in, const uint32_t dim_vec, q7_t * p_out); + +// sigmoid +void local_sigmoid_q7(q7_t * data, uint32_t size, int16_t int_width); + +// tanh +void local_tanh_q7(q7_t * data, uint32_t size, int16_t int_width); + +// relu +void local_relu_q7(q7_t * data, uint32_t size); + +// leaky relu +void local_leaky_relu_q7(q7_t *data, q7_t alpha, uint32_t size); + +// alpha in q7 format with dec_bit=7 +// max and threshold has the same Q format with the activation +void local_adv_relu_q7(q7_t *data, q7_t alpha, q7_t max, q7_t threshold, uint32_t size); + +// hard sigmoid, +// y=-1 if x < -2.5 +// y=1 if x > 2.5 +// otherwise y = 0.2 * x + 0.5 (y=0.20315 * x + 0.5) +void local_hard_sigmoid_q7(q7_t *data, uint32_t size, int16_t dec_bit); + +// hard tanh +// y=-1 if x < -1 +// y=1 if x > 1 +// otherwise y = x +void local_hard_tanh_q7(q7_t *data, uint32_t size, int16_t dec_bit); + +// matrix ops +void local_mult_q7(q7_t * pSrcA, q7_t * pSrcB, q7_t * pDst, const uint16_t out_shift, uint32_t blockSize); + +// add +void local_add_q7(q7_t * pSrcA, q7_t * pSrcB, q7_t * pDst, const uint16_t out_shift, uint32_t blockSize); + +// sub +void local_sub_q7(q7_t * pSrcA, q7_t * pSrcB, q7_t * pDst, const uint16_t out_shift, uint32_t blockSize); + +// take multiple blocks (>2) as input +void local_multiple_add_q7( q7_t *p_dst, + const int16_t out_shift, + uint32_t block_size, + uint32_t num_block, + q7_t **p_src); + +void local_multiple_mult_q7( q7_t *p_dst, + const int16_t out_shift, + uint32_t block_size, + uint32_t num_block, + q7_t **p_src); + +void local_multiple_sub_q7( q7_t *p_dst, + const int16_t out_shift, + uint32_t block_size, + uint32_t num_block, + q7_t **p_src); + + +// Below tables credit to CMSIS +// For more info. check CMSIS-NN lib +// https://github.com/ARM-software/CMSIS_5/blob/develop/CMSIS/NN/Source/NNSupportFunctions/arm_nntables.c +static const q7_t nnom_sigmoid_table_q7[256] = { + 0x40, 0x42, 0x44, 0x46, 0x48, 0x4a, 0x4c, 0x4e, + 0x50, 0x52, 0x53, 0x55, 0x57, 0x59, 0x5a, 0x5c, + 0x5e, 0x5f, 0x61, 0x62, 0x63, 0x65, 0x66, 0x67, + 0x69, 0x6a, 0x6b, 0x6c, 0x6d, 0x6e, 0x6f, 0x70, + 0x71, 0x72, 0x72, 0x73, 0x74, 0x74, 0x75, 0x76, + 0x76, 0x77, 0x77, 0x78, 0x78, 0x79, 0x79, 0x7a, + 0x7a, 0x7a, 0x7b, 0x7b, 0x7b, 0x7c, 0x7c, 0x7c, + 0x7c, 0x7c, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7e, + 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7f, + 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, + 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, + 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, + 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, + 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, + 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, + 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, + 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, + 0x01, 0x01, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, + 0x02, 0x02, 0x03, 0x03, 0x03, 0x03, 0x03, 0x04, + 0x04, 0x04, 0x04, 0x04, 0x05, 0x05, 0x05, 0x06, + 0x06, 0x06, 0x07, 0x07, 0x08, 0x08, 0x09, 0x09, + 0x0a, 0x0a, 0x0b, 0x0c, 0x0c, 0x0d, 0x0e, 0x0e, + 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, + 0x17, 0x19, 0x1a, 0x1b, 0x1d, 0x1e, 0x1f, 0x21, + 0x22, 0x24, 0x26, 0x27, 0x29, 0x2b, 0x2d, 0x2e, + 0x30, 0x32, 0x34, 0x36, 0x38, 0x3a, 0x3c, 0x3e, +}; + + +static const q7_t nnom_tanh_table_q7[256] = { + 0x00, 0x08, 0x10, 0x18, 0x1f, 0x27, 0x2e, 0x35, + 0x3b, 0x41, 0x47, 0x4c, 0x51, 0x56, 0x5a, 0x5e, + 0x61, 0x65, 0x68, 0x6a, 0x6d, 0x6f, 0x71, 0x72, + 0x74, 0x75, 0x76, 0x78, 0x78, 0x79, 0x7a, 0x7b, + 0x7b, 0x7c, 0x7c, 0x7d, 0x7d, 0x7e, 0x7e, 0x7e, + 0x7e, 0x7e, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, + 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, + 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, + 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, + 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, + 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, + 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, + 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, + 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, + 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, + 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, + 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, + 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, + 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, + 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, + 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, + 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, + 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, + 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, + 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, + 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x81, + 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x82, + 0x82, 0x82, 0x82, 0x82, 0x83, 0x83, 0x84, 0x84, + 0x85, 0x85, 0x86, 0x87, 0x88, 0x88, 0x8a, 0x8b, + 0x8c, 0x8e, 0x8f, 0x91, 0x93, 0x96, 0x98, 0x9b, + 0x9f, 0xa2, 0xa6, 0xaa, 0xaf, 0xb4, 0xb9, 0xbf, + 0xc5, 0xcb, 0xd2, 0xd9, 0xe1, 0xe8, 0xf0, 0xf8, +}; + + +// ------------ 16bit ops -------------------- + +void local_avepool_q15_HWC(const q15_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimension x or W + const uint16_t dim_im_in_y, // input image dimension y or H + const uint16_t ch_im_in, // number of input image channels + const uint16_t dim_kernel_x, // window kernel size + const uint16_t dim_kernel_y, // window kernel size + const uint16_t padding_x, // padding sizes + const uint16_t padding_y, // padding sizes + const uint16_t stride_x, // stride + const uint16_t stride_y, // stride + const uint16_t dim_im_out_x, // output image dimension x or W + const uint16_t dim_im_out_y, // output image dimension y or H + const uint16_t output_shift, // output right shift + q7_t *bufferA, // a buffer for local storage, NULL by now + q15_t *Im_out); + +void local_avepool_q15_CHW(const q15_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimension x or W + const uint16_t dim_im_in_y, // input image dimension y or H + const uint16_t ch_im_in, // number of input image channels + const uint16_t dim_kernel_x, // window kernel size + const uint16_t dim_kernel_y, // window kernel size + const uint16_t padding_x, // padding sizes + const uint16_t padding_y, // padding sizes + const uint16_t stride_x, // stride + const uint16_t stride_y, // stride + const uint16_t dim_im_out_x, // output image dimension x or W + const uint16_t dim_im_out_y, // output image dimension y or H + const uint16_t output_shift, // output right shift + q7_t *bufferA, // a buffer for local storage, NULL by now + q15_t *Im_out); + +void local_maxpool_q15_HWC(const q15_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimension x or W + const uint16_t dim_im_in_y, // input image dimension y or H + const uint16_t ch_im_in, // number of input image channels + const uint16_t dim_kernel_x, // window kernel size + const uint16_t dim_kernel_y, // window kernel size + const uint16_t padding_x, // padding sizes + const uint16_t padding_y, // padding sizes + const uint16_t stride_x, // stride + const uint16_t stride_y, // stride + const uint16_t dim_im_out_x, // output image dimension x or W + const uint16_t dim_im_out_y, // output image dimension y or H + q7_t *bufferA, // a buffer for local storage, NULL by now + q15_t *Im_out); + +void local_maxpool_q15_CHW(const q15_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimension x or W + const uint16_t dim_im_in_y, // input image dimension y or H + const uint16_t ch_im_in, // number of input image channels + const uint16_t dim_kernel_x, // window kernel size + const uint16_t dim_kernel_y, // window kernel size + const uint16_t padding_x, // padding sizes + const uint16_t padding_y, // padding sizes + const uint16_t stride_x, // stride + const uint16_t stride_y, // stride + const uint16_t dim_im_out_x, // output image dimension x or W + const uint16_t dim_im_out_y, // output image dimension y or H + q7_t *bufferA, // a buffer for local storage, NULL by now + q15_t *Im_out); + +void local_sumpool_q15_HWC(const q15_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimension x or W + const uint16_t dim_im_in_y, // input image dimension y or H + const uint16_t ch_im_in, // number of input image channels + const uint16_t dim_kernel_x, // window kernel size + const uint16_t dim_kernel_y, // window kernel size + const uint16_t padding_x, // padding sizes + const uint16_t padding_y, // padding sizes + const uint16_t stride_x, // stride + const uint16_t stride_y, // stride + const uint16_t dim_im_out_x, // output image dimension x or W + const uint16_t dim_im_out_y, // output image dimension y or H + const uint16_t output_shift, // output right shift + q7_t *bufferA, // a buffer for local storage, size = 4*output_size + q15_t *Im_out); + +void local_sumpool_q15_CHW(const q15_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimension x or W + const uint16_t dim_im_in_y, // input image dimension y or H + const uint16_t ch_im_in, // number of input image channels + const uint16_t dim_kernel_x, // window kernel size + const uint16_t dim_kernel_y, // window kernel size + const uint16_t padding_x, // padding sizes + const uint16_t padding_y, // padding sizes + const uint16_t stride_x, // stride + const uint16_t stride_y, // stride + const uint16_t dim_im_out_x, // output image dimension x or W + const uint16_t dim_im_out_y, // output image dimension y or H + const uint16_t output_shift, // output right shift + q7_t *bufferA, // a buffer for local storage, size = 4*output_size + q15_t *Im_out); + +void local_up_sampling_q15_HWC(const q15_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimension x or W + const uint16_t dim_im_in_y, // input image dimension y or H + const uint16_t ch_im_in, // number of input image channels + const uint16_t dim_kernel_x, // window kernel size + const uint16_t dim_kernel_y, // window kernel size + const uint16_t dim_im_out_x, // output image dimension x or W + const uint16_t dim_im_out_y, // output image dimension y or H + q7_t *bufferA, // a buffer for local storage, NULL by now + q15_t *Im_out); + + void local_up_sampling_q15_CHW(const q15_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimension x or W + const uint16_t dim_im_in_y, // input image dimension y or H + const uint16_t ch_im_in, // number of input image channels + const uint16_t dim_kernel_x, // window kernel size + const uint16_t dim_kernel_y, // window kernel size + const uint16_t dim_im_out_x, // output image dimension x or W + const uint16_t dim_im_out_y, // output image dimension y or H + q7_t *bufferA, // a buffer for local storage, NULL by now + q15_t *Im_out); + +void local_convolve_HWC_q15_nonsquare(const q15_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimention x + const uint16_t dim_im_in_y, // input image dimention y + const uint16_t ch_im_in, // number of input image channels + const q7_t *wt, // kernel weights + const uint16_t ch_im_out, // number of filters, i.e., output image channels + const uint16_t dim_kernel_x, // filter kernel size x + const uint16_t dim_kernel_y, // filter kernel size y + const uint16_t padding_x, // padding sizes x + const uint16_t padding_y, // padding sizes y + const uint16_t stride_x, // stride x + const uint16_t stride_y, // stride y + const uint16_t dilation_x, // dilation x + const uint16_t dilation_y, // dilation y + const q7_t *bias, // bias + const nnom_qformat_param_t *bias_shift, // bias shifts + const nnom_qformat_param_t *out_shift, // output shift + const nnom_qtype_t q_type, // per channel or per tensor + q15_t *Im_out, // output image + const uint16_t dim_im_out_x, // output image dimension x + const uint16_t dim_im_out_y, // output image dimension y + q15_t *bufferA, //buffer space for input + q7_t *bufferB //buffer space for output +); +void local_convolve_CHW_q15_nonsquare(const q15_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimention x + const uint16_t dim_im_in_y, // input image dimention y + const uint16_t ch_im_in, // number of input image channels + const q7_t *wt, // kernel weights + const uint16_t ch_im_out, // number of filters, i.e., output image channels + const uint16_t dim_kernel_x, // filter kernel size x + const uint16_t dim_kernel_y, // filter kernel size y + const uint16_t padding_x, // padding sizes x + const uint16_t padding_y, // padding sizes y + const uint16_t stride_x, // stride x + const uint16_t stride_y, // stride y + const uint16_t dilation_x, // dilation x + const uint16_t dilation_y, // dilation y + const q7_t *bias, // bias + const nnom_qformat_param_t *bias_shift, // bias shifts + const nnom_qformat_param_t *out_shift, // output shift + const nnom_qtype_t q_type, // per channel or per tensor + q15_t *Im_out, // output image + const uint16_t dim_im_out_x, // output image dimension x + const uint16_t dim_im_out_y, // output image dimension y + q15_t *bufferA, //buffer space for input + q7_t *bufferB //buffer space for output +); + +void local_conv_trans_HWC_q15_nonsquare(const int8_t * Im_in, + const uint16_t dim_im_in_x, // input image dimention x + const uint16_t dim_im_in_y, // input image dimention y + const uint16_t ch_im_in, // number of input image channels + const q7_t *wt, // kernel weights + const uint16_t ch_im_out, // number of filters, i.e., output image channels + const uint16_t dim_kernel_x, // filter kernel size x + const uint16_t dim_kernel_y, // filter kernel size y + const uint16_t padding_x, // padding sizes x + const uint16_t padding_y, // padding sizes y + const uint16_t stride_x, // stride x + const uint16_t stride_y, // stride y + const uint16_t dilation_x, // dilation x + const uint16_t dilation_y, // dilation y + const q7_t *bias, // bias + const uint16_t bias_shift, const uint16_t out_shift, q15_t *Im_out, // output image + const uint16_t dim_im_out_x, // output image dimension x + const uint16_t dim_im_out_y, // output image dimension y + q15_t *bufferA, //buffer space for input + q7_t *bufferB //buffer space for output +); + +void local_depthwise_separable_conv_HWC_q15_nonsquare(const q15_t *Im_in,// input image + const uint16_t dim_im_in_x, // input image dimention x + const uint16_t dim_im_in_y, // input image dimention y + const uint16_t ch_im_in, // number of input image channels + const q7_t *wt, // kernel weights + const uint16_t ch_im_out, // number of filters, i.e., output image channels + const uint16_t dim_kernel_x, // filter kernel size x + const uint16_t dim_kernel_y, // filter kernel size y + const uint16_t padding_x, // padding sizes x + const uint16_t padding_y, // padding sizes y + const uint16_t stride_x, // stride x + const uint16_t stride_y, // stride y + const uint16_t dilation_x, // dilation x + const uint16_t dilation_y, // dilation y + const q7_t *bias, // bias + const nnom_qformat_param_t *bias_shift, // bias shifts + const nnom_qformat_param_t *out_shift, // output shift + const nnom_qtype_t q_type, // per channel or per tensor + q15_t *Im_out, // output image + const uint16_t dim_im_out_x, // output image dimension x + const uint16_t dim_im_out_y, // output image dimension y + q15_t *bufferA, //buffer space for input + q7_t *bufferB //buffer space for output +); + +void local_depthwise_separable_conv_CHW_q15_nonsquare(const q15_t *Im_in,// input image + const uint16_t dim_im_in_x, // input image dimention x + const uint16_t dim_im_in_y, // input image dimention y + const uint16_t ch_im_in, // number of input image channels + const q7_t *wt, // kernel weights + const uint16_t ch_im_out, // number of filters, i.e., output image channels + const uint16_t dim_kernel_x, // filter kernel size x + const uint16_t dim_kernel_y, // filter kernel size y + const uint16_t padding_x, // padding sizes x + const uint16_t padding_y, // padding sizes y + const uint16_t stride_x, // stride x + const uint16_t stride_y, // stride y + const uint16_t dilation_x, // dilation x + const uint16_t dilation_y, // dilation y + const q7_t *bias, // bias + const nnom_qformat_param_t *bias_shift, // bias shifts + const nnom_qformat_param_t *out_shift, // output shift + const nnom_qtype_t q_type, // per channel or per tensor + q15_t *Im_out, // output image + const uint16_t dim_im_out_x, // output image dimension x + const uint16_t dim_im_out_y, // output image dimension y + q15_t *bufferA, //buffer space for input + q7_t *bufferB //buffer space for output +); + +void local_zero_padding_HWC_q15(const q15_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimention x + const uint16_t dim_im_in_y, // input image dimention y + const uint16_t ch_im_in, // number of input image channels + const uint16_t padding_top, // padding sizes y + const uint16_t padding_bottom, // padding sizes y + const uint16_t padding_left, // padding sizes x + const uint16_t padding_right, // padding sizes x + q15_t *Im_out, // output image + const uint16_t dim_im_out_x, // output image dimension x + const uint16_t dim_im_out_y); // output image dimension y + +void local_zero_padding_CHW_q15(const q15_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimention x + const uint16_t dim_im_in_y, // input image dimention y + const uint16_t ch_im_in, // number of input image channels + const uint16_t padding_top, // padding sizes y + const uint16_t padding_bottom, // padding sizes y + const uint16_t padding_left, // padding sizes x + const uint16_t padding_right, // padding sizes x + q15_t *Im_out, // output image + const uint16_t dim_im_out_x, // output image dimension x + const uint16_t dim_im_out_y); // output image dimension y + +void local_cropping_HWC_q15(const q15_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimention x + const uint16_t dim_im_in_y, // input image dimention y + const uint16_t ch_im_in, // number of input image channels + const uint16_t padding_top, // padding sizes y + const uint16_t padding_bottom, // padding sizes y + const uint16_t padding_left, // padding sizes x + const uint16_t padding_right, // padding sizes x + q15_t *Im_out, // output image + const uint16_t dim_im_out_x, // output image dimension x + const uint16_t dim_im_out_y); // output image dimension y + +void local_cropping_CHW_q15(const q15_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimention x + const uint16_t dim_im_in_y, // input image dimention y + const uint16_t ch_im_in, // number of input image channels + const uint16_t padding_top, // padding sizes y + const uint16_t padding_bottom, // padding sizes y + const uint16_t padding_left, // padding sizes x + const uint16_t padding_right, // padding sizes x + q15_t *Im_out, // output image + const uint16_t dim_im_out_x, // output image dimension x + const uint16_t dim_im_out_y); // output image dimension y + + +void local_dot_q15(const q15_t *pV, // pointer to vector + const q15_t *pM, // pointer to matrix + const uint16_t dim_vec, // length of the vector + const uint16_t num_of_rows, // numCol of A + const uint16_t out_shift, // amount of right-shift for output + q15_t *pOut); // output operand) + +void local_dot_q15_opt(const q15_t * pV, + const q15_t * pM, + const uint16_t dim_vec, + const uint16_t num_of_rows, + const uint16_t out_shift, + q15_t * pOut); + +// original implementation +// this support none bias, the it will perform like a dot. +// set the `bias=NULL` to work +void local_fully_connected_mat_q7_vec_q15(const q15_t * pV, // pointer to vector + const q7_t * pM, // pointer to matrix + const uint16_t dim_vec, // length of the vector + const uint16_t num_of_rows, // numCol of A + const uint16_t bias_shift, // amount of left-shift for bias + const uint16_t out_shift, // amount of right-shift for output + const q7_t * bias, // bias + q15_t * pOut, // output + q15_t * vec_buffer); // not used but to keep the interface same as the ARM's version + +// work on recorder matrix +// this support none bias, set the bias=NULL to work +void local_fully_connected_mat_q7_vec_q15_opt(const q15_t * pV, + const q7_t * pM, + const uint16_t dim_vec, + const uint16_t num_of_rows, + const uint16_t bias_shift, + const uint16_t out_shift, + const q7_t * bias, + q15_t * pOut, + q15_t * vec_buffer); + +// matrix operation Q15 +void local_multiple_add_q15( q15_t *p_dst, + const int16_t out_shift, + uint32_t block_size, + uint32_t num_block, + q15_t **p_src); + +void local_multiple_mult_q15( q15_t *p_dst, + const int16_t out_shift, + uint32_t block_size, + uint32_t num_block, + q15_t **p_src); + +void local_multiple_sub_q15( q15_t *p_dst, + const int16_t out_shift, + uint32_t block_size, + uint32_t num_block, + q15_t **p_src); + +void local_mult_q15(q15_t * pSrcA, q15_t * pSrcB, q15_t * pDst, const uint16_t out_shift, uint32_t blockSize); + +// add +void local_add_q15(q15_t * pSrcA, q15_t * pSrcB, q15_t * pDst, const uint16_t out_shift, uint32_t blockSize); + +// sub +void local_sub_q15(q15_t * pSrcA, q15_t * pSrcB, q15_t * pDst, const uint16_t out_shift, uint32_t blockSize); + +// Convert Q7 to Q15 +void local_q7_to_q15_no_shift(const q7_t *src, q15_t *des, uint32_t size); +void local_q7_to_q15(const q7_t *src, q15_t *des, uint32_t size); + +// q15 shift to q7 +void local_q15_to_q7(const q15_t *src, q7_t *des, uint32_t shift, uint32_t size); + +// y = 1 - x +void local_1_minor_z_q15(q15_t *src, q15_t *des, uint16_t dec_bit, uint32_t size); + +void local_softmax_q15(const q15_t * vec_in, const uint16_t dim_vec, q15_t * p_out); +void local_hard_sigmoid_q15(q15_t *data, uint32_t size, int16_t dec_bit); +void local_hard_tanh_q15(q15_t *data, uint32_t size, int16_t dec_bit); +void local_relu_q15(q15_t *data, uint32_t size); +void local_leaky_relu_q15(q15_t *data, q7_t alpha, uint32_t size); +void local_adv_relu_q15(q15_t *data, q7_t negative_slope, q15_t max, q15_t threshold, uint32_t size); +void local_sigmoid_q15(q15_t * data, uint32_t size, uint16_t int_width); +void local_tanh_q15(q15_t * data, uint32_t size, uint16_t int_width); + + +static const q15_t nnom_sigmoid_table_q15[256] = { + 0x4000, 0x4200, 0x43ff, 0x45fc, 0x47f5, 0x49eb, 0x4bdc, 0x4dc8, + 0x4fad, 0x518a, 0x5360, 0x552c, 0x56ef, 0x58a8, 0x5a57, 0x5bfb, + 0x5d93, 0x5f20, 0x60a1, 0x6216, 0x637f, 0x64db, 0x662b, 0x676f, + 0x68a6, 0x69d2, 0x6af1, 0x6c05, 0x6d0d, 0x6e09, 0x6efb, 0x6fe2, + 0x70be, 0x7190, 0x7258, 0x7316, 0x73cc, 0x7478, 0x751b, 0x75b7, + 0x764a, 0x76d6, 0x775b, 0x77d8, 0x784f, 0x78c0, 0x792a, 0x798f, + 0x79ee, 0x7a48, 0x7a9d, 0x7aed, 0x7b39, 0x7b80, 0x7bc4, 0x7c03, + 0x7c3f, 0x7c78, 0x7cad, 0x7ce0, 0x7d0f, 0x7d3c, 0x7d66, 0x7d8d, + 0x7db3, 0x7dd6, 0x7df7, 0x7e16, 0x7e33, 0x7e4f, 0x7e69, 0x7e81, + 0x7e98, 0x7eae, 0x7ec2, 0x7ed5, 0x7ee7, 0x7ef8, 0x7f08, 0x7f17, + 0x7f25, 0x7f32, 0x7f3e, 0x7f4a, 0x7f55, 0x7f5f, 0x7f69, 0x7f72, + 0x7f7b, 0x7f83, 0x7f8a, 0x7f91, 0x7f98, 0x7f9e, 0x7fa4, 0x7faa, + 0x7faf, 0x7fb4, 0x7fb8, 0x7fbd, 0x7fc1, 0x7fc5, 0x7fc8, 0x7fcc, + 0x7fcf, 0x7fd2, 0x7fd5, 0x7fd7, 0x7fda, 0x7fdc, 0x7fde, 0x7fe0, + 0x7fe2, 0x7fe4, 0x7fe6, 0x7fe7, 0x7fe9, 0x7fea, 0x7feb, 0x7fed, + 0x7fee, 0x7fef, 0x7ff0, 0x7ff1, 0x7ff2, 0x7ff3, 0x7ff4, 0x7ff4, + 0x000b, 0x000c, 0x000c, 0x000d, 0x000e, 0x000f, 0x0010, 0x0011, + 0x0012, 0x0013, 0x0015, 0x0016, 0x0017, 0x0019, 0x001a, 0x001c, + 0x001e, 0x0020, 0x0022, 0x0024, 0x0026, 0x0029, 0x002b, 0x002e, + 0x0031, 0x0034, 0x0038, 0x003b, 0x003f, 0x0043, 0x0048, 0x004c, + 0x0051, 0x0056, 0x005c, 0x0062, 0x0068, 0x006f, 0x0076, 0x007d, + 0x0085, 0x008e, 0x0097, 0x00a1, 0x00ab, 0x00b6, 0x00c2, 0x00ce, + 0x00db, 0x00e9, 0x00f8, 0x0108, 0x0119, 0x012b, 0x013e, 0x0152, + 0x0168, 0x017f, 0x0197, 0x01b1, 0x01cd, 0x01ea, 0x0209, 0x022a, + 0x024d, 0x0273, 0x029a, 0x02c4, 0x02f1, 0x0320, 0x0353, 0x0388, + 0x03c1, 0x03fd, 0x043c, 0x0480, 0x04c7, 0x0513, 0x0563, 0x05b8, + 0x0612, 0x0671, 0x06d6, 0x0740, 0x07b1, 0x0828, 0x08a5, 0x092a, + 0x09b6, 0x0a49, 0x0ae5, 0x0b88, 0x0c34, 0x0cea, 0x0da8, 0x0e70, + 0x0f42, 0x101e, 0x1105, 0x11f7, 0x12f3, 0x13fb, 0x150f, 0x162e, + 0x175a, 0x1891, 0x19d5, 0x1b25, 0x1c81, 0x1dea, 0x1f5f, 0x20e0, + 0x226d, 0x2405, 0x25a9, 0x2758, 0x2911, 0x2ad4, 0x2ca0, 0x2e76, + 0x3053, 0x3238, 0x3424, 0x3615, 0x380b, 0x3a04, 0x3c01, 0x3e00, +}; + + +static const q15_t nnom_tanh_table_q15[256] = { + 0x0000, 0x07fd, 0x0feb, 0x17b9, 0x1f59, 0x26bf, 0x2ddf, 0x34ae, + 0x3b27, 0x4142, 0x46fd, 0x4c56, 0x514d, 0x55e2, 0x5a1a, 0x5df6, + 0x617c, 0x64b0, 0x6797, 0x6a37, 0x6c95, 0x6eb5, 0x709e, 0x7254, + 0x73dc, 0x753a, 0x7672, 0x7788, 0x787f, 0x795b, 0x7a1e, 0x7acb, + 0x7b65, 0x7bee, 0x7c66, 0x7cd1, 0x7d30, 0x7d84, 0x7dce, 0x7e0f, + 0x7e49, 0x7e7d, 0x7eaa, 0x7ed2, 0x7ef5, 0x7f14, 0x7f30, 0x7f48, + 0x7f5e, 0x7f71, 0x7f82, 0x7f91, 0x7f9e, 0x7fa9, 0x7fb3, 0x7fbc, + 0x7fc4, 0x7fcb, 0x7fd1, 0x7fd7, 0x7fdc, 0x7fe0, 0x7fe4, 0x7fe7, + 0x7fea, 0x7fed, 0x7fef, 0x7ff1, 0x7ff3, 0x7ff4, 0x7ff6, 0x7ff7, + 0x7ff8, 0x7ff9, 0x7ffa, 0x7ffa, 0x7ffb, 0x7ffc, 0x7ffc, 0x7ffd, + 0x7ffd, 0x7ffd, 0x7ffe, 0x7ffe, 0x7ffe, 0x7ffe, 0x7fff, 0x7fff, + 0x7fff, 0x7fff, 0x7fff, 0x7fff, 0x7fff, 0x7fff, 0x7fff, 0x7fff, + 0x7fff, 0x7fff, 0x7fff, 0x7fff, 0x7fff, 0x7fff, 0x7fff, 0x7fff, + 0x7fff, 0x7fff, 0x7fff, 0x7fff, 0x7fff, 0x7fff, 0x7fff, 0x7fff, + 0x7fff, 0x7fff, 0x7fff, 0x7fff, 0x7fff, 0x7fff, 0x7fff, 0x7fff, + 0x7fff, 0x7fff, 0x7fff, 0x7fff, 0x7fff, 0x7fff, 0x7fff, 0x7fff, + 0x8000, 0x8000, 0x8000, 0x8000, 0x8000, 0x8000, 0x8000, 0x8000, + 0x8000, 0x8000, 0x8000, 0x8000, 0x8000, 0x8000, 0x8000, 0x8000, + 0x8000, 0x8000, 0x8000, 0x8000, 0x8000, 0x8000, 0x8000, 0x8000, + 0x8000, 0x8000, 0x8000, 0x8000, 0x8000, 0x8000, 0x8000, 0x8000, + 0x8000, 0x8000, 0x8001, 0x8001, 0x8001, 0x8001, 0x8001, 0x8001, + 0x8001, 0x8001, 0x8001, 0x8002, 0x8002, 0x8002, 0x8002, 0x8003, + 0x8003, 0x8003, 0x8004, 0x8004, 0x8005, 0x8006, 0x8006, 0x8007, + 0x8008, 0x8009, 0x800a, 0x800c, 0x800d, 0x800f, 0x8011, 0x8013, + 0x8016, 0x8019, 0x801c, 0x8020, 0x8024, 0x8029, 0x802f, 0x8035, + 0x803c, 0x8044, 0x804d, 0x8057, 0x8062, 0x806f, 0x807e, 0x808f, + 0x80a2, 0x80b8, 0x80d0, 0x80ec, 0x810b, 0x812e, 0x8156, 0x8183, + 0x81b7, 0x81f1, 0x8232, 0x827c, 0x82d0, 0x832f, 0x839a, 0x8412, + 0x849b, 0x8535, 0x85e2, 0x86a5, 0x8781, 0x8878, 0x898e, 0x8ac6, + 0x8c24, 0x8dac, 0x8f62, 0x914b, 0x936b, 0x95c9, 0x9869, 0x9b50, + 0x9e84, 0xa20a, 0xa5e6, 0xaa1e, 0xaeb3, 0xb3aa, 0xb903, 0xbebe, + 0xc4d9, 0xcb52, 0xd221, 0xd941, 0xe0a7, 0xe847, 0xf015, 0xf803, +}; + +#ifdef __cplusplus +} +#endif + +#endif /* __NNOM_LOCAL_H__ */ diff --git a/APP_Framework/Framework/knowing/nnom/inc/nnom_tensor.h b/APP_Framework/Framework/knowing/nnom/inc/nnom_tensor.h new file mode 100644 index 000000000..6853da868 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/inc/nnom_tensor.h @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2019-02-05 Jianjia Ma The first version + * 2019-02-10 Jianjia Ma Compiler supports dense net connection + */ + +#ifndef __NNOM_TENSOR_H__ +#define __NNOM_TENSOR_H__ + +#ifdef __cplusplus +extern "C" { +#endif + +#include "nnom.h" + + +void delete_tensor(nnom_tensor_t* t); +nnom_tensor_t* new_tensor(nnom_qtype_t type, uint32_t num_dim, uint32_t num_channel); +// set tensor by value +// for tensor with quantized type NNOM_QTYPE_PER_TENSOR +nnom_tensor_t* tensor_set_attr_v(nnom_tensor_t* t, + nnom_qformat_param_t dec_bit, nnom_qformat_param_t offset, nnom_shape_data_t* dim, uint32_t num_dim, uint8_t bitwidth); +nnom_tensor_t* tensor_set_attr(nnom_tensor_t* t, + nnom_qformat_param_t*dec_bit, nnom_qformat_param_t *offset, nnom_shape_data_t* dim, uint32_t num_dim, uint8_t bitwidth); +nnom_tensor_t* tensor_cpy_attr(nnom_tensor_t* des, nnom_tensor_t* src); +size_t tensor_get_num_channel(nnom_tensor_t* t); +size_t tensor_size(nnom_tensor_t* t); +size_t tensor_size_byte(nnom_tensor_t* t); + +// only support 3d tensor +// change format from CHW to HWC +// the shape of the data, input data, output data +void tensor_hwc2chw_q7(nnom_tensor_t* des, nnom_tensor_t* src); + +// change format from CHW to HWC +// the shape of the data, input data, output data +void tensor_chw2hwc_q7(nnom_tensor_t* des, nnom_tensor_t* src); + +// deprecated. +void hwc2chw_q7(nnom_3d_shape_t shape, q7_t* p_in, q7_t* p_out); +void chw2hwc_q7(nnom_3d_shape_t shape, q7_t* p_in, q7_t* p_out); + +#ifdef __cplusplus +} +#endif + +#endif /*__NNOM_TENSOR_H__ */ diff --git a/APP_Framework/Framework/knowing/nnom/inc/nnom_utils.h b/APP_Framework/Framework/knowing/nnom/inc/nnom_utils.h new file mode 100644 index 000000000..88c5067d3 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/inc/nnom_utils.h @@ -0,0 +1,91 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2019-02-05 Jianjia Ma The first version + */ + +#ifndef __NNOM_UTILS_H__ +#define __NNOM_UTILS_H__ + +#ifdef __cplusplus +extern "C" { +#endif + + +#include +#include +#include + +#include "nnom.h" + +typedef struct _nnom_predict_t +{ + uint16_t *confusion_mat; // confusiong matrix + uint32_t *top_k; // which stored the num of prediction in rank_k, example: Top-2 = top_k[0]+top_k[1] + nnom_model_t *model; // the model to run + int8_t *buf_prediction; // the pointer to the output of softmax layer(normally the end of classifier). + + // setting + uint32_t label_num; // number of types in classification + uint32_t top_k_size; // number of k that wants to know. + + // running + uint32_t predict_count; // how many prediction is done + + //timing + uint32_t t_run_total; // total running time + uint32_t t_predict_start; // when it is initial + uint32_t t_predict_total; // total time of the whole test +} nnom_predict_t; + +// create a prediction +// input model, the buf pointer to the softwmax output (Temporary, this can be extract from model) +// the size of softmax output (the num of lable) +// the top k that wants to record. +nnom_predict_t *prediction_create(nnom_model_t *m, int8_t *buf_prediction, size_t label_num, size_t top_k_size); // currently int8_t + +// after a new data is set in input +// feed data to prediction +// input the current label, (range from 0 to total number of label -1) +// (the current input data should be set by user manully to the input buffer of the model.) +// return NN_ARGUMENT_ERROR if parameter error +nnom_status_t prediction_run(nnom_predict_t *pre, uint32_t true_label, uint32_t* predict_label, float* prob); + +// to mark prediction finished +void prediction_end(nnom_predict_t *pre); + +// free all resources +void prediction_delete(nnom_predict_t *pre); + +// print matrix +void prediction_matrix(nnom_predict_t *pre); + +// print top-k +void prediction_top_k(nnom_predict_t *pre); + +// this function is to print sumarry +void prediction_summary(nnom_predict_t *pre); + +// ------------------------------- + +// stand alone prediction API +// this api test one set of data, return the prediction +// return the predicted label +// return NN_ARGUMENT_ERROR if parameter error +nnom_status_t nnom_predict(nnom_model_t *m, uint32_t *label, float *prob); + +void model_stat(nnom_model_t *m); + +void model_io_format(nnom_model_t *m); + +#ifdef __cplusplus +} +#endif + +#endif /*__NNOM_UTILS_H__ */ diff --git a/APP_Framework/Framework/knowing/nnom/port/nnom_port.h b/APP_Framework/Framework/knowing/nnom/port/nnom_port.h new file mode 100644 index 000000000..c9105431f --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/port/nnom_port.h @@ -0,0 +1,47 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2019-02-05 Jianjia Ma The first version + */ + +#ifndef __NNOM_PORT_H__ +#define __NNOM_PORT_H__ + +#include +#include +#include + +/* use static memory */ +// must set buf using "nnom_set_static_buf()" before creating a model. + +/* dynamic memory interfaces */ +/* when libc is not available, you shall implement the below memory interfaces (libc equivalents). */ +#ifndef NNOM_USING_STATIC_MEMORY + #define nnom_malloc(n) malloc(n) + #define nnom_free(p) free(p) +#endif + +/* memory interface */ +/* when libc is not available, you shall implement your equivalent functions here */ +#define nnom_memset(p,v,s) memset(p,v,s) +#define nnom_memcpy(dst,src,len) memcpy(dst,src,len) + +/* runtime & debug */ +#define nnom_us_get() 0 // return a microsecond timestamp +#define nnom_ms_get() 0 // return a millisecond timestamp +#define NNOM_LOG(...) printf(__VA_ARGS__) + +/* NNoM configuration */ +#define NNOM_BLOCK_NUM (8) // maximum number of memory blocks, increase it when log request. +#define DENSE_WEIGHT_OPT (1) // if used fully connected layer optimized weights. + +#endif + + + diff --git a/APP_Framework/Framework/knowing/nnom/scripts/README.MD b/APP_Framework/Framework/knowing/nnom/scripts/README.MD new file mode 100644 index 000000000..54a62afa7 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/scripts/README.MD @@ -0,0 +1,4 @@ +fully_connected_opt_weight_generation.py - is from https://github.com/ARM-software/CMSIS_5/tree/develop/CMSIS/NN/Scripts/NNFunctions witch is not a part of NNoM + +Please refer to NNoM documents for its usages. + diff --git a/APP_Framework/Framework/knowing/nnom/scripts/__init__.py b/APP_Framework/Framework/knowing/nnom/scripts/__init__.py new file mode 100644 index 000000000..5bb534f79 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/scripts/__init__.py @@ -0,0 +1 @@ +# package diff --git a/APP_Framework/Framework/knowing/nnom/scripts/fully_connected_opt_weight_generation.py b/APP_Framework/Framework/knowing/nnom/scripts/fully_connected_opt_weight_generation.py new file mode 100644 index 000000000..f68382b1f --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/scripts/fully_connected_opt_weight_generation.py @@ -0,0 +1,153 @@ +#!/usr/bin/env python + +''' + This file is apart of CMSIS-NN release + https://github.com/ARM-software/CMSIS_5/tree/develop/CMSIS/NN/Scripts/NNFunctions +''' + +import numpy as np + +def convert_to_x4_q7_weights(weights): + [r, h, w, c] = weights.shape + weights = np.reshape(weights, (r, h*w*c)) + num_of_rows = r + num_of_cols = h*w*c + new_weights = np.copy(weights) + new_weights = np.reshape(new_weights, (r*h*w*c)) + counter = 0 + for i in range(int(num_of_rows/4)): + # we only need to do the re-ordering for every 4 rows + row_base = 4*i + for j in range(int(num_of_cols/4)): + # for each 4 entries + column_base = 4*j + new_weights[counter] = weights[row_base ][column_base ] + new_weights[counter+1] = weights[row_base+1][column_base ] + new_weights[counter+2] = weights[row_base ][column_base+2] + new_weights[counter+3] = weights[row_base+1][column_base+2] + new_weights[counter+4] = weights[row_base+2][column_base ] + new_weights[counter+5] = weights[row_base+3][column_base ] + new_weights[counter+6] = weights[row_base+2][column_base+2] + new_weights[counter+7] = weights[row_base+3][column_base+2] + + new_weights[counter+8] = weights[row_base ][column_base+1] + new_weights[counter+9] = weights[row_base+1][column_base+1] + new_weights[counter+10] = weights[row_base ][column_base+3] + new_weights[counter+11] = weights[row_base+1][column_base+3] + new_weights[counter+12] = weights[row_base+2][column_base+1] + new_weights[counter+13] = weights[row_base+3][column_base+1] + new_weights[counter+14] = weights[row_base+2][column_base+3] + new_weights[counter+15] = weights[row_base+3][column_base+3] + counter = counter + 16 + # the remaining ones are in order + for j in range((int)(num_of_cols-num_of_cols%4), int(num_of_cols)): + new_weights[counter] = weights[row_base][j] + new_weights[counter+1] = weights[row_base+1][j] + new_weights[counter+2] = weights[row_base+2][j] + new_weights[counter+3] = weights[row_base+3][j] + counter = counter + 4 + return new_weights + +def convert_to_x4_q15_weights(weights): + [r, h, w, c] = weights.shape + weights = np.reshape(weights, (r, h*w*c)) + num_of_rows = r + num_of_cols = h*w*c + new_weights = np.copy(weights) + new_weights = np.reshape(new_weights, (r*h*w*c)) + counter = 0 + for i in range(int(num_of_rows/4)): + # we only need to do the re-ordering for every 4 rows + row_base = 4*i + for j in range(int(num_of_cols/2)): + # for each 2 entries + column_base = 2*j + new_weights[counter] = weights[row_base ][column_base ] + new_weights[counter+1] = weights[row_base ][column_base+1] + new_weights[counter+2] = weights[row_base+1][column_base ] + new_weights[counter+3] = weights[row_base+1][column_base+1] + new_weights[counter+4] = weights[row_base+2][column_base ] + new_weights[counter+5] = weights[row_base+2][column_base+1] + new_weights[counter+6] = weights[row_base+3][column_base ] + new_weights[counter+7] = weights[row_base+3][column_base+1] + + counter = counter + 8 + # the remaining ones are in order + for j in range((int)(num_of_cols-num_of_cols%2), int(num_of_cols)): + new_weights[counter] = weights[row_base][j] + new_weights[counter+1] = weights[row_base+1][j] + new_weights[counter+2] = weights[row_base+2][j] + new_weights[counter+3] = weights[row_base+3][j] + counter = counter + 4 + return new_weights + +def convert_q7_q15_weights(weights): + [r, h, w, c] = weights.shape + weights = np.reshape(weights, (r, h*w*c)) + num_of_rows = r + num_of_cols = h*w*c + new_weights = np.copy(weights) + new_weights = np.reshape(new_weights, (r*h*w*c)) + counter = 0 + for i in range(int(num_of_rows/4)): + # we only need to do the re-ordering for every 4 rows + row_base = 4*i + for j in range(int(num_of_cols/2)): + # for each 2 entries + column_base = 2*j + new_weights[counter] = weights[row_base ][column_base ] + new_weights[counter+1] = weights[row_base+1][column_base ] + new_weights[counter+2] = weights[row_base ][column_base+1] + new_weights[counter+3] = weights[row_base+1][column_base+1] + new_weights[counter+4] = weights[row_base+2][column_base ] + new_weights[counter+5] = weights[row_base+3][column_base ] + new_weights[counter+6] = weights[row_base+2][column_base+1] + new_weights[counter+7] = weights[row_base+3][column_base+1] + + counter = counter + 8 + # the remaining ones are in order + for j in range((int)(num_of_cols-num_of_cols%2), int(num_of_cols)): + new_weights[counter] = weights[row_base][j] + new_weights[counter+1] = weights[row_base+1][j] + new_weights[counter+2] = weights[row_base+2][j] + new_weights[counter+3] = weights[row_base+3][j] + counter = counter + 4 + return new_weights + + +if __name__ == "__main__": + # input dimensions + vec_dim = 127 + row_dim = 127 + + weight = np.zeros((row_dim,vec_dim), dtype=int) + + # generate random inputs + for i in range(row_dim): + for j in range(vec_dim): + weight[i][j] = np.random.randint(256)-128 + + weight = np.reshape(weight, (row_dim, vec_dim, 1, 1)) + + outfile = open("../Ref_Implementations/fully_connected_testing_weights.h", "w") + outfile.write("#define IP2_WEIGHT {") + weight.tofile(outfile,sep=",",format="%d") + outfile.write("}\n\n") + + new_weight = convert_to_x4_q7_weights(weight) + outfile.write("#define IP4_WEIGHT {") + new_weight.tofile(outfile,sep=",",format="%d") + outfile.write("}\n\n") + + new_weight = convert_q7_q15_weights(weight) + outfile.write("#define IP4_q7_q15_WEIGHT {") + new_weight.tofile(outfile,sep=",",format="%d") + outfile.write("}\n\n") + + new_weight = convert_to_x4_q15_weights(weight) + outfile.write("#define IP4_WEIGHT_Q15 {") + new_weight.tofile(outfile,sep=",",format="%d") + outfile.write("}\n\n") + + + outfile.close() diff --git a/APP_Framework/Framework/knowing/nnom/scripts/gen_config.py b/APP_Framework/Framework/knowing/nnom/scripts/gen_config.py new file mode 100644 index 000000000..d1b787abd --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/scripts/gen_config.py @@ -0,0 +1,561 @@ +''' + Copyright (c) 2018-2020 + Jianjia Ma + majianjia@live.com + SPDX-License-Identifier: Apache-2.0 + Change Logs: + Date Author Notes + 2020-05-22 Jianjia Ma The first version +''' +from tensorflow.keras.layers import * +import numpy as np + +def convert_tensor_name(t): + return 'tensor_'+t.name.replace('/', '_').replace(':', '_') + +def to_cstyle(data, integer=True): + #Convert an array to C style basket, not to be used for very large array. size > options['threshold'] will lead to ... + if(integer): + data = np.array(data, dtype=np.int).flatten() + else: + data = np.array(data).flatten() + s = np.array2string(data, separator=',') + s = s.replace("\n","").replace("\r","").replace(' ','') + s = s.replace(',', ', ') + s = s.replace('(', '[').replace(')', ']') + return s.replace('[', '{').replace(']', '}') + +def tensor_shape(tensor, is_io_tensor=False): + # inconsistance of TF1 and TF2 + # get tensor shape without None or ? + try: + shape = tensor.shape.as_list() # tf1 + except: + shape = tensor.get_shape().as_list() # tf2 + if(shape[0] == None or is_io_tensor): + shape = shape[1:] + else: + shape = shape + # for rnn input with timestamp = None, need a better implementation + for i in range(len(shape)): + shape[i] = shape[i] if shape[i] is not None else 1 + return shape + +def gen_base_config(layer): + config = '{.name = "%s"}' % (layer.name) + return config + +def gen_values(var_name, var, size='', dtype='const int8_t'): + s = ' [] = ;\n' + s = s.replace('', var_name).replace('', var).replace('', size).replace('', dtype) + return s + +# generate tensor by the tensor config +def gen_tensor(tensor, dec_bits, tensor_value='NULL', per_axis=False, is_io_tensor=False): + config = ''' +const nnom_shape_data_t _dim[] = ; +const nnom_qformat_param_t _dec[] = ; +const nnom_qformat_param_t _offset[] = ; +const nnom_tensor_t = { + .p_data = (void*), + .dim = (nnom_shape_data_t*)_dim, + .q_dec = (nnom_qformat_param_t*)_dec, + .q_offset = (nnom_qformat_param_t*)_offset, + .qtype = , + .num_dim = , + .bitwidth = +}; +''' + # inconsistance of TF1 and TF2 + shape = tensor_shape(tensor, is_io_tensor) + config = config.replace('', convert_tensor_name(tensor))#.name.replace('/','_').split(':')[0]) #conv2d/kernel:0 + config = config.replace('', '8') + config = config.replace('', tensor_value) + config = config.replace('', to_cstyle(shape)) + config = config.replace('', str(len(shape))) + if(type(dec_bits) == str): + config = config.replace('', dec_bits) + config = config.replace('', to_cstyle([0])) + else: + config = config.replace('', to_cstyle(dec_bits)) + config = config.replace('', to_cstyle([0])) + if(per_axis): + config = config.replace('', 'NNOM_QTYPE_PER_AXIS') + else: + config = config.replace('', 'NNOM_QTYPE_PER_TENSOR') + return config + +# create tensor by directly setting up the value +def gen_create_tensor(tensor_name, shape, dec_bits, tensor_value='NULL', per_axis=False): + config = ''' +const nnom_shape_data_t _dim[] = ; +const nnom_qformat_param_t _dec[] = ; +const nnom_qformat_param_t _offset[] = ; +const nnom_tensor_t = { + .p_data = (void*), + .dim = (nnom_shape_data_t*)_dim, + .q_dec = (nnom_qformat_param_t*)_dec, + .q_offset = (nnom_qformat_param_t*)_offset, + .qtype = , + .num_dim = , + .bitwidth = +}; +''' + config = config.replace('', tensor_name) + config = config.replace('', '8') + config = config.replace('', tensor_value) + config = config.replace('', to_cstyle(shape)) + config = config.replace('', str(len(shape))) + if(type(dec_bits) == str): + config = config.replace('', dec_bits) + config = config.replace('', to_cstyle([0])) + else: + config = config.replace('', to_cstyle(dec_bits)) + config = config.replace('', to_cstyle([0])) + if(per_axis): + config = config.replace('', 'NNOM_QTYPE_PER_AXIS') + else: + config = config.replace('', 'NNOM_QTYPE_PER_TENSOR') + return config + +def gen_conv2d_config(layer, output_shifts, bias_shifts): + c = ''' +const nnom_qformat_param_t _output_shift[] = ; +const nnom_qformat_param_t _bias_shift[] = ; +const nnom_conv2d_config_t _config = { + .super = , + .qtype = , + .weight = (nnom_tensor_t*)&, + .bias = (nnom_tensor_t*)&, + .output_shift = (nnom_qformat_param_t *)&_output_shift, + .bias_shift = (nnom_qformat_param_t *)&_bias_shift, + .filter_size = , + .kernel_size = , + .stride_size = , + .padding_size = , + .dilation_size = , + .padding_type = +}; +''' + c = c.replace('', layer.name) + c = c.replace('', gen_base_config(layer)) + c = c.replace('', "NNOM_QTYPE_PER_TENSOR") + c = c.replace('',convert_tensor_name(layer.weights[0])) + c = c.replace('',convert_tensor_name(layer.weights[1])) + c = c.replace('', output_shifts) + c = c.replace('', bias_shifts) + c = c.replace('', str(layer.filters) if layer.filters is not None else str(layer.depth_multiplier)) # output channel + c = c.replace('', to_cstyle(layer.kernel_size)) + c = c.replace('', to_cstyle(layer.strides)) + c = c.replace('', '{0, 0}') # not using it with keras, defined by padding type instead + c = c.replace('', to_cstyle(layer.dilation_rate)) + c = c.replace('', 'PADDING_'+layer.padding.upper()) + return c + +def gen_conv2d_trans_config(layer, output_shifts, bias_shifts): + c = ''' +const nnom_qformat_param_t _output_shift[] = ; +const nnom_qformat_param_t _bias_shift[] = ; +const nnom_conv2d_trans_config_t _config = { + .super = , + .qtype = , + .weight = (nnom_tensor_t*)&, + .bias = (nnom_tensor_t*)&, + .output_shift = (nnom_qformat_param_t *)&_output_shift, + .bias_shift = (nnom_qformat_param_t *)&_bias_shift, + .filter_size = , + .kernel_size = , + .stride_size = , + .padding_size = , + .dilation_size = , + .padding_type = +}; +''' + c = c.replace('', layer.name) + c = c.replace('', gen_base_config(layer)) + c = c.replace('', "NNOM_QTYPE_PER_TENSOR") + c = c.replace('',convert_tensor_name(layer.weights[0])) + c = c.replace('',convert_tensor_name(layer.weights[1])) + c = c.replace('', output_shifts) + c = c.replace('', bias_shifts) + c = c.replace('', str(layer.filters)) # output channel + c = c.replace('', to_cstyle(layer.kernel_size)) + c = c.replace('', to_cstyle(layer.strides)) + c = c.replace('', '{0, 0}') # not using it with keras, defined by padding type instead + c = c.replace('', to_cstyle(layer.dilation_rate)) + c = c.replace('', 'PADDING_'+layer.padding.upper()) + return c + +def gen_dense_config(layer, output_shifts, bias_shift): + c = ''' +const nnom_qformat_param_t _output_shift[] = ; +const nnom_qformat_param_t _bias_shift[] = ; +const nnom_dense_config_t _config = { + .super = , + .qtype = , + .weight = (nnom_tensor_t*)&, + .bias = (nnom_tensor_t*)&, + .output_shift = (nnom_qformat_param_t *)&_output_shift, + .bias_shift = (nnom_qformat_param_t *)&_bias_shift +}; +''' + c = c.replace('', layer.name) + c = c.replace('', gen_base_config(layer)) + c = c.replace('', "NNOM_QTYPE_PER_TENSOR") + c = c.replace('', convert_tensor_name(layer.weights[0])) + c = c.replace('', convert_tensor_name(layer.weights[1])) + c = c.replace('', output_shifts) + c = c.replace('', bias_shift) + return c + +def gen_io_config(layer, tensor_name): + c = ''' +const nnom_io_config_t _config = { + .super = , + .tensor = (nnom_tensor_t*)& +}; +''' + c = c.replace('', layer.name) + c = c.replace('', gen_base_config(layer)) + c = c.replace('', tensor_name) + return c + +def gen_output_config(previous_layer, dec_bits, output_num, value_name='nnom_output_data'): #cheat at the moments + c = ''' +const nnom_shape_data_t _dim[] = ; +const nnom_qformat_param_t _dec[] = ; +const nnom_qformat_param_t _offset[] = ; +const nnom_tensor_t = { + .p_data = (void*), + .dim = (nnom_shape_data_t*)_dim, + .q_dec = (nnom_qformat_param_t*)_dec, + .q_offset = (nnom_qformat_param_t*)_offset, + .qtype = , + .num_dim = , + .bitwidth = 8 +}; + +const nnom_io_config_t _config = { + .super = , + .tensor = (nnom_tensor_t*)& +}; +''' + shape = tensor_shape(previous_layer.output, is_io_tensor=True) + + c = c.replace('', 'tensor_output'+str(output_num)) + c = c.replace('', 'output'+str(output_num)) + c = c.replace('', '{.name = "output'+str(output_num)+'"}') # cheating at the moment. + c = c.replace('', value_name) + c = c.replace('', 'NNOM_QTYPE_PER_TENSOR') + c = c.replace('', str(len(shape))) + c = c.replace('', to_cstyle(shape)) + c = c.replace('', '{'+dec_bits+'}') + c = c.replace('', to_cstyle([0])) + return c + + +def gen_pooling_config(layer, output_shifts='0'): + c = ''' +const nnom_pool_config_t _config = { + .super = , + .padding_type = , + .output_shift = , + .kernel_size = , + .stride_size = , + .num_dim = +}; +''' + c = c.replace('', layer.name) + c = c.replace('', gen_base_config(layer)) + c = c.replace('', 'PADDING_'+layer.padding.upper()) + c = c.replace('', to_cstyle(layer.pool_size)) + c = c.replace('', to_cstyle(layer.strides)) + c = c.replace('', str(len(layer.pool_size))) + c = c.replace('', output_shifts) # not used at the moment + return c + +def gen_gl_pooling_config(layer, output_shifts='0'): + c = ''' +const nnom_global_pool_config_t _config = { + .super = , + .output_shift = , +}; +''' + c = c.replace('', layer.name) + c = c.replace('', gen_base_config(layer)) + c = c.replace('', output_shifts) + return c + + + +def gen_matrix_config(layer, output_shift_name='0'): + c = ''' +const nnom_matrix_config_t _config = { + .super = , + .output_shift = +}; +''' + c = c.replace('', layer.name) + c = c.replace('', gen_base_config(layer)) + c = c.replace('', output_shift_name) # not used at the moment + return c + +def gen_zero_padding_config(layer): + c = ''' +const nnom_zero_padding_config_t _config = { + .super = , + .pad = +}; +''' + c = c.replace('', layer.name) + c = c.replace('', gen_base_config(layer)) + try: + c = c.replace('', to_cstyle(sum(layer.padding, ()))) + except: + pad = ((0, 0), layer.padding) + c = c.replace('', to_cstyle(sum(pad, ()))) + return c + +def gen_cropping_config(layer): + c = ''' +const nnom_cropping_config_t _config = { + .super = , + .pad = +}; +''' + c = c.replace('', layer.name) + c = c.replace('', gen_base_config(layer)) + try: + c = c.replace('', to_cstyle(sum(layer.cropping, ()))) #((top_crop, bottom_crop), (left_crop, right_crop)) + except: + pad = ((0, 0), layer.cropping) + c = c.replace('', to_cstyle(sum(pad, ()))) + return c + +def gen_upsampling_config(layer): + c = ''' +const nnom_upsample_config_t _config = { + .super = , + .kernel = +}; +''' + c = c.replace('', layer.name) + c = c.replace('', gen_base_config(layer)) + c = c.replace('', to_cstyle(layer.size)) + return c + +def gen_softmax_config(layer): + c = ''' +const nnom_softmax_config_t _config = { + .super = +}; +''' + c = c.replace('', layer.name) + c = c.replace('', gen_base_config(layer)) + return c + +def gen_flatten_config(layer): + c = ''' +const nnom_flatten_config_t _config = { + .super = +}; +''' + c = c.replace('', layer.name) + c = c.replace('', gen_base_config(layer)) + return c + +def gen_reshape_config(layer): + c = ''' +const nnom_shape_data_t _targeted_shape[] = ; +const nnom_reshape_config_t _config = { + .super = , + .dim = (nnom_shape_data_t*)_targeted_shape, + .num_dim = +}; +''' + c = c.replace('', layer.name) + c = c.replace('', gen_base_config(layer)) + c = c.replace('', to_cstyle(layer.output_shape[1:])) + c = c.replace('', str(len(layer.output_shape[1:]))) + return c + +def gen_concat_config(layer): + c = ''' +const nnom_concat_config_t _config = { + .super = , + .axis = +}; +''' + c = c.replace('', layer.name) + c = c.replace('', gen_base_config(layer)) + c = c.replace('', str(layer.axis)) + return c + +def gen_lambda_config(layer, run_func_name='NULL', build_func_name='NULL', free_func_name='NULL', parameters_name='NULL'): + c = ''' +const nnom_lambda_config_t _config = { + .super = , + .run_func_name = , + .build_func_name = , + .free_func_name = , + .parameters = +}; +''' + c = c.replace('', layer.name) + c = c.replace('', gen_base_config(layer)) + c = c.replace('', run_func_name) + c = c.replace('', build_func_name) + c = c.replace('', free_func_name) + c = c.replace('', parameters_name) + return c + +def gen_rnn_config(layer): + c = ''' +const nnom_rnn_config_t _config = { + .super = , + .return_sequence = , + .stateful = , + .go_backwards = +}; +''' + c = c.replace('', layer.name) + c = c.replace('', gen_base_config(layer)) + c = c.replace('', 'true' if layer.stateful else 'false') + c = c.replace('', 'true' if layer.go_backwards else 'false') + c = c.replace('', 'true' if layer.return_sequences else 'false') + return c + +def gen_simple_cell_config(layer, q_list): + c = ''' +const nnom_simple_cell_config_t _simple_cell_config = { + .super = , + .weights = (nnom_tensor_t*)&, + .recurrent_weights = (nnom_tensor_t*)&, + .bias = (nnom_tensor_t*)&, + .q_dec_iw = , + .q_dec_hw = , + .q_dec_h = , + .act_type = , + .units = +}; +''' + try: + cell_cfg = layer.get_config()['cell']['config'] + except: + cell_cfg = layer.get_config() + c = c.replace('', layer.name) + c = c.replace('', gen_base_config(layer)) + c = c.replace('', convert_tensor_name(layer.weights[0])) + c = c.replace('', convert_tensor_name(layer.weights[1])) + c = c.replace('', convert_tensor_name(layer.weights[2])) + c = c.replace('', str(q_list[1])) # the qfmt of input x weight + c = c.replace('', str(q_list[2])) # q of hidden x recurrent weight + c = c.replace('', str(q_list[0])) # output, if act != relu, should be 7 (consider delete it.) + c = c.replace('', 'ACT_' + cell_cfg['activation'].upper()) + c = c.replace('', str(cell_cfg['units'])) + return c + +def gen_lstm_cell_config(layer, q_list): + c = ''' +const nnom_lstm_cell_config_t _lstm_cell_config = { + .super = , + .weights = (nnom_tensor_t*)&, + .recurrent_weights = (nnom_tensor_t*)&, + .bias = (nnom_tensor_t*)&, + .q_dec_z = , + .q_dec_h = , + .q_dec_c = , + .units = +}; +''' + try: + cell_cfg = layer.get_config()['cell']['config'] + except: + cell_cfg = layer.get_config() + c = c.replace('', layer.name) + c = c.replace('', gen_base_config(layer)) + c = c.replace('', convert_tensor_name(layer.weights[0])) + c = c.replace('', convert_tensor_name(layer.weights[1])) + c = c.replace('', convert_tensor_name(layer.weights[2])) + c = c.replace('', str(q_list[0])) # output and memory state, (should be q0.7. consider delete it) + c = c.replace('', str(q_list[1])) # cell state + c = c.replace('', str(q_list[2])) # input*weight + hidden*weight + bias + c = c.replace('', str(cell_cfg['units'])) + return c + + + +def gen_gru_cell_config(layer, q_list): + c = ''' +const nnom_gru_cell_config_t _gru_cell_config = { + .super = , + .weights = (nnom_tensor_t*)&, + .recurrent_weights = (nnom_tensor_t*)&, + .bias = (nnom_tensor_t*)&, + .q_dec_z = , + .q_dec_h = , + .units = +}; +''' + try: + cell_cfg = layer.get_config()['cell']['config'] + except: + cell_cfg = layer.get_config() + c = c.replace('', layer.name) + c = c.replace('', gen_base_config(layer)) + c = c.replace('', convert_tensor_name(layer.weights[0])) + c = c.replace('', convert_tensor_name(layer.weights[1])) + c = c.replace('', convert_tensor_name(layer.weights[2])) + c = c.replace('', str(q_list[0])) # + c = c.replace('', str(q_list[1])) # + c = c.replace('', str(cell_cfg['units'])) + return c + + +if __name__ == "__main__": + # test only + from tensorflow.keras.models import load_model + model = load_model("../model.h5") + print(gen_tensor(model.layers[1].weights[0], dec_bits=(1, 2, 3, 4, 5))) + print(gen_tensor(model.layers[1].weights[1], dec_bits=(1, 2, 3, 4, 5))) + print(gen_conv2d_config(model.layers[1], (1,2,3), 3)) + + with open("test.h", 'w') as fp: + # fp.write(gen_tensor(model.layers[1].weights[0], dec_bits=(1, 2, 3, 4, 5))) + # fp.write(gen_tensor(model.layers[1].weights[1], dec_bits=(1, 2, 3, 4, 5))) + # fp.write(gen_conv2d_config(model.layers[1], (1,2,3,))) + + fp.write('#include "nnom.h"\n') + + # test all + for layer in model.layers: + if(type(layer) in [Conv2D, Conv1D]): + for w in layer.weights: + fp.write(gen_tensor(w, [3])) + fp.write(gen_conv2d_config(layer, {0}, 2)) + elif(type(layer) in [Dense]): + for w in layer.weights: + fp.write(gen_tensor(w, [3])) + fp.write(gen_dense_config(layer, 2, 2)) + elif(type(layer) in [Input]): + fp.write(gen_io_config(layer, [9,1,1])) + elif(type(layer) in [MaxPooling2D, GlobalMaxPooling2D, AveragePooling2D, GlobalAveragePooling2D]): + fp.write(gen_pooling_config(layer)) + elif(type(layer) in [Multiply, Add, Subtract]): + fp.write(gen_matrix_config(layer)) + elif(type(layer) in [ZeroPadding2D, ZeroPadding1D]): + fp.write(gen_zero_padding_config(layer)) + elif(type(layer) in [Cropping2D, Cropping1D]): + fp.write(gen_cropping_config(layer)) + elif(type(layer) in [Softmax]): + fp.write(gen_softmax_config(layer)) + elif(type(layer) in [Flatten]): + fp.write(gen_flatten_config(layer)) + elif(type(layer) in [Concatenate]): + fp.write(gen_concat_config(layer)) + elif(type(layer) in [Lambda]): + fp.write(gen_lambda_config(layer)) + elif(type(layer) in [UpSampling2D, UpSampling1D]): + fp.write(gen_upsampling_config(layer)) + + diff --git a/APP_Framework/Framework/knowing/nnom/scripts/nnom.py b/APP_Framework/Framework/knowing/nnom/scripts/nnom.py new file mode 100644 index 000000000..45e6b30a7 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/scripts/nnom.py @@ -0,0 +1,1198 @@ +''' + Copyright (c) 2018-2020 + Jianjia Ma + majianjia@live.com + + SPDX-License-Identifier: Apache-2.0 + + Change Logs: + Date Author Notes + 2019-02-05 Jianjia Ma The first version +''' + +import sklearn.metrics as skmetrics +import matplotlib.pyplot as plt +import tensorflow as tf +import tensorflow.keras.backend as K +from tensorflow.keras import * +from tensorflow.keras.layers import * +from fully_connected_opt_weight_generation import * +from gen_config import * +import scipy.stats +import time +import warnings + +model_major_version = 0 +model_sub_version = 4 +model_reversion = 3 + +#define NNOM_MAJORVERSION 0L /**< major version number */ +#define NNOM_SUBVERSION 4L /**< minor version number */ +#define NNOM_REVISION 3L /**< revise version number */ +#define NNOM_VERSION (NNOM_MAJORVERSION * 10000) + (NNOM_SUBVERSION * 100) + NNOM_REVISION) + +def fuse_bn_to_conv(layer): + # try to fuse BN layer to convolutional + if ('conv' in layer.name) and \ + ('batch_normalization' in layer.outbound_nodes[0].outbound_layer.name): + print("fusing batch normalization to", layer.name) + bn_layer = layer._outbound_nodes[0].outbound_layer + c_w = layer.get_weights()[0] + c_b = layer.get_weights()[1] + print('original weight max', c_w.max(), 'min', c_w.min()) + print('original bias max', c_b.max(), 'min', c_b.min()) + bn_gamma = bn_layer.get_weights()[0] + bn_beta = bn_layer.get_weights()[1] + bn_mean = bn_layer.get_weights()[2] + bn_variance = bn_layer.get_weights()[3] + epsilon = 1e-3 # default epsilon for tf.slim.batch_norm + if ('conv2d' in layer.name): + if "depthwise" in layer.name: # depthwise batchnorm params are ordered differently + for l in range(c_w.shape[3]): + for k in range(c_w.shape[2]): + for j in range(c_w.shape[1]): + for i in range(c_w.shape[0]): + c_w[i][j][k][l] *= bn_gamma[k*c_w.shape[3]+l] / np.sqrt(bn_variance[k*c_w.shape[3]+l] + epsilon) + depth_dim = c_w.shape[2] * c_w.shape[3] # test needed + # normal conv + else: + for l in range(c_w.shape[3]): + for k in range(c_w.shape[2]): + for j in range(c_w.shape[1]): + for i in range(c_w.shape[0]): + c_w[i][j][k][l] *= bn_gamma[l] / np.sqrt(bn_variance[l] + epsilon) + depth_dim = c_w.shape[3] + for l in range(depth_dim): + c_b[l] = (bn_gamma[l] * (c_b[l] - bn_mean[l]) / np.sqrt(bn_variance[l] + epsilon)) + bn_beta[l] + # conv1d + else: + epsilon = 1e-3 # default epsilon for tf.slim.batch_norm + for k in range(c_w.shape[2]): + for j in range(c_w.shape[1]): + for i in range(c_w.shape[0]): + if "depthwise" in layer.name: # depthwise batchnorm params are ordered differently + c_w[i][j][k] *= bn_gamma[j] / np.sqrt(bn_variance[j] + epsilon) + else: + c_w[i][j][k] *= bn_gamma[k] / np.sqrt(bn_variance[k] + epsilon) + + if "depthwise" in layer.name: + depth_dim = c_w.shape[1]*c_w.shape[2] # need to be tested + else: + depth_dim = c_w.shape[2] + for l in range(depth_dim): + c_b[l] = (bn_gamma[l] * (c_b[l] - bn_mean[l]) / np.sqrt(bn_variance[l] + epsilon)) + bn_beta[l] + + print('fused weight max', c_w.max(), 'min', c_w.min()) + print('fused bias max', c_b.max(), 'min', c_b.min()) + # write the weights back to the layer + # after that, the model will be destroyed.. need a better way to pass the new weight + layer.set_weights([c_w, c_b]) + +def generate_test_bin(x, y, name='test_data_with_label.bin'): + ''' + this method generate the + :param x: input x data size + :param y: input label (one hot label) + :return: + ''' + # quantize input x + dec_bits = find_dec_bits_max_min(x, bit_width=8) + x = np.round(x*2**dec_bits).clip(-128, 127).astype(np.int8) + # get label + if(len(y.shape) >1): + test_label = np.argwhere(y == 1).astype(np.int8) # test data + test_label = test_label[:, 1] + else: + test_label = y + + # get data + dat = x.astype(dtype="byte") # test data + batch_size = dat.shape[0] # total pices of data + dat = dat.flatten() # flatten to get the total size. + block_size = int(dat.size / batch_size) # this must be integer but... just to confirm + + # write (label x 128) (data_block x 128) + label_batch = 128 # the Y-modem example uses 128 batch + with open(name, 'wb') as f: + start = 0 + while start <= (test_label.size - label_batch): + test_label[start: start + label_batch].tofile(f) + dat[block_size * start: block_size * (start + label_batch)].tofile(f) + start += label_batch + + # the rest data + if (start < test_label.size): + rest_len = test_label.size - start + new_labls = test_label[start:] + new_labls = np.pad(new_labls, (0, label_batch - rest_len), mode='constant') + new_labls.tofile(f) + dat[block_size * start:].tofile(f) + + print("binary test file generated:", name) + print("test data length:", test_label.size) + return + +def is_shift_layer(layer): + ''' layer which can change the output encoding''' + #FIXME: add more which will change the output shift + if('input' in layer.name or + 'conv2d' in layer.name or + 'conv1d' in layer.name or + 'dense' in layer.name or + 'softmax' in layer.name or + 'sigmoid' in layer.name or + 'tanh' in layer.name or + ('add' in layer.name and 'zero' not in layer.name) or # the name, zero_padding contains 'add' + 'subtract' in layer.name or + 'multiply' in layer.name or + ('activation' in layer.name and layer.get_config()['activation'] == 'softmax')or + ('activation' in layer.name and layer.get_config()['activation'] == 'hard_sigmoid') or + ('activation' in layer.name and layer.get_config()['activation'] == 'tanh') or + ('activation' in layer.name and layer.get_config()['activation'] == 'hard_tanh') or + is_rnn_layer(layer) + ): + return True + return False + +def is_shift_fixed(layer): + ''' layer which shift to a fixed value''' + #FIXME: add more which will change the output shift + if('softmax' in layer.name or + 'sigmoid' in layer.name or + 'tanh' in layer.name or + ('activation' in layer.name and layer.get_config()['activation'] == 'softmax') or + ('activation' in layer.name and layer.get_config()['activation'] == 'sigmoid') or + ('activation' in layer.name and layer.get_config()['activation'] == 'hard_sigmoid') or + ('activation' in layer.name and layer.get_config()['activation'] == 'tanh') or + ('activation' in layer.name and layer.get_config()['activation'] == 'hard_tanh') or + is_rnn_layer(layer) + ): + return True + return False + +def is_lstm_layer(layer): + if type(layer) is LSTM or 'lstm' in layer.name: + return True + if(type(layer) is RNN or 'rnn' in layer.name): + if(type(layer.cell) is LSTMCell or 'lstm' in layer.cell.name): + return True + return False + +def is_gru_layer(layer): + if type(layer) is GRU or 'gru' in layer.name: + return True + if(type(layer) is RNN or 'rnn' in layer.name): + if(type(layer.cell) is GRUCell or 'gru' in layer.cell.name): + return True + return False + +def is_rnn_layer(layer): + if( 'rnn' in layer.name or + is_lstm_layer(layer) or + is_gru_layer(layer) + ): + return True + return False + +def find_offset(data): + """ + Offset of the original data before quantisation + :param data: + :return: offset of the data block + """ + return np.average(data) + + +def find_dec_bits_max_min(data, bit_width=8, maximum_bit=32): + """ + A ragular non-saturated shift-based quantisation mathod. Using max/min values + :param data: + :param bit_width: + :param maximum_bit: maximum decimal bit. Incase sometime bias is too small lead to very large size dec bit + :return: + """ + max_val = abs(data.max()) - abs(data.max()/pow(2, bit_width)) # allow very small saturation. + min_val = abs(data.min()) - abs(data.min()/pow(2, bit_width)) + int_bits = int(np.ceil(np.log2(max(max_val, min_val)))) + dec_bits = (bit_width-1) - int_bits + return min(dec_bits, maximum_bit) + +def find_dec_bits_max_min_axis(data, axis=-1,bit_width=8, maximum_bit=32): + """ + A ragular non-saturated shift-based quantisation mathod. Using max/min values + :param data: + :param axis: + :param bit_width: + :return: + """ + dec_bits = [] + # if(len(data.shape) < np.abs(axis)): # for depthwise with axis = -2 while len(shape) =1 + # size = data.shape[0] + # axis = 0 # + # else: + # size = data.shape[axis] + for i in np.arange(0, data.shape[axis]): + d = np.take(data, indices=i, axis=axis) + max_val = abs(d.max()) - abs(d.max() / pow(2, bit_width)) # allow very small saturation. + min_val = abs(d.min()) - abs(d.min() / pow(2, bit_width)) + int_bit = int(np.ceil(np.log2(max(abs(max_val), abs(min_val))))) + dec_bit = (bit_width-1) - int_bit + dec_bits.append(min(dec_bit, maximum_bit)) + return dec_bits + +def find_dec_bits_kld(data, bit_width=8, scan_times=4, maximum_bit=16): + """ + # saturation shift, using KLD method (Kullback-Leibler divergence) + # Ref: http://on-demand.gputechconf.com/gtc/2017/presentation/s7310-8-bit-inference-with-tensorrt.pdf + :param data: The data for looking for quantisation + :param bit_width: the bitwidth of the data + :param scan_times: the times to try the best kld (normally the second is the best.) + :return: dec bit width for this data + """ + # do a regular non-saturated quantisation + max_val = data.max() + min_val = data.min() + abs_max = max(abs(max_val), abs(min_val)) + int_bits = int(np.ceil(np.log2(max(abs(max_val), abs(min_val))))) + dec_bits = (bit_width-1) - int_bits + + # now looking for the best quantisation using KLD method + small_var = 1e-5 + bins = np.arange(-abs_max, abs_max, abs_max / 2048 * 2) + q_bins = np.arange(-abs_max, abs_max, abs_max / 256 * 2) + flat_hist = np.histogram(data.flatten(), bins=bins)[0] + kl_loss = [] + kl_shifts = [] + for shift in range(scan_times): + t = 2 ** (dec_bits + shift) # 2-based threshold + act = np.round(data.flatten() * t) + act = act / t + act = np.clip(act, -128 / t, 127 / t) + act = np.histogram(act, bins=q_bins)[0] + act_hist = np.zeros(2047) + chunk = int(2048 / 256) + for i in range(int(255)): + none_zero = np.count_nonzero(flat_hist[i * chunk:(i + 1) * chunk]) + if none_zero == 0: + continue + for j in range(chunk): + act_hist[i * chunk + j] = act[i] / none_zero if flat_hist[i * chunk + j] != 0 else 0 + flat_hist[flat_hist == 0] = small_var + act_hist[act_hist == 0] = small_var + kl = scipy.stats.entropy(flat_hist, act_hist) + kl_loss.append(kl) + kl_shifts.append(dec_bits + shift) + + # now get the least loss from the scaned kld shift + dec_bits = kl_shifts[np.argmin(kl_loss)] # set the dec_bit to the KLD results + return min(dec_bits, maximum_bit) + +# convert to [-128,128) or int8 +def quantize_data(data, dec_bits, axis=-1, per_axis=False, bitwith=8): + if (per_axis): + out = [] + for i in np.arange(0, data.shape[axis]): + d = np.take(data, indices=i, axis=axis) + d = np.round(d * 2 ** dec_bits[i]) + d = np.clip(d, -2**(bitwith-1), 2**(bitwith-1)-1) + d = np.expand_dims(d, axis=axis) + out.append(d) + out = np.concatenate(out, axis=axis) + return out + else: + return np.clip(np.round(data * 2 ** dec_bits), -2**(bitwith-1), 2**(bitwith-1) -1) + +def quantize_rnn_intermediate_output(layer, features): + def nnom_sigmoid(data): + return 1 / (1 + np.exp(-data)) + def nnom_tanh(data): + return np.tanh(data) + def split_array(d, num): + l = len(d) + if(num==4): + return d[:int(l/4)], d[int(l/4): int(l/2)], d[int(l/2):-int(l/4)], d[-int(l/4):] + elif(num==3): + return d[:int(l/3)], d[int(l/3): -int(l/3)], d[-int(l/3):] + lcfg = layer.get_config() + if(lcfg['go_backwards']): + features = features[:,::-1,:] # reverse timestamp + + if(type(layer.cell) is SimpleRNNCell): + cfg = layer.cell.get_config() + state = np.zeros(cfg['units']) + kernel = layer.get_weights()[0] + recurrent_kernel = layer.get_weights()[1] + bias = layer.get_weights()[2] + # replicate keras's implementation + def simple_cell_step(inputs, state, kernel, recurrent_kernel, bias, activation): + h = np.dot(inputs, kernel) + h = np.add(h, bias) + h2 = np.dot(state, recurrent_kernel) + output = h + h2 + output = activation(output) + return output, h, h2 + output_arrary = [] + h_array = [] + h2_array = [] + activation = nnom_tanh if cfg['activation'] is 'tanh' else nnom_sigmoid + state = np.zeros(cfg['units']) + for feature in features: + if(not layer.stateful): + state = np.zeros(cfg['units']) + for fe in feature: + output, h, h2 = simple_cell_step(fe, state, kernel, recurrent_kernel, bias, activation) + state = output + output_arrary.append(output) + h_array.append(h) + h2_array.append(h2) + output_arrary = np.array(output_arrary) + h_array = np.array(h_array) + h2_array = np.array(h2_array) + # qout = find_dec_bits_kld(output_arrary) + # qh = find_dec_bits_kld(h_array) + # qh2 = find_dec_bits_kld(h2_array) + qout = find_dec_bits_max_min(output_arrary) + qh = find_dec_bits_max_min(h_array) + qh2 = find_dec_bits_max_min(h2_array) + return [qout, qh, qh2] + + elif (type(layer.cell) is LSTMCell or 'lstm' in layer.cell.name): + cfg = layer.cell.get_config() + state = np.zeros(cfg['units']*2) + kernel = layer.get_weights()[0] + recurrent_kernel = layer.get_weights()[1] + bias = layer.get_weights()[2] + def lstm_cell_step(cell_inputs, cell_states, kernel, recurrent_kernel, bias): + h_tm1 = cell_states[0] # previous memory state + c_tm1 = cell_states[1] # previous carry state + z1 = np.dot(cell_inputs, kernel) + z1 = np.add(z1, bias) + z2 = np.dot(h_tm1, recurrent_kernel) + z = z1+z2 # -----> q_z + z0, z1, z2, z3 = split_array(z, 4) + i = nnom_sigmoid(z0) # q0.7 + f = nnom_sigmoid(z1) # q0.7 + c1 = f*c_tm1 + c2 = i*nnom_tanh(z2) # q0.7 + c = c1 + c2 # -----> q_c + o = nnom_sigmoid(z3) # q0.7 + tc = nnom_tanh(c) + h = o * tc # q0.7 + return h, [h, c], z ,z0, z1, z2, z3 + h_array = [] + c_array = [] + z_array = [] + z0_array = [] + z1_array = [] + z2_array = [] + z3_array = [] + state = [np.zeros(cfg['units']), np.zeros(cfg['units'])] + for feature in features: + if(not layer.stateful): + state = [np.zeros(cfg['units']), np.zeros(cfg['units']) ] + for fe in feature: + output, state, z, z0, z1, z2, z3 = lstm_cell_step(fe, state, kernel, recurrent_kernel, bias) + h_array.append(output) + c_array.append(state[1]) + z_array.append(z) + z0_array.append(z0) + z1_array.append(z1) + z2_array.append(z2) + z3_array.append(z3) + h_array = np.array(h_array) + c_array = np.array(c_array) + z_array = np.array(z_array) + z0_array = np.array(z0_array) + z1_array = np.array(z1_array) + z2_array = np.array(z2_array) + z3_array = np.array(z3_array) + # q_h = find_dec_bits_kld(h_array) + # q_c = find_dec_bits_kld(c_array) + # q_z = find_dec_bits_kld(z_array) + # q_z0 = find_dec_bits_kld(z0_array) + # q_z1 = find_dec_bits_kld(z1_array) + # q_z2 = find_dec_bits_kld(z2_array) + # q_z3 = find_dec_bits_kld(z3_array) + q_h = find_dec_bits_max_min(h_array) + q_c = find_dec_bits_max_min(c_array) + q_z = find_dec_bits_max_min(z_array) + q_z0 = find_dec_bits_max_min(z0_array) # not needed. + q_z1 = find_dec_bits_max_min(z1_array) + q_z2 = find_dec_bits_max_min(z2_array) + q_z3 = find_dec_bits_max_min(z3_array) + return [q_h, q_c, q_z] + + elif (type(layer.cell) is GRUCell or 'gru' in layer.cell.name): + cfg = layer.cell.get_config() + state = np.zeros(cfg['units']) + k = layer.get_weights()[0] + rk = layer.get_weights()[1] + bias = layer.get_weights()[2] + + def gru_cell_step(cell_inputs, cell_states, kernel, recurrent_kernel, input_bias, recurrent_bias): + h_tm1 = cell_states[0] + # inputs projected by all gate matrices at once + matrix_x = np.dot(cell_inputs, kernel) + input_bias + x_z, x_r, x_h = split_array(matrix_x, 3) + # hidden state projected by all gate matrices at once + matrix_inner = np.dot(h_tm1, recurrent_kernel) + recurrent_bias + recurrent_z, recurrent_r, recurrent_h = split_array(matrix_inner, 3) + z = nnom_sigmoid(x_z + recurrent_z) + r = nnom_sigmoid(x_r + recurrent_r) + hh = nnom_tanh(x_h + r * recurrent_h) + # previous and candidate state mixed by update gate + # h = z * h_tm1 + (1 - z) * hh + h1 = z*h_tm1 + h2 = 1-z + h3 = h2 * hh + h = h1 + h3 + return h, [h], matrix_x, matrix_inner + h_array = [] + z_array = [] + i_array=[] + state = [np.zeros(cfg['units'])] + for feature in features: + if (not layer.stateful): + state = [np.zeros(cfg['units'])] + for fe in feature: + output, state, z, i = gru_cell_step(fe, state, k, rk, bias[0], bias[1]) + h_array.append(output) + z_array.append(z) + i_array.append(i) + h_array = np.array(h_array) + i_array = np.array(i_array) + z_array = np.array(z_array) + # q_h = find_dec_bits_kld(h_array) + # q_i = find_dec_bits_kld(i_array) + # q_z = find_dec_bits_kld(z_array) + q_h = find_dec_bits_max_min(h_array) + q_i = find_dec_bits_max_min(i_array) + q_z = find_dec_bits_max_min(z_array) + q_z = min(q_i, q_z) + return [q_h, q_z] + return [] + +def quantize_output(model, x_test, quantize_method='max_min', layer_offset=False, calibrate_size=None): + # limit the test data size + if(calibrate_size is not None): + if (x_test.shape[0] > calibrate_size): + x_test = x_test[:calibrate_size] + # test, show the output ranges + layer_q_list = {} + # FIXME: only support one input + if (type(model.layers[0]) != InputLayer): + L = [model.input] + model.layers + else: + L = model.layers + + for layer in L: # layer loop + if ("input" in layer.name): + features = x_test + else: + # rnn need a further step to determine the intermediate q format + if (is_rnn_layer(layer)): + in_layer = layer.inbound_nodes[0].inbound_layers + layer_model = Model(inputs=model.input, outputs=in_layer.output) + bs = model.input.shape[0] + features = layer_model.predict(x_test, batch_size=bs) + intermediate_dec = quantize_rnn_intermediate_output(layer, features) + print(layer.name, 'dec bit', intermediate_dec) + layer_q_list['intermediate_' + layer.name] = intermediate_dec + + # batch_normalization will need to be handled differently, since we are fusing the weight to its previosu conv. + # sigmoid and tanh are different, their shift is fixed to 7 + if (is_shift_layer(layer) or + ('batch_normalization' in layer.name)): + layer_model = Model(inputs=model.input, outputs=layer.output) + bs = model.input.shape[0] + features = layer_model.predict(x_test, batch_size=bs) + else: + # leave the features not changed, so this layer shift will be the same as its inputs + pass + + # we currently only support one offset for a layer output. + if(layer_offset): + offset = find_offset(features) + features = features - offset + else: + offset = 0 + # saturated shift using KLD method OR non saturated shift using max-min + if ("kld" in quantize_method + and not is_shift_fixed(layer) + and "input" not in layer.name + and "dense" not in layer.name): # test, also do not use kld in input layer + dec_bits = find_dec_bits_kld(features, bit_width=8, scan_times=4) + print(layer.name,"Quantized method:", "KLD", "Values max:", np.max(features), "min:", np.min(features), "dec bit", dec_bits) + else: + dec_bits = find_dec_bits_max_min(features, bit_width=8) + print(layer.name,"Quantized method:","max-min"," Values max:", np.max(features), "min:", np.min(features), "dec bit", dec_bits) + # quantise offset + offset = int(np.round(offset * 2 ** dec_bits)) + # record the shift + if (type(model.input) == tf.Tensor and type(model.layers[0]) != InputLayer): + layer_q_list[layer.name.split(':')[0]] = [dec_bits, offset] + else: + layer_q_list[layer.name] = [dec_bits, offset] + if ('batch_normalization' in layer.name): + layer_q_list[layer.inbound_nodes[0].inbound_layers.name] = [dec_bits, offset] # use the bn layer shift to update the last layer. + + # scan the layers backward, try to unify the dec bit in multiple input layers, (add, mult... concat...etc.) + LM = {} + for layer in model.layers: + LM[layer.name] = layer + L = [l for l in model.layers[1:]] + L.reverse() + def update_previous_layer_shift(layer, dec_bit): + if(type(layer.input) == list): + for inp in layer.input: + iname = inp.name.split('/')[0] + if('input' in iname): + continue + layer_q_list[iname][0] = dec_min + if(not is_shift_layer(LM[iname])): + update_previous_layer_shift(LM[iname], dec_bit) + else: + iname = layer.input.name.split('/')[0] + if('input' in iname): + return + layer_q_list[iname][0] = dec_min + if(not is_shift_layer(LM[iname])): + update_previous_layer_shift(LM[iname], dec_bit) + for layer in L: + if(type(layer.input) == list): + iname = layer.input[0].name.split('/')[0].split(':')[0] + dec_min = layer_q_list[iname][0] + # find min dec bit in these input + for inp in layer.input: + iname = inp.name.split('/')[0].split(':')[0] + if(layer_q_list[iname][0] < dec_min): + dec_min = layer_q_list[iname][0] + if(layer_q_list[iname][0] != dec_min): + bFlag = True + for inp in layer.input: + iname = inp.name.split('/')[0].split(':')[0] + layer_q_list[iname][0] = dec_min + if(not is_shift_layer(LM[iname])): + update_previous_layer_shift(LM[iname], dec_min) + print('set dec bit', dec_min, 'for the input of', layer.name, ':', [inp.name.split('/')[0] for inp in layer.input]) + if(not is_shift_layer(layer) or dec_min < layer_q_list[layer.name][0]): # update current layer's shift only when we cannot change the shift + layer_q_list[layer.name][0] = dec_min + # quantise offset + print("quantisation list", layer_q_list) + return layer_q_list + + +def layer_name_from_tensor(t): + return t.name.replace(':','/').split('/')[0] + + +def quantize_weights(model, name='weights.h', format='hwc', per_channel_quant=True, layer_q_list=None): + # Quantize weights to 8-bits using (min,max) and write to file + f = open(name, 'w') + f.write('#include "nnom.h"\n\n') + f.write('/* Weights, bias and Q format */\n') + f.close() + for curr_idx, layer in enumerate(model.layers): + if (not layer.weights): + continue + # before merging bn layer, check if the bn is "legally" after Conv + if('batch_normalization' in layer.name) and \ + ('conv' not in layer.inbound_nodes[0].inbound_layers.name): + raise Exception('Only support batch_normalization placed after conv', layer.name, + layer.inbound_nodes[0].inbound_layers.name) + # try to fuse BN layer to convolutional + if ('conv' in layer.name) and \ + ('batch_normalization' in layer.outbound_nodes[0].outbound_layer.name): + fuse_bn_to_conv(layer) + # generate weights and bias now + weight_dec_shift = 0 + print('quantizing weights for layer', layer.name) + layer_weights = layer.get_weights() + for idx, var in enumerate(layer_weights): + var_name = convert_tensor_name(layer.weights[idx]) + var_values = var + if("kernel" not in var_name and 'bias' not in var_name): # ignore batchnormalisation's parameters + continue + + if (per_channel_quant and type(layer) in [Conv2D, Conv1D, DepthwiseConv2D, Conv2DTranspose]): + if(type(layer) in [DepthwiseConv2D] and "kernel" in var_name): #depthwise kernel quantised by + shape = var_values.shape[:2] + (-1,) # need to combine the mult and channel first + var = var_values.reshape(shape) + dec_bits = find_dec_bits_max_min_axis(var, axis=-1, bit_width=8) + elif(type(layer) in [Conv2DTranspose]): + dec_bits = find_dec_bits_max_min_axis(var_values, axis=-2, bit_width=8) + else: + dec_bits = find_dec_bits_max_min_axis(var_values, bit_width=8) + else: + dec_bits = find_dec_bits_max_min(var_values, bit_width=8) + print(' ', var_name, "dec bit", dec_bits) + + # kernel dec, bias dec, bias shift, output shift + if(is_shift_layer(layer) and not is_rnn_layer(layer)): + inp = layer.input.name.replace(':','/').split('/')[0] + layer_input_dec = layer_q_list[inp][0] + layer_output_dec = layer_q_list[layer.name][0] + if ("kernel" in var_name): + weight_dec_shift = dec_bits + else: + # channel wise + if hasattr(dec_bits, '__len__'): + bias_shift = np.full(len(dec_bits), layer_input_dec)+weight_dec_shift-dec_bits + layer_output_shift = np.full(len(weight_dec_shift), layer_input_dec) + weight_dec_shift \ + - np.full(len(weight_dec_shift), layer_output_dec) + if (np.min(bias_shift) < 0): + for i, w_dec in enumerate(weight_dec_shift): + if (bias_shift[i] < 0): + dec_bits[i] = w_dec + bias_shift[i] = 0 + # layer wise + else: + bias_shift = layer_input_dec + weight_dec_shift - dec_bits + layer_output_shift = layer_input_dec + weight_dec_shift - layer_output_dec + if (bias_shift < 0): + dec_bits = weight_dec_shift + bias_shift = 0 + # RNN layer's kernel dec, bias dec, bias shift, output shift + if(is_rnn_layer(layer)): + inp = layer.input.name.replace(':','/').split('/')[0] + layer_input_dec = layer_q_list[inp][0] + layer_output_dec = layer_q_list[layer.name][0] + #if (type(layer.cell) is SimpleRNNCell): + if ("kernel" in var_name and 'recurrent' not in var_name): + weight_dec_shift = dec_bits + elif ('bias' in var_name): + bias_shift = layer_input_dec + weight_dec_shift - dec_bits + layer_output_shift = layer_input_dec + weight_dec_shift - layer_output_dec # this is not valid + if (bias_shift < 0): + dec_bits = weight_dec_shift + bias_shift = 0 + + # now quantise them + if(type(layer) in [Conv2D, Conv1D, DepthwiseConv2D, Conv2DTranspose]): + if(type(layer) in [DepthwiseConv2D] and "kernel" in var_name): + old_shape = var_values.shape + var_values = quantize_data(var_values.reshape(var_values.shape[:2] + (-1,)), + dec_bits, axis=-1, per_axis=per_channel_quant) # convert to [h, w, out x mult] + var_values = var_values.reshape(old_shape) # convert the shape back to [h, w, out, mult] + elif(type(layer) in [Conv2DTranspose] and "kernel" in var_name): + var_values = quantize_data(var_values, dec_bits, axis=-2, per_axis=per_channel_quant) # [h, w, out, in] + else: + var_values = quantize_data(var_values, dec_bits, per_axis=per_channel_quant) # [h, w, in, out] + else: + var_values = quantize_data(var_values, dec_bits, per_axis=False) + + # CHW format + if ('chw' in format): + if (is_lstm_layer(layer) or is_gru_layer(layer)): # currently we use 16 bit intermediate, use reorder optimation + transposed_wts = np.transpose(var_values) + if('kernel' in var_name): + transposed_wts = convert_q7_q15_weights(np.reshape(transposed_wts ,(transposed_wts.shape[0], transposed_wts.shape[1], 1, 1))) + # dense and rnn still working under HWC format + elif ("dense" in var_name or is_rnn_layer(layer)) and "kernel" in var_name: + transposed_wts = np.transpose(var_values) + transposed_wts = convert_to_x4_q7_weights(np.reshape(transposed_wts, (transposed_wts.shape[0], transposed_wts.shape[1], 1, 1))) + # all other kernels, bias stay the same + else: + transposed_wts = var_values + # HWC format (NNOM/CMSIS-NN use [out_ch, h, w, in_ch], in C order) + else: + if (len(var_values.shape) == 3): # 1D convolution layer weights + transposed_wts = np.transpose(var_values, (2, 0, 1)) + elif (len(var_values.shape) == 4): # 2D convolution layer weights + if(type(layer) == Conv2DTranspose): # test + transposed_wts = np.transpose(var_values, (2, 0, 1, 3)) + elif type(layer) == DepthwiseConv2D: + transposed_wts = var_values#np.transpose(var_values, (0, 1, 3, 2)) # [h, w, out, mult] test for multiplier + else: + transposed_wts = np.transpose(var_values, (3, 0, 1, 2)) + elif(is_lstm_layer(layer) or is_gru_layer(layer)): # currently we use 16 bit intermediate, use reorder optimation + if('kernel' in var_name): + transposed_wts = np.transpose(var_values) + transposed_wts = convert_q7_q15_weights(np.reshape(transposed_wts ,(transposed_wts.shape[0], transposed_wts.shape[1], 1, 1))) + else: # bias will not need to be transposed (for GRU which has 2d bias) + transposed_wts = var_values + else: # fully connected layer weights or biases of any layer + # test, use opt weight reorder + transposed_wts = np.transpose(var_values) + if ("dense" in var_name or is_rnn_layer(layer)) and "kernel" in var_name: # and other RNN layers + transposed_wts = convert_to_x4_q7_weights(np.reshape(transposed_wts ,(transposed_wts.shape[0], transposed_wts.shape[1], 1, 1))) + + with open(name, 'a') as f: + def write_weights(f, name, value): + f.write('#define ' + name + ' {') + value.tofile(f, sep=", ", format="%d") + f.write('}\n\n') + # weights or bias + write_weights(f, var_name.upper(), transposed_wts) + # dec bits + write_weights(f, var_name.upper()+'_DEC_BITS' , np.array(dec_bits)) + # for test + if( "bias" in var_name): + f.write('#define ' + layer.name.upper() + '_BIAS_LSHIFT '+to_cstyle(bias_shift) +'\n\n') + #f.write('#define ' + layer.name.upper() + '_OUTPUT_DEC '+ to_cstyle(layer_output_dec)+'\n\n') # not here + f.write('#define ' + layer.name.upper() + '_OUTPUT_RSHIFT ' + to_cstyle(layer_output_shift)+'\n\n') + + +def generate_model(model, x_test, per_channel_quant=False, name='weights.h', format='hwc', quantize_method='max_min'): + """ + :param model: + :param x_test: + :param name: + :param format: + :param quantize_method: "max_min" or "kld" + :return: + """ + # get the quantize output range/format + layer_q_list = quantize_output(model, x_test, layer_offset=False, quantize_method=quantize_method) + # quantize weights and output shift + quantize_weights(model, per_channel_quant=per_channel_quant, name=name, format=format, layer_q_list=layer_q_list) + # now generate the model + if (type(model.layers[0]) != InputLayer): + L = [model.input] + model.layers + else: + L = model.layers + with open(name, 'a') as fp: + # generate the list of output + fp.write('\n/* output q format for each layer */\n') + for layer in L: + if (type(model.input) == tf.Tensor and type(model.layers[0]) != InputLayer): + iname = layer.name.split(':')[0] + else: + iname = layer.name + fp.write('#define %s_OUTPUT_DEC %s\n' % (iname.upper(), layer_q_list[iname][0])) + fp.write('#define %s_OUTPUT_OFFSET %s\n' % (iname.upper(), layer_q_list[iname][1])) + fp.write('\n/* bias shift and output shift for none-weighted layer */\n') + + # generate output shift for the layers without weights (weighted layers were generated in quantize_weights) + for layer in model.layers: + if (is_shift_layer(layer)): + iname = layer.name.upper() + # add, sub + if ('add' in layer.name or 'subtract' in layer.name): + # only consider the first, they have been set to same in out_put_range() + inp = layer.input[0].name.replace(':', '/').split('/')[0].upper() + fp.write('#define {0}_OUTPUT_RSHIFT ({1}_OUTPUT_DEC-{0}_OUTPUT_DEC)\n'.format( + iname, inp)) + fp.write( + '#if {0}_OUTPUT_RSHIFT < 0\n#error {0}_OUTPUT_RSHIFT must be bigger than 0\n#endif\n'.format( + iname)) + # mult is different, Q3.4 * Q3.4 = Q6.8. if mult out is Q4.3, then shift (Q.4+q.4)-Q.3=5. Am I right? + elif ('multiply' in layer.name): + inp = layer.input[0].name.replace(':', '/').split('/')[0].upper() + fp.write('#define {0}_OUTPUT_RSHIFT ({1}_OUTPUT_DEC*2-{0}_OUTPUT_DEC)\n'.format( + iname, inp)) + fp.write( + '#if {0}_OUTPUT_RSHIFT < 0\n#error {0}_OUTPUT_RSHIFT must be bigger than 0\n#endif\n'.format( + iname)) + + fp.write('\n/* tensors and configurations for each layer */\n') + LI = {} + ID = 0 + + def is_skipable_layer(layer): + # FIXME: add more that could be skiped + if ('lambda' in layer.name or + 'dropout' in layer.name or + 'gaussian_noise' in layer.name or + 'batch_normalization' in layer.name + #or ('flatten' in layer.name and 'chw' not in format) + ): # flatten layer can be skipped in HWC but needed in CHW + return True + return False + + output_num = 0 + for id, layer in enumerate(L): + if (is_skipable_layer(layer)): + inp = layer.input.name.replace(':', '/').split('/')[0] + LI[layer.name] = (LI[inp][0], layer) + else: + if (type(model.input) == tf.Tensor and type(model.layers[0]) != InputLayer): + LI[layer.name.split(':')[0]] = (ID, layer) + else: + LI[layer.name] = (ID, layer) + ID += 1 + + def gen_weight_tensor(w, per_axis): + var_cname = convert_tensor_name(w) + '_data' + dec_bits_name = convert_tensor_name(w).upper() + '_DEC_BITS' + fp.write(gen_values(var_cname, convert_tensor_name(w).upper())) + fp.write(gen_tensor(w, dec_bits=dec_bits_name, tensor_value=var_cname, per_axis=per_axis)) + + # output the config of all layer + if (type(layer) in [InputLayer] or 'input' in layer.name): + if(type(layer) == tf.Tensor): + raise Exception('Not yet support tensor as input/or Sequential model. ' + 'please use Input layer as your first layer in the model', layer.name, layer) + size = 1 + for s in layer.input.shape[1:]: + size *= s if s is not None else 1 + fp.write(gen_values('nnom_input_data', '{0}', size=str(size), dtype='static int8_t')) + fp.write(gen_tensor(layer.input, layer_q_list[layer.name][0], tensor_value='nnom_input_data', is_io_tensor=True)) + fp.write(gen_io_config(layer, tensor_name=convert_tensor_name(layer.input))) + elif (type(layer) in [Conv2D, Conv1D, DepthwiseConv2D]): + for w in layer.weights: + gen_weight_tensor(w, per_axis=per_channel_quant) + fp.write(gen_conv2d_config(layer, layer.name.upper() +'_OUTPUT_RSHIFT', layer.name.upper() +'_BIAS_LSHIFT')) + elif (type(layer) in [Conv2DTranspose]): + for w in layer.weights: + gen_weight_tensor(w, per_axis=per_channel_quant) + fp.write(gen_conv2d_trans_config(layer, layer.name.upper() +'_OUTPUT_RSHIFT', layer.name.upper() +'_BIAS_LSHIFT')) + elif (type(layer) in [Dense]): + for w in layer.weights: + gen_weight_tensor(w, per_axis=False) + fp.write(gen_dense_config(layer, layer.name.upper() +'_OUTPUT_RSHIFT', layer.name.upper() +'_BIAS_LSHIFT')) + elif (type(layer) in [MaxPooling2D, AveragePooling2D, MaxPooling1D, AveragePooling1D]): + fp.write(gen_pooling_config(layer)) + elif (type(layer) in [GlobalMaxPooling2D, GlobalAveragePooling2D, GlobalMaxPooling1D, GlobalAveragePooling1D]): + fp.write(gen_gl_pooling_config(layer)) + elif (type(layer) in [Multiply, Add, Subtract]): + fp.write(gen_matrix_config(layer, output_shift_name=layer.name.upper()+'_OUTPUT_RSHIFT')) + elif (type(layer) in [ZeroPadding2D, ZeroPadding1D]): + fp.write(gen_zero_padding_config(layer)) + elif (type(layer) in [Cropping2D, Cropping1D]): + fp.write(gen_cropping_config(layer)) + elif (type(layer) in [Softmax]): + fp.write(gen_softmax_config(layer)) + elif (type(layer) in [Flatten]): + fp.write(gen_flatten_config(layer)) + elif (type(layer) in [Reshape]): + fp.write(gen_reshape_config(layer)) + elif (type(layer) in [Concatenate]): + fp.write(gen_concat_config(layer)) + elif (type(layer) in [Lambda]): + fp.write(gen_lambda_config(layer)) + elif (type(layer) in [UpSampling2D, UpSampling1D]): + fp.write(gen_upsampling_config(layer)) + elif(is_rnn_layer(layer)): + if(type(layer.cell) is SimpleRNNCell): + for w in layer.weights: + gen_weight_tensor(w, per_axis=False) + fp.write(gen_simple_cell_config(layer, layer_q_list['intermediate_'+layer.name])) + elif(type(layer.cell) is GRUCell or 'gru' in layer.cell.name): + for w in layer.weights: + gen_weight_tensor(w, per_axis=False) + fp.write(gen_gru_cell_config(layer, layer_q_list['intermediate_'+layer.name])) + elif(type(layer.cell) is LSTMCell or 'lstm' in layer.cell.name): + for w in layer.weights: + gen_weight_tensor(w, per_axis=False) + fp.write(gen_lstm_cell_config(layer, layer_q_list['intermediate_'+layer.name])) + fp.write(gen_rnn_config(layer)) + + # test, multiple output layer + if(len(layer.outbound_nodes) == 0): + size=1 + for s in layer.output.shape[1:]: + size *= s if s is not None else 1 + if(output_num == 0): # the first output or the only output + fp.write(gen_values('nnom_output_data', '{0}', size=str(size), dtype='static int8_t')) + fp.write(gen_output_config(layer, dec_bits=layer.name.upper() + '_OUTPUT_DEC', output_num=output_num, value_name='nnom_output_data')) + output_num += 1 + else: + output_value_names = 'nnom_output_data'+str(output_num) + fp.write(gen_values(output_value_names, '{0}', size=str(size), dtype='static int8_t')) + fp.write(gen_output_config(layer, dec_bits=layer.name.upper() + '_OUTPUT_DEC', output_num=output_num, value_name=output_value_names)) + output_num += 1 + + # # last layer, attach the additional nnom output layer + # if(id == len(L)-1): + # size=1 + # for s in layer.output.shape[1:]: + # size *= s if s is not None else 1 + # fp.write(gen_values('nnom_output_data', '{0}', size=str(size), dtype='static int8_t')) + # fp.write(gen_output_config(layer, dec_bits=layer.name.upper()+'_OUTPUT_DEC', value_name='nnom_output_data')) + + # write version + fp.write('/* model version */\n') + fp.write('#define NNOM_MODEL_VERSION (10000*{0} + 100*{1} + {2})\n'.format(model_major_version, model_sub_version, model_reversion )) + + # model + fp.write('\n/* nnom model */\n') + fp.write('static nnom_model_t* nnom_model_create(void)\n{\n') + fp.write('\tstatic nnom_model_t model;\n') + if (ID > 32): + fp.write('\tnnom_layer_t **layer = (nnom_layer_t**)malloc(sizeof(nnom_layer_t *)*%d);\n' % (ID + 1)) + fp.write('\tif(NULL == layer) return NULL;\n') + else: + fp.write('\tnnom_layer_t* layer[%d];\n' % (ID + 1)) + fp.write('\n\tcheck_model_version(NNOM_MODEL_VERSION);') + fp.write('\n\tnew_model(&model);\n\n') + + # inverted order of output, very strange + output_num = (len(model.output) -1) if type(model.output) is list else 0 + for layer in L: + if (is_skipable_layer(layer)): + continue + # FIXME: need a better solution to seperate the input 'tensor' from other layers + if (type(model.input) == tf.Tensor and type(model.layers[0]) != InputLayer): + id, _ = LI[layer.name.split(':')[0]] + else: + id, _ = LI[layer.name] + + if ('input' in layer.name): + fp.write('\tlayer[%d] = input_s(&%s_config);\n' % (id, layer.name)) + + # convlutional + elif ('conv1d' in layer.name + or 'conv2d' in layer.name): + inp = layer_name_from_tensor(layer.input) + if('transpose' in layer.name): + fp.write('\tlayer[{0}] = model.hook(conv2d_trans_s(&{1}_config), layer[{2}]);\n'.format(id, layer.name, LI[inp][0])) + elif('depthwise' in layer.name): + fp.write('\tlayer[{0}] = model.hook(dw_conv2d_s(&{1}_config), layer[{2}]);\n'.format(id, layer.name, LI[inp][0])) + else: + fp.write('\tlayer[{0}] = model.hook(conv2d_s(&{1}_config), layer[{2}]);\n'.format(id, layer.name, LI[inp][0])) + elif ('activation' in layer.name): + inp = layer_name_from_tensor(layer.input) + cfg = layer.get_config() + if (cfg['activation'] == 'relu'): + fp.write('\tlayer[%s] = model.active(act_relu(), layer[%s]);\n' % (id, LI[inp][0])) + elif (cfg['activation'] == 'tanh'): + fp.write('\tlayer[%s] = model.active(act_hard_tanh(%s_OUTPUT_DEC), layer[%s]);\n' % ( + id, inp.upper(), LI[inp][0])) + elif (cfg['activation'] == 'sigmoid'): + fp.write('\tlayer[%s] = model.active(act_sigmoid(%s_OUTPUT_DEC), layer[%s]);\n' % ( + id, inp.upper(), LI[inp][0])) + elif (cfg['activation'] == 'hard_sigmoid'): + fp.write('\tlayer[%s] = model.active(act_hard_sigmoid(%s_OUTPUT_DEC), layer[%s]);\n' % ( + id, inp.upper(), LI[inp][0])) + elif (cfg['activation'] == 'softmax'): + fp.write('\tlayer[%s] = model.hook(Softmax(), layer[%s]);\n' % (id, LI[inp][0])) + elif ('leaky_re_lu' in layer.name): + inp = layer_name_from_tensor(layer.input) + cfg = layer.get_config() + fp.write('\tlayer[%s] = model.active(act_leaky_relu(%ff), layer[%s]);\n' % (id, cfg["alpha"],LI[inp][0])) + elif ('re_lu' in layer.name): + inp = layer_name_from_tensor(layer.input) + cfg = layer.get_config() + if(cfg['max_value'] is None and cfg['negative_slope'] == 0 and cfg['threshold'] == 0): + fp.write('\tlayer[%s] = model.active(act_relu(), layer[%s]);\n' % (id, LI[inp][0])) + else: + if(cfg['max_value'] is None): + max_v = 'INFINITY ' + else: + max_v = str(cfg['max_value']) + fp.write('\tlayer[%s] = model.active(act_adv_relu(%f,%s,%f), layer[%s]);\n' + % (id, cfg['negative_slope'], max_v, cfg['threshold'], LI[inp][0])) + # pooling + elif ('max_pooling' in layer.name): + inp = layer_name_from_tensor(layer.input) + if ('global' in layer.name): + fp.write('\tlayer[{0}] = model.hook(global_maxpool_s(&{1}_config), layer[{2}]);\n'.format(id, layer.name, LI[inp][0])) + else: + fp.write('\tlayer[{0}] = model.hook(maxpool_s(&{1}_config), layer[{2}]);\n'.format(id, layer.name, LI[inp][0])) + elif ('average_pooling' in layer.name): + inp = layer_name_from_tensor(layer.input) + if ('global' in layer.name): + fp.write('\tlayer[{0}] = model.hook(global_avgpool_s(&{1}_config), layer[{2}]);\n'.format(id, layer.name, LI[inp][0])) + else: + fp.write('\tlayer[{0}] = model.hook(avgpool_s(&{1}_config), layer[{2}]);\n'.format(id, layer.name, LI[inp][0])) + elif ('up_sampling' in layer.name): + inp = layer_name_from_tensor(layer.input) + fp.write('\tlayer[{0}] = model.hook(upsample_s(&{1}_config), layer[{2}]);\n'.format(id, layer.name, LI[inp][0])) + # zero padding + elif ('zero_padding' in layer.name): + inp = layer_name_from_tensor(layer.input) + fp.write('\tlayer[{0}] = model.hook(zeropadding_s(&{1}_config), layer[{2}]);\n'.format(id, layer.name, LI[inp][0])) + # Cropping + elif ('cropping' in layer.name): + inp = layer_name_from_tensor(layer.input) + fp.write('\tlayer[{0}] = model.hook(cropping_s(&{1}_config), layer[{2}]);\n'.format(id, layer.name, LI[inp][0])) + + # others + elif ('flatten' in layer.name): # flatten is needed in CHW backend but not needed in HWC + inp = layer_name_from_tensor(layer.input) + fp.write('\tlayer[{0}] = model.hook(flatten_s(&{1}_config), layer[{2}]);\n'.format(id, layer.name, LI[inp][0])) + elif ('reshape' in layer.name): # flatten is needed in CHW backend but not needed in HWC + inp = layer_name_from_tensor(layer.input) + fp.write('\tlayer[{0}] = model.hook(reshape_s(&{1}_config), layer[{2}]);\n'.format(id, layer.name, LI[inp][0])) + elif ('concatenate' in layer.name): + inps = [layer_name_from_tensor(input) for input in layer.input] + inX = '' + for inp in inps: + inX += ' ,layer[%d]' % (LI[inp][0]) + fp.write('\tlayer[%s] = model.mergex(concat_s(&%s_config), %s%s);\n' % ( + id, layer.name, len(inps), inX)) + elif ('add' in layer.name): + inps = [layer_name_from_tensor(input) for input in layer.input] + inX = '' + for inp in inps: + inX += ' ,layer[%d]' % (LI[inp][0]) + fp.write('\tlayer[%s] = model.mergex(add_s(&%s_config), %s%s);\n' % ( + id, layer.name, len(inps), inX)) + elif ('subtract' in layer.name): + inps = [layer_name_from_tensor(input) for input in layer.input] + inX = '' + for inp in inps: + inX += ' ,layer[%d]' % (LI[inp][0]) + fp.write('\tlayer[%s] = model.mergex(sub_s(&%s_config), %s%s);\n' % ( + id, layer.name, len(inps), inX)) + elif ('multiply' in layer.name): + inps = [layer_name_from_tensor(input) for input in layer.input] + inX = '' + for inp in inps: + inX += ' ,layer[%d]' % (LI[inp][0]) + fp.write('\tlayer[%s] = model.mergex(mult_s(&%s_config), %s%s);\n' % ( + id, layer.name, len(inps), inX)) + elif ('dense' in layer.name): + inp = layer_name_from_tensor(layer.input) + fp.write('\tlayer[{0}] = model.hook(dense_s(&{1}_config), layer[{2}]);\n'.format(id, layer.name, LI[inp][0])) + elif ('softmax' in layer.name): + inp = layer_name_from_tensor(layer.input) + fp.write('\tlayer[{0}] = model.hook(softmax_s(&{1}_config), layer[{2}]);\n'.format(id, layer.name, LI[inp][0])) + + elif (is_rnn_layer(layer)): + inp = layer_name_from_tensor(layer.input) + line = '\tlayer[{0}] = model.hook(rnn_s(, &{1}_config), layer[{2}]);\n'.format(id, layer.name, LI[inp][0]) + if (type(layer.cell) is SimpleRNNCell): + line = line.replace('', 'simple_cell_s(&%s_simple_cell_config)' %(layer.name)) + elif (type(layer.cell) is GRUCell or 'gru' in layer.cell.name): + line = line.replace('', 'gru_cell_s(&%s_gru_cell_config)' % (layer.name)) + elif (type(layer.cell) is LSTMCell or 'lstm' in layer.cell.name): + line = line.replace('', 'lstm_cell_s(&%s_lstm_cell_config)' % (layer.name)) + fp.write(line) + else: + raise Exception('unsupported layer', layer.name, layer) + + # test, multiple output layer (not yet working with multiple outputs) + if(len(layer.outbound_nodes) == 0): + fp.write('\tlayer[{0}] = model.hook(output_s(&{1}_config), layer[{2}]);\n'.format(id + 1, 'output'+str(output_num), LI[inp][0] + 1)) + output_num -=1 # the num is inverted in keras, not a good solution yet. + + """ + # temporary fixed for activations attached into layers in construction + def is_activation_attached(layer): + if(("Softmax" in layer.output.name and "softmax" not in layer.name)or + ("Relu" in layer.output.name and "re_lu" not in layer.name) or + ("Sigmoid" in layer.output.name and "sigmoid" not in layer.name) or + ("Tanh" in layer.output.name and "tanh" not in layer.name)): + return True + return False + if "input" not in layer.name and is_activation_attached(layer): + inp = layer.output.name.replace(':', '/').split('/')[0] + cfg = layer.get_config() + if(cfg['activation'] == 'relu'): + fp.write('\tlayer[%s] = model.active(act_relu(), layer[%s]);\n'%(id, LI[inp][0])) + if(cfg['activation'] == 'tanh'): + fp.write('\tlayer[%s] = model.active(act_tanh(%s_OUTPUT_SHIFT), layer[%s]);\n'%(id, inp.upper(), LI[inp][0])) + if(cfg['activation'] == 'sigmoid'): + fp.write('\tlayer[%s] = model.active(act_sigmoid(%s_OUTPUT_SHIFT), layer[%s]);\n'%(id, inp.upper(), LI[inp][0])) + elif(cfg['activation'] == 'softmax'): + fp.write('\tlayer[%s] = model.hook(Softmax(), layer[%s]);\n'%(id, LI[inp][0])) + """ + # generate final output layer + #fp.write('\tlayer[{0}] = model.hook(output_s(&{1}_config), layer[{2}]);\n'.format(id+1, 'output', LI[inp][0]+1)) + fp.write('\tmodel_compile(&model, layer[0], layer[%s]);\n' % (id + 1)) + if (ID > 32): + fp.write('\tfree(layer);\n') + fp.write('\treturn &model;\n}\n') + with open('.layer_q_list', 'w') as fp: + fp.write(str(layer_q_list)) + +def evaluate_model(model, x_test, y_test, running_time=False, to_file='evaluation.txt'): + # Score trained model. + scores = model.evaluate(x_test, y_test, verbose=2) + print('Test loss:', scores[0]) + print('Top 1:', scores[1]) + + if(len(y_test.shape)>1): + bs = model.input.shape[0] + predictions = model.predict(x_test, batch_size=bs) + matrix = skmetrics.confusion_matrix(y_test.argmax(axis=1), predictions.argmax(axis=1)) + print(matrix) + + run_time = 0 + if running_time: + # try to calculate the time + T = time.time() + bs = model.input.shape[0] + for i in range(10): + model.predict(x_test, batch_size=bs) + T = time.time() - T + run_time = round((T / 10 / x_test.shape[0] * 1000 * 1000), 2) + print("Runing time:",run_time , "us" ) + # + with open(to_file, 'w') as f: + f.write("Runing time: "+ str(run_time) + "us" + "\n") + f.write('Test loss:'+ str(scores[0]) + "\n") + f.write('Top 1:'+ str(scores[1])+ "\n") + if (len(y_test.shape) > 1): + for row in matrix: + row.tofile(f, sep=',') + f.write("\n") + return scores + +def f2q(d, Q): + '''To convert a number from floating point to Qm.n format: + 1. Multiply the floating point number by 2n + 2. Round to the nearest integer + ''' + return np.round(d*2**Q) + + +def q2f(d, Q): + '''To convert a number from Qm.n format to floating point: + 1. Convert the number to floating point as if it were an integer, in other words remove the binary point + 2. Multiply by 2-n + ''' + return d*2**-Q + +def show_weights(w, name): + sz = 1 + for s in w.shape: + sz = sz*s + aL = w.reshape(sz,) + MIN,MAX=min(aL),max(aL) + Q = int(np.ceil(np.log2(max(abs(MIN),abs(MAX))))) + Q = 7-Q + qL = f2q(aL,Q) + qL = q2f(qL,Q) + plt.figure(figsize=(18, 3)) + plt.subplot(131) + plt.title(name) + plt.plot(aL) + plt.grid() + aL.sort() + plt.plot(aL,'r') + plt.grid() + plt.subplot(132) + plt.title('Q%s'%(Q)) + qL.sort() + plt.plot(aL,'r') + plt.plot(qL,'g') + plt.grid() + plt.subplot(133) + plt.hist(aL,100) + plt.title('hist') + plt.grid() + plt.show() + +def compare(a,b,name): + sz = 1 + for s in a.shape: + sz = sz*s + aL = a.reshape(sz,) + bL = b.reshape(sz,) + assert(len(aL) == len(bL)) + Z = list(zip(aL,bL)) + Z.sort(key=lambda x: x[0]) + aL1,bL1=zip(*Z) + plt.figure(figsize=(18, 3)) + plt.subplot(131) + plt.plot(aL) + plt.plot(aL1,'r') + plt.grid() + plt.title('tf-%s'%(name)) + plt.subplot(133) + plt.plot(bL1,'g') + plt.plot(aL1,'r') + plt.grid() + plt.title('compare') + plt.subplot(132) + bL1=list(bL1) + bL1.sort() + plt.plot(bL) + plt.plot(bL1,'g') + plt.grid() + plt.title('nn-%s'%(name)) + plt.show() + diff --git a/APP_Framework/Framework/knowing/nnom/scripts/nnom_utils.py b/APP_Framework/Framework/knowing/nnom/scripts/nnom_utils.py new file mode 100644 index 000000000..32868ac81 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/scripts/nnom_utils.py @@ -0,0 +1,845 @@ +''' + Copyright (c) 2018-2020 + Jianjia Ma + majianjia@live.com + + SPDX-License-Identifier: Apache-2.0 + + Change Logs: + Date Author Notes + 2019-02-05 Jianjia Ma The first version + + + This file provides: + -> fake_quantisation layers which simulate the output quantisation on fixed-point NN models. + -> weights/bias quantisation of Convolution and Dense Layer. "weight.h" file generations + -> export "testing set" binary data file. + -> print output ranges of each layers. + + Currently, this script does not support RNN (type) layers. +''' + +import matplotlib.pyplot as plt +import tensorflow as tf +from tensorflow.keras.layers import InputLayer +from tensorflow.keras.models import Model + +from sklearn import metrics +from .fully_connected_opt_weight_generation import * +import time +import warnings + +""" +this is the generate the test set data to a bin file +bin file can be used to validate the implementation in MCU + +""" +def generate_test_bin(x, y, name='test_data_with_label.bin'): + ''' + this method generate the + :param x: input x data size + :param y: input label (one hot label) + :return: + ''' + # quantize input x + min_value = np.min(x) + max_value = np.max(x) + + int_bits = int(np.ceil(np.log2(max(abs(min_value), abs(max_value))))) + dec_bits = 7 - int_bits + x = np.round(x*2**dec_bits).astype(np.int8) + # get label + if(len(y.shape) >1): + test_label = np.argwhere(y == 1).astype(np.int8) # test data + test_label = test_label[:, 1] + else: + test_label = y + + # get data + dat = x.astype(dtype="byte") # test data + batch_size = dat.shape[0] # total pices of data + dat = dat.flatten() # flatten to get the total size. + block_size = int(dat.size / batch_size) # this must be integer but... just to confirm + + # write (label x 128) (data_block x 128) + label_batch = 128 # the Y-modem example uses 128 batch + with open(name, 'wb') as f: + start = 0 + while start <= (test_label.size - label_batch): + test_label[start: start + label_batch].tofile(f) + dat[block_size * start: block_size * (start + label_batch)].tofile(f) + start += label_batch + + # the rest data + if (start < test_label.size): + rest_len = test_label.size - start + new_labls = test_label[start:] + new_labls = np.pad(new_labls, (0, label_batch - rest_len), mode='constant') + new_labls.tofile(f) + dat[block_size * start:].tofile(f) + + print("binary test file generated:", name) + print("test data length:", test_label.size) + return + +def is_shift_layer(layer): + ''' layer which can change the output encoding''' + #FIXME: add more which will change the output shift + if('input' in layer.name or + 'conv2d' in layer.name or + 'conv1d' in layer.name or + 'dense' in layer.name or + 'softmax' in layer.name or + 'sigmoid' in layer.name or + 'tanh' in layer.name or + ('add' in layer.name and 'zero' not in layer.name) or # the name, zero_padding contains 'add' + 'subtract' in layer.name or + 'multiply' in layer.name or + ('activation' in layer.name and layer.get_config()['activation'] == 'softmax')or + ('activation' in layer.name and layer.get_config()['activation'] == 'sigmoid') or + ('activation' in layer.name and layer.get_config()['activation'] == 'tanh') + ): + return True + return False + +def is_shift_fixed(layer): + ''' layer which shift to a fixed value''' + #FIXME: add more which will change the output shift + if('softmax' in layer.name or + 'sigmoid' in layer.name or + 'tanh' in layer.name or + ('activation' in layer.name and layer.get_config()['activation'] == 'softmax') or + ('activation' in layer.name and layer.get_config()['activation'] == 'sigmoid') or + ('activation' in layer.name and layer.get_config()['activation'] == 'tanh') + ): + return True + return False + +def fuse_bn_to_conv(layer): + # try to fuse BN layer to convolutional + if ('conv' in layer.name) and \ + ('batch_normalization' in layer._outbound_nodes[0].outbound_layer.name): + + print("fusing batch normalization to", layer.name) + bn_layer = layer._outbound_nodes[0].outbound_layer + c_w = layer.get_weights()[0] + c_b = layer.get_weights()[1] + print('original weight max', c_w.max(), 'min', c_w.min()) + print('original bias max', c_b.max(), 'min', c_b.min()) + bn_gamma = bn_layer.get_weights()[0] + bn_beta = bn_layer.get_weights()[1] + bn_mean = bn_layer.get_weights()[2] + bn_variance = bn_layer.get_weights()[3] + + if ('conv2d' in layer.name): + epsilon = 1e-3 # default epsilon for tf.slim.batch_norm + for l in range(c_w.shape[3]): + for k in range(c_w.shape[2]): + for j in range(c_w.shape[1]): + for i in range(c_w.shape[0]): + if "depthwise" in layer.name: # depthwise batchnorm params are ordered differently + c_w[i][j][k][l] *= bn_gamma[k] / np.sqrt(bn_variance[k] + epsilon) + else: + c_w[i][j][k][l] *= bn_gamma[l] / np.sqrt(bn_variance[l] + epsilon) + + if "depthwise" in layer.name: + depth_dim = c_w.shape[2] + else: + depth_dim = c_w.shape[3] + for l in range(depth_dim): + c_b[l] = (bn_gamma[l] * (c_b[l] - bn_mean[l]) / np.sqrt(bn_variance[l] + epsilon)) + bn_beta[l] + # conv1d + else: + epsilon = 1e-3 # default epsilon for tf.slim.batch_norm + for k in range(c_w.shape[2]): + for j in range(c_w.shape[1]): + for i in range(c_w.shape[0]): + if "depthwise" in layer.name: # depthwise batchnorm params are ordered differently + c_w[i][j][k] *= bn_gamma[j] / np.sqrt(bn_variance[j] + epsilon) + else: + c_w[i][j][k] *= bn_gamma[k] / np.sqrt(bn_variance[k] + epsilon) + + if "depthwise" in layer.name: + depth_dim = c_w.shape[1] + else: + depth_dim = c_w.shape[2] + for l in range(depth_dim): + c_b[l] = (bn_gamma[l] * (c_b[l] - bn_mean[l]) / np.sqrt(bn_variance[l] + epsilon)) + bn_beta[l] + + print('fused weight max', c_w.max(), 'min', c_w.min()) + print('fused bias max', c_b.max(), 'min', c_b.min()) + # write the weights back to the layer + # after that, the model will be destroyed.. need a better way to pass the new weight + layer.set_weights([c_w, c_b]) + +def generate_weights(model, name='weights.h', format='hwc', shift_list=None): + # Quantize weights to 8-bits using (min,max) and write to file + f = open(name, 'w') + f.write('#include "nnom.h"\n\n') + f.close() + + for curr_idx, layer in enumerate(model.layers): + if (not layer.weights): + continue + + # before merging bn layer, check if the bn is "legally" after Conv + if('batch_normalization' in layer.name) and \ + ('conv' not in layer.inbound_nodes[0].inbound_layers.name): + raise Exception('Currently only support batch_normalization after conv', layer.name, + layer._inbound_nodes[0].inbound_layers[0].name) + + # try to fuse BN layer to convolutional + if ('conv' in layer.name) and \ + ('batch_normalization' in layer.outbound_nodes[0].outbound_layer.name): + fuse_bn_to_conv(layer) + + # generate weights and bias now + weight_dec_shift = 0 + print('weights for layer', layer.name) + for var in layer.weights: + var_name = str(var.name) + if("kernel" in var_name ): + var_values = layer.get_weights()[0] # weight + print(" weight:", var_name) + elif("bias" in var_name): + var_values = layer.get_weights()[1] # bias + print(" bias: ",var_name) + else: + continue + + print(" original shape: ", var_values.shape) + min_value = np.min(var_values) + max_value = np.max(var_values) + + int_bits = int(np.ceil(np.log2(max(abs(min_value), abs(max_value))))) + dec_bits = 7 - int_bits + print(" dec bit", dec_bits) + bSameAsKernel = False + if(is_shift_layer(layer)): + bSameAsKernel = False + inp = layer.input.name.replace(':','/').split('/')[0] + input_encoding = shift_list[inp] + if ("kernel" in var_name): + weight_dec_shift = dec_bits + else: + shift = input_encoding+weight_dec_shift-dec_bits + if(shift < 0): + bSameAsKernel = True + if(shift_list is None or bSameAsKernel): + # check if bias shift > weight shift, then reduce bias shift to weight shift + if ("kernel" in var_name): + weight_dec_shift = dec_bits + else: + if(dec_bits > weight_dec_shift): + dec_bits = weight_dec_shift + print(" new dec bit", dec_bits) + + # convert to [-128,128) or int8 + var_values = np.round(var_values * 2 ** dec_bits) + var_name = var_name.replace('/', '_') + var_name = var_name.replace(':', '_') + with open(name, 'a') as f: + f.write('#define ' + var_name.upper() + ' {') + # CHW format + if ('chw' in format): + if "dense" in var_name and "kernel" in var_name: + transposed_wts = np.transpose(var_values) + transposed_wts = convert_to_x4_q7_weights( + np.reshape(transposed_wts, (transposed_wts.shape[0], transposed_wts.shape[1], 1, 1))) + # all other kernels, bias stay the same + else: + transposed_wts = var_values + # HWC format + else: + if (len(var_values.shape) == 3): # 1D convolution layer weights + transposed_wts = np.transpose(var_values, (2, 0, 1)) + elif (len(var_values.shape) == 4): # 2D convolution layer weights + transposed_wts = np.transpose(var_values, (3, 0, 1, 2)) + else: # fully connected layer weights or biases of any layer + # test, use opt weight reorder + if "dense" in var_name and "kernel" in var_name: + transposed_wts = np.transpose(var_values) + transposed_wts = convert_to_x4_q7_weights(np.reshape(transposed_wts ,(transposed_wts.shape[0], transposed_wts.shape[1], 1, 1))) + else: + transposed_wts = np.transpose(var_values) + + print(" reshape to:",transposed_wts.shape) + + with open(name, 'a') as f: + transposed_wts.tofile(f, sep=", ", format="%d") + f.write('}\n\n') + if ("bias" in var_name): + f.write('#define ' + var_name.upper() + '_SHIFT ' + '(' + str(dec_bits) + ')\n\n\n') + if ("kernel" in var_name ): + f.write('#define ' + var_name.upper() + '_SHIFT ' + '(' + str(dec_bits) + ')\n\n') + """ + # for checking the quantised and dequantised range. + with K.tf.Session() as session: + # convert back original range but quantized to 8-bits or 256 levels + var_values = var_values / (2 ** dec_bits) + var_values = session.run(K.tf.assign(var, var_values)) + print(' '+var_name + ' number of wts/bias: ' + str(var_values.shape) + \ + ' dec bits: ' + str(dec_bits) + \ + ' max: (' + str(np.max(var_values)) + ',' + str(max_value) + ')' + \ + ' min: (' + str(np.min(var_values)) + ',' + str(min_value) + ')') + """ + +def layers_output_ranges(model, x_test, quantize_method='max_min', calibrate_size=1000): + # limit the test data size + np.random.shuffle(x_test) + if(x_test.shape[0] > calibrate_size): + x_test = x_test[:1000] + # test, show the output ranges + shift_list = {} + # FIXME: only support one input + if(type(model.layers[0]) != InputLayer): + L = [model.input] + model.layers + else: + L = model.layers + last_layer = None + + for layer in L: # layer loop + if("input" in layer.name): + features = x_test + else: + # batch_normalization will need to be handled differently, since we are fusing the weight to its predecessor. + # sigmoid and tanh are different, their shift is fixed to 7 + if(is_shift_layer(layer) or + ('batch_normalization' in layer.name)): + layer_model = Model(inputs=model.input, outputs=layer.output) + features = layer_model.predict(x_test) + else: + # leave the features not changed, so this layer shift will be the same + # as its inputs + pass + # calculate no saturation shift + max_val = features.max() + min_val = features.min() + int_bits = int(np.ceil(np.log2(max(abs(max_val), abs(min_val))))) + dec_bits = 7 - int_bits + + # saturation shift, using KLD method + # Ref: http://on-demand.gputechconf.com/gtc/2017/presentation/s7310-8-bit-inference-with-tensorrt.pdf + if('kld' in quantize_method and not is_shift_fixed(layer) and "input" not in layer.name and "dense" not in layer.name): # test, also do not use kld in input layer + import scipy.stats + abs_max = max(abs(max_val), abs(min_val)) + small_var = 1e-5 + bins = np.arange(-abs_max, abs_max, abs_max/2048*2) + q_bins = np.arange(-abs_max, abs_max, abs_max/256*2) + flat_hist = np.histogram(features.flatten(), bins=bins)[0] + kl_loss = [] + kl_shifts = [] + for shift in range(4): + t = 2 ** (dec_bits + shift) # 2-based threshold + act = np.round(features.flatten() * t) + act = act / t + act = np.clip(act, -128/t, 127/t) + act = np.histogram(act, bins=q_bins)[0] + act_hist = np.zeros(2047) + chunk = int(2048/256) + for i in range(int(255)): + none_zero = np.count_nonzero(flat_hist[i*chunk:(i+1)*chunk]) + if none_zero == 0: + continue + for j in range(chunk): + act_hist[i*chunk+j] = act[i]/none_zero if flat_hist[i*chunk+j] != 0 else 0 + flat_hist[flat_hist==0] = small_var + act_hist[act_hist==0] = small_var + kl = scipy.stats.entropy(flat_hist, act_hist) + kl_loss.append(kl) + kl_shifts.append(dec_bits + shift) + """ + ax = plt.subplot(8, 1, shift+1) + ax.plot(flat_hist) + ax.plot(act_hist) + """ + new_dec = kl_shifts[np.argmin(kl_loss)] # set the dec_bit to the KLD results + #plt.show() + print("KLD loss", kl_loss) + print("KLD shift", kl_shifts) + if(new_dec != dec_bits): + print(layer.name,"is using KLD method, original shift",dec_bits, "KLD results", new_dec) + dec_bits = new_dec + + print( layer.name, "max value:", max_val, "min value:", min_val,"dec bit", dec_bits) + # record the shift + if(type(model.input) == tf.Tensor and type(model.layers[0]) != InputLayer): + shift_list[layer.name.split(':')[0]] = dec_bits + else: + shift_list[layer.name] = dec_bits + if ('batch_normalization' in layer.name): + shift_list[last_layer.name] = dec_bits # use the bn layer shift to update the last layer. + last_layer = layer + + LM = {} + for layer in model.layers: + LM[layer.name] = layer + L = [l for l in model.layers[1:]] + L.reverse() + + def update_previous_layer_shift(layer, Q): + if(type(layer.input) == list): + for inp in layer.input: + iname = inp.name.split('/')[0] + if('input' in iname): + continue + shift_list[iname] = Qmin + if(not is_shift_layer(LM[iname])): + update_previous_layer_shift(LM[iname], Q) + else: + iname = layer.input.name.split('/')[0] + if('input' in iname): + return + shift_list[iname] = Qmin + if(not is_shift_layer(LM[iname])): + update_previous_layer_shift(LM[iname], Q) + for layer in L: + if(type(layer.input) == list): + iname = layer.input[0].name.split('/')[0] + Qmin = shift_list[iname] + for inp in layer.input: + iname = inp.name.split('/')[0] + if(shift_list[iname] < Qmin): + Qmin = shift_list[iname] + if(shift_list[iname] != Qmin): + bFlag = True + for inp in layer.input: + iname = inp.name.split('/')[0] + shift_list[iname] = Qmin + if(not is_shift_layer(LM[iname])): + update_previous_layer_shift(LM[iname], Qmin) + print('set shift', Qmin, 'for the input of', layer.name, ':', [inp.name.split('/')[0] for inp in layer.input]) + if(not is_shift_layer(layer) or Qmin < shift_list[layer.name]): # update current layer's shift only when we cannot change the shift + shift_list[layer.name] = Qmin + print("shift list", shift_list) + return shift_list + +def generate_model(model, x_test, name='weights.h', format='hwc', quantize_method='max_min'): + shift_list = layers_output_ranges(model, x_test, quantize_method=quantize_method) + generate_weights(model, name=name, format=format, shift_list=shift_list) + if(type(model.layers[0]) != InputLayer): + L = [model.input] + model.layers + else: + L = model.layers + with open(name,'a') as fp: + fp.write('\n/* output enconding for each layer */\n') + for layer in L: + if(type(model.input) == tf.Tensor and type(model.layers[0]) != InputLayer): + iname = layer.name.split(':')[0] + else: + iname = layer.name + fp.write('#define %s_OUTPUT_SHIFT %s\n'%(iname.upper(), shift_list[iname])) + fp.write('\n/* bias shift and output shift for each layer */\n') + for layer in model.layers: + if(is_shift_layer(layer)): + iname = layer.name.upper() + if(len(layer.weights) == 2 and + 'kernel' in layer.weights[0].name and + 'bias' in layer.weights[1].name): + kname = layer.weights[0].name.upper().replace('/', '_').replace(':', '_') + bname = layer.weights[1].name.upper().replace('/', '_').replace(':', '_') + inp = layer.input.name.replace(':','/').split('/')[0].upper() + fp.write('#define {0}_OUTPUT_RSHIFT ({1}_OUTPUT_SHIFT+{2}_SHIFT-{0}_OUTPUT_SHIFT)\n'.format( + iname, inp, kname)) + fp.write('#define {0}_BIAS_LSHIFT ({1}_OUTPUT_SHIFT+{2}_SHIFT-{3}_SHIFT)\n'.format( + iname, inp, kname, bname)) + fp.write('#if {0}_OUTPUT_RSHIFT < 0\n#error {0}_OUTPUT_RSHIFT must be bigger than 0\n#endif\n'.format(iname)) + fp.write('#if {0}_BIAS_LSHIFT < 0\n#error {0}_BIAS_RSHIFT must be bigger than 0\n#endif\n'.format(iname)) + # add, sub + elif ('add' in layer.name or + 'subtract' in layer.name): + # only consider the first, they have been set to same in out_put_range() + inp = layer.input[0].name.replace(':','/').split('/')[0].upper() + fp.write('#define {0}_OUTPUT_RSHIFT ({1}_OUTPUT_SHIFT-{0}_OUTPUT_SHIFT)\n'.format( + iname, inp)) + fp.write('#if {0}_OUTPUT_RSHIFT < 0\n#error {0}_OUTPUT_RSHIFT must be bigger than 0\n#endif\n'.format(iname)) + # mult is different, Q3.4 * Q3.4 = Q6.8. if mult out is Q4.3, then shift (Q.4+q.4)-Q.3=5. Am I right? + elif ('multiply' in layer.name ): + inp = layer.input[0].name.replace(':','/').split('/')[0].upper() + fp.write('#define {0}_OUTPUT_RSHIFT ({1}_OUTPUT_SHIFT*2-{0}_OUTPUT_SHIFT)\n'.format( + iname, inp)) + fp.write('#if {0}_OUTPUT_RSHIFT < 0\n#error {0}_OUTPUT_RSHIFT must be bigger than 0\n#endif\n'.format(iname)) + + fp.write('\n/* weights for each layer */\n') + LI = {} + ID = 0 + def is_skipable_layer(layer): + # FIXME: add more that could be skiped + if('lambda' in layer.name or + 'dropout' in layer.name or + 'batch_normalization' in layer.name or + ('flatten' in layer.name and 'chw' not in format)): # flatten layer can be skipped in HWC but have to present in CHW + return True + return False + for id,layer in enumerate(L): + if(is_skipable_layer(layer)): + inp = layer.input.name.replace(':','/').split('/')[0] + LI[layer.name] = (LI[inp][0], layer) + else: + if(type(model.input) == tf.Tensor and type(model.layers[0]) != InputLayer): + LI[layer.name.split(':')[0]] = (ID, layer) + else: + LI[layer.name] = (ID, layer) + ID += 1 + + if ('input' in layer.name or not layer.weights): + continue + for var in layer.weights: + var_name = str(var.name).replace('/', '_').replace(':', '_') + if("kernel" in var_name): + fp.write('static const int8_t %s_weights[] = %s;\n'%(layer.name, var_name.upper())) + fp.write('static const nnom_weight_t %s_w = { (const void*)%s_weights, %s_OUTPUT_RSHIFT};\n'%(layer.name,layer.name, layer.name.upper())) + elif("bias" in var_name): + fp.write('static const int8_t %s_bias[] = %s;\n'%(layer.name, var_name.upper())) + fp.write('static const nnom_bias_t %s_b = { (const void*)%s_bias, %s_BIAS_LSHIFT};\n'%(layer.name,layer.name, layer.name.upper())) + fp.write('\n/* nnom model */\n') + # FIXME: now only support one input and one output + sz = 1 + for d in model.input.shape[1:]: + sz = sz*d + fp.write('static int8_t nnom_input_data[%d];\n'%(sz)) + sz = 1 + for d in model.output.shape[1:]: + sz = sz*d + fp.write('static int8_t nnom_output_data[%d];\n'%(sz)) + fp.write('static nnom_model_t* nnom_model_create(void)\n{\n') + fp.write('\tstatic nnom_model_t model;\n') + if(ID>32): + fp.write('\tnnom_layer_t ** layer = malloc(sizeof(nnom_layer_t *)*%d);\n'%(ID+1)) + fp.write('\tif(NULL == layer) return NULL;\n') + else: + fp.write('\tnnom_layer_t* layer[%d];\n'%(ID+1)) + fp.write('\n\tnew_model(&model);\n\n') + for layer in L: + if(is_skipable_layer(layer)): + continue + #FIXME: need a better solution to seperate the input 'tensor' from other layers + if (type(model.input) == tf.Tensor and type(model.layers[0]) != InputLayer): + id,_ = LI[layer.name.split(':')[0]] + else: + id,_ = LI[layer.name] + + if('input' in layer.name): + try: + inshape = layer.input_shape[0][1:] # new changes in tf2? + except: + inshape = layer.shape[1:] + if (len(inshape) == 1): # 1-D input + fp.write('\tlayer[%d] = Input(shape(%d,1,1), nnom_input_data);\n' % (id, inshape[0])) + elif (len(inshape) == 2): # 1-D input + fp.write('\tlayer[%d] = Input(shape(1,%d,%d), nnom_input_data);\n' % (id, inshape[0], inshape[1])) + else: + fp.write('\tlayer[%d] = Input(shape%s, nnom_input_data);\n' % (id, inshape)) + + # convlutional + elif('conv1d' in layer.name): + inp = layer.input.name.replace(':','/').split('/')[0] + cfg = layer.get_config() + if('depthwise' in layer.name): + fp.write('\tlayer[{0}] = model.hook(DW_Conv2D({1}, kernel(1,{2}), stride(1,{3}), dilation(1,{4}), PADDING_{5}, &{6}_w, &{6}_b), layer[{7}]);\n'.format( + id, 1, cfg['kernel_size'][0], cfg['strides'][0], cfg['dilation_rate'][0], cfg['padding'].upper(), + layer.name, LI[inp][0])) + else: + fp.write('\tlayer[{0}] = model.hook(Conv2D({1}, kernel(1,{2}), stride(1,{3}), dilation(1,{4}), PADDING_{5}, &{6}_w, &{6}_b), layer[{7}]);\n'.format( + id, cfg['filters'], cfg['kernel_size'][0], cfg['strides'][0], cfg['dilation_rate'][0], cfg['padding'].upper(), + layer.name, LI[inp][0])) + elif('conv2d' in layer.name): + inp = layer.input.name.replace(':','/').split('/')[0] + cfg = layer.get_config() + if ('depthwise' in layer.name): + fp.write('\tlayer[{0}] = model.hook(DW_Conv2D({1}, kernel{2}, stride{3}, dilation{4}, PADDING_{5}, &{6}_w, &{6}_b), layer[{7}]);\n'.format( + id, 1, cfg['kernel_size'], cfg['strides'], cfg['dilation_rate'], cfg['padding'].upper(), + layer.name, LI[inp][0])) + else: + fp.write('\tlayer[{0}] = model.hook(Conv2D({1}, kernel{2}, stride{3}, dilation{4}, PADDING_{5}, &{6}_w, &{6}_b), layer[{7}]);\n'.format( + id, cfg['filters'], cfg['kernel_size'], cfg['strides'], cfg['dilation_rate'], cfg['padding'].upper(), + layer.name, LI[inp][0])) + # activations + elif('activation' in layer.name): + inp = layer.input.name.replace(':','/').split('/')[0] + cfg = layer.get_config() + if(cfg['activation'] == 'relu'): + fp.write('\tlayer[%s] = model.active(act_relu(), layer[%s]);\n'%(id, LI[inp][0])) + if(cfg['activation'] == 'tanh'): + fp.write('\tlayer[%s] = model.active(act_tanh(%s_OUTPUT_SHIFT), layer[%s]);\n'%(id, inp.upper(), LI[inp][0])) + if(cfg['activation'] == 'sigmoid'): + fp.write('\tlayer[%s] = model.active(act_sigmoid(%s_OUTPUT_SHIFT), layer[%s]);\n'%(id, inp.upper(), LI[inp][0])) + elif(cfg['activation'] == 'softmax'): + fp.write('\tlayer[%s] = model.hook(Softmax(), layer[%s]);\n'%(id, LI[inp][0])) + elif('re_lu' in layer.name): + inp = layer.input.name.replace(':','/').split('/')[0] + fp.write('\tlayer[%s] = model.active(act_relu(), layer[%s]);\n'%(id, LI[inp][0])) + # pooling + elif('max_pooling' in layer.name): + inp = layer.input.name.replace(':','/').split('/')[0] + cfg = layer.get_config() + if ('global' in layer.name): + fp.write('\tlayer[%s] = model.hook(GlobalMaxPool(), layer[%s]);\n' % (id, LI[inp][0])) + elif('2d' in layer.name): + fp.write('\tlayer[%s] = model.hook(MaxPool(kernel%s, stride%s, PADDING_%s), layer[%d]);\n'%( + id, cfg['pool_size'], cfg['strides'], cfg['padding'].upper(), LI[inp][0])) + elif('1d' in layer.name): + fp.write('\tlayer[{0}] = model.hook(MaxPool(kernel(1,{1}), stride(1,{2}), PADDING_{3}), layer[{4}]);\n'.format( + id, cfg['pool_size'][0], cfg['strides'][0], cfg['padding'].upper(), LI[inp][0])) + elif('average_pooling' in layer.name): + inp = layer.input.name.replace(':','/').split('/')[0] + cfg = layer.get_config() + if ('global' in layer.name): + # a global avg pool before softmax can be replace by sumpool in MCU (recommend) + if(layer == model.layers[-2] and 'Softmax' in model.layers[-1].output.name): + print(layer.name, 'has been replaced by GlobalSumPool()') + fp.write('\tlayer[%s] = model.hook(GlobalSumPool(), layer[%s]);\n' % (id, LI[inp][0])) + else: + fp.write('\tlayer[%s] = model.hook(GlobalAvgPool(), layer[%s]);\n' % (id, LI[inp][0])) + elif('2d' in layer.name): + fp.write('\tlayer[%s] = model.hook(AvgPool(kernel%s, stride%s, PADDING_%s), layer[%d]);\n'%( + id, cfg['pool_size'], cfg['strides'], cfg['padding'].upper(), LI[inp][0])) + elif('1d' in layer.name): + fp.write('\tlayer[{0}] = model.hook(AvgPool(kernel(1,{1}), stride(1,{2}), PADDING_{3}), layer[{4}]);\n'.format( + id, cfg['pool_size'][0], cfg['strides'][0], cfg['padding'].upper(), LI[inp][0])) + elif ('up_sampling' in layer.name): + inp = layer.input.name.replace(':','/').split('/')[0] + cfg = layer.get_config() + if('2d' in layer.name): + fp.write('\tlayer[%s] = model.hook(UpSample(kernel%s), layer[%d]);\n'%(id, cfg['size'], LI[inp][0])) + elif('1d' in layer.name): + fp.write('\tlayer[{0}] = model.hook(UpSample(kernel(1,{1})), layer[{2}]);\n'.format( + id, cfg['size'][0], LI[inp][0])) + # zero padding + elif ('zero_padding' in layer.name): + inp = layer.input.name.replace(':','/').split('/')[0] + cfg = layer.get_config() + if('2d' in layer.name): + fp.write('\tlayer[{0}] = model.hook(ZeroPadding(border({1},{2},{3},{4})), layer[{5}]);\n'.format( + id, cfg['padding'][0][0], cfg['padding'][0][1], cfg['padding'][1][0],cfg['padding'][1][1], LI[inp][0])) + elif('1d' in layer.name): + fp.write('\tlayer[{0}] = model.hook(ZeroPadding(border(0,0,{1},{2})), layer[{3}]);\n'.format( + id, cfg['padding'][0], cfg['padding'][1], LI[inp][0])) + # Cropping + elif ('cropping' in layer.name): + inp = layer.input.name.replace(':','/').split('/')[0] + cfg = layer.get_config() + if('2d' in layer.name): + fp.write('\tlayer[{0}] = model.hook(Cropping(border({1},{2},{3},{4})), layer[{5}]);\n'.format( + id, cfg['cropping'][0][0], cfg['cropping'][0][1], cfg['cropping'][1][0],cfg['cropping'][1][1], LI[inp][0])) + elif('1d' in layer.name): + fp.write('\tlayer[{0}] = model.hook(Cropping(border(0,0,{1},{2})), layer[{3}]);\n'.format( + id, cfg['cropping'][0], cfg['cropping'][1], LI[inp][0])) + + # others + elif('flatten' in layer.name): # flatten is needed in CHW backend but not needed in HWC + inp = layer.input.name.replace(':', '/').split('/')[0] + fp.write('\tlayer[%s] = model.hook(Flatten(), layer[%s]);\n'%(id, LI[inp][0])) + elif('concatenate' in layer.name): + inps = [input.name.replace(':','/').split('/')[0] for input in layer.input] + inX = '' + for inp in inps: + inX += ' ,layer[%d]'%(LI[inp][0]) + cfg = layer.get_config() + fp.write('\tlayer[%s] = model.mergex(Concat(%s), %s%s);\n'%( + id, cfg['axis'], len(inps), inX)) + elif('add' in layer.name): + inps = [input.name.replace(':','/').split('/')[0] for input in layer.input] + inX = '' + for inp in inps: + inX += ' ,layer[%d]'%(LI[inp][0]) + fp.write('\tlayer[%s] = model.mergex(Add(%s_OUTPUT_RSHIFT), %s%s);\n'%( + id, layer.name.upper(), len(inps), inX)) + elif('subtract' in layer.name): + inps = [input.name.replace(':','/').split('/')[0] for input in layer.input] + inX = '' + for inp in inps: + inX += ' ,layer[%d]'%(LI[inp][0]) + fp.write('\tlayer[%s] = model.mergex(Sub(%s_OUTPUT_RSHIFT), %s%s);\n'%( + id, layer.name.upper(), len(inps), inX)) + elif('multiply' in layer.name): + warnings.warn("Warning mutiply is under testing") + inps = [input.name.replace(':','/').split('/')[0] for input in layer.input] + inX = '' + for inp in inps: + inX += ' ,layer[%d]'%(LI[inp][0]) + fp.write('\tlayer[%s] = model.mergex(Mult(%s_OUTPUT_RSHIFT), %s%s);\n'%( + id, layer.name.upper(), len(inps), inX)) + elif('dense' in layer.name): + inp = layer.input.name.replace(':','/').split('/')[0] + cfg = layer.get_config() + fp.write('\tlayer[{0}] = model.hook(Dense({1}, &{2}_w, &{2}_b), layer[{3}]);\n'.format( + id, cfg['units'], layer.name, LI[inp][0])) + elif('softmax' in layer.name): + inp = layer.input.name.replace(':','/').split('/')[0] + fp.write('\tlayer[%s] = model.hook(Softmax(), layer[%s]);\n'%(id, LI[inp][0])) + else: + raise Exception('unsupported layer', layer.name, layer) + + """ + # temporary fixed for activations attached into layers in construction + def is_activation_attached(layer): + if(("Softmax" in layer.output.name and "softmax" not in layer.name)or + ("Relu" in layer.output.name and "re_lu" not in layer.name) or + ("Sigmoid" in layer.output.name and "sigmoid" not in layer.name) or + ("Tanh" in layer.output.name and "tanh" not in layer.name)): + return True + return False + if "input" not in layer.name and is_activation_attached(layer): + inp = layer.output.name.replace(':', '/').split('/')[0] + cfg = layer.get_config() + if(cfg['activation'] == 'relu'): + fp.write('\tlayer[%s] = model.active(act_relu(), layer[%s]);\n'%(id, LI[inp][0])) + if(cfg['activation'] == 'tanh'): + fp.write('\tlayer[%s] = model.active(act_tanh(%s_OUTPUT_SHIFT), layer[%s]);\n'%(id, inp.upper(), LI[inp][0])) + if(cfg['activation'] == 'sigmoid'): + fp.write('\tlayer[%s] = model.active(act_sigmoid(%s_OUTPUT_SHIFT), layer[%s]);\n'%(id, inp.upper(), LI[inp][0])) + elif(cfg['activation'] == 'softmax'): + fp.write('\tlayer[%s] = model.hook(Softmax(), layer[%s]);\n'%(id, LI[inp][0])) + """ + + # FIXME, test later. + if('softmax' in layer.name + or ('activation' in layer.name and layer.get_config()['activation'] == 'softmax')): + fp.write('\tlayer[%s] = model.hook(Output(shape(%s,1,1), nnom_output_data), layer[%s]);\n'%(id+1, layer.output.shape[1], id)) + elif len(layer.output.shape) == 4: + fp.write('\tlayer[%s] = model.hook(Output(shape%s, nnom_output_data), layer[%s]);\n'%(id+1, layer.output.shape[1:], id)) + elif len(layer.output.shape) == 3: + fp.write('\tlayer[%s] = model.hook(Output(shape(1,%s,%s), nnom_output_data), layer[%s]);\n'%(id+1, layer.output.shape[1], layer.output.shape[2], id)) + elif len(layer.output.shape) == 2: + fp.write('\tlayer[%s] = model.hook(Output(shape(%s,1,1), nnom_output_data), layer[%s]);\n'%(id+1, layer.output.shape[1], id)) + else: + raise Exception('unsupported output shape of the last layer', layer.name, layer) + fp.write('\tmodel_compile(&model, layer[0], layer[%s]);\n'%(id+1)) + if(ID>32): + fp.write('\tfree(layer);\n') + fp.write('\treturn &model;\n}\n') + with open('.shift_list','w') as fp: + fp.write(str(shift_list)) + +def evaluate_model(model, x_test, y_test, running_time=False, to_file='evaluation.txt'): + # Score trained model. + scores = model.evaluate(x_test, y_test, verbose=2) + print('Test loss:', scores[0]) + print('Top 1:', scores[1]) + + if(len(y_test.shape)>1): + # predictions = model.predict(x_test) + # output = tf.keras.metrics.top_k_categorical_accuracy(y_test, predictions, k=2) + # # with tf.Session() as sess: + # # result = sess.run(output) + # result = + # print("Top 2:",result) + + predictions = model.predict(x_test) + matrix = metrics.confusion_matrix(y_test.argmax(axis=1), predictions.argmax(axis=1)) + print(matrix) + + run_time = 0 + if running_time: + # try to calculate the time + T = time.time() + for i in range(10): + model.predict(x_test) + T = time.time() - T + run_time = round((T / 10 / x_test.shape[0] * 1000 * 1000), 2) + print("Runing time:",run_time , "us" ) + # + with open(to_file, 'w') as f: + f.write("Runing time: "+ str(run_time) + "us" + "\n") + f.write('Test loss:'+ str(scores[0]) + "\n") + f.write('Top 1:'+ str(scores[1])+ "\n") + if (len(y_test.shape) > 1): + #f.write("Top 2:"+ str(result)+ "\n") + #f.write(str(matrix)) + for row in matrix: + row.tofile(f, sep=',') + f.write("\n") + + # try to check the weight and bias dec ranges + for layer in model.layers: + if (not layer.weights): + continue + for var in layer.weights: + var_name = str(var.name) + if ("kernel" in var_name): + var_values = layer.get_weights()[0] # weight + else: + var_values = layer.get_weights()[1] # bias + min_value = np.min(var_values) + max_value = np.max(var_values) + intt = int(np.ceil(np.log2(max(abs(min_value), abs(max_value))))) + dec = 7 - intt + print(var_name, "Dec num:", dec) + return scores + +def f2q(d, Q): + '''To convert a number from floating point to Qm.n format: + 1. Multiply the floating point number by 2n + 2. Round to the nearest integer + ''' + return np.round(d*2**Q) + + +def q2f(d, Q): + '''To convert a number from Qm.n format to floating point: + 1. Convert the number to floating point as if it were an integer, in other words remove the binary point + 2. Multiply by 2-n + ''' + return d*2**-Q + +def show_weights(w, name): + sz = 1 + for s in w.shape: + sz = sz*s + aL = w.reshape(sz,) + MIN,MAX=min(aL),max(aL) + Q = int(np.ceil(np.log2(max(abs(MIN),abs(MAX))))) + Q = 7-Q + qL = f2q(aL,Q) + qL = q2f(qL,Q) + plt.figure(figsize=(18, 3)) + plt.subplot(131) + plt.title(name) + plt.plot(aL) + plt.grid() + aL.sort() + plt.plot(aL,'r') + plt.grid() + plt.subplot(132) + plt.title('Q%s'%(Q)) + qL.sort() + plt.plot(aL,'r') + plt.plot(qL,'g') + plt.grid() + plt.subplot(133) + plt.hist(aL,100) + plt.title('hist') + plt.grid() + plt.show() + +def compare(a,b,name): + sz = 1 + for s in a.shape: + sz = sz*s + aL = a.reshape(sz,) + bL = b.reshape(sz,) + assert(len(aL) == len(bL)) + Z = list(zip(aL,bL)) + Z.sort(key=lambda x: x[0]) + aL1,bL1=zip(*Z) + plt.figure(figsize=(18, 3)) + plt.subplot(131) + plt.plot(aL) + plt.plot(aL1,'r') + plt.grid() + plt.title('tf-%s'%(name)) + plt.subplot(133) + plt.plot(bL1,'g') + plt.plot(aL1,'r') + plt.grid() + plt.title('compare') + plt.subplot(132) + bL1=list(bL1) + bL1.sort() + plt.plot(bL) + plt.plot(bL1,'g') + plt.grid() + plt.title('nn-%s'%(name)) + plt.show() + diff --git a/APP_Framework/Framework/knowing/nnom/src/backends/nnom_local.c b/APP_Framework/Framework/knowing/nnom/src/backends/nnom_local.c new file mode 100644 index 000000000..5c514b21b --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/src/backends/nnom_local.c @@ -0,0 +1,1689 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Notice: + * Code in this file inlcudes derivative works from CMSIS + * Please check the LICENSE file for detial. + * + * Change Logs: + * Date Author Notes + * 2019-02-05 Jianjia Ma The first version + * 2019-03-19 Jianjia Ma Local C implementation partly from CMSIS-NN + * 2019-06-19 Jianjia Ma Implement CHW functions + */ + +#include "nnom.h" +#include "nnom_local.h" + +// modified from CMSIS-NN test_ref +void local_avepool_q7_HWC(const q7_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimension x or W + const uint16_t dim_im_in_y, // input image dimension y or H + const uint16_t ch_im_in, // number of input image channels + const uint16_t dim_kernel_x, // window kernel size + const uint16_t dim_kernel_y, // window kernel size + const uint16_t padding_x, // padding sizes + const uint16_t padding_y, // padding sizes + const uint16_t stride_x, // stride + const uint16_t stride_y, // stride + const uint16_t dim_im_out_x, // output image dimension x or W + const uint16_t dim_im_out_y, // output image dimension y or H + const uint16_t output_shift, // output right shift + q7_t *bufferA, // a buffer for local storage, NULL by now + q7_t *Im_out) +{ + int16_t i_ch_in, i_x, i_y; + int16_t k_x, k_y; + + for (i_ch_in = 0; i_ch_in < ch_im_in; i_ch_in++) + { + for (i_y = 0; i_y < dim_im_out_y; i_y++) + { + for (i_x = 0; i_x < dim_im_out_x; i_x++) + { + int sum = 0; + int count = 0; + for (k_y = i_y * stride_y - padding_y; k_y < i_y * stride_y - padding_y + dim_kernel_y; k_y++) + { + for (k_x = i_x * stride_x - padding_x; k_x < i_x * stride_x - padding_x + dim_kernel_x; k_x++) + { + if (k_y >= 0 && k_x >= 0 && k_y < dim_im_in_y && k_x < dim_im_in_x) + { + sum += Im_in[i_ch_in + ch_im_in * (k_x + k_y * dim_im_in_x)]; + count++; + } + } + } + Im_out[i_ch_in + ch_im_in * (i_x + i_y * dim_im_out_x)] = sum / (count>>output_shift); + } + } + } +} + +void local_avepool_q7_CHW(const q7_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimension x or W + const uint16_t dim_im_in_y, // input image dimension y or H + const uint16_t ch_im_in, // number of input image channels + const uint16_t dim_kernel_x, // window kernel size + const uint16_t dim_kernel_y, // window kernel size + const uint16_t padding_x, // padding sizes + const uint16_t padding_y, // padding sizes + const uint16_t stride_x, // stride + const uint16_t stride_y, // stride + const uint16_t dim_im_out_x, // output image dimension x or W + const uint16_t dim_im_out_y, // output image dimension y or H + const uint16_t output_shift, // output right shift + q7_t *bufferA, // a buffer for local storage, NULL by now + q7_t *Im_out) +{ + int16_t i_ch_in, i_x, i_y; + int16_t k_x, k_y; + int32_t ch_offset; + + for (i_ch_in = 0; i_ch_in < ch_im_in; i_ch_in++) + { + ch_offset = i_ch_in*dim_im_in_x*dim_im_in_y; + for (i_y = 0; i_y < dim_im_out_y; i_y++) + { + for (i_x = 0; i_x < dim_im_out_x; i_x++) + { + int sum = 0; + int count = 0; + for (k_y = i_y * stride_y - padding_y; k_y < i_y * stride_y - padding_y + dim_kernel_y; k_y++) + { + for (k_x = i_x * stride_x - padding_x; k_x < i_x * stride_x - padding_x + dim_kernel_x; k_x++) + { + if (k_y >= 0 && k_x >= 0 && k_y < dim_im_in_y && k_x < dim_im_in_x) + { + sum += Im_in[ch_offset + (k_x + k_y * dim_im_in_x)]; + count++; + } + } + } + Im_out[i_ch_in*dim_im_out_x*dim_im_out_y + (i_x + i_y * dim_im_out_x)] = sum / (count>>output_shift); + } + } + } +} + +// modified from CMSIS-NN test_ref +void local_maxpool_q7_HWC(const q7_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimension x or W + const uint16_t dim_im_in_y, // input image dimension y or H + const uint16_t ch_im_in, // number of input image channels + const uint16_t dim_kernel_x, // window kernel size + const uint16_t dim_kernel_y, // window kernel size + const uint16_t padding_x, // padding sizes + const uint16_t padding_y, // padding sizes + const uint16_t stride_x, // stride + const uint16_t stride_y, // stride + const uint16_t dim_im_out_x, // output image dimension x or W + const uint16_t dim_im_out_y, // output image dimension y or H + q7_t *bufferA, // a buffer for local storage, NULL by now + q7_t *Im_out) +{ + int16_t i_ch_in, i_x, i_y; + int16_t k_x, k_y; + + for (i_ch_in = 0; i_ch_in < ch_im_in; i_ch_in++) + { + for (i_y = 0; i_y < dim_im_out_y; i_y++) + { + for (i_x = 0; i_x < dim_im_out_x; i_x++) + { + int max = -129; + for (k_y = i_y * stride_y - padding_y; k_y < i_y * stride_y - padding_y + dim_kernel_y; k_y++) + { + for (k_x = i_x * stride_x - padding_x; k_x < i_x * stride_x - padding_x + dim_kernel_x; k_x++) + { + if (k_y >= 0 && k_x >= 0 && k_y < dim_im_in_y && k_x < dim_im_in_x) + { + if (Im_in[i_ch_in + ch_im_in * (k_x + k_y * dim_im_in_x)] > max) + { + max = Im_in[i_ch_in + ch_im_in * (k_x + k_y * dim_im_in_x)]; + } + } + } + } + Im_out[i_ch_in + ch_im_in * (i_x + i_y * dim_im_out_x)] = max; + } + } + } +} + +void local_maxpool_q7_CHW(const q7_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimension x or W + const uint16_t dim_im_in_y, // input image dimension y or H + const uint16_t ch_im_in, // number of input image channels + const uint16_t dim_kernel_x, // window kernel size + const uint16_t dim_kernel_y, // window kernel size + const uint16_t padding_x, // padding sizes + const uint16_t padding_y, // padding sizes + const uint16_t stride_x, // stride + const uint16_t stride_y, // stride + const uint16_t dim_im_out_x, // output image dimension x or W + const uint16_t dim_im_out_y, // output image dimension y or H + q7_t *bufferA, // a buffer for local storage, NULL by now + q7_t *Im_out) +{ + int16_t i_ch_in, i_x, i_y; + int16_t k_x, k_y; + int32_t ch_offset; + + for (i_ch_in = 0; i_ch_in < ch_im_in; i_ch_in++) + { + ch_offset = i_ch_in * dim_im_out_x * dim_im_out_y; + for (i_y = 0; i_y < dim_im_out_y; i_y++) + { + for (i_x = 0; i_x < dim_im_out_x; i_x++) + { + int max = -129; + for (k_y = i_y * stride_y - padding_y; k_y < i_y * stride_y - padding_y + dim_kernel_y; k_y++) + { + for (k_x = i_x * stride_x - padding_x; k_x < i_x * stride_x - padding_x + dim_kernel_x; k_x++) + { + if (k_y >= 0 && k_x >= 0 && k_y < dim_im_in_y && k_x < dim_im_in_x) + { + if (Im_in[i_ch_in * dim_im_in_x * dim_im_in_y + (k_x + k_y * dim_im_in_x)] > max) + { + max = Im_in[i_ch_in * dim_im_in_x * dim_im_in_y + (k_x + k_y * dim_im_in_x)]; + } + } + } + } + Im_out[ch_offset+(i_x + i_y * dim_im_out_x)] = max; + } + } + } +} + +// temporary for the thesis +// shift according to the maximum +void local_sumpool_q7_HWC(const q7_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimension x or W + const uint16_t dim_im_in_y, // input image dimension y or H + const uint16_t ch_im_in, // number of input image channels + const uint16_t dim_kernel_x, // window kernel size + const uint16_t dim_kernel_y, // window kernel size + const uint16_t padding_x, // padding sizes + const uint16_t padding_y, // padding sizes + const uint16_t stride_x, // stride + const uint16_t stride_y, // stride + const uint16_t dim_im_out_x, // output image dimension x or W + const uint16_t dim_im_out_y, // output image dimension y or H + q7_t *bufferA, // a buffer for local storage, size = 4*output_size + q7_t *Im_out) +{ + int16_t i_ch_in, i_x, i_y; + int16_t k_x, k_y; + int32_t *buf = (int32_t *)bufferA; + // stage2 + // int32_t max_abs = 0; + // int32_t output_shift; + // size_t output_size = dim_im_out_x * dim_im_out_x * ch_im_in; + + // save in 32bit + for (i_ch_in = 0; i_ch_in < ch_im_in; i_ch_in++) + { + for (i_y = 0; i_y < dim_im_out_y; i_y++) + { + for (i_x = 0; i_x < dim_im_out_x; i_x++) + { + int sum = 0; + for (k_y = i_y * stride_y - padding_y; k_y < i_y * stride_y - padding_y + dim_kernel_y; k_y++) + { + for (k_x = i_x * stride_x - padding_x; k_x < i_x * stride_x - padding_x + dim_kernel_x; k_x++) + { + if (k_y >= 0 && k_x >= 0 && k_y < dim_im_in_y && k_x < dim_im_in_x) + { + sum += Im_in[i_ch_in + ch_im_in * (k_x + k_y * dim_im_in_x)]; + } + } + } + // 32bit + buf[i_ch_in + ch_im_in * (i_x + i_y * dim_im_out_x)] = sum; + } + } + } + + // // find max amount results + // for (int i = 0; i < output_size; i++) + // { + // int32_t val = buf[i]; + // if (val < 0) + // val = -val; + // if (val > max_abs) + // max_abs = val; + // } + // // find best shift to cover the max + // for (output_shift = 0;; output_shift++) + // { + // if (127 * (1 + output_shift) >= max_abs) + // break; + // } + + // // shift the results + // for (int i = 0; i < output_size; i++) + // { + // Im_out[i] = buf[i] >> output_shift; + // } + //return output_shift; +} + +// temporary for the thesis +// shift according to the maximum +void local_sumpool_q7_CHW(const q7_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimension x or W + const uint16_t dim_im_in_y, // input image dimension y or H + const uint16_t ch_im_in, // number of input image channels + const uint16_t dim_kernel_x, // window kernel size + const uint16_t dim_kernel_y, // window kernel size + const uint16_t padding_x, // padding sizes + const uint16_t padding_y, // padding sizes + const uint16_t stride_x, // stride + const uint16_t stride_y, // stride + const uint16_t dim_im_out_x, // output image dimension x or W + const uint16_t dim_im_out_y, // output image dimension y or H + q7_t *bufferA, // a buffer for local storage, size = 4*output_size + q7_t *Im_out) +{ + int16_t i_ch_in, i_x, i_y; + int16_t k_x, k_y; + int32_t *buf = (int32_t *)bufferA; + int32_t i_ch_offset, o_ch_offset; + // stage2 + // int32_t max_abs = 0; + // int32_t output_shift; + // size_t output_size = dim_im_out_x * dim_im_out_x * ch_im_in; + + // save in 32bit + for (i_ch_in = 0; i_ch_in < ch_im_in; i_ch_in++) + { + i_ch_offset = i_ch_in*dim_im_in_x*dim_im_in_y; + o_ch_offset = i_ch_in*dim_im_out_x*dim_im_out_y; + + for (i_y = 0; i_y < dim_im_out_y; i_y++) + { + for (i_x = 0; i_x < dim_im_out_x; i_x++) + { + int sum = 0; + for (k_y = i_y * stride_y - padding_y; k_y < i_y * stride_y - padding_y + dim_kernel_y; k_y++) + { + for (k_x = i_x * stride_x - padding_x; k_x < i_x * stride_x - padding_x + dim_kernel_x; k_x++) + { + if (k_y >= 0 && k_x >= 0 && k_y < dim_im_in_y && k_x < dim_im_in_x) + { + sum += Im_in[i_ch_offset + (k_x + k_y * dim_im_in_x)]; + } + } + } + // 32bit + buf[o_ch_offset + (i_x + i_y * dim_im_out_x)] = sum; + } + } + } + + // // find max amount results + // for (int i = 0; i < output_size; i++) + // { + // int32_t val = buf[i]; + // if (val < 0) + // val = -val; + // if (val > max_abs) + // max_abs = val; + // } + // // find best shift to cover the max + // for (output_shift = 0;; output_shift++) + // { + // if (127 * (1 + output_shift) >= max_abs) + // break; + // } + + // // shift the results + // for (int i = 0; i < output_size; i++) + // { + // Im_out[i] = buf[i] >> output_shift; + // } + //return output_shift; +} + +// customised up sample pooling +void local_up_sampling_q7_HWC(const q7_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimension x or W + const uint16_t dim_im_in_y, // input image dimension y or H + const uint16_t ch_im_in, // number of input image channels + const uint16_t dim_kernel_x, // window kernel size + const uint16_t dim_kernel_y, // window kernel size + const uint16_t dim_im_out_x, // output image dimension x or W + const uint16_t dim_im_out_y, // output image dimension y or H + q7_t *bufferA, // a buffer for local storage, NULL by now + q7_t *Im_out) +{ + int16_t i_x, i_y; + + // for loop for each pixel in input image. + for (i_y = 0; i_y < dim_im_in_y; i_y++) + { + for (i_x = 0; i_x < dim_im_in_x; i_x++) + { + // copy all the channels together. + const q7_t *p_in = Im_in + (i_y * dim_im_in_x + i_x ) * ch_im_in; + q7_t *pout = Im_out + (i_y * dim_im_in_x * dim_kernel_x * dim_kernel_y + i_x * dim_kernel_y) * ch_im_in; + + // copy along x axis + for(int i = 0; i> out_shift[shift_idx]), 8); + } + } + } +} + +void local_convolve_CHW_q7_nonsquare(const q7_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimention x + const uint16_t dim_im_in_y, // input image dimention y + const uint16_t ch_im_in, // number of input image channels + const q7_t *wt, // kernel weights + const uint16_t ch_im_out, // number of filters, i.e., output image channels + const uint16_t dim_kernel_x, // filter kernel size x + const uint16_t dim_kernel_y, // filter kernel size y + const uint16_t padding_x, // padding sizes x + const uint16_t padding_y, // padding sizes y + const uint16_t stride_x, // stride x + const uint16_t stride_y, // stride y + const uint16_t dilation_x, // dilation x + const uint16_t dilation_y, // dilation y + const q7_t *bias, // bias + const nnom_qformat_param_t *bias_shift, // bias shifts + const nnom_qformat_param_t *out_shift, // output shift + const nnom_qtype_t q_type, // per channel or per tensor + q7_t *Im_out, // output image + const uint16_t dim_im_out_x, // output image dimension x + const uint16_t dim_im_out_y, // output image dimension y + q15_t *bufferA, //buffer space for input + q7_t *bufferB //buffer space for output +) +{ + int i, j, k, l, m, n; + long conv_out; + int in_row, in_col; + int shift_idx, shift_steps; + if(q_type == NNOM_QTYPE_PER_AXIS) + shift_steps = 1; + else + shift_steps = 0; + + for(i = 0, shift_idx = 0; i < ch_im_out; i++, shift_idx += shift_steps) + { + for (j = 0; j < dim_im_out_y; j++) + { + for (k = 0; k < dim_im_out_x; k++) + { + if(bias) + conv_out = ((q31_t)(bias[i]) << bias_shift[shift_idx]) + NNOM_ROUND(out_shift[shift_idx]); + else + conv_out = (q31_t) NNOM_ROUND(out_shift[shift_idx]); + + for (m = 0; m < dim_kernel_y; m++) + { + for (n = 0; n < dim_kernel_x; n++) + { + // if-for implementation + in_row = stride_y * j + m * dilation_y - padding_y; + in_col = stride_x * k + n * dilation_x - padding_x; + if (in_row >= 0 && in_col >= 0 && in_row < dim_im_in_y && in_col < dim_im_in_x) + { + for (l = 0; l < ch_im_in; l++) + { + conv_out += Im_in[(in_row * dim_im_in_x + in_col) + l * dim_im_in_x * dim_im_in_y] * + wt[(m * dim_kernel_x + n) * ch_im_in * ch_im_out + l * ch_im_out + i]; + } + } + } + } + Im_out[i * dim_im_out_x * dim_im_out_y + (j * dim_im_out_x + k)] = (q7_t)__NNOM_SSAT((conv_out >> out_shift[shift_idx]), 8); + } + } + } +} + +#define FALSE 0 +#define TRUE 1 + +static int alg_deconv2d_calculate_position( + int pos, + int stride, + int padding, + int dim_kernel, + int dim_in, + int* in_start, + int* kernel_start, + int* kernel_end) +{ + int is_zero = FALSE; + int of, adj; + is_zero = FALSE; + *in_start = pos/stride; + of = pos%stride; + *kernel_start = padding - of; + if(*kernel_start >= 0) { + adj = MIN(*in_start, *kernel_start/stride); + *kernel_start -= adj*stride; + *in_start -= adj; + } else { + adj = -*kernel_start + dim_kernel; + if(adj<=stride) { + is_zero = TRUE; + } else { + adj = MIN(dim_in-1-*in_start, adj/stride); + *kernel_start += adj*stride; + *in_start += adj; + } + } + of = dim_kernel - 1 - *kernel_start; + adj = MIN(dim_in-1-*in_start, of/stride); + *kernel_end = *kernel_start + adj*stride; + + return is_zero; +} + +void local_conv_trans_HWC_q7_nonsquare(const int8_t * Im_in, + const uint16_t dim_im_in_x, // input image dimention x + const uint16_t dim_im_in_y, // input image dimention y + const uint16_t ch_im_in, // number of input image channels + const q7_t *wt, // kernel weights + const uint16_t ch_im_out, // number of filters, i.e., output image channels + const uint16_t dim_kernel_x, // filter kernel size x + const uint16_t dim_kernel_y, // filter kernel size y + const uint16_t padding_x, // padding sizes x + const uint16_t padding_y, // padding sizes y + const uint16_t stride_x, // stride x + const uint16_t stride_y, // stride y + const uint16_t dilation_x, // dilation x + const uint16_t dilation_y, // dilation y + const q7_t *bias, // bias + const uint16_t bias_shift, const uint16_t out_shift, q7_t *Im_out, // output image + const uint16_t dim_im_out_x, // output image dimension x + const uint16_t dim_im_out_y, // output image dimension y + q15_t *bufferA, //buffer space for input + q7_t *bufferB //buffer space for output +) +// { +// int ox, oy, oc, ky, kx, kc, ix, iy; +// int conv_out; +// int in_pix_loc, wt_loc; + +// (void)dilation_y; +// (void)dilation_x; + +// // padding and stride are applied to output +// for (oc = 0; oc < ch_im_out; oc++) +// { +// for (oy = 0; oy < dim_im_out_y; oy++) +// { +// for (ox = 0; ox < dim_im_out_x; ox++) +// { +// conv_out = ((q31_t)(bias[oc]) << bias_shift) + NNOM_ROUND(out_shift); + +// for (ky = 0; ky < dim_kernel_y; ky++) +// { +// for (kx = 0; kx < dim_kernel_x; kx++) +// { +// // input y, input x location +// iy = oy / stride_y + ky - padding_y; +// ix = ox / stride_x + kx - padding_x; + +// if(ix >= 0 && iy >= 0 && ix < dim_im_in_y && iy< dim_im_in_y) +// { +// in_pix_loc = (iy * dim_im_in_x + ix) * ch_im_in; +// wt_loc = oc * ch_im_in * dim_kernel_y * dim_kernel_x + (ky * dim_kernel_x + kx) * ch_im_in; + +// for (kc = 0; kc < ch_im_in; kc++) +// { +// conv_out += Im_in[in_pix_loc + kc] * wt[wt_loc + kc]; +// } +// } +// } +// } + +// Im_out[oc + (oy * dim_im_out_x + ox) * ch_im_out] = (q7_t) __NNOM_SSAT((conv_out >> out_shift), 8); +// } +// } +// } +// } + +{ + int i, j, k, l, m, n; + int conv_out; + int in_row, in_col; + int kernel_start_x,kernel_end_x; + int kernel_start_y,kernel_end_y; + int in_row_start, in_col_start; + int is_zero; + + for (i = 0; i < ch_im_out; i++) { + for (j = 0; j < dim_im_out_y; j++) { + is_zero = alg_deconv2d_calculate_position(j, stride_y, padding_y, dim_kernel_y, + dim_im_in_y, &in_row_start, &kernel_start_y, &kernel_end_y); + + if(is_zero) { + conv_out = ((q31_t)(bias[i]) << bias_shift) + NNOM_ROUND(out_shift); + conv_out = (q7_t) __NNOM_SSAT((conv_out >> out_shift), 8); + for (k = 0; k < dim_im_out_x; k++) { + Im_out[i + (j * dim_im_out_x + k) * ch_im_out] = (q7_t) conv_out; + } + continue; + } + + for (k = 0; k < dim_im_out_x; k++) { + conv_out = ((q31_t)(bias[i]) << bias_shift) + NNOM_ROUND(out_shift); + + is_zero = alg_deconv2d_calculate_position(k, stride_x, padding_x, dim_kernel_x, + dim_im_in_x, &in_col_start, &kernel_start_x, &kernel_end_x); + + if(is_zero) { + Im_out[i + (j * dim_im_out_x + k) * ch_im_out] = conv_out; + continue; + } + + for (m = kernel_start_y, in_row = in_row_start; m <= kernel_end_y; m+=stride_y, in_row++) { + for (n = kernel_start_x, in_col = in_col_start; n <= kernel_end_x; n+=stride_x, in_col++) { + if ((in_row >= 0) && (in_col >= 0) && + (in_row < dim_im_in_y) && (in_col < dim_im_in_x)) { + for (l = 0; l < ch_im_in; l++) { + conv_out += Im_in[(in_row * dim_im_in_x + in_col) * ch_im_in + l] * + wt[i * ch_im_in * dim_kernel_y * dim_kernel_x + (m * dim_kernel_x + n) * ch_im_in + l]; + } + } + } + } + + Im_out[i + (j * dim_im_out_x + k) * ch_im_out] = (q7_t) __NNOM_SSAT((conv_out >> out_shift), 8); + } + } + } +} + +void local_depthwise_separable_conv_HWC_q7_nonsquare(const q7_t *Im_in,// input image + const uint16_t dim_im_in_x, // input image dimention x + const uint16_t dim_im_in_y, // input image dimention y + const uint16_t ch_im_in, // number of input image channels + const q7_t *wt, // kernel weights + const uint16_t ch_im_out, // number of filters, i.e., output image channels + const uint16_t dim_kernel_x, // filter kernel size x + const uint16_t dim_kernel_y, // filter kernel size y + const uint16_t padding_x, // padding sizes x + const uint16_t padding_y, // padding sizes y + const uint16_t stride_x, // stride x + const uint16_t stride_y, // stride y + const uint16_t dilation_x, // dilation x + const uint16_t dilation_y, // dilation y + const q7_t *bias, // bias + const nnom_qformat_param_t *bias_shift, // bias shifts + const nnom_qformat_param_t *out_shift, // output shift + const nnom_qtype_t q_type, // per channel or per tensor + q7_t *Im_out, // output image + const uint16_t dim_im_out_x, // output image dimension x + const uint16_t dim_im_out_y, // output image dimension y + q15_t *bufferA, //buffer space for input + q7_t *bufferB //buffer space for output +) +{ + int i_out_y, i_out_x, i_ch_out, i_ch_in, i_ch_mult; + int i_ker_y, i_ker_x; + int i_out = 0; + int shift_idx, shift_steps; + int ch_mult = ch_im_out / ch_im_in; + q31_t conv_out; + + for (i_out_y = 0; i_out_y < dim_im_out_y; i_out_y++) + { + const int32_t base_idx_y = stride_y * i_out_y - padding_y; + for (i_out_x = 0; i_out_x < dim_im_out_x; i_out_x++) + { + const int32_t base_idx_x = stride_x * i_out_x - padding_x; + for (i_ch_in = 0; i_ch_in < ch_im_in; i_ch_in++) + { + for(i_ch_mult = 0; i_ch_mult < ch_mult; i_ch_mult++) + { + i_ch_out = i_ch_mult + i_ch_in * ch_mult; + int32_t ker_y_start = MAX(0, -(base_idx_y-(dilation_y-1))/dilation_y); + int32_t ker_x_start = MAX(0, -(base_idx_x-(dilation_x-1))/dilation_x); + int32_t ker_y_end = MIN(dim_kernel_y, (dim_im_in_y - base_idx_y + (dilation_y-1))/dilation_y); + int32_t ker_x_end = MIN(dim_kernel_x, (dim_im_in_x - base_idx_x + (dilation_x-1))/dilation_x); + + shift_idx = q_type == NNOM_QTYPE_PER_AXIS ? i_ch_out : 0; + if (bias) + conv_out = ((q31_t)(bias[i_ch_out]) << bias_shift[shift_idx]) + NNOM_ROUND(out_shift[shift_idx]); + else + conv_out = (q31_t)NNOM_ROUND(out_shift[shift_idx]); + + for (i_ker_y = ker_y_start; i_ker_y < ker_y_end; i_ker_y++) + { + const int32_t idx_y = base_idx_y + i_ker_y * dilation_y; + for (i_ker_x = ker_x_start; i_ker_x < ker_x_end; i_ker_x++) + { + const int32_t idx_x = base_idx_x + i_ker_x * dilation_x; + int32_t in_pix_loc = (idx_y * dim_im_in_x + idx_x) * ch_im_in + i_ch_in; + int32_t wt_loc = (i_ker_y * dim_kernel_x + i_ker_x) * (ch_im_in * ch_mult) + i_ch_out; + conv_out += Im_in[in_pix_loc] * wt[wt_loc]; + } + } + Im_out[i_out++] = (q7_t)__NNOM_SSAT((conv_out >> out_shift[shift_idx]), 8); + } + } + } + } +} + +void local_depthwise_separable_conv_CHW_q7_nonsquare(const q7_t *Im_in,// input image + const uint16_t dim_im_in_x, // input image dimention x + const uint16_t dim_im_in_y, // input image dimention y + const uint16_t ch_im_in, // number of input image channels + const q7_t *wt, // kernel weights + const uint16_t ch_im_out, // number of filters, i.e., output image channels + const uint16_t dim_kernel_x, // filter kernel size x + const uint16_t dim_kernel_y, // filter kernel size y + const uint16_t padding_x, // padding sizes x + const uint16_t padding_y, // padding sizes y + const uint16_t stride_x, // stride x + const uint16_t stride_y, // stride y + const uint16_t dilation_x, // dilation x + const uint16_t dilation_y, // dilation y + const q7_t *bias, // bias + const nnom_qformat_param_t *bias_shift, // bias shifts + const nnom_qformat_param_t *out_shift, // output shift + const nnom_qtype_t q_type, // per channel or per tensor + q7_t *Im_out, // output image + const uint16_t dim_im_out_x, // output image dimension x + const uint16_t dim_im_out_y, // output image dimension y + q15_t *bufferA, //buffer space for input + q7_t *bufferB //buffer space for output +) +{ + int i_out_y, i_out_x, i_ch_out, i_ch_in, i_ch_mult; + int i_ker_y, i_ker_x; + int i_out = 0; + int shift_idx, shift_steps; + int ch_mult = ch_im_out / ch_im_in; + q31_t conv_out; + + for (i_out_y = 0; i_out_y < dim_im_out_y; i_out_y++) + { + const int32_t base_idx_y = stride_y * i_out_y - padding_y; + for (i_out_x = 0; i_out_x < dim_im_out_x; i_out_x++) + { + const int32_t base_idx_x = stride_x * i_out_x - padding_x; + for (i_ch_in = 0; i_ch_in < ch_im_in; i_ch_in++) + { + for (i_ch_mult = 0; i_ch_mult < ch_mult; i_ch_mult++) + { + i_ch_out = i_ch_mult + i_ch_in * ch_mult; + int32_t ker_y_start = MAX(0, -(base_idx_y-(dilation_y-1))/dilation_y); + int32_t ker_x_start = MAX(0, -(base_idx_x-(dilation_x-1))/dilation_x); + int32_t ker_y_end = MIN(dim_kernel_y, (dim_im_in_y - base_idx_y + (dilation_y-1))/dilation_y); + int32_t ker_x_end = MIN(dim_kernel_x, (dim_im_in_x - base_idx_x + (dilation_x-1))/dilation_x); + + shift_idx = q_type == NNOM_QTYPE_PER_AXIS ? i_ch_out : 0; + if (bias) + conv_out = ((q31_t)(bias[i_ch_out]) << bias_shift[shift_idx]) + NNOM_ROUND(out_shift[shift_idx]); + else + conv_out = (q31_t)NNOM_ROUND(out_shift[shift_idx]); + + for (i_ker_y = ker_y_start; i_ker_y < ker_y_end; i_ker_y++) + { + const int32_t idx_y = base_idx_y + i_ker_y * dilation_y; + for (i_ker_x = ker_x_start; i_ker_x < ker_x_end; i_ker_x++) + { + const int32_t idx_x = base_idx_x + i_ker_x * dilation_x; + int32_t in_pix_loc = (idx_y * dim_im_in_x + idx_x) + i_ch_in * dim_im_in_x * dim_im_in_y; + int32_t wt_loc = (i_ker_y * dim_kernel_x + i_ker_x) * ch_im_out + i_ch_out; + conv_out += Im_in[in_pix_loc] * wt[wt_loc]; + } + } + Im_out[i_ch_out * dim_im_out_x * dim_im_out_y + (i_out_y * dim_im_out_x + i_out_x)] = (q7_t)__NNOM_SSAT((conv_out >> out_shift[shift_idx]), 8); + } + } + } + } + +} + + +void local_zero_padding_HWC_q7(const q7_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimention x + const uint16_t dim_im_in_y, // input image dimention y + const uint16_t ch_im_in, // number of input image channels + const uint16_t padding_top, // padding sizes y + const uint16_t padding_bottom, // padding sizes y + const uint16_t padding_left, // padding sizes x + const uint16_t padding_right, // padding sizes x + q7_t *Im_out, // output image + const uint16_t dim_im_out_x, // output image dimension x + const uint16_t dim_im_out_y) // output image dimension y +{ + int i, size; + q7_t * p_out = Im_out; + + // top rows + size = dim_im_out_x*ch_im_in*padding_top; + nnom_memset(p_out, 0, size); + p_out += size; + + // middle + for(i=0; i> 2; + const q7_t *pB = pM; + const q7_t *pA; + q7_t *pO = pOut; + + while (rowCnt) + { + pA = pV; + q31_t sum = (q31_t) NNOM_ROUND(out_shift); + q31_t sum2 = (q31_t) NNOM_ROUND(out_shift); + q31_t sum3 = (q31_t) NNOM_ROUND(out_shift); + q31_t sum4 = (q31_t) NNOM_ROUND(out_shift); + + uint16_t colCnt = dim_vec >> 2; + + while (colCnt) + { + q7_t inA1 = *pA++; + q7_t inA3 = *pA++; + q7_t inA2 = *pA++; + q7_t inA4 = *pA++; + + q7_t inB1 = *pB++; + q7_t inB3 = *pB++; + q7_t inB2 = *pB++; + q7_t inB4 = *pB++; + + sum += inA1 * inB1 + inA2 * inB2; + sum2 += inA1 * inB3 + inA2 * inB4; + + inB1 = *pB++; + inB3 = *pB++; + inB2 = *pB++; + inB4 = *pB++; + + sum3 += inA1 * inB1 + inA2 * inB2; + sum4 += inA1 * inB3 + inA2 * inB4; + + inB1 = *pB++; + inB3 = *pB++; + inB2 = *pB++; + inB4 = *pB++; + + sum += inA3 * inB1 + inA4 * inB2; + sum2 += inA3 * inB3 + inA4 * inB4; + + inB1 = *pB++; + inB3 = *pB++; + inB2 = *pB++; + inB4 = *pB++; + + sum3 += inA3 * inB1 + inA4 * inB2; + sum4 += inA3 * inB3 + inA4 * inB4; + + colCnt--; + } + colCnt = dim_vec & 0x3; + while (colCnt) + { + q7_t inA = *pA++; + q7_t inB = *pB++; + sum += inA * inB; + inB = *pB++; + sum2 += inA * inB; + inB = *pB++; + sum3 += inA * inB; + inB = *pB++; + sum4 += inA * inB; + + colCnt--; + } + *pO++ = (q7_t)__NNOM_SSAT((sum >> out_shift), 8); + *pO++ = (q7_t)__NNOM_SSAT((sum2 >> out_shift), 8); + *pO++ = (q7_t)__NNOM_SSAT((sum3 >> out_shift), 8); + *pO++ = (q7_t)__NNOM_SSAT((sum4 >> out_shift), 8); + + rowCnt--; + } + + rowCnt = num_of_rows & 0x3; + + while (rowCnt) + { + int ip_out = (q31_t) NNOM_ROUND (out_shift); + pA = pV; + for (int j = 0; j < dim_vec; j++) + { + q7_t inA = *pA++; + q7_t inB = *pB++; + ip_out += inA * inB; + } + *pO++ = (q7_t)__NNOM_SSAT((ip_out >> out_shift), 8); + + rowCnt--; + } +} + +void local_dot_q7(const q7_t *pV, // pointer to vector + const q7_t *pM, // pointer to matrix + const uint16_t dim_vec, // length of the vector + const uint16_t num_of_rows, // numCol of A + const uint16_t out_shift, // amount of right-shift for output + q7_t *pOut) // output operand) +{ + for (int i = 0; i < num_of_rows; i++) + { + int ip_out = (q31_t) NNOM_ROUND(out_shift); + for (int j = 0; j < dim_vec; j++) + { + ip_out += pV[j] * pM[i * dim_vec + j]; + } + pOut[i] = (q7_t)__NNOM_SSAT((ip_out >> out_shift), 8); + } +} + +void local_fully_connected_q7_opt(const q7_t *pV, // pointer to vector + const q7_t *pM, // pointer to matrix + const uint16_t dim_vec, // length of the vector + const uint16_t num_of_rows, // numCol of A + const uint16_t bias_shift, // amount of left-shift for bias + const uint16_t out_shift, // amount of right-shift for output + const q7_t *bias, q7_t *pOut, // output operand + q15_t *vec_buffer) +{ + uint16_t rowCnt = num_of_rows >> 2; + const q7_t *pB = pM; + const q7_t *pA; + q7_t *pO = pOut; + const q7_t *pBias = bias; + + while (rowCnt) + { + pA = pV; + q31_t sum; + q31_t sum2; + q31_t sum3; + q31_t sum4; + uint16_t colCnt = dim_vec >> 2; + + if(bias) + { + sum = ((q31_t)(*pBias++) << bias_shift) + NNOM_ROUND(out_shift); + sum2 = ((q31_t)(*pBias++) << bias_shift) + NNOM_ROUND(out_shift); + sum3 = ((q31_t)(*pBias++) << bias_shift) + NNOM_ROUND(out_shift); + sum4 = ((q31_t)(*pBias++) << bias_shift) + NNOM_ROUND(out_shift); + } + else + { + sum = (q31_t) NNOM_ROUND(out_shift); + sum2 = (q31_t) NNOM_ROUND(out_shift); + sum3 = (q31_t) NNOM_ROUND(out_shift); + sum4 = (q31_t) NNOM_ROUND(out_shift); + } + + while (colCnt) + { + q7_t inA1 = *pA++; + q7_t inA3 = *pA++; + q7_t inA2 = *pA++; + q7_t inA4 = *pA++; + + q7_t inB1 = *pB++; + q7_t inB3 = *pB++; + q7_t inB2 = *pB++; + q7_t inB4 = *pB++; + + sum += inA1 * inB1 + inA2 * inB2; + sum2 += inA1 * inB3 + inA2 * inB4; + + inB1 = *pB++; + inB3 = *pB++; + inB2 = *pB++; + inB4 = *pB++; + + sum3 += inA1 * inB1 + inA2 * inB2; + sum4 += inA1 * inB3 + inA2 * inB4; + + inB1 = *pB++; + inB3 = *pB++; + inB2 = *pB++; + inB4 = *pB++; + + sum += inA3 * inB1 + inA4 * inB2; + sum2 += inA3 * inB3 + inA4 * inB4; + + inB1 = *pB++; + inB3 = *pB++; + inB2 = *pB++; + inB4 = *pB++; + + sum3 += inA3 * inB1 + inA4 * inB2; + sum4 += inA3 * inB3 + inA4 * inB4; + + colCnt--; + } + colCnt = dim_vec & 0x3; + while (colCnt) + { + q7_t inA = *pA++; + q7_t inB = *pB++; + sum += inA * inB; + inB = *pB++; + sum2 += inA * inB; + inB = *pB++; + sum3 += inA * inB; + inB = *pB++; + sum4 += inA * inB; + + colCnt--; + } + *pO++ = (q7_t)__NNOM_SSAT((sum >> out_shift), 8); + *pO++ = (q7_t)__NNOM_SSAT((sum2 >> out_shift), 8); + *pO++ = (q7_t)__NNOM_SSAT((sum3 >> out_shift), 8); + *pO++ = (q7_t)__NNOM_SSAT((sum4 >> out_shift), 8); + + rowCnt--; + } + + rowCnt = num_of_rows & 0x3; + + while (rowCnt) + { + int ip_out; + if(bias) + ip_out=((q31_t)(*bias++) << bias_shift) + NNOM_ROUND(out_shift); + else + ip_out=(q31_t)NNOM_ROUND(out_shift); + + pA = pV; + for (int j = 0; j < dim_vec; j++) + { + q7_t inA = *pA++; + q7_t inB = *pB++; + ip_out += inA * inB; + } + *pO++ = (q7_t)__NNOM_SSAT((ip_out >> out_shift), 8); + + rowCnt--; + } +} + +void local_fully_connected_q7(const q7_t *pV, // pointer to vector + const q7_t *pM, // pointer to matrix + const uint16_t dim_vec, // length of the vector + const uint16_t num_of_rows, // numCol of A + const uint16_t bias_shift, // amount of left-shift for bias + const uint16_t out_shift, // amount of right-shift for output + const q7_t *bias, q7_t *pOut, // output operand + q15_t *vec_buffer) +{ + if(bias) + { + for (int i = 0; i < num_of_rows; i++) + { + int ip_out = ((q31_t)(*bias++) << bias_shift) + NNOM_ROUND(out_shift); + for (int j = 0; j < dim_vec; j++) + { + ip_out += pV[j] * pM[i * dim_vec + j]; + } + pOut[i] = (q7_t)__NNOM_SSAT((ip_out >> out_shift), 8); + } + } + else + { + for (int i = 0; i < num_of_rows; i++) + { + int ip_out = (q31_t)NNOM_ROUND(out_shift); + for (int j = 0; j < dim_vec; j++) + { + ip_out += pV[j] * pM[i * dim_vec + j]; + } + pOut[i] = (q7_t)__NNOM_SSAT((ip_out >> out_shift), 8); + } + } +} + + +void local_softmax_q7(const q7_t *vec_in, const uint32_t dim_vec, q7_t *p_out) +{ + q31_t sum; + int32_t i; + uint8_t shift; + q15_t base; + base = -257; + + /* We first search for the maximum */ + for (i = 0; i < dim_vec; i++) + { + if (vec_in[i] > base) + { + base = vec_in[i]; + } + } + + /* + * So the base is set to max-8, meaning + * that we ignore really small values. + * anyway, they will be 0 after shrinking to q7_t. + */ + base = base - 8; + + sum = 0; + + for (i = 0; i < dim_vec; i++) + { + if (vec_in[i] > base) + { + shift = (uint8_t)__NNOM_USAT(vec_in[i] - base, 5); + sum += 0x1 << shift; + } + } + + /* This is effectively (0x1 << 20) / sum */ + int output_base = 0x100000 / sum; + + /* + * Final confidence will be output_base >> ( 13 - (vec_in[i] - base) ) + * so 128 (0x1<<7) -> 100% confidence when sum = 0x1 << 8, output_base = 0x1 << 12 + * and vec_in[i]-base = 8 + */ + for (i = 0; i < dim_vec; i++) + { + if (vec_in[i] > base) + { + /* Here minimum value of 13+base-vec_in[i] will be 5 */ + shift = (uint8_t)__NNOM_USAT(13 + base - vec_in[i], 5); + p_out[i] = (q7_t)__NNOM_SSAT((output_base >> shift), 8); + } + else + { + p_out[i] = 0; + } + } +} + + +// hard sigmoid, +// y=-1 if x < -2.5 +// y=1 if x > 2.5 +// otherwise y = 0.2 * x + 0.5 (y=0.20315 * x + 0.5) +void local_hard_sigmoid_q7(q7_t *data, uint32_t size, int16_t dec_bit) +{ + int16_t limit = 2.5f * (1 << dec_bit)-1; + int16_t offset = 64; // 0.5 * 128 + int16_t mult = 26; // 0.2 * 128 + + // int bit >= 0 + for(int i=0; i= limit) + data[i] = 127; + else + { + data[i] = ((int16_t)(data[i] * mult) >> dec_bit) + offset; + } + } + } + +// hard tanh +// y=-1 if x < -1 +// y=1 if x > 1 +// otherwise y = x +void local_hard_tanh_q7(q7_t *data, uint32_t size, int16_t dec_bit) +{ + int16_t int_bit = 7 - dec_bit; + int16_t limit = 1 << dec_bit; + + if(dec_bit == 7) + return; + + // int bit < 0 + if(int_bit < 0) + for(int i=0; i= limit) + data[i] = 127; + else + { + data[i] = data[i] >> (-int_bit); + } + } + else + // int bit >= 0 + for(int i=0; i= limit) + data[i] = 127; + else + { + data[i] = data[i] << int_bit; + } + } +} + +void local_sigmoid_q7(q7_t *data, uint32_t size, int16_t int_width) +{ + uint32_t i = size; + q7_t *pIn = data; + q7_t *pOut = data; + q7_t in; + q7_t out; + uint16_t shift_size = 3 - int_width; + // saturation if int bit too large + if(int_width > 3) + { + while (i) + { + if(*pIn++ > 0) + *pOut++ = 127; + else + *pOut++ = 0; + i--; + } + } + // otherwise search table + else + { + while (i) + { + in = *pIn++; + out = nnom_sigmoid_table_q7[(uint8_t)(in >> shift_size)]; + *pOut++ = out; + i--; + } + } +} + +void local_tanh_q7(q7_t *data, uint32_t size, int16_t int_width) +{ + uint32_t i = size; + q7_t *pIn = data; + q7_t *pOut = data; + q7_t in; + q7_t out; + uint16_t shift_size = 3 - int_width; + + // saturation if int bit too large + if(int_width > 3) + { + while (i) + { + in = *pIn++; + if(in > 0) + *pOut++ = 127; + else if ( in == 0) + *pOut++ = 0; + else + *pOut++ = -128; + i--; + } + } + // otherwise search table + else + { + while (i) + { + in = *pIn++; + out = nnom_tanh_table_q7[(uint8_t)(in >> shift_size)]; + *pOut++ = out; + i--; + } + } +} + +void local_relu_q7(q7_t *data, uint32_t size) +{ + uint32_t i; + + for (i = 0; i < size; i++) + { + if (data[i] < 0) + data[i] = 0; + } +} + +// alpha in q7 format with dec_bit=7 +void local_leaky_relu_q7(q7_t *data, q7_t alpha, uint32_t size) +{ + uint32_t i; + + for (i = 0; i < size; i++) + { + if (data[i] < 0) + { + data[i] = data[i] * alpha / 128; + } + } +} + +// alpha in q7 format with dec_bit=7 +// max and threshold has the same Q format with the activation +void local_adv_relu_q7(q7_t *data, q7_t negative_slope, q7_t max, q7_t threshold, uint32_t size) +{ + uint32_t i; + for (i = 0; i < size; i++) + { + // `f(x) = max_value` for `x >= max_value`, + // `f(x) = x` for `threshold <= x < max_value`, + // `f(x) = alpha * (x - threshold)` otherwise. + + if(data[i] > max) + data[i] = max; + if (data[i] < threshold) + data[i] = (data[i] - threshold) * negative_slope / 128; + } +} + +// matrix ops +void local_mult_q7(q7_t *pSrcA, + q7_t *pSrcB, + q7_t *pDst, + const uint16_t out_shift, + uint32_t blockSize) +{ + uint32_t i; + + for (i = 0; i < blockSize; i++) + { + q31_t product = pSrcA[i] * pSrcB[i]; + pDst[i] = (q7_t) __NNOM_SSAT(((product + NNOM_ROUND(out_shift)) >> out_shift), 8); + } +} + +void local_add_q7(q7_t *pSrcA, + q7_t *pSrcB, + q7_t *pDst, + const uint16_t out_shift, + uint32_t blockSize) +{ + uint32_t i; + + for (i = 0; i < blockSize; i++) + { + q31_t sum = pSrcA[i] + pSrcB[i]; + pDst[i] = (q7_t) __NNOM_SSAT(((sum + NNOM_ROUND(out_shift)) >> out_shift), 8); + } +} + +void local_sub_q7(q7_t *pSrcA, + q7_t *pSrcB, + q7_t *pDst, + const uint16_t out_shift, + uint32_t blockSize) +{ + uint32_t i; + + for (i = 0; i < blockSize; i++) + { + q31_t sub = pSrcA[i] - pSrcB[i]; + pDst[i] = (q7_t) __NNOM_SSAT(((sub + NNOM_ROUND(out_shift)) >> out_shift), 8); + } +} + + + +void local_multiple_add_q7( q7_t *p_dst, + const int16_t out_shift, + uint32_t block_size, + uint32_t num_block, + q7_t **p_src) +{ + uint32_t i, blk; + q31_t sum; + + for (i = 0; i < block_size; i++) + { + sum = 0; + for(blk=0; blk < num_block; blk++) + sum += p_src[blk][i]; + p_dst[i] = (q7_t) __NNOM_SSAT(((sum + NNOM_ROUND(out_shift)) >> out_shift), 8); + } +} + +void local_multiple_mult_q7( q7_t *p_dst, + const int16_t out_shift, + uint32_t block_size, + uint32_t num_block, + q7_t **p_src) +{ + uint32_t i, blk; + q31_t product; + + for (i = 0; i < block_size; i++) + { + product = 1; + for(blk=0; blk < num_block; blk++) + product *= p_src[blk][i]; + p_dst[i] = (q7_t) __NNOM_SSAT(((product + NNOM_ROUND(out_shift)) >> out_shift), 8); + } +} + +void local_multiple_sub_q7( q7_t *p_dst, + const int16_t out_shift, + uint32_t block_size, + uint32_t num_block, + q7_t **p_src) +{ + uint32_t i, blk; + q31_t sub; + + for (i = 0; i < block_size; i++) + { + sub = p_src[0][i]; + for(blk=1; blk < num_block; blk++) + sub -= p_src[blk][i]; + p_dst[i] = (q7_t) __NNOM_SSAT(((sub + NNOM_ROUND(out_shift)) >> out_shift), 8); + } +} + + +void local_q7_to_q15_no_shift(const q7_t *src, q15_t *des, uint32_t size) +{ + // simple unloop + uint32_t count = size/8; + while (count-- > 0) + { + *des++ = (q15_t)*src++; + *des++ = (q15_t)*src++; + *des++ = (q15_t)*src++; + *des++ = (q15_t)*src++; + *des++ = (q15_t)*src++; + *des++ = (q15_t)*src++; + *des++ = (q15_t)*src++; + *des++ = (q15_t)*src++; + } + count = size%8; + while(count-- > 0) + *des++ = (q15_t)*src++; +} + +void local_q7_to_q15(const q7_t *src, q15_t *des, uint32_t size) +{ + // simple unloop + uint32_t count = size/8; + while (count-- > 0) + { + *des++ = (q15_t)*src++<<8; + *des++ = (q15_t)*src++<<8; + *des++ = (q15_t)*src++<<8; + *des++ = (q15_t)*src++<<8; + *des++ = (q15_t)*src++<<8; + *des++ = (q15_t)*src++<<8; + *des++ = (q15_t)*src++<<8; + *des++ = (q15_t)*src++<<8; + } + count = size%8; + while(count-- > 0) + *des++ = (q15_t)*src++<<8; +} + +// right shift q15 to q7 +void local_q15_to_q7(const q15_t *src, q7_t *des, uint32_t shift, uint32_t size) +{ + while(size-- >0) + { + *des = *src >> shift; + des++; + src++; + } +} + diff --git a/APP_Framework/Framework/knowing/nnom/src/backends/nnom_local_q15.c b/APP_Framework/Framework/knowing/nnom/src/backends/nnom_local_q15.c new file mode 100644 index 000000000..d78c3efc0 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/src/backends/nnom_local_q15.c @@ -0,0 +1,1602 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Notice: + * Code in this file inlcudes derivative works from CMSIS + * Please check the LICENSE file for detial. + * + * Change Logs: + * Date Author Notes + * 2020-10-05 Jianjia Ma The first version + */ + +#include "nnom.h" +#include "nnom_local.h" + +// modified from CMSIS-NN test_ref +void local_avepool_q15_HWC(const q15_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimension x or W + const uint16_t dim_im_in_y, // input image dimension y or H + const uint16_t ch_im_in, // number of input image channels + const uint16_t dim_kernel_x, // window kernel size + const uint16_t dim_kernel_y, // window kernel size + const uint16_t padding_x, // padding sizes + const uint16_t padding_y, // padding sizes + const uint16_t stride_x, // stride + const uint16_t stride_y, // stride + const uint16_t dim_im_out_x, // output image dimension x or W + const uint16_t dim_im_out_y, // output image dimension y or H + const uint16_t output_shift, // output right shift + q7_t *bufferA, // a buffer for local storage, NULL by now + q15_t *Im_out) +{ + int16_t i_ch_in, i_x, i_y; + int16_t k_x, k_y; + + for (i_ch_in = 0; i_ch_in < ch_im_in; i_ch_in++) + { + for (i_y = 0; i_y < dim_im_out_y; i_y++) + { + for (i_x = 0; i_x < dim_im_out_x; i_x++) + { + int sum = 0; + int count = 0; + for (k_y = i_y * stride_y - padding_y; k_y < i_y * stride_y - padding_y + dim_kernel_y; k_y++) + { + for (k_x = i_x * stride_x - padding_x; k_x < i_x * stride_x - padding_x + dim_kernel_x; k_x++) + { + if (k_y >= 0 && k_x >= 0 && k_y < dim_im_in_y && k_x < dim_im_in_x) + { + sum += Im_in[i_ch_in + ch_im_in * (k_x + k_y * dim_im_in_x)]; + count++; + } + } + } + Im_out[i_ch_in + ch_im_in * (i_x + i_y * dim_im_out_x)] = sum / (count>>output_shift); + } + } + } +} + +void local_avepool_q15_CHW(const q15_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimension x or W + const uint16_t dim_im_in_y, // input image dimension y or H + const uint16_t ch_im_in, // number of input image channels + const uint16_t dim_kernel_x, // window kernel size + const uint16_t dim_kernel_y, // window kernel size + const uint16_t padding_x, // padding sizes + const uint16_t padding_y, // padding sizes + const uint16_t stride_x, // stride + const uint16_t stride_y, // stride + const uint16_t dim_im_out_x, // output image dimension x or W + const uint16_t dim_im_out_y, // output image dimension y or H + const uint16_t output_shift, // output right shift + q7_t *bufferA, // a buffer for local storage, NULL by now + q15_t *Im_out) +{ + int16_t i_ch_in, i_x, i_y; + int16_t k_x, k_y; + int32_t ch_offset; + + for (i_ch_in = 0; i_ch_in < ch_im_in; i_ch_in++) + { + ch_offset = i_ch_in*dim_im_in_x*dim_im_in_y; + for (i_y = 0; i_y < dim_im_out_y; i_y++) + { + for (i_x = 0; i_x < dim_im_out_x; i_x++) + { + int sum = 0; + int count = 0; + for (k_y = i_y * stride_y - padding_y; k_y < i_y * stride_y - padding_y + dim_kernel_y; k_y++) + { + for (k_x = i_x * stride_x - padding_x; k_x < i_x * stride_x - padding_x + dim_kernel_x; k_x++) + { + if (k_y >= 0 && k_x >= 0 && k_y < dim_im_in_y && k_x < dim_im_in_x) + { + sum += Im_in[ch_offset + (k_x + k_y * dim_im_in_x)]; + count++; + } + } + } + Im_out[i_ch_in*dim_im_out_x*dim_im_out_y + (i_x + i_y * dim_im_out_x)] = sum / (count>>output_shift); + } + } + } +} + +// modified from CMSIS-NN test_ref +void local_maxpool_q15_HWC(const q15_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimension x or W + const uint16_t dim_im_in_y, // input image dimension y or H + const uint16_t ch_im_in, // number of input image channels + const uint16_t dim_kernel_x, // window kernel size + const uint16_t dim_kernel_y, // window kernel size + const uint16_t padding_x, // padding sizes + const uint16_t padding_y, // padding sizes + const uint16_t stride_x, // stride + const uint16_t stride_y, // stride + const uint16_t dim_im_out_x, // output image dimension x or W + const uint16_t dim_im_out_y, // output image dimension y or H + q7_t *bufferA, // a buffer for local storage, NULL by now + q15_t *Im_out) +{ + int16_t i_ch_in, i_x, i_y; + int16_t k_x, k_y; + + for (i_ch_in = 0; i_ch_in < ch_im_in; i_ch_in++) + { + for (i_y = 0; i_y < dim_im_out_y; i_y++) + { + for (i_x = 0; i_x < dim_im_out_x; i_x++) + { + int max = -32768; + for (k_y = i_y * stride_y - padding_y; k_y < i_y * stride_y - padding_y + dim_kernel_y; k_y++) + { + for (k_x = i_x * stride_x - padding_x; k_x < i_x * stride_x - padding_x + dim_kernel_x; k_x++) + { + if (k_y >= 0 && k_x >= 0 && k_y < dim_im_in_y && k_x < dim_im_in_x) + { + if (Im_in[i_ch_in + ch_im_in * (k_x + k_y * dim_im_in_x)] > max) + { + max = Im_in[i_ch_in + ch_im_in * (k_x + k_y * dim_im_in_x)]; + } + } + } + } + Im_out[i_ch_in + ch_im_in * (i_x + i_y * dim_im_out_x)] = max; + } + } + } +} + +void local_maxpool_q15_CHW(const q15_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimension x or W + const uint16_t dim_im_in_y, // input image dimension y or H + const uint16_t ch_im_in, // number of input image channels + const uint16_t dim_kernel_x, // window kernel size + const uint16_t dim_kernel_y, // window kernel size + const uint16_t padding_x, // padding sizes + const uint16_t padding_y, // padding sizes + const uint16_t stride_x, // stride + const uint16_t stride_y, // stride + const uint16_t dim_im_out_x, // output image dimension x or W + const uint16_t dim_im_out_y, // output image dimension y or H + q7_t *bufferA, // a buffer for local storage, NULL by now + q15_t *Im_out) +{ + int16_t i_ch_in, i_x, i_y; + int16_t k_x, k_y; + int32_t ch_offset; + + for (i_ch_in = 0; i_ch_in < ch_im_in; i_ch_in++) + { + ch_offset = i_ch_in * dim_im_out_x * dim_im_out_y; + for (i_y = 0; i_y < dim_im_out_y; i_y++) + { + for (i_x = 0; i_x < dim_im_out_x; i_x++) + { + int max = -32768; + for (k_y = i_y * stride_y - padding_y; k_y < i_y * stride_y - padding_y + dim_kernel_y; k_y++) + { + for (k_x = i_x * stride_x - padding_x; k_x < i_x * stride_x - padding_x + dim_kernel_x; k_x++) + { + if (k_y >= 0 && k_x >= 0 && k_y < dim_im_in_y && k_x < dim_im_in_x) + { + if (Im_in[i_ch_in * dim_im_in_x * dim_im_in_y + (k_x + k_y * dim_im_in_x)] > max) + { + max = Im_in[i_ch_in * dim_im_in_x * dim_im_in_y + (k_x + k_y * dim_im_in_x)]; + } + } + } + } + Im_out[ch_offset+(i_x + i_y * dim_im_out_x)] = max; + } + } + } +} + +// shift according to the maximum +void local_sumpool_q15_HWC(const q15_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimension x or W + const uint16_t dim_im_in_y, // input image dimension y or H + const uint16_t ch_im_in, // number of input image channels + const uint16_t dim_kernel_x, // window kernel size + const uint16_t dim_kernel_y, // window kernel size + const uint16_t padding_x, // padding sizes + const uint16_t padding_y, // padding sizes + const uint16_t stride_x, // stride + const uint16_t stride_y, // stride + const uint16_t dim_im_out_x, // output image dimension x or W + const uint16_t dim_im_out_y, // output image dimension y or H + const uint16_t output_shift, // output right shift + q7_t *bufferA, // a buffer for local storage, size = 4*output_size + q15_t *Im_out) +{ + int16_t i_ch_in, i_x, i_y; + int16_t k_x, k_y; + int32_t *buf = (int32_t *)bufferA; + // stage2 + // int32_t max_abs = 0; + // int32_t output_shift; + // size_t output_size = dim_im_out_x * dim_im_out_x * ch_im_in; + + // save in 32bit + for (i_ch_in = 0; i_ch_in < ch_im_in; i_ch_in++) + { + for (i_y = 0; i_y < dim_im_out_y; i_y++) + { + for (i_x = 0; i_x < dim_im_out_x; i_x++) + { + int sum = 0; + for (k_y = i_y * stride_y - padding_y; k_y < i_y * stride_y - padding_y + dim_kernel_y; k_y++) + { + for (k_x = i_x * stride_x - padding_x; k_x < i_x * stride_x - padding_x + dim_kernel_x; k_x++) + { + if (k_y >= 0 && k_x >= 0 && k_y < dim_im_in_y && k_x < dim_im_in_x) + { + sum += Im_in[i_ch_in + ch_im_in * (k_x + k_y * dim_im_in_x)]; + } + } + } + // 32bit + buf[i_ch_in + ch_im_in * (i_x + i_y * dim_im_out_x)] = (q15_t)__NNOM_SSAT((sum >> output_shift), 16); + } + } + } + + // // find max amount results + // for (int i = 0; i < output_size; i++) + // { + // int32_t val = buf[i]; + // if (val < 0) + // val = -val; + // if (val > max_abs) + // max_abs = val; + // } + // // find best shift to cover the max + // for (output_shift = 0;; output_shift++) + // { + // if (127 * (1 + output_shift) >= max_abs) + // break; + // } + + // // shift the results + // for (int i = 0; i < output_size; i++) + // { + // Im_out[i] = buf[i] >> output_shift; + // } + //return output_shift; +} + +// temporary for the thesis +// shift according to the maximum +void local_sumpool_q15_CHW(const q15_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimension x or W + const uint16_t dim_im_in_y, // input image dimension y or H + const uint16_t ch_im_in, // number of input image channels + const uint16_t dim_kernel_x, // window kernel size + const uint16_t dim_kernel_y, // window kernel size + const uint16_t padding_x, // padding sizes + const uint16_t padding_y, // padding sizes + const uint16_t stride_x, // stride + const uint16_t stride_y, // stride + const uint16_t dim_im_out_x, // output image dimension x or W + const uint16_t dim_im_out_y, // output image dimension y or H + const uint16_t output_shift, // output right shift + q7_t *bufferA, // a buffer for local storage, size = 4*output_size + q15_t *Im_out) +{ + int16_t i_ch_in, i_x, i_y; + int16_t k_x, k_y; + int32_t *buf = (int32_t *)bufferA; + int32_t i_ch_offset, o_ch_offset; + // // stage2 + // int32_t max_abs = 0; + // int32_t output_shift; + // size_t output_size = dim_im_out_x * dim_im_out_x * ch_im_in; + + // save in 32bit + for (i_ch_in = 0; i_ch_in < ch_im_in; i_ch_in++) + { + i_ch_offset = i_ch_in*dim_im_in_x*dim_im_in_y; + o_ch_offset = i_ch_in*dim_im_out_x*dim_im_out_y; + + for (i_y = 0; i_y < dim_im_out_y; i_y++) + { + for (i_x = 0; i_x < dim_im_out_x; i_x++) + { + int sum = 0; + for (k_y = i_y * stride_y - padding_y; k_y < i_y * stride_y - padding_y + dim_kernel_y; k_y++) + { + for (k_x = i_x * stride_x - padding_x; k_x < i_x * stride_x - padding_x + dim_kernel_x; k_x++) + { + if (k_y >= 0 && k_x >= 0 && k_y < dim_im_in_y && k_x < dim_im_in_x) + { + sum += Im_in[i_ch_offset + (k_x + k_y * dim_im_in_x)]; + } + } + } + // 32bit + buf[o_ch_offset + (i_x + i_y * dim_im_out_x)] = (q15_t)__NNOM_SSAT((sum >> output_shift), 16); + } + } + } + + // // find max amount results + // for (int i = 0; i < output_size; i++) + // { + // int32_t val = buf[i]; + // if (val < 0) + // val = -val; + // if (val > max_abs) + // max_abs = val; + // } + // // find best shift to cover the max + // for (output_shift = 0;; output_shift++) + // { + // if (127 * (1 + output_shift) >= max_abs) + // break; + // } + + // // shift the results + // for (int i = 0; i < output_size; i++) + // { + // Im_out[i] = buf[i] >> output_shift; + // } + //return output_shift; +} + +// customised up sample pooling +void local_up_sampling_q15_HWC(const q15_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimension x or W + const uint16_t dim_im_in_y, // input image dimension y or H + const uint16_t ch_im_in, // number of input image channels + const uint16_t dim_kernel_x, // window kernel size + const uint16_t dim_kernel_y, // window kernel size + const uint16_t dim_im_out_x, // output image dimension x or W + const uint16_t dim_im_out_y, // output image dimension y or H + q7_t *bufferA, // a buffer for local storage, NULL by now + q15_t *Im_out) +{ + int16_t i_x, i_y; + + // for loop for each pixel in input image. + for (i_y = 0; i_y < dim_im_in_y; i_y++) + { + for (i_x = 0; i_x < dim_im_in_x; i_x++) + { + // copy all the channels together. + const q15_t *p_in = Im_in + (i_y * dim_im_in_x + i_x ) * ch_im_in; + q15_t *pout = Im_out + (i_y * dim_im_in_x * dim_kernel_x * dim_kernel_y + i_x * dim_kernel_y) * ch_im_in; + + // copy along x axis + for(int i = 0; i> out_shift[shift_idx]), 16); + } + } + } +} + +void local_convolve_CHW_q15_nonsquare(const q15_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimention x + const uint16_t dim_im_in_y, // input image dimention y + const uint16_t ch_im_in, // number of input image channels + const q7_t *wt, // kernel weights + const uint16_t ch_im_out, // number of filters, i.e., output image channels + const uint16_t dim_kernel_x, // filter kernel size x + const uint16_t dim_kernel_y, // filter kernel size y + const uint16_t padding_x, // padding sizes x + const uint16_t padding_y, // padding sizes y + const uint16_t stride_x, // stride x + const uint16_t stride_y, // stride y + const uint16_t dilation_x, // dilation x + const uint16_t dilation_y, // dilation y + const q7_t *bias, // bias + const nnom_qformat_param_t *bias_shift, // bias shifts + const nnom_qformat_param_t *out_shift, // output shift + const nnom_qtype_t q_type, // per channel or per tensor + q15_t *Im_out, // output image + const uint16_t dim_im_out_x, // output image dimension x + const uint16_t dim_im_out_y, // output image dimension y + q15_t *bufferA, //buffer space for input + q7_t *bufferB //buffer space for output +) +{ + int i, j, k, l, m, n; + int64_t conv_out; + int in_row, in_col; + int shift_idx, shift_steps; + if(q_type == NNOM_QTYPE_PER_AXIS) + shift_steps = 1; + else + shift_steps = 0; + + for(i = 0, shift_idx = 0; i < ch_im_out; i++, shift_idx += shift_steps) + { + for (j = 0; j < dim_im_out_y; j++) + { + for (k = 0; k < dim_im_out_x; k++) + { + if(bias) + conv_out = ((q31_t)(bias[i]) << bias_shift[shift_idx]) + NNOM_ROUND(out_shift[shift_idx]); + else + conv_out = (q31_t)NNOM_ROUND(out_shift[shift_idx]); + for (m = 0; m < dim_kernel_y; m++) + { + for (n = 0; n < dim_kernel_x; n++) + { + // if-for implementation + in_row = stride_y * j + m * dilation_y - padding_y; + in_col = stride_x * k + n * dilation_x - padding_x; + if (in_row >= 0 && in_col >= 0 && in_row < dim_im_in_y && in_col < dim_im_in_x) + { + for (l = 0; l < ch_im_in; l++) + { + conv_out += Im_in[(in_row * dim_im_in_x + in_col) + l * dim_im_in_x * dim_im_in_y] * + wt[(m * dim_kernel_x + n) * ch_im_in * ch_im_out + l * ch_im_out + i]; + } + } + } + } + Im_out[i * dim_im_out_x * dim_im_out_y + (j * dim_im_out_x + k)] = (q15_t)__NNOM_SSAT((conv_out >> out_shift[shift_idx]), 16); + } + } + } +} + +#define FALSE 0 +#define TRUE 1 + +static int alg_deconv2d_calculate_position( + int pos, + int stride, + int padding, + int dim_kernel, + int dim_in, + int* in_start, + int* kernel_start, + int* kernel_end) +{ + int is_zero = FALSE; + int of, adj; + is_zero = FALSE; + *in_start = pos/stride; + of = pos%stride; + *kernel_start = padding - of; + if(*kernel_start >= 0) { + adj = MIN(*in_start, *kernel_start/stride); + *kernel_start -= adj*stride; + *in_start -= adj; + } else { + adj = -*kernel_start + dim_kernel; + if(adj<=stride) { + is_zero = TRUE; + } else { + adj = MIN(dim_in-1-*in_start, adj/stride); + *kernel_start += adj*stride; + *in_start += adj; + } + } + of = dim_kernel - 1 - *kernel_start; + adj = MIN(dim_in-1-*in_start, of/stride); + *kernel_end = *kernel_start + adj*stride; + + return is_zero; +} + +void local_conv_trans_HWC_q15_nonsquare(const int8_t * Im_in, + const uint16_t dim_im_in_x, // input image dimention x + const uint16_t dim_im_in_y, // input image dimention y + const uint16_t ch_im_in, // number of input image channels + const q7_t *wt, // kernel weights + const uint16_t ch_im_out, // number of filters, i.e., output image channels + const uint16_t dim_kernel_x, // filter kernel size x + const uint16_t dim_kernel_y, // filter kernel size y + const uint16_t padding_x, // padding sizes x + const uint16_t padding_y, // padding sizes y + const uint16_t stride_x, // stride x + const uint16_t stride_y, // stride y + const uint16_t dilation_x, // dilation x + const uint16_t dilation_y, // dilation y + const q7_t *bias, // bias + const uint16_t bias_shift, const uint16_t out_shift, q15_t *Im_out, // output image + const uint16_t dim_im_out_x, // output image dimension x + const uint16_t dim_im_out_y, // output image dimension y + q15_t *bufferA, //buffer space for input + q7_t *bufferB //buffer space for output +) +// { +// int ox, oy, oc, ky, kx, kc, ix, iy; +// int conv_out; +// int in_pix_loc, wt_loc; + +// (void)dilation_y; +// (void)dilation_x; + +// // padding and stride are applied to output +// for (oc = 0; oc < ch_im_out; oc++) +// { +// for (oy = 0; oy < dim_im_out_y; oy++) +// { +// for (ox = 0; ox < dim_im_out_x; ox++) +// { +// conv_out = ((q31_t)(bias[oc]) << bias_shift) + NNOM_ROUND(out_shift); + +// for (ky = 0; ky < dim_kernel_y; ky++) +// { +// for (kx = 0; kx < dim_kernel_x; kx++) +// { +// // input y, input x location +// iy = oy / stride_y + ky - padding_y; +// ix = ox / stride_x + kx - padding_x; + +// if(ix >= 0 && iy >= 0 && ix < dim_im_in_y && iy< dim_im_in_y) +// { +// in_pix_loc = (iy * dim_im_in_x + ix) * ch_im_in; +// wt_loc = oc * ch_im_in * dim_kernel_y * dim_kernel_x + (ky * dim_kernel_x + kx) * ch_im_in; + +// for (kc = 0; kc < ch_im_in; kc++) +// { +// conv_out += Im_in[in_pix_loc + kc] * wt[wt_loc + kc]; +// } +// } +// } +// } + +// Im_out[oc + (oy * dim_im_out_x + ox) * ch_im_out] = (q7_t) __NNOM_SSAT((conv_out >> out_shift), 8); +// } +// } +// } +// } + +{ + int i, j, k, l, m, n; + int64_t conv_out; + int in_row, in_col; + int kernel_start_x,kernel_end_x; + int kernel_start_y,kernel_end_y; + int in_row_start, in_col_start; + int is_zero; + + for (i = 0; i < ch_im_out; i++) { + for (j = 0; j < dim_im_out_y; j++) { + is_zero = alg_deconv2d_calculate_position(j, stride_y, padding_y, dim_kernel_y, + dim_im_in_y, &in_row_start, &kernel_start_y, &kernel_end_y); + + if(is_zero) { + conv_out = ((q31_t)(bias[i]) << bias_shift) + NNOM_ROUND(out_shift); + conv_out = (q15_t) __NNOM_SSAT((conv_out >> out_shift), 16); + for (k = 0; k < dim_im_out_x; k++) { + Im_out[i + (j * dim_im_out_x + k) * ch_im_out] = (q15_t) conv_out; + } + continue; + } + + for (k = 0; k < dim_im_out_x; k++) { + conv_out = ((q31_t)(bias[i]) << bias_shift) + NNOM_ROUND(out_shift); + + is_zero = alg_deconv2d_calculate_position(k, stride_x, padding_x, dim_kernel_x, + dim_im_in_x, &in_col_start, &kernel_start_x, &kernel_end_x); + + if(is_zero) { + Im_out[i + (j * dim_im_out_x + k) * ch_im_out] = conv_out; + continue; + } + + for (m = kernel_start_y, in_row = in_row_start; m <= kernel_end_y; m+=stride_y, in_row++) { + for (n = kernel_start_x, in_col = in_col_start; n <= kernel_end_x; n+=stride_x, in_col++) { + if ((in_row >= 0) && (in_col >= 0) && + (in_row < dim_im_in_y) && (in_col < dim_im_in_x)) { + for (l = 0; l < ch_im_in; l++) { + conv_out += Im_in[(in_row * dim_im_in_x + in_col) * ch_im_in + l] * + wt[i * ch_im_in * dim_kernel_y * dim_kernel_x + (m * dim_kernel_x + n) * ch_im_in + l]; + } + } + } + } + + Im_out[i + (j * dim_im_out_x + k) * ch_im_out] = (q15_t) __NNOM_SSAT((conv_out >> out_shift), 16); + } + } + } +} + + + + +void local_depthwise_separable_conv_HWC_q15_nonsquare(const q15_t *Im_in,// input image + const uint16_t dim_im_in_x, // input image dimention x + const uint16_t dim_im_in_y, // input image dimention y + const uint16_t ch_im_in, // number of input image channels + const q7_t *wt, // kernel weights + const uint16_t ch_im_out, // number of filters, i.e., output image channels + const uint16_t dim_kernel_x, // filter kernel size x + const uint16_t dim_kernel_y, // filter kernel size y + const uint16_t padding_x, // padding sizes x + const uint16_t padding_y, // padding sizes y + const uint16_t stride_x, // stride x + const uint16_t stride_y, // stride y + const uint16_t dilation_x, // dilation x + const uint16_t dilation_y, // dilation y + const q7_t *bias, // bias + const nnom_qformat_param_t *bias_shift, // bias shifts + const nnom_qformat_param_t *out_shift, // output shift + const nnom_qtype_t q_type, // per channel or per tensor + q15_t *Im_out, // output image + const uint16_t dim_im_out_x, // output image dimension x + const uint16_t dim_im_out_y, // output image dimension y + q15_t *bufferA, //buffer space for input + q7_t *bufferB //buffer space for output +) +{ + int i_out_y, i_out_x, i_ch_out, i_ch_in, i_ch_mult; + int i_ker_y, i_ker_x; + int i_out = 0; + int shift_idx; + int ch_mult = ch_im_out / ch_im_in; + int64_t conv_out; + + for (i_out_y = 0; i_out_y < dim_im_out_y; i_out_y++) + { + const int32_t base_idx_y = stride_y * i_out_y - padding_y; + for (i_out_x = 0; i_out_x < dim_im_out_x; i_out_x++) + { + const int32_t base_idx_x = stride_x * i_out_x - padding_x; + for (i_ch_in = 0; i_ch_in < ch_im_in; i_ch_in++) + { + for(i_ch_mult = 0; i_ch_mult < ch_mult; i_ch_mult++) + { + i_ch_out = i_ch_mult + i_ch_in * ch_mult; + int32_t ker_y_start = MAX(0, -base_idx_y); + int32_t ker_x_start = MAX(0, -base_idx_x); + int32_t ker_y_end = MIN(dim_kernel_y, dim_im_in_y - base_idx_y); + int32_t ker_x_end = MIN(dim_kernel_x, dim_im_in_x - base_idx_x); + + shift_idx = q_type == NNOM_QTYPE_PER_AXIS ? i_ch_out : 0; + if (bias) + conv_out = ((q31_t)(bias[i_ch_out]) << bias_shift[shift_idx]) + NNOM_ROUND(out_shift[shift_idx]); + else + conv_out = (q31_t)NNOM_ROUND(out_shift[shift_idx]); + + for (i_ker_y = ker_y_start; i_ker_y < ker_y_end; i_ker_y++) + { + const int32_t idx_y = base_idx_y + i_ker_y * dilation_y; + for (i_ker_x = ker_x_start; i_ker_x < ker_x_end; i_ker_x++) + { + const int32_t idx_x = base_idx_x + i_ker_x * dilation_x; + int32_t in_pix_loc = (idx_y * dim_im_in_x + idx_x) * ch_im_in + i_ch_in; + int32_t wt_loc = (i_ker_y * dim_kernel_x + i_ker_x) * (ch_im_in * ch_mult) + i_ch_out; + conv_out += Im_in[in_pix_loc] * wt[wt_loc]; + } + } + Im_out[i_out++] = (q15_t)__NNOM_SSAT((conv_out >> out_shift[shift_idx]), 16); + } + } + } + } +} + +void local_depthwise_separable_conv_CHW_q15_nonsquare(const q15_t *Im_in,// input image + const uint16_t dim_im_in_x, // input image dimention x + const uint16_t dim_im_in_y, // input image dimention y + const uint16_t ch_im_in, // number of input image channels + const q7_t *wt, // kernel weights + const uint16_t ch_im_out, // number of filters, i.e., output image channels + const uint16_t dim_kernel_x, // filter kernel size x + const uint16_t dim_kernel_y, // filter kernel size y + const uint16_t padding_x, // padding sizes x + const uint16_t padding_y, // padding sizes y + const uint16_t stride_x, // stride x + const uint16_t stride_y, // stride y + const uint16_t dilation_x, // dilation x + const uint16_t dilation_y, // dilation y + const q7_t *bias, // bias + const nnom_qformat_param_t *bias_shift, // bias shifts + const nnom_qformat_param_t *out_shift, // output shift + const nnom_qtype_t q_type, // per channel or per tensor + q15_t *Im_out, // output image + const uint16_t dim_im_out_x, // output image dimension x + const uint16_t dim_im_out_y, // output image dimension y + q15_t *bufferA, //buffer space for input + q7_t *bufferB //buffer space for output +) +{ + int i_out_y, i_out_x, i_ch_out, i_ch_in, i_ch_mult; + int i_ker_y, i_ker_x; + int shift_idx; + int ch_mult = ch_im_out / ch_im_in; + int64_t conv_out; + + for (i_out_y = 0; i_out_y < dim_im_out_y; i_out_y++) + { + const int32_t base_idx_y = stride_y * i_out_y - padding_y; + for (i_out_x = 0; i_out_x < dim_im_out_x; i_out_x++) + { + const int32_t base_idx_x = stride_x * i_out_x - padding_x; + for (i_ch_in = 0; i_ch_in < ch_im_in; i_ch_in++) + { + for (i_ch_mult = 0; i_ch_mult < ch_mult; i_ch_mult++) + { + i_ch_out = i_ch_mult + i_ch_in * ch_mult; + int32_t ker_y_start = MAX(0, -base_idx_y); + int32_t ker_x_start = MAX(0, -base_idx_x); + int32_t ker_y_end = MIN(dim_kernel_y, dim_im_in_y - base_idx_y); + int32_t ker_x_end = MIN(dim_kernel_x, dim_im_in_x - base_idx_x); + + shift_idx = q_type == NNOM_QTYPE_PER_AXIS ? i_ch_out : 0; + if (bias) + conv_out = ((q31_t)(bias[i_ch_out]) << bias_shift[shift_idx]) + NNOM_ROUND(out_shift[shift_idx]); + else + conv_out = (q31_t)NNOM_ROUND(out_shift[shift_idx]); + + for (i_ker_y = ker_y_start; i_ker_y < ker_y_end; i_ker_y++) + { + const int32_t idx_y = base_idx_y + i_ker_y * dilation_y; + for (i_ker_x = ker_x_start; i_ker_x < ker_x_end; i_ker_x++) + { + const int32_t idx_x = base_idx_x + i_ker_x * dilation_x; + int32_t in_pix_loc = (idx_y * dim_im_in_x + idx_x) + i_ch_in * dim_im_in_x * dim_im_in_y; + int32_t wt_loc = (i_ker_y * dim_kernel_x + i_ker_x) * ch_im_out + i_ch_out; + conv_out += Im_in[in_pix_loc] * wt[wt_loc]; + } + } + Im_out[i_ch_out * dim_im_out_x * dim_im_out_y + (i_out_y * dim_im_out_x + i_out_x)] = + (q15_t)__NNOM_SSAT((conv_out >> out_shift[shift_idx]), 16); + } + } + } + } + +} + +void local_zero_padding_HWC_q15(const q15_t *Im_in, // input image + const uint16_t dim_im_in_x, // input image dimention x + const uint16_t dim_im_in_y, // input image dimention y + const uint16_t ch_im_in, // number of input image channels + const uint16_t padding_top, // padding sizes y + const uint16_t padding_bottom, // padding sizes y + const uint16_t padding_left, // padding sizes x + const uint16_t padding_right, // padding sizes x + q15_t *Im_out, // output image + const uint16_t dim_im_out_x, // output image dimension x + const uint16_t dim_im_out_y) // output image dimension y +{ + int i, size; + q15_t * p_out = Im_out; + + // top rows + size = dim_im_out_x*ch_im_in*padding_top; + nnom_memset(p_out, 0, size*sizeof(q15_t)); + p_out += size; + + // middle + for(i=0; i> out_shift), 16); + } +} + +void local_dot_q15_opt(const q15_t * pV, + const q15_t * pM, + const uint16_t dim_vec, + const uint16_t num_of_rows, + const uint16_t out_shift, + q15_t * pOut) +{ + /* Run the following code as reference implementation for Cortex-M0 and Cortex-M3 */ + uint16_t rowCnt = num_of_rows >> 2; + const q15_t *pB = pM; + const q15_t *pA; + q15_t *pO = pOut; + + while (rowCnt) + { + int64_t sum = (q31_t) NNOM_ROUND(out_shift); + int64_t sum2 = (q31_t) NNOM_ROUND(out_shift); + int64_t sum3 = (q31_t) NNOM_ROUND(out_shift); + int64_t sum4 = (q31_t) NNOM_ROUND(out_shift); + uint16_t colCnt = dim_vec >> 1; + pA = pV; + while (colCnt) + { + q15_t inA1 = *pA++; + q15_t inA2 = *pA++; + q15_t inB1 = *pB++; + q15_t inB2 = *pB++; + sum += inA1 * inB1 + inA2 * inB2; + + inB1 = *pB++; + inB2 = *pB++; + sum2 += inA1 * inB1 + inA2 * inB2; + + inB1 = *pB++; + inB2 = *pB++; + sum3 += inA1 * inB1 + inA2 * inB2; + + inB1 = *pB++; + inB2 = *pB++; + sum4 += inA1 * inB1 + inA2 * inB2; + + colCnt--; + } + colCnt = dim_vec & 0x1; + while (colCnt) + { + q15_t inA = *pA++; + q15_t inB = *pB++; + sum += inA * inB; + inB = *pB++; + sum2 += inA * inB; + inB = *pB++; + sum3 += inA * inB; + inB = *pB++; + sum4 += inA * inB; + colCnt--; + } + *pO++ = (q15_t) __NNOM_SSAT((sum >> out_shift), 16); + *pO++ = (q15_t) __NNOM_SSAT((sum2 >> out_shift), 16); + *pO++ = (q15_t) __NNOM_SSAT((sum3 >> out_shift), 16); + *pO++ = (q15_t) __NNOM_SSAT((sum4 >> out_shift), 16); + + rowCnt--; + } + rowCnt = num_of_rows & 0x3; + + while (rowCnt) + { + int64_t ip_out = (q31_t) + NNOM_ROUND(out_shift); + int j; + + pA = pV; + for (j = 0; j < dim_vec; j++) + { + q15_t inA = *pA++; + q15_t inB = *pB++; + ip_out += inA * inB; + } + *pO++ = (q15_t) __NNOM_SSAT((ip_out >> out_shift), 16); + + rowCnt--; + } +} + +void local_fully_connected_mat_q7_vec_q15_opt(const q15_t * pV, + const q7_t * pM, + const uint16_t dim_vec, + const uint16_t num_of_rows, + const uint16_t bias_shift, + const uint16_t out_shift, + const q7_t * bias, + q15_t * pOut, + q15_t * vec_buffer) +{ + + (void)vec_buffer; + + /* Run the following code as reference implementation for Cortex-M0 and Cortex-M3 */ + uint16_t rowCnt = num_of_rows >> 2; + const q7_t *pB = pM; + const q15_t *pA; + q15_t *pO = pOut; + const q7_t *pBias = bias; + + while (rowCnt) + { + int64_t sum; + int64_t sum2; + int64_t sum3; + int64_t sum4; + uint16_t colCnt = dim_vec >> 1; + + // quick and dirty to support none bias fully connected + if(bias) + { + sum = ((q31_t)(*pBias++) << bias_shift) + NNOM_ROUND(out_shift); + sum2 = ((q31_t)(*pBias++) << bias_shift) + NNOM_ROUND(out_shift); + sum3 = ((q31_t)(*pBias++) << bias_shift) + NNOM_ROUND(out_shift); + sum4 = ((q31_t)(*pBias++) << bias_shift) + NNOM_ROUND(out_shift); + } + else + { + sum = (q31_t) NNOM_ROUND(out_shift); + sum2 = (q31_t) NNOM_ROUND(out_shift); + sum3 = (q31_t) NNOM_ROUND(out_shift); + sum4 = (q31_t) NNOM_ROUND(out_shift); + } + + pA = pV; + while (colCnt) + { + q15_t inA1 = *pA++; + q15_t inA2 = *pA++; + + q7_t inB1 = *pB++; + q7_t inB3 = *pB++; + q7_t inB2 = *pB++; + q7_t inB4 = *pB++; + + sum += inA1 * inB1 + inA2 * inB2; + sum2 += inA1 * inB3 + inA2 * inB4; + + inB1 = *pB++; + inB3 = *pB++; + inB2 = *pB++; + inB4 = *pB++; + + sum3 += inA1 * inB1 + inA2 * inB2; + sum4 += inA1 * inB3 + inA2 * inB4; + + colCnt--; + } + + colCnt = dim_vec & 0x1; + while (colCnt) + { + q15_t inA = *pA++; + q7_t inB = *pB++; + sum += inA * inB; + inB = *pB++; + sum2 += inA * inB; + inB = *pB++; + sum3 += inA * inB; + inB = *pB++; + sum4 += inA * inB; + + colCnt--; + } + *pO++ = (q15_t) __NNOM_SSAT((sum >> out_shift), 16); + *pO++ = (q15_t) __NNOM_SSAT((sum2 >> out_shift), 16); + *pO++ = (q15_t) __NNOM_SSAT((sum3 >> out_shift), 16); + *pO++ = (q15_t) __NNOM_SSAT((sum4 >> out_shift), 16); + + rowCnt--; + } + + rowCnt = num_of_rows & 0x3; + + while (rowCnt) + { + int64_t ip_out; + int j; + + // quick and dirty to support none bias fully connected + if(bias) + ip_out = ((q31_t)(*pBias++) << bias_shift) + NNOM_ROUND(out_shift); + else + ip_out = (q31_t)NNOM_ROUND(out_shift); + + pA = pV; + for (j = 0; j < dim_vec; j++) + { + q15_t inA = *pA++; + q7_t inB = *pB++; + ip_out += inA * inB; + } + *pO++ = (q15_t) __NNOM_SSAT((ip_out >> out_shift), 16); + + rowCnt--; + } +} + +void local_fully_connected_mat_q7_vec_q15(const q15_t * pV, + const q7_t * pM, + const uint16_t dim_vec, + const uint16_t num_of_rows, + const uint16_t bias_shift, + const uint16_t out_shift, + const q7_t * bias, + q15_t * pOut, + q15_t * vec_buffer) +{ + int i, j; + + // a quick solution for none-bias dot. + if(bias == NULL) + { + for (i = 0; i < num_of_rows; i++) + { + int64_t ip_out = (q31_t) NNOM_ROUND(out_shift); + for (j = 0; j < dim_vec; j++) + { + ip_out += pV[j] * pM[i * dim_vec + j]; + } + pOut[i] = (q15_t) __NNOM_SSAT((ip_out >> out_shift), 16); + } + } + else + { + for (i = 0; i < num_of_rows; i++) + { + int64_t ip_out = ((q31_t)(bias[i]) << bias_shift) + NNOM_ROUND(out_shift); + for (j = 0; j < dim_vec; j++) + { + ip_out += pV[j] * pM[i * dim_vec + j]; + } + pOut[i] = (q15_t) __NNOM_SSAT((ip_out >> out_shift), 16); + } + } +} + +// This softmax is a copy from ARM CMSIS implimentation as it was efficient and written in pure-C. +// original implementation: https://github.com/ARM-software/CMSIS_5/blob/develop/CMSIS/NN/Source/SoftmaxFunctions/arm_softmax_q15.c +void local_softmax_q15(const q15_t * vec_in, const uint16_t dim_vec, q15_t * p_out) +{ + q31_t sum; + int16_t i; + uint8_t shift; + q31_t base; + base = -1 * 0x100000; + for (i = 0; i < dim_vec; i++) + { + if (vec_in[i] > base) + { + base = vec_in[i]; + } + } + + /* we ignore really small values + * anyway, they will be 0 after shrinking + * to q15_t + */ + base = base - 16; + + sum = 0; + + for (i = 0; i < dim_vec; i++) + { + if (vec_in[i] > base) + { + shift = (uint8_t)__NNOM_USAT(vec_in[i] - base, 5); + sum += 0x1 << shift; + } + } + + /* This is effectively (0x1 << 32) / sum */ + int64_t div_base = 0x100000000LL; + int output_base = (int32_t)(div_base / sum); + + /* Final confidence will be output_base >> ( 17 - (vec_in[i] - base) ) + * so 32768 (0x1<<15) -> 100% confidence when sum = 0x1 << 16, output_base = 0x1 << 16 + * and vec_in[i]-base = 16 + */ + for (i = 0; i < dim_vec; i++) + { + if (vec_in[i] > base) + { + /* Here minimum value of 17+base-vec[i] will be 1 */ + shift = (uint8_t)__NNOM_USAT(17+base-vec_in[i], 5); + p_out[i] = (q15_t) __NNOM_SSAT((output_base >> shift), 16); + } else + { + p_out[i] = 0; + } + } + +} + + +// hard sigmoid, +// y=-1 if x < -2.5 +// y=1 if x > 2.5 +// otherwise y = 0.2 * x + 0.5 (y=0.20315 * x + 0.5) +void local_hard_sigmoid_q15(q15_t *data, uint32_t size, int16_t dec_bit) +{ + int16_t limit = 2.5f * (1 << dec_bit)-1; + int16_t offset = 16384; // 0.5 * 32768 + int16_t mult = 6554; // 0.2 * 32768 + + // int bit >= 0 + for(int i=0; i= limit) + data[i] = 32767; + else + { + data[i] = ((int32_t)(data[i] * mult) >> dec_bit) + offset; + } + } + } + +// hard tanh +// y=-1 if x < -1 +// y=1 if x > 1 +// otherwise y = x +void local_hard_tanh_q15(q15_t *data, uint32_t size, int16_t dec_bit) +{ + int16_t int_bit = 15 - dec_bit; + int16_t limit = 1 << dec_bit; + + if(dec_bit == 15) + return; + + // int bit < 0 + if(int_bit < 0) + for(int i=0; i= limit) + data[i] = 32767; + else + { + data[i] = data[i] >> (-int_bit); + } + } + else + // int bit >= 0 + for(int i=0; i= limit) + data[i] = 32767; + else + { + data[i] = data[i] << int_bit; + } + } +} + +void local_relu_q15(q15_t *data, uint32_t size) +{ + uint32_t i; + + for (i = 0; i < size; i++) + { + if (data[i] < 0) + data[i] = 0; + } +} + +// alpha in q7 format with dec_bit=7 +void local_leaky_relu_q15(q15_t *data, q7_t alpha, uint32_t size) +{ + uint32_t i; + + for (i = 0; i < size; i++) + { + if (data[i] < 0) + { + data[i] = data[i] * alpha / 128; + } + } +} + +// alpha in q7 format with dec_bit=7 +// max and threshold has the same Q format with the activation +void local_adv_relu_q15(q15_t *data, q7_t negative_slope, q15_t max, q15_t threshold, uint32_t size) +{ + uint32_t i; + for (i = 0; i < size; i++) + { + // `f(x) = max_value` for `x >= max_value`, + // `f(x) = x` for `threshold <= x < max_value`, + // `f(x) = alpha * (x - threshold)` otherwise. + + if(data[i] > max) + data[i] = max; + if (data[i] < threshold) + data[i] = (data[i] - threshold) * negative_slope / 128; + } +} + +// ARM's CMSIS implementation. +static void local_activation_q15(q15_t * data, uint32_t size, uint16_t int_width, const q15_t*lookup_table) +{ + uint32_t i = size; + q15_t *pIn = data; + q15_t *pOut = data; + uint16_t shift_size = 8 + 3 - int_width; + uint32_t bit_mask = 0x7FF >> int_width; + uint32_t full_frac = bit_mask + 1; + while (i) + { + q15_t out; + q15_t in = *pIn++; + q15_t frac = (uint32_t) in & bit_mask; + q15_t value = lookup_table[(uint8_t)(in >> shift_size)]; + if ((in >> shift_size) != 0x7f) + { + q15_t value2 = lookup_table[(uint8_t)(1 + ((uint8_t)(in >> shift_size)))]; + /* doing the interpolation here for better accuracy */ + out = ((q31_t) (full_frac - frac) * value + (q31_t) value2 * frac) >> shift_size; + } else + { + /* the largest positive value does not have a right side for linear interpolation */ + out = value; + } + *pOut++ = out; + i--; + } +} + +void local_sigmoid_q15(q15_t * data, uint32_t size, uint16_t int_width) +{ + local_activation_q15(data, size, int_width, nnom_sigmoid_table_q15); +} + +void local_tanh_q15(q15_t * data, uint32_t size, uint16_t int_width) +{ + local_activation_q15(data, size, int_width, nnom_tanh_table_q15); +} + +// matrix ops q15 +void local_mult_q15(q15_t *pSrcA, + q15_t *pSrcB, + q15_t *pDst, + const uint16_t out_shift, + uint32_t blockSize) +{ + uint32_t i; + + for (i = 0; i < blockSize; i++) + { + q31_t product = pSrcA[i] * pSrcB[i]; + pDst[i] = (q15_t) __NNOM_SSAT(((product + NNOM_ROUND(out_shift)) >> out_shift), 16); + } +} + +void local_add_q15(q15_t *pSrcA, + q15_t *pSrcB, + q15_t *pDst, + const uint16_t out_shift, + uint32_t blockSize) +{ + uint32_t i; + + for (i = 0; i < blockSize; i++) + { + q31_t sum = pSrcA[i] + pSrcB[i]; + pDst[i] = (q15_t) __NNOM_SSAT(((sum + NNOM_ROUND(out_shift)) >> out_shift), 16); + } +} + +void local_sub_q15(q15_t *pSrcA, + q15_t *pSrcB, + q15_t *pDst, + const uint16_t out_shift, + uint32_t blockSize) +{ + uint32_t i; + + for (i = 0; i < blockSize; i++) + { + q31_t sub = pSrcA[i] - pSrcB[i]; + pDst[i] = (q15_t) __NNOM_SSAT(((sub + NNOM_ROUND(out_shift)) >> out_shift), 16); + } +} + + +void local_multiple_add_q15( q15_t *p_dst, + const int16_t out_shift, + uint32_t block_size, + uint32_t num_block, + q15_t **p_src) +{ + uint32_t i, blk; + q31_t sum; + + for (i = 0; i < block_size; i++) + { + sum = 0; + for(blk=0; blk < num_block; blk++) + sum += p_src[blk][i]; + p_dst[i] = (q15_t) __NNOM_SSAT(((sum + NNOM_ROUND(out_shift)) >> out_shift), 16); + } +} + +void local_multiple_mult_q15( q15_t *p_dst, + const int16_t out_shift, + uint32_t block_size, + uint32_t num_block, + q15_t **p_src) +{ + uint32_t i, blk; + q63_t product; + + for (i = 0; i < block_size; i++) + { + product = 1; + for(blk=0; blk < num_block; blk++) + product *= p_src[blk][i]; + p_dst[i] = (q15_t) __NNOM_SSAT(((product + NNOM_ROUND(out_shift)) >> out_shift), 16); + } +} + +void local_multiple_sub_q15( q15_t *p_dst, + const int16_t out_shift, + uint32_t block_size, + uint32_t num_block, + q15_t **p_src) +{ + uint32_t i, blk; + q31_t sub; + + for (i = 0; i < block_size; i++) + { + sub = p_src[0][i]; + for(blk=1; blk < num_block; blk++) + sub -= p_src[blk][i]; + p_dst[i] = (q15_t) __NNOM_SSAT(((sub + NNOM_ROUND(out_shift)) >> out_shift), 16); + } +} + +// y = 1 - x +void local_1_minor_z_q15(q15_t* src, q15_t*des, uint16_t dec_bit, uint32_t size) +{ + int32_t one = (1 << dec_bit)-1; + for(int i=0; i +#include +#include +#include +#include "nnom.h" + +const char default_layer_names[][12] = DEFUALT_LAYER_NAMES; +const char default_activation_names[][8] = ACTIVATION_NAMES; +const char default_cell_names[][8] = DEFUALT_CELL_NAMES; +size_t nnom_memory_taken = 0; + +// local static functions (when libc/dynamic memory interfaces are not avaiable. ) +#ifdef NNOM_USING_STATIC_MEMORY +static uint8_t *nnom_static_buf = NULL; //pointer to static buffer +static size_t nnom_static_buf_size = 0; //static buf size +static size_t nnom_static_buf_curr = 0; +void nnom_set_static_buf(void* buf, size_t size) +{ + nnom_static_buf = buf; + nnom_static_buf_size = size; + nnom_static_buf_curr = 0; +} +void* nnom_malloc(size_t size) +{ + size = nnom_alignto(size, NNOM_ALIGN); + if(size + nnom_static_buf_curr < nnom_static_buf_size) + { + uint8_t* new_block = nnom_static_buf_curr + nnom_static_buf; + nnom_static_buf_curr += size; + return new_block; + } + else + { + if(nnom_static_buf_size == 0) + NNOM_LOG("Please set static memory using 'nnom_set_static_buf()' before calling model created."); + else + NNOM_LOG("No memory! Static buffer size(%d) not big enough, please increase buffer size!", + (uint32_t)nnom_static_buf_size); + return NULL; + } +} +void nnom_free(void* p){;} +#endif // NNOM_USING_STATIC_MEMORY + +void *nnom_mem(size_t size) +{ + size = nnom_alignto(size, NNOM_ALIGN); + void *p = nnom_malloc(size); + if (p) + { + nnom_memory_taken += size; //test + nnom_memset(p, 0, size); + } + return p; +} + +size_t nnom_mem_stat(void) +{ + return nnom_memory_taken; +} + +// get the size of an IO module +static size_t io_mem_size(nnom_layer_io_t *io) +{ + size_t size = 0; + if (io != NULL) + { + while (io) + { + size += tensor_size(io->tensor); + io = io->aux; + } + } + return size; +} + +size_t nnom_alignto(size_t value, uint32_t alignment) +{ + if (value % alignment == 0) + return value; + value += alignment - value % alignment; + return value; +} + +static nnom_layer_t *find_last(nnom_layer_t *layer) +{ + if (layer == NULL) + return NULL; + // iterate every layer until the last one on the list, then return the last instance + while (layer->out->hook.io != NULL) + layer = layer->out->hook.io->owner; + return layer; +} +// input start layer, return layer num +static uint32_t find_index(nnom_layer_t *start, nnom_layer_t *layer) +{ + uint32_t i = 1; + if (start == NULL) + return 0; + // iterate every layer until the last one on the list, then return the index number + while (start->out->hook.io != NULL) + { + i++; + if (layer == start) + return i; + start = start->out->hook.io->owner; + } + return 0; +} + +static nnom_status_t model_add(nnom_model_t *model, nnom_layer_t *layer) +{ + nnom_layer_t *last = NULL; + nnom_layer_t *curr = NULL; + + if (layer == NULL) + { + NNOM_LOG("Error: added a NULL layer, could be no memory while creating layer.\n"); + return NN_NO_MEMORY; + } + + last = find_last(model->head); + curr = layer; + + // when the layer list is empty, the find_last() return model->head. + if (last == NULL) + { + model->head = curr; + } + else + { + // hook the current layer with the last layer. + last->out->hook.io = curr->in; // hook IO + curr->in->hook.io = last->out; + } + return NN_SUCCESS; +} + +// find an available hook on the io module, normally used by output io module. +// input, the output io module that wants to hook on +// output, the new hook that added to the end of the hook list on the io +static nnom_layer_hook_t *allocate_hook(nnom_layer_io_t *io) +{ + nnom_layer_hook_t *hook; + if (io == NULL) + return NULL; + hook = &io->hook; + + // if the primary hook is empty, reture it directly. + if (hook->io == NULL) + { + return hook; + } + else + { + // find the empty place and allocate new hook for us + while (hook->next != NULL) + { + hook = hook->next; + } + hook->next = nnom_mem(sizeof(nnom_layer_hook_t)); + if (hook->next == NULL) + return NULL; + return hook->next; + } +} + +// to check if an io is hooked to other layer +// input the primary io of a layer's input or output +// return, the new io that added to the io list. +static nnom_layer_io_t *allocate_io(nnom_layer_io_t *io) +{ + if (io == NULL) + return NULL; + + // if the io is free to used + if (io->hook.io == NULL) + { + return io; + } + else + { + // find the empty place and allocate new hook for us + while (io->aux != NULL) + { + io = io->aux; + } + io->aux = nnom_mem(sizeof(nnom_layer_io_t)); + if (io->aux == NULL) + return NULL; + // the owner for new io is inherited + io->aux->owner = io->owner; + return io->aux; + } +} + +// hook the current layer to the input layer +// this function only to connect (single output layer) to (single input layer). +static nnom_layer_t *model_hook(nnom_layer_t *curr, nnom_layer_t *last) +{ + nnom_layer_io_t *curr_in_io; + nnom_layer_hook_t *last_io_hook; + + if (last == NULL || curr == NULL) + return NULL; + + // add a new hook to the output io of the last layer + last_io_hook = allocate_hook(last->out); + // add a new input io to the current layer's input list. + curr_in_io = allocate_io(curr->in); + + // manually hook them togeter. + last_io_hook->io = curr_in_io; + curr_in_io->hook.io = last->out; + + return curr; +} + +// merge a few layers using specified method +// num = the number of layer that will be merged +// method = functional layer such as (concat(), mult(), add(), sub()) +static nnom_layer_t *model_mergex(nnom_layer_t *method, int num, ...) +{ + nnom_layer_t *layer_in; + va_list valist; + + if (method == NULL) + return NULL; + + va_start(valist, num); + for (int i = 0; i < num; i++) + { + // get the input layer + layer_in = va_arg(valist, nnom_layer_t *); + model_hook(method, layer_in); + } + va_end(valist); + return method; +} + +// merge 2 input +// this is an older interface +// method = functional layer such as (concat(), mult(), add(), sub()) +static nnom_layer_t *model_merge(nnom_layer_t *method, nnom_layer_t *in1, nnom_layer_t *in2) +{ + return model_mergex(method, 2, in1, in2); +} + +// This api will merge activation to layer's actail to avoid the extra layer for activation +static nnom_layer_t *model_active(nnom_activation_t *act, nnom_layer_t *target) +{ + // simple and easy + target->actail = act; + return target; +} + +// when model=NULL, it create a new sequential model +nnom_model_t *new_model(nnom_model_t *model) +{ + nnom_model_t *m = model; + if (m == NULL) + { + m = nnom_mem(sizeof(nnom_model_t)); + m->is_allocated = true; + } + else + { + nnom_memset(m, 0, sizeof(nnom_model_t)); + m->is_allocated = false; + } + + // set methods + m->add = model_add; + m->hook = model_hook; + m->merge = model_merge; + m->mergex = model_mergex; + m->active = model_active; + + return m; +} + +static void io_tensor_delete(nnom_layer_io_t* io) +{ + while (io) + { + nnom_free(io->tensor); + io = io->aux; + } +} + +// delete all the aux hooks +// delete aux io only, keep the primary io. +static void io_list_delete(nnom_layer_io_t *io) +{ + nnom_layer_hook_t *hook, *next_hook; + nnom_layer_io_t *next_io; + while (io) + { + // store the next io + next_io = io->aux; + + // release hooks list first + hook = io->hook.next; + while (hook) + { + next_hook = hook->next; + nnom_free(hook); + hook = next_hook; + } + + // now we can release the aux io itself + // but if this io is the primary input/out of the layer, it will be freed with they layer's instance since they are allocated together. + if (io != io->owner->in && io != io->owner->out) + nnom_free(io); + + // next aux io + io = next_io; + } +} + +// there are 2 type of memory in a layer +// *primary memory* is allocated when a layer instance is created, they are created by layer API (Conv2D()...). +// it includes the layer instance, primary input, primary output, and an optional computational memory buffer instance +// each io module also has one primary hook. +// *secondary memory* are axiliary io modules, axiliary hooks and activations which created by model.xx() APIs (model.hook(), model.active()...) +// it includes the list of aux io modules, the list of aux hooks. +// +// Additionaly, layer's private free method must be called to free layer's private resources +// Such as activation instance passed to Activation() layer, and private memory allcated within Lambda layer. +// +// A layer is consist of a few io modules. primary io are allocated with layers instance. +// each of the io has a few hooks. primary hooks are included in the io module. +// so only "aux" hooks and ios need to be freed separately. +static void layer_delete(nnom_layer_t *layer) +{ + if (layer == NULL) + return; + + // call private free of the layer + if (layer->free) + layer->free(layer); + + // delete the tensors first. only input layer should delete input + if (layer->type == NNOM_INPUT) + io_tensor_delete(layer->in); + io_tensor_delete(layer->out); + + // release secondary memory on the layers. + // they are io lists and hooks list + io_list_delete(layer->in); + io_list_delete(layer->out); + + // release activations (it takes null too) + nnom_free(layer->actail); + + // release primary memory + nnom_free(layer); + return; +} + +void model_delete(nnom_model_t *m) +{ + nnom_layer_t *layer; + nnom_layer_t *next; + if (m == NULL) + return; + + // uses shortcut list to iterate the model, + // start from head + layer = m->head; + while (layer) + { + // get the next before releasing current + next = layer->shortcut; + // your term + layer_delete(layer); + // who's next! + layer = next; + } + + // free the memory blocks for the network's buffer + nnom_free(m->blocks->blk); + + // free model instance itself + if (m->is_allocated) + nnom_free(m); + else + nnom_memset(m, 0, sizeof(nnom_model_t)); + + nnom_memory_taken = 0; + return; +} + +// find an available memory block. +static nnom_mem_block_t *allocate_block(nnom_mem_block_t *list) +{ + nnom_mem_block_t *free = NULL; + uint32_t idx; + + for (idx = 0; idx < NNOM_BLOCK_NUM; idx++) + { + if (list[idx].owners == 0) + break; + } + if(idx == NNOM_BLOCK_NUM) + { + NNOM_LOG("\nERROR! No enough memory block for parallel buffers, please increase the 'NNOM_BLOCK_NUM' in 'nnom_port.h'\n"); + return NULL; + } + + free = &list[idx]; + return free; +} + +static void release_block(nnom_mem_block_t *block) +{ + if (block->owners > 0) + block->owners -= 1; + if (block->owners == 0) + block->state = NNOM_BUF_EMPTY; +} + +static void release_input_mem(nnom_layer_t *layer) +{ + nnom_layer_io_t *in; + // release all input of buf + in = layer->in; + while (in != NULL) + { + release_block(in->mem); + in = in->aux; + } +} +static void release_comp_mem(nnom_layer_t *layer) +{ + // release computational buf if exist + if (layer->comp != NULL) + { + release_block(layer->comp->mem); + } +} + +// return the length of the io lists +size_t nnom_io_length(nnom_layer_io_t *io) +{ + size_t num = 0; + if (io == NULL) + return 0; + while (io != NULL) + { + num++; + io = io->aux; + } + return num; +} + +// return the length of the hook lists +size_t nnom_hook_length(nnom_layer_hook_t *hook) +{ + size_t num = 0; + if (hook == NULL) + return 0; + while (hook != NULL) + { + num++; + hook = hook->next; + } + return num; +} + +// The shortcut version of find_last() method. +// must be used after compiling. +static nnom_layer_t *layer_shortcut_find_last(nnom_layer_t *start) +{ + nnom_layer_t *layer = start; + if (start == NULL) + return NULL; + while (layer->shortcut != NULL) + layer = layer->shortcut; + return layer; +} + +// call while compiling. +// the shorcut is for fast running and fast iliterating. +// simply link every layer as a list. ordered by its runing order +static nnom_status_t layer_shortcut_add(nnom_layer_t *start, nnom_layer_t *curr) +{ + nnom_layer_t *layer = start; + // first one, return + if (start == curr) + { + return NN_SUCCESS; + } + // find the end of the list, and add curr layer to the end of it. + while (layer->shortcut != NULL) + { + // if the layer is already in shortcut list, tell upper. + if (curr == layer) + return NN_ARGUMENT_ERROR; + layer = layer->shortcut; + } + layer->shortcut = curr; + + return NN_SUCCESS; +} + +// input the layer number, +static void print_layer_info(nnom_layer_t *layer, uint32_t layer_count) +{ + size_t in_size = io_mem_size(layer->in); + size_t out_size = io_mem_size(layer->out); + size_t compsize; + size_t mac = layer->stat.macc; + if (layer->comp != NULL) + compsize = layer->comp->size; + else + compsize = 0; + // names + if(layer->type != NNOM_RNN) + NNOM_LOG("#%-3d %-10s - ", layer_count, default_layer_names[layer->type]); + else + { + NNOM_LOG("#%-3d %-3s/", layer_count, default_layer_names[layer->type]); + NNOM_LOG("%-6s - ", default_cell_names[((nnom_rnn_layer_t*)layer)->cell->type]); + } + + // activations + if (layer->actail != NULL) + NNOM_LOG("%-8s - ", default_activation_names[layer->actail->type]); + else + NNOM_LOG(" - "); + + NNOM_LOG("("); + for (int i = 0; i < 3; i++) + { + if (layer->out->tensor->num_dim > i) + NNOM_LOG("%4d,", layer->out->tensor->dim[i]); + else + NNOM_LOG(" "); + } + NNOM_LOG(") "); + + // MAC operation + if(mac == 0) + NNOM_LOG(" "); + else if (mac < 10000) + NNOM_LOG("%7d ", (uint32_t)mac); + else if (mac < 1000*1000) + NNOM_LOG("%6dk ", (uint32_t)(mac/1000)); + else if (mac < 1000*1000*1000) + NNOM_LOG("%3d.%02dM ", (uint32_t)(mac/(1000*1000)), (uint32_t)(mac%(1000*1000)/(10*1000))); // xxx.xx M + else + NNOM_LOG("%3d.%02dG ", (uint32_t)(mac/(1000*1000*1000)), (uint32_t)(mac%(1000*1000*1000)/(10*1000*1000))); // xxx.xx G + + // memory + NNOM_LOG("(%6d,%6d,%6d)", (uint32_t)in_size, (uint32_t)out_size,(uint32_t) compsize); +} + +static void print_memory_block_info(nnom_mem_block_t *block_pool) +{ + // show the memory blocks's lifetime (number of owners) + NNOM_LOG(" "); + for (int i = 0; i < NNOM_BLOCK_NUM; i++) + { + if (i % 4 == 0) + NNOM_LOG(" "); + if (block_pool[i].owners) + NNOM_LOG("%d ", block_pool[i].owners); + else + NNOM_LOG("- "); + } + NNOM_LOG("\n"); +} + +// This is a nested called functions. +// to analyse the topology of the model, calculate the output_shape of each layer and create shortcut lists. +// Nest will happend when a layer have multiple output module or mutiple output hooks. +// This function will return when +// 1) if the layer has multiple input but not all of them are filled by last layers. returns NN_MORE_TODO +// 2) if all the output hooked are nested called. return NN_SUCCESS +// 3) if the layer is output layer. return NN_SUCCESS +nnom_status_t compile_layers(nnom_layer_t* first, nnom_layer_t *curr, nnom_mem_block_t *block_pool, uint32_t *layer_count) +{ + size_t mem_size = 0; + nnom_layer_t *layer = curr; + nnom_layer_io_t *in; + nnom_layer_io_t *out; + nnom_layer_hook_t *hook; + + nnom_mem_block_t *in_blk; + nnom_mem_block_t *out_blk; + + uint32_t local_layer_count = 1; + + if(layer_count == NULL) + layer_count = &local_layer_count; + + in = layer->in; + out = layer->out; + + while (layer) + { + // check input + in = layer->in; + + // check if this layer is the input layer + // the first layer has no input hooked, and the io is not initialized + if (in->hook.io == NULL) + { + // if the input is not initalized + if (in->mem == NULL) + { + in_blk = allocate_block(block_pool); + in_blk->owners += 1; // add 1 + mem_size = nnom_alignto(tensor_size(in->tensor), NNOM_ALIGN); + in_blk->size = mem_size > in_blk->size ? mem_size : in_blk->size; + // set the blk to the layer IO + in->mem = in_blk; + in->mem->state = NNOM_BUF_FILLED; //mark input buff filled + } + } + else + { + // get the mem for every input from its hooked output. + while (in != NULL) + { + in->mem = in->hook.io->mem; + in = in->aux; + } + } + + // if there are mutiple inputs, wait utill all blocks filled + in = layer->in; + if (in != NULL && in->aux != NULL) + { + while (in != NULL) + { + // if the mem (of its hooked output) is not allocated or is not filled. + // It not the time to run the layer yet, return and waits for next nested called. + if (in->mem == NULL || in->mem->state != NNOM_BUF_FILLED) + return NN_MORE_TODO; + in = in->aux; + } + } + + // if run to this point, then it is the right time to compile(run) this layer. + // compiling are seperated into the steps below. + // 1. to calculate the output shape. + // 2. to put the current layer to the end of shortcut list. + // 3. allocate computational buffer. + // 4. allocate output buffer for each output module. + // 5.1 if there is only one layer hooked to the output. we dont use nested call, but continue in this big while loop. + // 5.2 nested call the hooked output layers (if there are > 1 hooked to the output of this layer) + + // 1. calculate output shape while all inputs are filled + layer->build(layer); + + // 2. add to shortcut list. + layer_shortcut_add(first, layer); + + // 3. assign for computational buf + if (layer->comp != NULL) + { + layer->comp->mem = allocate_block(block_pool); + layer->comp->mem->owners += 1; // add us to buffer users + layer->comp->mem->state = NNOM_BUF_FILLED; + // record maximum mem size in this block + mem_size = nnom_alignto(layer->comp->size, NNOM_ALIGN); + layer->comp->mem->size = + mem_size > layer->comp->mem->size ? mem_size : layer->comp->mem->size; + } + + // print current layer's info. + // show names, activations, mem block size + print_layer_info(layer, (*layer_count)++); + + // 4. allocate output buffer for each output module. + // check output + if (layer->out == NULL) + return NN_SUCCESS; + + // 5.1 if there is only one layer hooked to the output. we dont use nested call, but continue in this big while loop. + // if the layer is Single Output, continue the loop directly. To reduce nested level + if (layer->out->aux == NULL && layer->out->hook.next == NULL) + { + // single buf layer. + if (layer->in->type == NNOM_TENSOR_BUF_NULL || layer->out->type == NNOM_TENSOR_BUF_NULL) + { + // pass to next layer directly, like we never touch the buffer(dont change life-time) + layer->out->mem = layer->in->mem; + + // print memory before release + print_memory_block_info(block_pool); + // computational buf + release_comp_mem(layer); + } + // not a single buf layer + else + { + // allocate mem block for the output + out_blk = allocate_block(block_pool); + if (out_blk == NULL) + return NN_NO_MEMORY; + // set the life time, only one hooked layer, so the life time is 1 + out_blk->owners = 1; + out_blk->state = NNOM_BUF_FILLED; // marked filled + // record maximum mem size in this block + mem_size = nnom_alignto(tensor_size(layer->out->tensor), NNOM_ALIGN); + out_blk->size = mem_size > out_blk->size ? mem_size : out_blk->size; + // set the blk to the layer IO + layer->out->mem = out_blk; + + // once we allocate for output, we can now release input and comput. + // print memory before release + print_memory_block_info(block_pool); + // release input mem and comp mem + release_input_mem(layer); + release_comp_mem(layer); + } + } + // Multiple output and/or mutiple hooks + else + { + // single buf layer will use the input buf for the first output + if (layer->in->type == NNOM_TENSOR_BUF_NULL || layer->out->type == NNOM_TENSOR_BUF_NULL) + { + // we dont allocate new buf, but use the input + // the ownership will be set to next layer later + layer->out->mem = layer->in->mem; + layer->out->mem->owners += nnom_hook_length(&layer->out->hook); // set the mem lifetime.// test + layer->out->mem->state = NNOM_BUF_FILLED; + + // print memory before release + print_memory_block_info(block_pool); + // release computational buff and input buffer + release_input_mem(layer); + release_comp_mem(layer); + } + // mutiple buf layer. (I/O use different memory blocks) + else + { + // allocate for every output + out = layer->out; + while (out != NULL && out->hook.io != NULL) // the output layer have no output IO + { + // assign new block + out->mem = allocate_block(block_pool); + if (out->mem == NULL) + return NN_NO_MEMORY; + // record maximum mem size in this block + mem_size = nnom_alignto(tensor_size(out->tensor), NNOM_ALIGN); + out->mem->size = mem_size > out->mem->size ? mem_size : out->mem->size; + // keep the block untill the last hooked layer is called. + out->mem->owners = nnom_hook_length(&out->hook); // set lifetime of the buffer = the num of hooked layers + out->mem->state = NNOM_BUF_FILLED; + + out = out->aux; + } + // once we allocate for output, we can now release input and comput (or reduce the lifetime). + // print memory before release + print_memory_block_info(block_pool); + // release input mem and comp mem + release_input_mem(layer); + release_comp_mem(layer); + } + + // 5.12 nested call the hooked output layers (if there are > 1 hooked to the output of this layer) + // while all the out module(s) receive a memory block, it is ready to be sent to other layers. + // iterate all hooked layers in each out module. + out = layer->out; + while (out != NULL) + { + // nested call hooked layer one by one. + hook = &out->hook; + while (hook != NULL && hook->io != NULL) + { + compile_layers(first, hook->io->owner, block_pool, layer_count); + hook = hook->next; + } + + // next io + out = out->aux; + } + + // when all the out is called. this should stop here. + // once enter mutiple output iterating, the function will always return. + // because at least one of the nested called by this function will run till the end of the model. + return NN_SUCCESS; + } + // Multiple output ended. + + // return if this is output layer. + // the output layer's output io is hooked to nothing. + if (layer->out->hook.io == NULL) + return NN_SUCCESS; + + // single output layer, this function continue to analyse next layer. + // switch to next layer. + layer = layer->out->hook.io->owner; + } + + // seems to be redundants + return NN_SUCCESS; +} + +size_t mem_analysis_result(nnom_model_t *m) +{ + uint32_t index; + uint32_t total_mem = 0; + NNOM_LOG("Memory cost by each block:\n "); + // print size of memory blocks + for (index = 0; index < NNOM_BLOCK_NUM; index++) + { + total_mem += m->blocks[index].size; + NNOM_LOG("blk_%d:%d ", index, (uint32_t)(m->blocks[index].size)); + } + // size of total memory cost by networks buffer + NNOM_LOG("\n Memory cost by network buffers: %d bytes\n", total_mem); + return total_mem; +} + +// allocate memory, and set them to each block according to the mem analysis results. +nnom_status_t block_mem_set(nnom_model_t *m, void *buf) +{ + uint32_t index; + uint32_t mem_offset = 0; + + for (index = 0; index < NNOM_BLOCK_NUM; index++) + { + if (m->blocks[index].size == 0) + break; + m->blocks[index].blk = (void *)((uint8_t*)buf + mem_offset); + mem_offset += m->blocks[index].size; + } + return NN_SUCCESS; +} + +// experimental: this function is temporary use to +// assign memory blk which has assigned to input and output to the corresponding tensor +nnom_status_t tensor_mem_set(nnom_model_t *m) +{ + nnom_layer_t *layer = m->head; + nnom_layer_io_t *io; + while (layer) + { + io = layer->in; + while (io) + { + io->tensor->p_data = io->mem->blk; + io = io->aux; + } + + io = layer->out; + while (io) + { + io->tensor->p_data = io->mem->blk; + io = io->aux; + } + + layer = layer->shortcut; + } + + return NN_SUCCESS; +} + +// this function has to be used after memory is assigned to the layers. +// it means it has to be call after compile_model() as well. +// it simply get the output buffer and set the buffer to tailed activation of each layer.. +nnom_status_t set_tailed_activation(nnom_model_t *m) +{ + NNOM_NULL_CHECK(m); + NNOM_NULL_CHECK(m->head); + nnom_layer_t *layer = m->head; + + // if tailed activation is exist, set it to the output. + while (layer) + { + if (layer->actail != NULL) + { + layer->actail->tensor = layer->out->tensor; + } + if (layer->shortcut == NULL) + break; + layer = layer->shortcut; + } + + return NN_SUCCESS; +} + +// get total ops +static uint64_t model_set_ops(nnom_model_t *m) +{ + nnom_layer_t *layer; + uint64_t total_ops = 0; + layer = m->head; + while (layer) + { + total_ops += layer->stat.macc; + if (layer->shortcut == NULL) + break; + layer = layer->shortcut; + } + m->total_ops = total_ops; + return total_ops; +} + +// a compiler can be use for both sequencial / functional model. +// the output layer is optional only when the model is single output model +// in this case, if output = NULL, the compile can find it by its own. +nnom_status_t model_compile(nnom_model_t *m, nnom_layer_t *input, nnom_layer_t *output) +{ + size_t buf_size; + uint8_t *buf; + uint32_t layer_num = 1; + uint32_t time = nnom_ms_get(); + + NNOM_NULL_CHECK(m); + NNOM_NULL_CHECK(input); + + m->head = input; + m->tail = output; + if (output == NULL) + m->tail = find_last(input); + + NNOM_LOG("NNoM version %d.%d.%d\n", NNOM_MAJORVERSION, NNOM_SUBVERSION, NNOM_REVISION); + NNOM_LOG("To disable logs, please void the marco 'NNOM_LOG(...)' in 'nnom_port.h'.\n"); + #ifdef NNOM_USING_CHW + NNOM_LOG("Data format: Channel first (CHW)\n"); + #else + NNOM_LOG("Data format: Channel last (HWC)\n"); + #endif + #ifdef NNOM_USING_CMSIS_NN + NNOM_LOG("Backend optimization: CMSIS-NN\n"); + #endif + #ifdef NNOM_USING_STATIC_MEMORY + NNOM_LOG("Static memory size set to: %d\n", (uint32_t)nnom_static_buf_size); + #endif + NNOM_LOG("Start compiling model...\n"); + NNOM_LOG("Layer(#) Activation output shape ops(MAC) mem(in, out, buf) mem blk lifetime\n"); + NNOM_LOG("-------------------------------------------------------------------------------------------------\n"); + + // compile layers, started from list head, nested run till the end of models + compile_layers(m->head, m->head, m->blocks, &layer_num); + + NNOM_LOG("-------------------------------------------------------------------------------------------------\n"); + + // if model's tail is not the last layer which built by user. + if (output->type != NNOM_OUTPUT) + NNOM_LOG("WARNING: the last layer '%s' is not the Output Layer, please check carefully.\n", + default_layer_names[output->type]); + + // get the total (aligned) memory requirement + buf_size = mem_analysis_result(m); + + // allocate one big memory block + buf = nnom_mem(buf_size); + if (buf == NULL) + { + NNOM_LOG("ERROR: No enough memory for network buffer, required %d bytes\n", (uint32_t)buf_size); + return NN_NO_MEMORY; + } + // all memory cost + NNOM_LOG(" Total memory occupied: %d bytes\n", (uint32_t)nnom_memory_taken); + + // split the memory for every memory block + block_mem_set(m, buf); + + // experimental: set memory from io to the io tensor + tensor_mem_set(m); + + // finally set the output buff to tailed activation on each layer + set_tailed_activation(m); + + // calculate the total operations and set it to the model + model_set_ops(m); + + // print the time. + if(nnom_ms_get()) + NNOM_LOG("Compling done in %d ms\n", nnom_ms_get() - time); + + return NN_SUCCESS; +} + +// This is a simplified API for compile models with sequencial model only +// this does not require specified Input / Output layers +nnom_status_t sequencial_compile(nnom_model_t *m) +{ + nnom_layer_t *input, *output; + input = m->head; + output = find_last(input); + return model_compile(m, input, output); +} + +// run that layer +nnom_status_t layer_run(nnom_layer_t *layer) +{ + nnom_status_t result; + uint32_t start; + NNOM_NULL_CHECK(layer); + + // start + start = nnom_us_get(); + // run main layer first + result = layer->run(layer); + // run tailed-activation if it is presented + if (layer->actail != NULL) + { + layer->actail->run(layer->actail); + } + // done + layer->stat.time = nnom_us_get() - start; + return result; +} + +// run the model, until the end_layer. If end_layer == NULL, run all layers. +nnom_status_t model_run_to(nnom_model_t *m, nnom_layer_t *end_layer) +{ + uint32_t layer_num = 1; + nnom_status_t result; + nnom_layer_t *layer; + NNOM_NULL_CHECK(m); + NNOM_NULL_CHECK(m->head); + + layer = m->head; + + // using shortcut run + while (layer) + { + // run layer + result = layer_run(layer); + if (result != NN_SUCCESS) + { + NNOM_LOG("Error: #%d %s layer return error code:%d\n", layer_num, default_layer_names[layer->type], result); + return result; + } + // run callback + if(m->layer_callback != NULL) + { + result = m->layer_callback(m, layer); + if (result != NN_SUCCESS) + { + NNOM_LOG("Error: Callback return error code %d at #%d %s layer\n", result, layer_num, default_layer_names[layer->type]); + return result; + } + } + // check if finished + if (layer == end_layer || layer->shortcut == NULL) + break; + layer = layer->shortcut; + layer_num++; + } + + return NN_SUCCESS; +} + +// run all layers. +nnom_status_t model_run(nnom_model_t *m) +{ + return model_run_to(m, NULL); +} + +// callback, called after each layer has finished the calculation. +nnom_status_t model_set_callback(nnom_model_t *m, nnom_status_t (*layer_callback)(nnom_model_t *m, nnom_layer_t *layer)) +{ + if(m->layer_callback != NULL && m->layer_callback != layer_callback) + return NN_LENGTH_ERROR; + + m->layer_callback = layer_callback; + return NN_SUCCESS; +} + +// delete callback. +void model_delete_callback(nnom_model_t *m) +{ + m->layer_callback = NULL; +} + +nnom_status_t check_model_version(unsigned long model_version) +{ + nnom_status_t result = NN_SUCCESS; + int32_t major, sub, rev; + major = model_version/10000; + sub = (model_version/100)%100; + rev = model_version % 100; + if(model_version != NNOM_VERSION) + { + NNOM_LOG("WARNING: model version %d.%d.%d dosen't match nnom version!\n", major, sub, rev); + result = -NN_ARGUMENT_ERROR; + } + else + { + NNOM_LOG("Model version: %d.%d.%d\n", major, sub, rev); + } + return result; +} + + diff --git a/APP_Framework/Framework/knowing/nnom/src/core/nnom_layers.c b/APP_Framework/Framework/knowing/nnom/src/core/nnom_layers.c new file mode 100644 index 000000000..dc059074a --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/src/core/nnom_layers.c @@ -0,0 +1,83 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2019-02-05 Jianjia Ma The first version + */ + +#include +#include +#include + +#include "nnom.h" +#include "nnom_local.h" +#include "nnom_layers.h" + +size_t shape_size(nnom_3d_shape_t *s) +{ + if (s == NULL) + return 0; + return s->h * s->w * s->c; +} + +nnom_3d_shape_t shape(size_t h, size_t w, size_t c) +{ + nnom_3d_shape_t s; + s.h = h; + s.w = w; + s.c = c; + return s; +} +nnom_3d_shape_t kernel(size_t h, size_t w) +{ + return shape(h, w, 1); +} +nnom_3d_shape_t stride(size_t h, size_t w) +{ + return shape(h, w, 1); +} +nnom_3d_shape_t dilation(size_t h, size_t w) +{ + return shape(h, w, 1); +} + +nnom_border_t border(size_t top, size_t bottom, size_t left, size_t right) +{ + nnom_border_t b; + b.top = top; + b.bottom = bottom; + b.left = left; + b.right = right; + return b; +} + +// this function has to be used while assign a io for a layer. +// because the io needs to know who is its owner. +nnom_layer_io_t *io_init(void *owner_layer, nnom_layer_io_t *io) +{ + io->owner = (nnom_layer_t *)owner_layer; + return io; +} + +// this function is to add a new IO to current inited IO +// input, the targeted IO that the new IO will be added to +// output , the new IO +nnom_layer_io_t *io_add_aux(nnom_layer_io_t *targeted_io) +{ + nnom_layer_io_t *new_io; + // check if the targeted io is inited, and its aux = NULL + if (targeted_io == NULL || targeted_io->owner == NULL || targeted_io->aux != NULL) + return NULL; + // create new io, init it + new_io = nnom_mem(sizeof(nnom_layer_io_t)); + if (new_io == NULL) + return NULL; + // add to aux + targeted_io->aux = new_io; + return io_init(targeted_io->owner, new_io); +} diff --git a/APP_Framework/Framework/knowing/nnom/src/core/nnom_tensor.c b/APP_Framework/Framework/knowing/nnom/src/core/nnom_tensor.c new file mode 100644 index 000000000..55b3984ca --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/src/core/nnom_tensor.c @@ -0,0 +1,245 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2019-02-05 Jianjia Ma The first version + * 2019-02-14 Jianjia Ma Add layer.free() method. + */ + +#include +#include +#include +#include +#include "nnom.h" +#include "nnom_tensor.h" + + // tensor size +size_t tensor_size(nnom_tensor_t* t) +{ + size_t size = 0; + if (t != NULL) + { + size = t->dim[0]; + for (int i = 1; i < t->num_dim; i++) + size *= t->dim[i]; + } + return size; +} +size_t tensor_size_byte(nnom_tensor_t* t) +{ + return tensor_size(t)*t->bitwidth/8; +} + + +size_t tensor_get_num_channel(nnom_tensor_t* t) +{ + // this will need to be changed to support batch. +#ifdef NNOM_USING_CHW + // channel first + //return t->dim[0]; + return t->dim[t->num_dim -1]; // we are always using hwc to describe even our data is in CHW +#else + // channel last + return t->dim[t->num_dim -1]; +#endif +} + +// initialise/create new tensor +nnom_tensor_t* new_tensor(nnom_qtype_t type, uint32_t num_dim, uint32_t num_channel) +{ + nnom_tensor_t* t = NULL; + uint32_t q_len; + if(type == NNOM_QTYPE_PER_AXIS) + { + q_len = num_channel; + } + else if (type == NNOM_QTYPE_PER_TENSOR) + { + q_len = 1; + } + else + { + NNOM_LOG("ERROR: tensor type not specified\n"); + return NULL; + } + + t = nnom_mem(nnom_alignto(sizeof(nnom_tensor_t), NNOM_ALIGN) + + nnom_alignto(num_dim*sizeof(nnom_shape_data_t),sizeof(nnom_qformat_param_t)) + + q_len*sizeof(nnom_qformat_param_t)*2); + if(t == NULL) + return t; + t->dim = (nnom_shape_data_t*)((uint8_t*)t + sizeof(nnom_tensor_t)); // should add alignment + t->q_dec = (nnom_qformat_param_t*)((uint8_t*)t->dim + nnom_alignto(num_dim*sizeof(nnom_shape_data_t),sizeof(nnom_qformat_param_t))); + t->q_offset = (nnom_qformat_param_t*)((uint8_t*)t->q_dec + q_len*sizeof(nnom_qformat_param_t)); + t->num_dim = num_dim; + t->qtype = type; + + return t; +} + +void delete_tensor(nnom_tensor_t* t) +{ + if (t) + nnom_free(t); +} + +// set tensor by value +// for tensor with quantized type NNOM_QTYPE_PER_TENSOR +nnom_tensor_t* tensor_set_attr_v(nnom_tensor_t* t, + nnom_qformat_param_t dec_bit, nnom_qformat_param_t offset, nnom_shape_data_t* dim, uint32_t num_dim, uint8_t bitwidth) +{ + // copy dim + t->num_dim = num_dim; + nnom_memcpy(t->dim, dim, sizeof(nnom_shape_data_t) * num_dim); + + // bitwidth + t->bitwidth = bitwidth; + // copy the offset and q format + *(t->q_dec) = dec_bit; + *(t->q_offset) = offset; + return t; +} + + +// set tensor by pointer +// for tensor with quantized type NNOM_QTYPE_PER_AXIS +nnom_tensor_t* tensor_set_attr(nnom_tensor_t* t, + nnom_qformat_param_t*dec_bit, nnom_qformat_param_t *offset, nnom_shape_data_t* dim, uint32_t num_dim, uint8_t bitwidth) +{ + size_t size; + + // copy dim + t->num_dim = num_dim; + nnom_memcpy(t->dim, dim, sizeof(nnom_shape_data_t) * num_dim); + + // get the q format data size + if(t->qtype == NNOM_QTYPE_PER_AXIS) + size = sizeof(nnom_qformat_param_t) * tensor_get_num_channel(t); + else + size = sizeof(nnom_qformat_param_t); + + // bitwidth + t->bitwidth = bitwidth; + // copy the offset and q format + nnom_memcpy(t->q_dec, dec_bit, size); + nnom_memcpy(t->q_offset, offset, size); + return t; +} + +// this method copy the attributes of a tensor to a new tensor +// before that, src and des tensor must already have QTYPE and NUM_OF_DIM set. +// Note, the tensors must have the same lenght. this method wont cpy the memory pointer data (we will assign memory later after building) +nnom_tensor_t* tensor_cpy_attr(nnom_tensor_t* des, nnom_tensor_t* src) +{ + size_t size; + if(src->qtype != des->qtype || src->num_dim != des->num_dim) + return NULL; + + if(src->qtype == NNOM_QTYPE_PER_AXIS) + size = sizeof(nnom_qformat_param_t) * tensor_get_num_channel(src); + else + size = sizeof(nnom_qformat_param_t); + + // bit + des->bitwidth = src->bitwidth; + // copy quantisation parameters + nnom_memcpy(des->q_dec, src->q_dec, size); + nnom_memcpy(des->q_offset, src->q_offset, size); + + // copy number of dimension + des->num_dim = src->num_dim; + nnom_memcpy(des->dim, src->dim, src->num_dim * sizeof(nnom_shape_data_t)); + return des; +} + +// change format from CHW to HWC +// the shape of the data, input data, output data +void tensor_hwc2chw_q7(nnom_tensor_t* des, nnom_tensor_t* src) +{ + q7_t* p_out = des->p_data; + q7_t* p_in = src->p_data; + + for (int c = 0; c < src->dim[2]; c++) + { + for (int h = 0; h < src->dim[0]; h++) + { + for (int w = 0; w < src->dim[1]; w++) + { + *p_out = p_in[(h * src->dim[1] + w) * src->dim[2] + c]; + p_out++; + } + } + } +} + + +// only support 3d tensor +// change format from CHW to HWC +void tensor_chw2hwc_q7(nnom_tensor_t* des, nnom_tensor_t* src) +{ + q7_t* p_out = des->p_data; + q7_t* p_in = src->p_data; + int im_size; + int h_step; + + im_size = src->dim[0] * src->dim[1]; // H*W + + for (int h = 0; h < src->dim[0]; h++) + { + h_step = src->dim[1] * h; + for (int w = 0; w < src->dim[1]; w++) + { + for (int c = 0; c < src->dim[2]; c++) + { + *p_out = p_in[im_size * c + h_step + w]; + p_out++; + } + } + } + +} + +// (deprecated by tensor_hwc2chw version) +// change format from CHW to HWC +// the shape of the data, input data, output data +void hwc2chw_q7(nnom_3d_shape_t shape, q7_t* p_in, q7_t* p_out) +{ + for (int c = 0; c < shape.c; c++) + { + for (int h = 0; h < shape.h; h++) + { + for (int w = 0; w < shape.w; w++) + { + *p_out = p_in[(h * shape.w + w) * shape.c + c]; + p_out++; + } + } + } +} + +// (deprecated) +// change format from CHW to HWC +// the shape of the data, input data, output data +void chw2hwc_q7(nnom_3d_shape_t shape, q7_t* p_in, q7_t* p_out) +{ + int im_size = shape.w * shape.h; + int h_step; + + for (int h = 0; h < shape.h; h++) + { + h_step = shape.w * h; + for (int w = 0; w < shape.w; w++) + { + for (int c = 0; c < shape.c; c++) + { + *p_out = p_in[im_size * c + h_step + w]; + p_out++; + } + } + } +} diff --git a/APP_Framework/Framework/knowing/nnom/src/core/nnom_utils.c b/APP_Framework/Framework/knowing/nnom/src/core/nnom_utils.c new file mode 100644 index 000000000..3b13c3551 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/src/core/nnom_utils.c @@ -0,0 +1,417 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2019-02-05 Jianjia Ma The first version + */ + +#include +#include +#include +#include +#include "nnom.h" +#include "nnom_utils.h" + +static nnom_predict_t *_predict_create_instance(nnom_model_t *m, size_t label_num, size_t top_k_size) +{ + nnom_predict_t *pre; + // allocate memory + pre = (nnom_predict_t *)nnom_malloc(sizeof(nnom_predict_t)); + if(pre == NULL) + return NULL; + pre->top_k = (uint32_t *)nnom_malloc(top_k_size * sizeof(uint32_t)); + pre->confusion_mat = (uint16_t *)nnom_malloc(label_num * label_num * sizeof(uint16_t)); + if(pre->top_k == NULL || pre->confusion_mat == NULL) + { + nnom_free(pre->top_k); nnom_free(pre->confusion_mat); nnom_free(pre); + return NULL; + } + nnom_memset(pre->top_k, 0, top_k_size * sizeof(uint32_t)); + nnom_memset(pre->confusion_mat, 0, label_num * label_num * sizeof(uint16_t)); + + // config + pre->label_num = label_num; + pre->top_k_size = top_k_size; + pre->predict_count = 0; + + // run + pre->model = m; + pre->t_run_total = 0; // model running time in total + pre->t_predict_start = 0; // when it is initial + pre->t_predict_total = 0; // total time of the whole test + + return pre; +} + +static void _predict_delete_instance(nnom_predict_t *pre) +{ + if(pre == NULL) + return; + nnom_free(pre->top_k); + nnom_free(pre->confusion_mat); + nnom_free(pre); +} + +// create a prediction +// input model, the buf pointer to the softwmax output (Temporary, this can be extract from model) +// the size of softmax output (the num of lable) +// the top k that wants to record. +nnom_predict_t *prediction_create(nnom_model_t *m, int8_t *buf_prediction, size_t label_num, size_t top_k_size) +{ + nnom_predict_t *pre = _predict_create_instance(m, label_num, top_k_size); + if (!pre) + return NULL; + if (!m) + { + _predict_delete_instance(pre); + return NULL; + } + + // set the output buffer of model to the prediction instance + pre->buf_prediction = buf_prediction; + + // mark start time. + pre->t_predict_start = nnom_ms_get(); + + return pre; +} + +// after a new data is set in input +// feed data to prediction +// input the current label, (range from 0 to total number of label -1) +// (the current input data should be set by user manully to the input buffer of the model.) +nnom_status_t prediction_run(nnom_predict_t *pre, uint32_t true_label, uint32_t*predict_label, float* prob) +{ + int max_val; + int max_index; + uint32_t true_ranking = 0; + uint32_t start; + uint32_t sum = 0; + + if (!pre) + return NN_ARGUMENT_ERROR; + + // now run model + start = nnom_ms_get(); + model_run(pre->model); + pre->t_run_total += nnom_ms_get() - start; + + // only draw matrix and top k when number of label > 1 + if (pre->label_num > 1) + { + // find how many prediction is bigger than the ground true. + // Raning rules, same as tensorflow. however, predictions in MCU is more frequencly to have equal probability since it is using fixed-point. + // if ranking is 1, 2, =2(true), 4, 5, 6. the result will be top 3. + // if ranking is 1, 2(true), =2, 4, 5, 6. the result will be top 2. + // find the ranking of the prediced label. + for (uint32_t j = 0; j < pre->label_num; j++) + { + if (j == true_label) + continue; + if (pre->buf_prediction[true_label] < pre->buf_prediction[j]) + true_ranking++; + // while value[label] = value[j]. only when label > j, label is the second of j + else if (pre->buf_prediction[true_label] == pre->buf_prediction[j] && j < true_label) + true_ranking++; + } + + if (true_ranking < pre->top_k_size) + pre->top_k[true_ranking]++; + + // Find top 1 and return the current prediction. + // If there are several maximum prediction, return the first one. + max_val = pre->buf_prediction[0]; + max_index = 0; + for (uint32_t j = 1; j < pre->label_num; j++) + { + if (pre->buf_prediction[j] > max_val) + { + max_val = pre->buf_prediction[j]; + max_index = j; + } + sum += pre->buf_prediction[j]; + } + // result + if (max_val != 0) + *prob = (float)max_val / 127.f; + else + *prob = 0; + *predict_label = max_index; + + // fill confusion matrix + pre->confusion_mat[true_label * pre->label_num + max_index] += 1; + } + // only one neural as output. + else + { + *prob = (float)pre->buf_prediction[0] / 127.f; + if (*prob >= 0.5f) + *predict_label = 1; + else + *predict_label = 0; + } + + // prediction count + pre->predict_count++; + + // return the prediction + return NN_SUCCESS; +} + +void prediction_end(nnom_predict_t *pre) +{ + if (!pre) + return; + pre->t_predict_total = nnom_ms_get() - pre->t_predict_start; +} + +void prediction_delete(nnom_predict_t *pre) +{ + _predict_delete_instance(pre); +} + +void prediction_matrix(nnom_predict_t *pre) +{ + if (!pre) + return; + // print titles + NNOM_LOG("\nConfusion matrix:\n"); + NNOM_LOG("predict"); + for (int i = 0; i < pre->label_num; i++) + { + NNOM_LOG("%6d", i); + } + NNOM_LOG("\n"); + NNOM_LOG("actual\n"); + // print the matrix + for (int i = 0; i < pre->label_num; i++) + { + uint32_t row_total = 0; + + NNOM_LOG(" %3d | ", i); + for (int j = 0; j < pre->label_num; j++) + { + row_total += pre->confusion_mat[i * pre->label_num + j]; + NNOM_LOG("%6d", pre->confusion_mat[i * pre->label_num + j]); + } + NNOM_LOG(" |%4d%%\n", pre->confusion_mat[i * pre->label_num + i] * 100 / row_total); + row_total = 0; + } + NNOM_LOG("\n"); +} + +// top-k +void prediction_top_k(nnom_predict_t *pre) +{ + uint32_t top = 0; + if (!pre) + return; + + for (int i = 0; i < pre->top_k_size; i++) + { + top += pre->top_k[i]; + if (top != pre->predict_count) + NNOM_LOG("Top %d Accuracy: %d.%02d%% \n", i + 1, (top * 100) / pre->predict_count, + ((top * 100 * 100) / pre->predict_count)%100); + else + NNOM_LOG("Top %d Accuracy: 100%% \n", i + 1); + } +} + +// this function is to print sumarry +void prediction_summary(nnom_predict_t *pre) +{ + if (!pre) + return; + // sumamry + NNOM_LOG("\nPrediction summary:\n"); + NNOM_LOG("Test frames: %d\n", pre->predict_count); + NNOM_LOG("Test running time: %d sec\n", pre->t_predict_total / 1000); + NNOM_LOG("Model running time: %d ms\n", pre->t_run_total); + if(pre->predict_count !=0) + NNOM_LOG("Average prediction time: %d us\n", (pre->t_run_total * 1000) / pre->predict_count); + if(pre->t_run_total != 0) + NNOM_LOG("Average effeciency: %d.%02d ops/us\n", (int)(((uint64_t)pre->model->total_ops * pre->predict_count) / (pre->t_run_total * 1000)), + (int)(((uint64_t)pre->model->total_ops * pre->predict_count)*100 / (pre->t_run_total * 1000))%100); + if(pre->t_run_total !=0 && pre->predict_count !=0) + NNOM_LOG("Average frame rate: %d.%d Hz\n", 1000 / (pre->t_run_total / pre->predict_count), + (1000*10 / (pre->t_run_total / pre->predict_count))%10); + + // only valid for multiple labels + if(pre->label_num > 1) + { + // print top-k + prediction_top_k(pre); + + // print confusion matrix + prediction_matrix(pre); + } +} + +// stand alone prediction API +// this api test one set of data, return the prediction +nnom_status_t nnom_predict(nnom_model_t *m, uint32_t *label, float *prob) +{ + int32_t max_val, max_index, sum; + int8_t *output; + + if (!m) + return NN_ARGUMENT_ERROR; + + model_run(m); + + // get the output memory + output = m->tail->out->tensor->p_data; + + // multiple neural output + if (tensor_size(m->tail->out->tensor) > 1) + { + // Top 1 + max_val = output[0]; + max_index = 0; + sum = max_val; + for (uint32_t i = 1; i < tensor_size(m->tail->out->tensor); i++) + { + if (output[i] > max_val) + { + max_val = output[i]; + max_index = i; + } + sum += output[i]; + } + // send results + *label = max_index; + if(max_val !=0) + *prob = (float)max_val/127.f; + else + *prob = 0; + } + // single neural output + else + { + *prob = (float)output[0] / 127.f; + if (*prob >= 0.5f) + *label = 1; + else + *label = 0; + } + + return NN_SUCCESS; +} + +static void layer_stat(nnom_layer_t *layer) +{ + // layer stat + if(layer->type != NNOM_RNN) + NNOM_LOG("%-10s - ", default_layer_names[layer->type]); + else + { + NNOM_LOG("%-3s/", default_layer_names[layer->type]); + NNOM_LOG("%-6s - ", default_cell_names[((nnom_rnn_layer_t*)layer)->cell->type]); + } + NNOM_LOG(" %8d ", layer->stat.time); + + // MAC operation + if(layer->stat.macc == 0) + NNOM_LOG(" "); + else if (layer->stat.macc < 10000) + NNOM_LOG("%7d ", (uint32_t)layer->stat.macc); + else if (layer->stat.macc < 1000*1000) + NNOM_LOG("%6dk ", (uint32_t)(layer->stat.macc/1000)); + else if (layer->stat.macc < 1000*1000*1000) + NNOM_LOG("%3d.%02dM ", (uint32_t)(layer->stat.macc/(1000*1000)), (uint32_t)(layer->stat.macc%(1000*1000)/(10*1000))); // xxx.xx M + else + NNOM_LOG("%3d.%02dG ", (uint32_t)(layer->stat.macc/(1000*1000*1000)), (uint32_t)(layer->stat.macc%(1000*1000*1000)/(10*1000*1000))); // xxx.xx G + + // layer efficiency + if (layer->stat.macc != 0 && layer->stat.time != 0) + NNOM_LOG("%d.%02d\n", (uint32_t)(layer->stat.macc / layer->stat.time), (uint32_t)((layer->stat.macc * 100) / (layer->stat.time) % 100)); + else + NNOM_LOG("\n"); +} + +void model_stat(nnom_model_t *m) +{ + size_t total_ops = 0; + size_t total_time = 0; + nnom_layer_t *layer; + uint32_t run_num = 0; + + if (!m) + return; + + layer = m->head; + + NNOM_LOG("\nPrint running stat..\n"); + NNOM_LOG("Layer(#) - Time(us) ops(MACs) ops/us \n"); + NNOM_LOG("--------------------------------------------------------\n"); + while (layer) + { + run_num++; + NNOM_LOG("#%-3d", run_num); + total_ops += layer->stat.macc; + total_time += layer->stat.time; + layer_stat(layer); + if (layer->shortcut == NULL) + break; + layer = layer->shortcut; + } + NNOM_LOG("\nSummary:\n"); + NNOM_LOG("Total ops (MAC): %d", (uint32_t)(total_ops)); + NNOM_LOG("(%d.%02dM)\n", (uint32_t) (total_ops/(1000*1000)), (uint32_t)(total_ops%(1000*1000)/(10000))); + NNOM_LOG("Prediction time :%dus\n", (uint32_t)total_time); + if(total_time != 0) + NNOM_LOG("Efficiency %d.%02d ops/us\n", + (uint32_t)(total_ops / total_time), + (uint32_t)((total_ops * 100) / (total_time) % 100)); + + NNOM_LOG("Total memory:%d\n", (uint32_t)nnom_mem_stat()); +} + +void model_io_format(nnom_model_t *m) +{ + nnom_layer_t *layer; + uint32_t run_num = 0; + + if (!m) + return; + + layer = m->head; + + NNOM_LOG("\nPrint layer input/output..\n"); + NNOM_LOG("Layer(#) - Input(Qnm) Output(Qnm) Oshape \n"); + NNOM_LOG("----------------------------------------------------------\n"); + while (layer) + { + run_num++; + NNOM_LOG("#%-3d", run_num); + if(layer->type != NNOM_RNN) + NNOM_LOG("%-10s - ", default_layer_names[layer->type]); + else + { + NNOM_LOG("%-3s/", default_layer_names[layer->type]); + NNOM_LOG("%-6s - ", default_cell_names[((nnom_rnn_layer_t*)layer)->cell->type]); + } + NNOM_LOG(" %2d.%2d", 7-layer->in->tensor->q_dec[0], layer->in->tensor->q_dec[0]); + NNOM_LOG(" %2d.%2d", 7-layer->out->tensor->q_dec[0], layer->out->tensor->q_dec[0]); + NNOM_LOG(" ("); + for (int i = 0; i < 3; i++) + { + if (layer->out->tensor->num_dim > i) + NNOM_LOG("%4d,", layer->out->tensor->dim[i]); + else + NNOM_LOG(" "); + } + NNOM_LOG(")\n"); + + if (layer->shortcut == NULL) + break; + layer = layer->shortcut; + } + +} diff --git a/APP_Framework/Framework/knowing/nnom/src/layers/nnom_activation.c b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_activation.c new file mode 100644 index 000000000..c90171c77 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_activation.c @@ -0,0 +1,369 @@ + + +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2019-07-23 Jianjia Ma The first version + */ + +#include +#include +#include +#include + +#include "nnom.h" +#include "nnom_local.h" +#include "nnom_layers.h" +#include "layers/nnom_activation.h" + +#ifdef NNOM_USING_CMSIS_NN +#include "arm_math.h" +#include "arm_nnfunctions.h" +#endif + +nnom_layer_t *Activation(nnom_activation_t *act) +{ + nnom_activation_layer_t *layer; + nnom_layer_io_t *in, *out; + + // apply a block memory for all the sub handles. + size_t mem_size = sizeof(nnom_activation_layer_t) + sizeof(nnom_layer_io_t) * 2; + layer = nnom_mem(mem_size); + if (layer == NULL) + return NULL; + + // distribut the memory to sub handles. + in = (void *)((uint8_t*)layer + sizeof(nnom_activation_layer_t)); + out = (void *)((uint8_t*)in + sizeof(nnom_layer_io_t)); + + // set type in layer parent + layer->super.type = NNOM_ACTIVATION; + layer->super.run = activation_run; + layer->super.build = default_build; + // set buf state + in->type = NNOM_TENSOR_BUF_TEMP; + out->type = NNOM_TENSOR_BUF_NULL; // when a layer's io is set to NULL, both will point to same mem. + // put in & out on the layer. + layer->super.in = io_init(layer, in); + layer->super.out = io_init(layer, out); + + // set activation to layer + layer->act = act; + + // set free method + layer->super.free = activation_free; + + return (nnom_layer_t *)layer; +} + +nnom_layer_t *ReLU(void) +{ + nnom_layer_t *layer = Activation(act_relu()); + if (layer == NULL) + return NULL; + + // set type in layer parent + layer->type = NNOM_RELU; + return layer; +} + +nnom_layer_t *LeakyReLU(float alpha) +{ + nnom_layer_t *layer = Activation(act_leaky_relu(alpha)); + if (layer == NULL) + return NULL; + + // set type in layer parent + layer->type = NNOM_LEAKY_RELU; + return layer; +} + +nnom_layer_t *AdvReLU(float alpha, float max, float threshold) +{ + nnom_layer_t *layer = Activation(act_adv_relu(alpha, max, threshold)); + if (layer == NULL) + return NULL; + + // set type in layer parent + layer->type = NNOM_ADV_RELU; + return layer; +} + +nnom_layer_t *Sigmoid(int32_t dec_bit) +{ + nnom_layer_t *layer = Activation(act_sigmoid(dec_bit)); + if (layer == NULL) + return NULL; + + // set type in layer parent + layer->type = NNOM_SIGMOID; + return layer; +} + +nnom_layer_t *TanH(int32_t dec_bit) +{ + nnom_layer_t *layer = Activation(act_tanh(dec_bit)); + if (layer == NULL) + return NULL; + // set type in layer parent + layer->type = NNOM_TANH; + return layer; +} + +void act_delete(nnom_activation_t* act){ + nnom_free(act); +} + +// activation takes act instance which is created. therefore, it must be free when activation is deleted. +// this is the callback in layer->free +nnom_status_t activation_free(nnom_layer_t *layer) +{ + if(layer) + act_delete(((nnom_activation_layer_t *)layer)->act); + return NN_SUCCESS; +} + +nnom_status_t activation_run(nnom_layer_t *layer) +{ + nnom_activation_layer_t *cl = (nnom_activation_layer_t *)layer; + return act_tensor_run(cl->act, layer->in->tensor); +} + +// porting +static nnom_status_t relu_run(nnom_activation_t* act) +{ + if(act->tensor->bitwidth == 16) + { + #ifdef NNOM_USING_CMSIS_NN + arm_relu_q15(act->tensor->p_data, tensor_size(act->tensor)); + #else + local_relu_q15(act->tensor->p_data, tensor_size(act->tensor)); + #endif + } + else + { + #ifdef NNOM_USING_CMSIS_NN + arm_relu_q7(act->tensor->p_data, tensor_size(act->tensor)); + #else + local_relu_q7(act->tensor->p_data, tensor_size(act->tensor)); + #endif + } + return NN_SUCCESS; +} + +// leaky relu +static nnom_status_t leaky_relu_run(nnom_activation_t* act) +{ + nnom_activation_leaky_relu_t* a = (nnom_activation_leaky_relu_t*) act; + if(act->tensor->bitwidth == 16) + local_leaky_relu_q15(act->tensor->p_data, a->alpha, tensor_size(act->tensor)); + else + local_leaky_relu_q7(act->tensor->p_data, a->alpha, tensor_size(act->tensor)); + return NN_SUCCESS; +} + +// advance relu +static nnom_status_t adv_relu_run(nnom_activation_t* act) +{ + nnom_activation_adv_relu_t* a = (nnom_activation_adv_relu_t*) act; + + // we need to convert float to fixpoint in runtime where we can know the tensor's q format + if(act->tensor->bitwidth == 16) + { + q15_t max = 32767; + q15_t threshold = MIN(a->threshold * (1 << (15 - act->tensor->q_dec[0])), 32767); + q7_t max_scale = (1 << (15 - act->tensor->q_dec[0])); + if(a->max != INFINITY && a->max != 0x7fc00000) + if(a->max * max_scale < max) + max = a->max * max_scale; + local_adv_relu_q15(act->tensor->p_data, a->negative_slope, max, threshold, tensor_size(act->tensor)); + } + // 8bit + else + { + q7_t max = 127; + q7_t threshold = MIN(a->threshold * (1 << (7 - act->tensor->q_dec[0])), 127); + q7_t max_scale = (1 << (7 - act->tensor->q_dec[0])); + if(a->max != INFINITY && a->max != 0x7fc00000) // QNAN 0x7fc00000 also represent infinity in script 0.4.1 + if(a->max * max_scale < max) + max = a->max * max_scale; + local_adv_relu_q7(act->tensor->p_data, a->negative_slope, max, threshold, tensor_size(act->tensor)); + } + + return NN_SUCCESS; +} + +static nnom_status_t tanh_run(nnom_activation_t* act) +{ + nnom_activation_fixed_q_t * a = (nnom_activation_fixed_q_t*)act; + // 16 bit + if(act->tensor->bitwidth == 16) + { + uint8_t int_bit = 15 - a->dec_bit; + #ifdef NNOM_USING_CMSIS_NN + arm_nn_activations_direct_q15(act->tensor->p_data, tensor_size(act->tensor), int_bit, ARM_TANH); + #else + local_tanh_q15(act->tensor->p_data, tensor_size(act->tensor), int_bit); + #endif + } + else // 8bit + { + uint8_t int_bit = 7 - a->dec_bit; + // arm version cannot handle int_bit > 3 + #ifdef NNOM_USING_CMSIS_NN + if(act->tensor->q_dec[0] <= 3) + arm_nn_activations_direct_q7(act->tensor->p_data, tensor_size(act->tensor), int_bit, ARM_TANH); + else + #endif + local_tanh_q7(act->tensor->p_data, tensor_size(act->tensor), int_bit); + } + return NN_SUCCESS; +} + +static nnom_status_t sigmoid_run( nnom_activation_t* act) +{ + nnom_activation_fixed_q_t * a = (nnom_activation_fixed_q_t*)act; + // 16 bit + if(act->tensor->bitwidth == 16) + { + uint8_t int_bit = 15 - a->dec_bit; + #ifdef NNOM_USING_CMSIS_NN + arm_nn_activations_direct_q15(act->tensor->p_data, tensor_size(act->tensor), int_bit, ARM_SIGMOID); + #else + local_sigmoid_q15(act->tensor->p_data, tensor_size(act->tensor), int_bit); + #endif + } + else // 8bit + { + uint8_t int_bit = 7 - a->dec_bit; + // arm version cannot handle int_bit > 3 + #ifdef NNOM_USING_CMSIS_NN + if(act->tensor->q_dec[0] <= 3) + arm_nn_activations_direct_q7(act->tensor->p_data, tensor_size(act->tensor), int_bit, ARM_TANH); + else + #endif + local_sigmoid_q7(act->tensor->p_data, tensor_size(act->tensor), int_bit); + } + + return NN_SUCCESS; +} + +static nnom_status_t hard_tanh_run( nnom_activation_t* act) +{ + nnom_activation_fixed_q_t * a = (nnom_activation_fixed_q_t*)act; + if(act->tensor->bitwidth == 16) + local_hard_tanh_q15(act->tensor->p_data, tensor_size(act->tensor), a->dec_bit + 8); // a->dec is based on 8 bit. + else + local_hard_tanh_q7(act->tensor->p_data, tensor_size(act->tensor), a->dec_bit); + return NN_SUCCESS; +} + +static nnom_status_t hard_sigmoid_run( nnom_activation_t* act) +{ + nnom_activation_fixed_q_t * a = (nnom_activation_fixed_q_t*)act; + if(act->tensor->bitwidth == 16) + local_hard_sigmoid_q15(act->tensor->p_data, tensor_size(act->tensor), a->dec_bit + 8); // a->dec is based on 8 bit. + else + local_hard_sigmoid_q7(act->tensor->p_data, tensor_size(act->tensor), a->dec_bit); + return NN_SUCCESS; +} + +// +nnom_activation_t* act_relu(void) +{ + nnom_activation_t* act = nnom_mem(sizeof(nnom_activation_t)); + act->run = relu_run; + act->type = ACT_RELU; + return act; +} + +nnom_activation_t* act_leaky_relu(float alpha) +{ + nnom_activation_leaky_relu_t* act = nnom_mem(sizeof(nnom_activation_leaky_relu_t)); + act->super.run = leaky_relu_run; + act->super.type = ACT_LEAKY_RELU; + act->alpha = (q7_t)(alpha*128); + return (nnom_activation_t* )act; +} + +nnom_activation_t* act_adv_relu(float negative_slope, float max, float threshold) +{ + nnom_activation_adv_relu_t* act = nnom_mem(sizeof(nnom_activation_adv_relu_t)); + act->super.run = adv_relu_run; + act->super.type = ACT_ADV_RELU; + act->negative_slope = (q7_t)(negative_slope*128); + act->max = max; + act->threshold = threshold; + return (nnom_activation_t* )act; +} + +nnom_activation_t* act_tanh(int32_t dec_bit) +{ + nnom_activation_fixed_q_t* act = nnom_mem(sizeof(nnom_activation_fixed_q_t)); + act->super.run = tanh_run; + act->super.type = ACT_TANH; + act->dec_bit = dec_bit; + return (nnom_activation_t*)act; +} + +nnom_activation_t* act_sigmoid(int32_t dec_bit) +{ + nnom_activation_fixed_q_t* act = nnom_mem(sizeof(nnom_activation_fixed_q_t)); + + act->super.run = sigmoid_run; + act->super.type = ACT_SIGMOID; + act->dec_bit = dec_bit; + return (nnom_activation_t*)act; +} + +nnom_activation_t* act_hard_tanh(int32_t dec_bit) +{ + nnom_activation_fixed_q_t* act = nnom_mem(sizeof(nnom_activation_fixed_q_t)); + + act->super.run = hard_tanh_run; + act->super.type = ACT_HARD_TANH; + act->dec_bit = dec_bit; + return (nnom_activation_t*)act; +} + +nnom_activation_t* act_hard_sigmoid(int32_t dec_bit) +{ + nnom_activation_fixed_q_t* act = nnom_mem(sizeof(nnom_activation_fixed_q_t)); + + act->super.run = hard_sigmoid_run; + act->super.type = ACT_HARD_SIGMOID; + act->dec_bit = dec_bit; + return (nnom_activation_t*)act; +} + +// return the decimal bit if the activation will change the q format of the layer. +int32_t act_get_dec_bit(nnom_activation_type_t type, int32_t dec_bit) +{ + switch(type) + { + case ACT_RELU: + case ACT_LEAKY_RELU: + case ACT_ADV_RELU: + break; + case ACT_TANH: + case ACT_HARD_TANH: + case ACT_SIGMOID: + case ACT_HARD_SIGMOID: + dec_bit = 7; + default:break; + } + return dec_bit; +} + +// a direct api to run activate a tensor +nnom_status_t act_tensor_run(nnom_activation_t* act, nnom_tensor_t* tensor) +{ + act->tensor = tensor; + return act->run(act); +} diff --git a/APP_Framework/Framework/knowing/nnom/src/layers/nnom_avgpool.c b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_avgpool.c new file mode 100644 index 000000000..8ee220f4c --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_avgpool.c @@ -0,0 +1,167 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2019-07-23 Jianjia Ma The first version + */ + +#include +#include +#include + +#include "nnom.h" +#include "nnom_local.h" +#include "nnom_layers.h" +#include "layers/nnom_avgpool.h" + +#ifdef NNOM_USING_CMSIS_NN +#include "arm_math.h" +#include "arm_nnfunctions.h" +#endif + +nnom_layer_t *avgpool_s(const nnom_pool_config_t * config) +{ + nnom_avgpool_layer_t *cl; + + if(config->num_dim == 1) + { + cl = (nnom_avgpool_layer_t *)AvgPool(kernel(1, config->kernel_size[0]), + stride(1, config->stride_size[0]), + config->padding_type); + } + else + { + cl = (nnom_avgpool_layer_t *)AvgPool(kernel(config->kernel_size[0], config->kernel_size[1]), + stride(config->stride_size[0], config->stride_size[1]), + config->padding_type); + } + + if(cl) + { + cl->super.config = (void*) config; + cl->output_shift = config->output_shift; // no idea if we need it + } + return (nnom_layer_t *)cl; +} + +nnom_layer_t *AvgPool(nnom_3d_shape_t k, nnom_3d_shape_t s, nnom_padding_t pad_type) +{ + nnom_layer_t *layer = MaxPool(k, s, pad_type); + + if (layer != NULL) + { + layer->type = NNOM_AVGPOOL; + layer->run = avgpool_run; + layer->build = avgpool_build; + } + return (nnom_layer_t *)layer; +} + +nnom_status_t avgpool_build(nnom_layer_t *layer) +{ + uint32_t size; + // avg pooling share the same output shape, stride, padding setting. + maxpool_build(layer); + + #ifdef NNOM_USING_CMSIS_NN + // however, avg pooling require a computational buffer. + // bufferA size: 2*dim_im_out*ch_im_in + size = layer->out->tensor->dim[1] > layer->out->tensor->dim[0] ? + layer->out->tensor->dim[1] : layer->out->tensor->dim[0]; + layer->comp->size = 2 * size * layer->in->tensor->dim[2]; + #endif + + return NN_SUCCESS; +} + +nnom_status_t avgpool_run(nnom_layer_t *layer) +{ + nnom_avgpool_layer_t *cl = (nnom_avgpool_layer_t *)(layer); + uint16_t out_x, out_y; + // if global pooling + if(layer->out->tensor->num_dim == 1) + { + out_x = 1; out_y = 1; + } + else // normal pooling. + { + out_x = layer->out->tensor->dim[1]; //W + out_y = layer->out->tensor->dim[0]; //h + } + + // 16 bit + if(layer->in->tensor->bitwidth == 16) + { +#ifdef NNOM_USING_CHW + local_avepool_q15_CHW(layer->in->tensor->p_data, + layer->in->tensor->dim[1], layer->in->tensor->dim[0], layer->in->tensor->dim[2], + cl->kernel.w, cl->kernel.h, + cl->pad.w, cl->pad.h, + cl->stride.w, cl->stride.h, + out_x, out_y, + cl->output_shift, + NULL, + layer->out->tensor->p_data); +#else + local_avepool_q15_HWC(layer->in->tensor->p_data, + layer->in->tensor->dim[1], layer->in->tensor->dim[0], layer->in->tensor->dim[2], + cl->kernel.w, cl->kernel.h, + cl->pad.w, cl->pad.h, + cl->stride.w, cl->stride.h, + out_x, out_y, + cl->output_shift, + NULL, + layer->out->tensor->p_data); +#endif + } + // 8bit + else{ +#ifdef NNOM_USING_CHW + local_avepool_q7_CHW(layer->in->tensor->p_data, + layer->in->tensor->dim[1], layer->in->tensor->dim[0], layer->in->tensor->dim[2], + cl->kernel.w, cl->kernel.h, + cl->pad.w, cl->pad.h, + cl->stride.w, cl->stride.h, + out_x, out_y, + cl->output_shift, + NULL, + layer->out->tensor->p_data); +#else //end of CHW + #ifdef NNOM_USING_CMSIS_NN + // 2D, square + if (layer->in->tensor->dim[1] == layer->in->tensor->dim[0] && + layer->out->tensor->dim[1] == layer->out->tensor->dim[0] && + cl->output_shift == 0) + { + arm_avepool_q7_HWC( + layer->in->tensor->p_data, + layer->in->tensor->dim[1], layer->in->tensor->dim[2], + cl->kernel.w, cl->pad.w, cl->stride.w, + layer->out->tensor->dim[1], + layer->comp->mem->blk, + layer->out->tensor->p_data); + } + // none square 2D, or 1D + else + #endif + { + // CMSIS-NN does not support none-square pooling, we have to use local implementation + local_avepool_q7_HWC(layer->in->tensor->p_data, + layer->in->tensor->dim[1], layer->in->tensor->dim[0], layer->in->tensor->dim[2], + cl->kernel.w, cl->kernel.h, + cl->pad.w, cl->pad.h, + cl->stride.w, cl->stride.h, + out_x, out_y, + cl->output_shift, + NULL, + layer->out->tensor->p_data); + } +#endif + } + return NN_SUCCESS; +} diff --git a/APP_Framework/Framework/knowing/nnom/src/layers/nnom_baselayer.c b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_baselayer.c new file mode 100644 index 000000000..0442fb2b0 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_baselayer.c @@ -0,0 +1,90 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2019-07-23 Jianjia Ma The first version + */ + +#include +#include +#include + +#include "nnom.h" +#include "nnom_local.h" +#include "nnom_layers.h" +#include "layers/nnom_baselayer.h" + +// this layer copys the input to the output + +nnom_layer_t *baselayer_s(const nnom_layer_config_t * config) +{ + nnom_layer_t *layer = BaseLayer(); + if(layer) + layer->config = (void*) config; + return layer; +} + +nnom_layer_t *BaseLayer() +{ + nnom_io_layer_t *layer; + nnom_layer_io_t *in, *out; + + // apply a block memory for all the sub handles. + size_t mem_size = sizeof(nnom_io_layer_t) + sizeof(nnom_layer_io_t) * 2; + layer = nnom_mem(mem_size); + if (layer == NULL) + return NULL; + + // distribut the memory to sub handles. + in = (void *)((uint8_t*)layer + sizeof(nnom_io_layer_t)); + out = (void *)((uint8_t*)in + sizeof(nnom_layer_io_t)); + + // set type in layer parent + layer->super.type = NNOM_BASE; + layer->super.run = default_run; + layer->super.build = default_build; + // set buf state + in->type = NNOM_TENSOR_BUF_TEMP; + out->type = NNOM_TENSOR_BUF_NULL; + // put in & out on the layer. + layer->super.in = io_init(layer, in); + layer->super.out = io_init(layer, out); + + return (nnom_layer_t *)layer; +} + +// this is call while output shape is not defined. +// this will set the output shape same as input shape, and it set only the primary IO +// this cannot be used as first layer, of course... +nnom_status_t default_build(nnom_layer_t *layer) +{ + // get the last layer's output as input shape + layer->in->tensor = layer->in->hook.io->tensor; + // output tensor + // 1. allocate a new tensor for output + // 2. set the same dim, qfmt to the new tensor. + layer->out->tensor = new_tensor(NNOM_QTYPE_PER_TENSOR,layer->in->tensor->num_dim, tensor_get_num_channel(layer->in->tensor)); + tensor_cpy_attr(layer->out->tensor, layer->in->tensor); + + // see if the activation will change the q format + if(layer->actail) + layer->out->tensor->q_dec[0] = act_get_dec_bit(layer->actail->type, layer->out->tensor->q_dec[0]); + + // now this build has passed the input tensors (shapes, formats) to the new tensors. + return NN_SUCCESS; +} + +// simply copy input to output +nnom_status_t default_run(nnom_layer_t *layer) +{ + if(layer->out->type != NNOM_TENSOR_BUF_NULL) + { + nnom_memcpy(layer->out->tensor->p_data, layer->in->tensor->p_data, tensor_size_byte(layer->in->tensor)); + } + return NN_SUCCESS; +} diff --git a/APP_Framework/Framework/knowing/nnom/src/layers/nnom_concat.c b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_concat.c new file mode 100644 index 000000000..0e1efa7a2 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_concat.c @@ -0,0 +1,223 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2019-07-23 Jianjia Ma The first version + */ + +#include +#include +#include + +#include "nnom.h" +#include "nnom_local.h" +#include "nnom_layers.h" +#include "layers/nnom_concat.h" + +nnom_layer_t *concat_s(const nnom_concat_config_t *config) +{ + nnom_layer_t* layer = Concat(config->axis); + if(layer) + layer->config = (void*) config; + return layer; +} + +// concate method +// concate requires more than one input module. aux input will be allocated in model.merge() +nnom_layer_t *Concat(int8_t axis) +{ + nnom_concat_layer_t *layer; + nnom_layer_io_t *in, *out; + size_t mem_size; + + // apply a block memory for all the sub handles. + mem_size = sizeof(nnom_concat_layer_t) + sizeof(nnom_layer_io_t) * 2; + layer = nnom_mem(mem_size); + if (layer == NULL) + return NULL; + + // distribut the memory to sub handles. + in = (void *)((uint8_t*)layer + sizeof(nnom_concat_layer_t)); + out = (void *)((uint8_t*)in + sizeof(nnom_layer_io_t)); + + // set type in layer parent + layer->super.type = NNOM_CONCAT; + layer->super.run = concat_run; + layer->super.build = concat_build; + // set buf state + in->type = NNOM_TENSOR_BUF_TEMP; + out->type = NNOM_TENSOR_BUF_TEMP; + // put in & out on the layer. + layer->super.in = io_init(layer, in); + layer->super.out = io_init(layer, out); + + // axis + layer->axis = axis; + + return (nnom_layer_t *)layer; +} + + +nnom_status_t concat_build(nnom_layer_t *layer) +{ + nnom_concat_layer_t *cl = (nnom_concat_layer_t *)layer; + nnom_layer_io_t *in; + uint32_t in_num = 0; + int32_t num_dim; + + // for each input module, copy the shape from the output of last layer + in = layer->in; + while (in != NULL) + { + //get the last layer's output as input shape + in->tensor = in->hook.io->tensor; + in = in->aux; + in_num++; + } + + // allocate new tensor for output, keep the same dimension lenght + layer->out->tensor = new_tensor(NNOM_QTYPE_PER_TENSOR, layer->in->tensor->num_dim, tensor_get_num_channel(layer->in->tensor)); + tensor_cpy_attr(layer->out->tensor, layer->in->tensor); + + // convert the axis. + if (cl->axis < 0) + cl->axis = (layer->in->tensor->num_dim + cl->axis); + else if (cl->axis >0) + cl->axis = cl->axis -1; // keras use axis start from 1. we are using 0, 1, 2 (check?) + + // find out the concated axis + num_dim = layer->in->tensor->num_dim; + for (uint32_t i = 0; i < num_dim; i ++) + { + // exclue the concat axies + if (i == cl->axis) + { + layer->out->tensor->dim[i] = 0; + + // add the same axis from all input up. + in = layer->in; + while (in != NULL) + { + layer->out->tensor->dim[i] += in->tensor->dim[i]; + in = in->aux; + } + continue; + } + + // check others, all other must be same shape + in = layer->in; + while (in != NULL && in->aux != NULL) + { + if (in->tensor->dim[i] != in->aux->tensor->dim[i]) + return NN_ARGUMENT_ERROR; + in = in->aux; + } + + // now set other axis + layer->out->tensor->dim[i] = layer->in->tensor->dim[i]; + } + + return NN_SUCCESS; +} + + +#ifdef NNOM_USING_CHW +// axis index converter between HWC and CHW +static inline int chw_i(int hwc, int num_dim) +{ + num_dim = num_dim -1; + hwc = hwc + 1; + if(hwc>num_dim) + hwc = 0; + return hwc; +} +static inline int hwc_i(int chw, int num_dim) +{ + num_dim = num_dim -1; + chw = chw - 1; + if(chw=2) input and 1 output. + nnom_concat_layer_t *cl = (nnom_concat_layer_t *)layer; + nnom_layer_io_t *in; + uint32_t dwidth = layer->in->tensor->bitwidth/8; // data width in byte + +#ifdef NNOM_USING_CHW + // Concatenate for HWC + uint8_t *pin; + uint8_t *pout = layer->out->tensor->p_data; + uint32_t block_size; + uint32_t n_block; + uint8_t num_dim = layer->in->tensor->num_dim; + + // calcualte number of block to concat. the other shapes before the concat axis + n_block = 1; + for(int i= 0; i< chw_i(cl->axis, num_dim); i++) + { + n_block *= layer->in->tensor->dim[hwc_i(i, num_dim)]; + } + + // concat all input layers + for(int i=0; iin; + while (in != NULL) + { + // the block size of concat data in this layer + block_size = dwidth; + for(int j= num_dim-1; j >= chw_i(cl->axis, num_dim); j--) + block_size *= in->tensor->dim[hwc_i(j, num_dim)]; + // concat + pin = (uint8_t *)in->tensor->p_data + i * block_size; + nnom_memcpy(pout, pin, block_size); + pout += block_size; + in = in->aux; + } + } + +#else // end of CHW concate + + // Concatenate for HWC + uint8_t* pin; + uint8_t* pout = layer->out->tensor->p_data; + uint32_t block_size; + uint32_t n_block; + uint8_t num_dim = layer->in->tensor->num_dim; + + // calcualte the number of block to concat. (the other shapes before the concat axis) + n_block = 1; + for (int i = 0; i < cl->axis; i++) + n_block *= layer->in->tensor->dim[i]; + + // concat all input layers + for (int i = 0; i < n_block; i++) + { + in = layer->in; + while (in != NULL) + { + // the block size of concat data in this layer + block_size = dwidth; + for (int j = cl->axis; j < num_dim; j++) + block_size *= in->tensor->dim[j]; + // concat + pin = (uint8_t*)in->tensor->p_data + i * block_size; + nnom_memcpy(pout, pin, block_size); + pout += block_size; + in = in->aux; + } + } +#endif + return NN_SUCCESS; +} + diff --git a/APP_Framework/Framework/knowing/nnom/src/layers/nnom_conv2d.c b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_conv2d.c new file mode 100644 index 000000000..ea553aedf --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_conv2d.c @@ -0,0 +1,434 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2019-07-23 Jianjia Ma The first version + */ + + +#include +#include +#include + +#include "nnom.h" +#include "nnom_local.h" +#include "nnom_layers.h" +#include "layers/nnom_conv2d.h" + +#ifdef NNOM_USING_CMSIS_NN +#include "arm_math.h" +#include "arm_nnfunctions.h" +#endif + +// a machine friendly api, with suffix _s for structured configuration. +nnom_layer_t *conv2d_s(const nnom_conv2d_config_t *config) +{ + nnom_conv2d_layer_t *layer; + nnom_buf_t *comp; + nnom_layer_io_t *in, *out; + size_t mem_size; + + // allocate a block memory for all the sub handles and shifts. + mem_size = sizeof(nnom_conv2d_layer_t) + sizeof(nnom_layer_io_t) * 2 + sizeof(nnom_buf_t); + layer = nnom_mem(mem_size); + if (layer == NULL) + return NULL; + + // distribut the memory to sub handles. + in = (void *)((uint8_t*)layer + sizeof(nnom_conv2d_layer_t)); + out = (void *)((uint8_t*)in + sizeof(nnom_layer_io_t)); + comp = (void *)((uint8_t*)out + sizeof(nnom_layer_io_t)); + + // set type in layer parent + layer->super.type = NNOM_CONV_2D; + // set buf state + in->type = NNOM_TENSOR_BUF_TEMP; + out->type = NNOM_TENSOR_BUF_TEMP; + comp->type = NNOM_TENSOR_BUF_TEMP; + // put in & out on the layer. + layer->super.in = io_init(layer, in); + layer->super.out = io_init(layer, out); + #ifdef NNOM_USING_CMSIS_NN + layer->super.comp = comp; + #endif + // set run method & output shape + layer->super.run = conv2d_run; + layer->super.build = conv2d_build; + layer->super.free = conv2d_free; + + // save the config + layer->super.config = (void*) config; + + // get the private parameters + // test: for 1d input, expend h = 1 + if(config->weight->num_dim == 3) + { + layer->kernel = kernel(1, config->kernel_size[0]); + layer->stride = stride(1, config->stride_size[0]); + layer->dilation = dilation(1, config->dilation_size[0]); + } + else + { + layer->kernel = kernel(config->kernel_size[0], config->kernel_size[1]); + layer->stride = stride(config->stride_size[0], config->stride_size[1]); + layer->dilation = dilation(config->dilation_size[0], config->dilation_size[1]); + } + + layer->filter_mult = config->filter_size; // for convs, this means filter number + layer->padding_type = config->padding_type; + + // get bias and weight tensor, this should be created by script. + layer->weight = config->weight; + layer->bias = config->bias; + + // get shifts + layer->output_rshift = (nnom_qformat_param_t *)config->output_shift; + layer->bias_lshift = (nnom_qformat_param_t *)config->bias_shift; + + // padding + if (layer->padding_type == PADDING_SAME) + { + layer->pad.h = layer->dilation.h * (layer->kernel.h - 1) / 2; + layer->pad.w = layer->dilation.w * (layer->kernel.w - 1) / 2; + layer->pad.c = (1 - 1) / 2; + } + + return (nnom_layer_t *)layer; +} + + +// Conv2D +// multiplier of (output/input channel), +// shape of kernal, shape of strides, weight struct, bias struct +nnom_layer_t *Conv2D(uint32_t filters, nnom_3d_shape_t k, nnom_3d_shape_t s, nnom_3d_shape_t d, nnom_padding_t pad_type, + const nnom_weight_t *w, const nnom_bias_t *b) +{ + nnom_conv2d_layer_t *layer; + nnom_buf_t *comp; + nnom_layer_io_t *in, *out; + // apply a block memory for all the sub handles. + size_t mem_size = sizeof(nnom_conv2d_layer_t) + sizeof(nnom_layer_io_t) * 2 + sizeof(nnom_buf_t); + layer = nnom_mem(mem_size); + if (layer == NULL) + return NULL; + + // distribut the memory to sub handles. + in = (void *)((uint8_t*)layer + sizeof(nnom_conv2d_layer_t)); + out = (void *)((uint8_t*)in + sizeof(nnom_layer_io_t)); + comp = (void *)((uint8_t*)out + sizeof(nnom_layer_io_t)); + + // set type in layer parent + layer->super.type = NNOM_CONV_2D; + // set buf state + in->type = NNOM_TENSOR_BUF_TEMP; + out->type = NNOM_TENSOR_BUF_TEMP; + comp->type = NNOM_TENSOR_BUF_TEMP; + // put in & out on the layer. + layer->super.in = io_init(layer, in); + layer->super.out = io_init(layer, out); + #ifdef NNOM_USING_CMSIS_NN + layer->super.comp = comp; + #endif + // set run method & output shape + layer->super.run = conv2d_run; + layer->super.build = conv2d_build; + + // get the private parameters + layer->kernel = k; + layer->stride = s; + layer->dilation = d; + layer->filter_mult = filters; // for convs, this means filter number + layer->padding_type = pad_type; + + // create weight and bias tensor + layer->weight = new_tensor(NNOM_QTYPE_PER_TENSOR, 4, filters); + layer->bias = new_tensor(NNOM_QTYPE_PER_TENSOR, 1, filters); + + // configure weight tensor manually to support new tensor based backends. + // needs to be very careful + { + // config weight + nnom_shape_data_t dim[4] = {k.h, k.w, k.c, filters}; + *(layer->weight->q_offset) = 0; // we have no support of offset here + *(layer->weight->q_dec) = 0; // not using it + layer->weight->p_data = (void*)w->p_value; + layer->weight->bitwidth = 8; + layer->weight->qtype = NNOM_QTYPE_PER_TENSOR; + nnom_memcpy(layer->weight->dim, dim, layer->weight->num_dim * sizeof(nnom_shape_data_t)); + + // config bias + dim[0] = filters; + *(layer->bias->q_offset) = 0; // we have no support of offset here + *(layer->bias->q_dec) = 0; // not using it + layer->bias->p_data = (void*) b->p_value; + layer->bias->bitwidth = 8; + layer->weight->qtype = NNOM_QTYPE_PER_TENSOR; + nnom_memcpy(layer->bias->dim, dim, layer->bias->num_dim * sizeof(nnom_shape_data_t)); + + // output shift and bias shift + layer->output_rshift = (nnom_qformat_param_t *)&w->shift; + layer->bias_lshift = (nnom_qformat_param_t *)&b->shift; + } + + return (nnom_layer_t *)layer; +} + +// keras's implementation. +// source: https://github.com/keras-team/keras/blob/7a39b6c62d43c25472b2c2476bd2a8983ae4f682/keras/utils/conv_utils.py#L85 +uint32_t conv_output_length(uint32_t input_length, uint32_t filter_size, nnom_padding_t padding, uint32_t stride, uint32_t dilation) +{ + if (input_length == 0) + return 0; + uint32_t dilated_filter_size = (filter_size - 1) * dilation + 1; + uint32_t output_length; + if(padding == PADDING_SAME) + output_length = input_length; + else + output_length = input_length - dilated_filter_size + 1; + return (output_length + stride - 1) / stride; +} + +nnom_status_t conv2d_build(nnom_layer_t *layer) +{ + nnom_conv2d_layer_t *cl = (nnom_conv2d_layer_t *)layer; + + // get the tensor from last layer's output + layer->in->tensor = layer->in->hook.io->tensor; + + // create new tensor for the output + layer->out->tensor = new_tensor(NNOM_QTYPE_PER_TENSOR, layer->in->tensor->num_dim, cl->filter_mult); + // copy then change later. + tensor_cpy_attr(layer->out->tensor, layer->in->tensor); + + // calculate the output tensor q format, only support per tensor quantise now + layer->out->tensor->q_dec[0] = layer->in->tensor->q_dec[0] + cl->weight->q_dec[0] - cl->output_rshift[0]; // need some modification for 16bit. + // see if the activation will change the q format + if(layer->actail) + layer->out->tensor->q_dec[0] = act_get_dec_bit(layer->actail->type, layer->out->tensor->q_dec[0]); + + // now we set up the tensor shape, always HWC format + layer->out->tensor->dim[0] = conv_output_length(layer->in->tensor->dim[0], cl->kernel.h, cl->padding_type, cl->stride.h, cl->dilation.h); + layer->out->tensor->dim[1] = conv_output_length(layer->in->tensor->dim[1], cl->kernel.w, cl->padding_type, cl->stride.w, cl->dilation.w); + layer->out->tensor->dim[2] = cl->filter_mult; // channel stays the same + + // fill padding + if (cl->padding_type == PADDING_SAME) + { + cl->pad.w = cl->dilation.w * (cl->kernel.w - 1) / 2; + cl->pad.h = cl->dilation.h * (cl->kernel.h - 1) / 2; + cl->pad.c = 0; + } + + #ifdef NNOM_USING_CMSIS_NN + // bufferA size: (1D shape) + // 2*ch_im_in*dim_kernel*dim_kernel + layer->comp->size = 2 * 2 * layer->in->tensor->dim[2] * cl->kernel.w * cl->kernel.h; + #endif + // computational cost: K x K x Cin x Hour x Wout x Cout + layer->stat.macc = cl->kernel.w * cl->kernel.h * layer->in->tensor->dim[2] * tensor_size(layer->out->tensor); + return NN_SUCCESS; +} + +nnom_status_t conv2d_free(nnom_layer_t *layer) +{ + // free weight and bias tensor when we are not initialised from structured configuration. + if(!layer->config) + { + nnom_conv2d_layer_t* cl = (nnom_conv2d_layer_t*)layer; + delete_tensor(cl->weight); + delete_tensor(cl->bias); + } + return NN_SUCCESS; +} + + +nnom_status_t conv2d_run(nnom_layer_t *layer) +{ + nnom_conv2d_layer_t *cl = (nnom_conv2d_layer_t *)layer; + +#ifdef NNOM_USING_CHW + // CHW format + if(layer->in->tensor->bitwidth == 16) + local_convolve_CHW_q15_nonsquare( + layer->in->tensor->p_data, + layer->in->tensor->dim[1], layer->in->tensor->dim[0], layer->in->tensor->dim[2], + cl->weight->p_data, layer->out->tensor->dim[2], + cl->kernel.w, cl->kernel.h, cl->pad.w, cl->pad.h, cl->stride.w, cl->stride.h, cl->dilation.w, cl->dilation.h, + cl->bias->p_data, cl->bias_lshift, cl->output_rshift, cl->weight->qtype, + layer->out->tensor->p_data, + layer->out->tensor->dim[1], layer->out->tensor->dim[0], NULL, NULL); + else + local_convolve_CHW_q7_nonsquare( + layer->in->tensor->p_data, + layer->in->tensor->dim[1], layer->in->tensor->dim[0], layer->in->tensor->dim[2], + cl->weight->p_data, layer->out->tensor->dim[2], + cl->kernel.w, cl->kernel.h, cl->pad.w, cl->pad.h, cl->stride.w, cl->stride.h, cl->dilation.w, cl->dilation.h, + cl->bias->p_data, cl->bias_lshift, cl->output_rshift, cl->weight->qtype, + layer->out->tensor->p_data, + layer->out->tensor->dim[1], layer->out->tensor->dim[0], NULL, NULL); + return NN_SUCCESS; +#else + // HWC format + #ifdef NNOM_USING_CMSIS_NN + // current cmsis nn does not support dilation + if(cl->dilation.w == 1 && cl->dilation.h == 1 && cl->weight->qtype == NNOM_QTYPE_PER_TENSOR) + { + // 8 bit cmsis nn + if(layer->in->tensor->bitwidth == 8) + { + //RGB + // ch_im_in = 3, w = h + if (layer->in->tensor->dim[2] == 3 && layer->in->tensor->dim[0] == layer->in->tensor->dim[1]) + // squared + if((cl->kernel.w == cl->kernel.h) && (cl->pad.w == cl->pad.h) && (cl->stride.w == cl->stride.h)) + return (nnom_status_t)arm_convolve_HWC_q7_RGB( + layer->in->tensor->p_data, layer->in->tensor->dim[1], layer->in->tensor->dim[2], + cl->weight->p_data, + layer->out->tensor->dim[2], + cl->kernel.w, cl->pad.w, cl->stride.w, + cl->bias->p_data, cl->bias_lshift[0], + cl->output_rshift[0], layer->out->tensor->p_data, layer->out->tensor->dim[1], + (q15_t *)(layer->comp->mem->blk), NULL); + + // check if can use optimized function + // ch_im_in is multiple of 4 + // ch_im_out is multiple of 2 + if ((layer->in->tensor->dim[2] % 4 == 0) && (layer->out->tensor->dim[2] % 2 == 0)) + { + // squared + if((layer->in->tensor->dim[0] == layer->in->tensor->dim[1]) + && (layer->out->tensor->dim[0] == layer->out->tensor->dim[1]) + && (cl->kernel.w == cl->kernel.h) && (cl->pad.w == cl->pad.h) && (cl->stride.w == cl->stride.h)) + { + // 1x1 fast + if (cl->kernel.w == 1 && cl->kernel.h == 1 && cl->stride.w == 1 && cl->stride.h == 1 && cl->pad.w == 0 && cl->pad.h == 0) + return (nnom_status_t)arm_convolve_1x1_HWC_q7_fast_nonsquare( + layer->in->tensor->p_data, + layer->in->tensor->dim[1], layer->in->tensor->dim[0], layer->in->tensor->dim[2], + cl->weight->p_data, + layer->out->tensor->dim[2], + cl->kernel.w, cl->kernel.h, cl->pad.w, cl->pad.h, cl->stride.w, cl->stride.h, + cl->bias->p_data, cl->bias_lshift[0], + cl->output_rshift[0], layer->out->tensor->p_data, layer->out->tensor->dim[1], layer->out->tensor->dim[0], + (q15_t *)(layer->comp->mem->blk), NULL); + // opt square shape + else + return (nnom_status_t)arm_convolve_HWC_q7_fast( + layer->in->tensor->p_data, layer->in->tensor->dim[1], layer->in->tensor->dim[2], + cl->weight->p_data, + layer->out->tensor->dim[2], cl->kernel.w, cl->pad.w, cl->stride.w, + cl->bias->p_data, cl->bias_lshift[0], + cl->output_rshift[0], layer->out->tensor->p_data, + layer->out->tensor->dim[1], (q15_t *)(layer->comp->mem->blk), NULL); + } + // opt none square shape + else + return (nnom_status_t)arm_convolve_HWC_q7_fast_nonsquare( + layer->in->tensor->p_data, + layer->in->tensor->dim[1], layer->in->tensor->dim[0], layer->in->tensor->dim[2], + cl->weight->p_data, layer->out->tensor->dim[2], + cl->kernel.w, cl->kernel.h, cl->pad.w, cl->pad.h, cl->stride.w, cl->stride.h, + cl->bias->p_data, cl->bias_lshift[0], cl->output_rshift[0], + layer->out->tensor->p_data, + layer->out->tensor->dim[1], layer->out->tensor->dim[0], (q15_t *)(layer->comp->mem->blk), NULL); + } + // none optimized + else + { + // none opt square shape + if ((layer->in->tensor->dim[0] == layer->in->tensor->dim[1] && + layer->out->tensor->dim[0] == layer->out->tensor->dim[1]) && + (cl->kernel.w == cl->kernel.h) && (cl->pad.w == cl->pad.h) && (cl->stride.w == cl->stride.h)) + return (nnom_status_t)arm_convolve_HWC_q7_basic( + layer->in->tensor->p_data, layer->in->tensor->dim[1], layer->in->tensor->dim[2], + cl->weight->p_data, + layer->out->tensor->dim[2], cl->kernel.w, cl->pad.w, cl->stride.w, + cl->bias->p_data, cl->bias_lshift[0], + cl->output_rshift[0], layer->out->tensor->p_data, + layer->out->tensor->dim[1], (q15_t *)(layer->comp->mem->blk), NULL); + // none opt none square shape + else + return (nnom_status_t)arm_convolve_HWC_q7_basic_nonsquare( + layer->in->tensor->p_data, + layer->in->tensor->dim[1], layer->in->tensor->dim[0], layer->in->tensor->dim[2], + cl->weight->p_data, layer->out->tensor->dim[2], + cl->kernel.w, cl->kernel.h, cl->pad.w, cl->pad.h, cl->stride.w, cl->stride.h, + cl->bias->p_data, cl->bias_lshift[0], cl->output_rshift[0], + layer->out->tensor->p_data, + layer->out->tensor->dim[1], layer->out->tensor->dim[0], (q15_t *)(layer->comp->mem->blk), NULL); + } //end of cmsis-nn none-opt + } //end of 8 bit cmsis-nn + else if (layer->in->tensor->bitwidth == 16) + { + // fast opt + if ((layer->in->tensor->dim[2] % 2 == 0) && (layer->out->tensor->dim[2] % 2 == 0)) + { + if((layer->in->tensor->dim[0] == layer->in->tensor->dim[1]) + && (layer->out->tensor->dim[0] == layer->out->tensor->dim[1]) + && (cl->kernel.w == cl->kernel.h) && (cl->pad.w == cl->pad.h) && (cl->stride.w == cl->stride.h)) + return (nnom_status_t)arm_convolve_HWC_q15_fast( + layer->in->tensor->p_data, layer->in->tensor->dim[1], layer->in->tensor->dim[2], + cl->weight->p_data, + layer->out->tensor->dim[2], cl->kernel.w, cl->pad.w, cl->stride.w, + cl->bias->p_data, cl->bias_lshift[0], + cl->output_rshift[0], layer->out->tensor->p_data, + layer->out->tensor->dim[1], (q15_t *)(layer->comp->mem->blk), NULL); + else + return (nnom_status_t)arm_convolve_HWC_q15_fast_nonsquare( + layer->in->tensor->p_data, + layer->in->tensor->dim[1], layer->in->tensor->dim[0], layer->in->tensor->dim[2], + cl->weight->p_data, layer->out->tensor->dim[2], + cl->kernel.w, cl->kernel.h, cl->pad.w, cl->pad.h, cl->stride.w, cl->stride.h, + cl->bias->p_data, cl->bias_lshift[0], cl->output_rshift[0], + layer->out->tensor->p_data, + layer->out->tensor->dim[1], layer->out->tensor->dim[0], (q15_t *)(layer->comp->mem->blk), NULL); + } + // none opt basic + else + { + local_convolve_HWC_q7_nonsquare( + layer->in->tensor->p_data, + layer->in->tensor->dim[1], layer->in->tensor->dim[0], layer->in->tensor->dim[2], + cl->weight->p_data, layer->out->tensor->dim[2], + cl->kernel.w, cl->kernel.h, cl->pad.w, cl->pad.h, cl->stride.w, cl->stride.h, cl->dilation.w, cl->dilation.h, + cl->bias->p_data, cl->bias_lshift, cl->output_rshift, cl->weight->qtype, + layer->out->tensor->p_data, + layer->out->tensor->dim[1], layer->out->tensor->dim[0], NULL, NULL); + return NN_SUCCESS; + } + + } // end of 16 bit cmsis-nn + } // end of dilation == 1 + else + #endif // NNOM_USING_CMSIS_NN + { + + if(layer->in->tensor->bitwidth == 16) + local_convolve_HWC_q15_nonsquare( + layer->in->tensor->p_data, + layer->in->tensor->dim[1], layer->in->tensor->dim[0], layer->in->tensor->dim[2], + cl->weight->p_data, layer->out->tensor->dim[2], + cl->kernel.w, cl->kernel.h, cl->pad.w, cl->pad.h, cl->stride.w, cl->stride.h, cl->dilation.w, cl->dilation.h, + cl->bias->p_data, cl->bias_lshift, cl->output_rshift, cl->weight->qtype, + layer->out->tensor->p_data, + layer->out->tensor->dim[1], layer->out->tensor->dim[0], NULL, NULL); + else + local_convolve_HWC_q7_nonsquare( + layer->in->tensor->p_data, + layer->in->tensor->dim[1], layer->in->tensor->dim[0], layer->in->tensor->dim[2], + cl->weight->p_data, layer->out->tensor->dim[2], + cl->kernel.w, cl->kernel.h, cl->pad.w, cl->pad.h, cl->stride.w, cl->stride.h, cl->dilation.w, cl->dilation.h, + cl->bias->p_data, cl->bias_lshift, cl->output_rshift, cl->weight->qtype, + layer->out->tensor->p_data, + layer->out->tensor->dim[1], layer->out->tensor->dim[0], NULL, NULL); + return NN_SUCCESS; + } +#endif // end of CHW/HWC + return NN_SUCCESS; +} + diff --git a/APP_Framework/Framework/knowing/nnom/src/layers/nnom_conv2d_trans.c b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_conv2d_trans.c new file mode 100644 index 000000000..5a99380a2 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_conv2d_trans.c @@ -0,0 +1,131 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2020-05-31 Jianjia Ma The first version + */ + + +#include +#include +#include + +#include "nnom.h" +#include "nnom_local.h" +#include "nnom_layers.h" +#include "layers/nnom_conv2d_trans.h" + +nnom_layer_t *conv2d_trans_s(const nnom_conv2d_config_t *config) +{ + nnom_layer_t *layer; + layer = conv2d_s(config); + if (layer) + { + layer->type = NNOM_CONV2D_TRANS; + layer->run = conv2d_trans_run; + layer->build = conv2d_trans_build; + } + return layer; +} + +nnom_layer_t *Conv2DTrans(uint32_t multiplier, nnom_3d_shape_t k, nnom_3d_shape_t s, nnom_3d_shape_t d, nnom_padding_t pad_type, + const nnom_weight_t *w, const nnom_bias_t *b) +{ + nnom_layer_t *layer = Conv2D(multiplier, k, s, d, pad_type, w, b); + if (layer != NULL) + { + layer->type = NNOM_CONV2D_TRANS; + layer->run = conv2d_trans_run; + layer->build = conv2d_trans_build; + } + return layer; +} + +// utils, keras method +// https://github.com/keras-team/keras/blob/7a39b6c62d43c25472b2c2476bd2a8983ae4f682/keras/utils/conv_utils.py#L114 +// https://github.com/tensorflow/tensorflow/blob/2b96f3662bd776e277f86997659e61046b56c315/tensorflow/python/layers/utils.py#L156 +uint32_t conv_trans_output_length(uint32_t input_length, uint32_t kernel_size, nnom_padding_t padding, uint32_t stride_size, uint32_t dilation) +{ + input_length *= stride_size; + if (padding == PADDING_VALID) + input_length += MAX(kernel_size - stride_size, 0); + return input_length; +} + +nnom_status_t conv2d_trans_build(nnom_layer_t *layer) +{ + nnom_conv2d_trans_layer_t *cl = (nnom_conv2d_trans_layer_t *)layer; + + // get the tensor from last layer's output + layer->in->tensor = layer->in->hook.io->tensor; + + // create new tensor for the output + layer->out->tensor = new_tensor(NNOM_QTYPE_PER_TENSOR, layer->in->tensor->num_dim, cl->filter_mult); + // copy then change later. + tensor_cpy_attr(layer->out->tensor, layer->in->tensor); + + // calculate the output tensor q format, only support per tensor quantise now + layer->out->tensor->q_dec[0] = layer->in->tensor->q_dec[0] + cl->weight->q_dec[0] - cl->output_rshift[0]; + // see if the activation will change the q format + if(layer->actail) + layer->out->tensor->q_dec[0] = act_get_dec_bit(layer->actail->type, layer->out->tensor->q_dec[0]); + + // now we set up the tensor shape, always HWC format + layer->out->tensor->dim[0] = conv_trans_output_length(layer->in->tensor->dim[0], cl->kernel.h, cl->padding_type, cl->stride.h, cl->dilation.h); + layer->out->tensor->dim[1] = conv_trans_output_length(layer->in->tensor->dim[1], cl->kernel.w, cl->padding_type, cl->stride.w, cl->dilation.w); + layer->out->tensor->dim[2] = cl->filter_mult; // channel stays the same + + // fill the correct padding + if(cl->padding_type == PADDING_SAME) + { + cl->pad.h = (cl->kernel.h - cl->stride.h) / 2; // the padding to the output. + cl->pad.w = (cl->kernel.w - cl->stride.w) / 2; +// cl->pad.h = (cl->kernel.h - 1)/2; // the padding to the output. +// cl->pad.w = (cl->kernel.w - 1)/2; + cl->pad.c = 0; + } + else + { + cl->pad.h = 0; + cl->pad.w = 0; + cl->pad.c = 0; + } + + // bufferA size: (1D shape) + // 2*ch_im_in*dim_kernel*dim_kernel + //layer->comp->size = 2 * 2 * layer->in->tensor->dim[2] * cl->kernel.w * cl->kernel.h; + // computational cost: K x K x Cin x Hour x Wout x Cout + layer->stat.macc = cl->kernel.w * cl->kernel.h * layer->in->tensor->dim[2] * tensor_size(layer->out->tensor); + return NN_SUCCESS; +} + + +nnom_status_t conv2d_trans_run(nnom_layer_t *layer) +{ + nnom_conv2d_trans_layer_t *cl = (nnom_conv2d_trans_layer_t *)layer; + +#ifdef NNOM_USING_CHW + // no support for CHW yet + return NN_ARGUMENT_ERROR; +#else + + //return conv2d_run(layer); + + local_conv_trans_HWC_q7_nonsquare( + layer->in->tensor->p_data, + layer->in->tensor->dim[1], layer->in->tensor->dim[0], layer->in->tensor->dim[2], + cl->weight->p_data, layer->out->tensor->dim[2], + cl->kernel.w, cl->kernel.h, cl->pad.w, cl->pad.h, cl->stride.w, cl->stride.h, cl->dilation.w, cl->dilation.h, + cl->bias->p_data, cl->bias_lshift[0], cl->output_rshift[0], + layer->out->tensor->p_data, + layer->out->tensor->dim[1], layer->out->tensor->dim[0], NULL, NULL); + return NN_SUCCESS; +#endif +} + + diff --git a/APP_Framework/Framework/knowing/nnom/src/layers/nnom_cropping.c b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_cropping.c new file mode 100644 index 000000000..01abe9265 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_cropping.c @@ -0,0 +1,88 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2019-07-23 Jianjia Ma The first version + */ + +#include +#include +#include + +#include "nnom.h" +#include "nnom_local.h" +#include "nnom_layers.h" +#include "layers/nnom_cropping.h" + +nnom_layer_t * cropping_s(const nnom_cropping_config_t *config) +{ + nnom_layer_t *layer = Cropping(config->pad); + if(layer) + layer->config = (void*) config; + return layer; +} + +// Cropping layer +nnom_layer_t *Cropping(nnom_border_t pad) +{ + nnom_layer_t *layer; + // most setting are the same as zero padding + layer = ZeroPadding(pad); + + // now change to cropping + layer->type = NNOM_CROPPING; + layer->run = cropping_run; + layer->build = cropping_build; + + return layer; +} + +nnom_status_t cropping_build(nnom_layer_t* layer) +{ + nnom_cropping_layer_t *cl = (nnom_cropping_layer_t *)layer; + + // get the tensor from last layer's output + layer->in->tensor = layer->in->hook.io->tensor; + + // create new tensor for output + layer->out->tensor = new_tensor(NNOM_QTYPE_PER_TENSOR, layer->in->tensor->num_dim, tensor_get_num_channel(layer->in->tensor)); + // copy then change later. + tensor_cpy_attr(layer->out->tensor, layer->in->tensor); + + // output shape + if(layer->in->tensor->dim[1] <= (cl->pad.left + cl->pad.right) || + layer->in->tensor->dim[0] <= (cl->pad.top + cl->pad.bottom)) + return NN_ARGUMENT_ERROR; + + layer->out->tensor->dim[0] = layer->in->tensor->dim[0] - (cl->pad.top + cl->pad.bottom); + layer->out->tensor->dim[1] = layer->in->tensor->dim[1] - (cl->pad.left + cl->pad.right); + layer->out->tensor->dim[2] = layer->in->tensor->dim[2]; + return NN_SUCCESS; +} + + +nnom_status_t cropping_run(nnom_layer_t * layer) +{ + nnom_cropping_layer_t *cl = (nnom_cropping_layer_t*)layer; + +#ifdef NNOM_USING_CHW + local_cropping_CHW_q7( +#else + local_cropping_HWC_q7( +#endif + layer->in->tensor->p_data, + layer->in->tensor->dim[1], layer->in->tensor->dim[0], layer->in->tensor->dim[2], + cl->pad.top, + cl->pad.bottom, + cl->pad.left, + cl->pad.right, + layer->out->tensor->p_data, + layer->out->tensor->dim[1], layer->out->tensor->dim[0]); + + return NN_SUCCESS; +} diff --git a/APP_Framework/Framework/knowing/nnom/src/layers/nnom_dense.c b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_dense.c new file mode 100644 index 000000000..17c566c76 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_dense.c @@ -0,0 +1,207 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2019-07-23 Jianjia Ma The first version + */ + +#include +#include +#include + +#include "nnom.h" +#include "nnom_local.h" +#include "nnom_layers.h" +#include "layers/nnom_dense.h" + +#ifdef NNOM_USING_CMSIS_NN +#include "arm_math.h" +#include "arm_nnfunctions.h" +#endif + +nnom_layer_t *dense_s(const nnom_dense_config_t *config) +{ + nnom_dense_layer_t *layer; + nnom_buf_t *comp; + nnom_layer_io_t *in, *out; + + // apply a block memory for all the sub handles. + size_t mem_size = sizeof(nnom_dense_layer_t) + sizeof(nnom_layer_io_t) * 2 + sizeof(nnom_buf_t); + layer = nnom_mem(mem_size); + if (layer == NULL) + return NULL; + + // distribut the memory to sub handles. + in = (void *)((uint8_t*)layer + sizeof(nnom_dense_layer_t)); + out = (void *)((uint8_t*)in + sizeof(nnom_layer_io_t)); + comp = (void *)((uint8_t*)out + sizeof(nnom_layer_io_t)); + + // set type in layer parent + layer->super.type = NNOM_DENSE; + // set buf state + in->type = NNOM_TENSOR_BUF_TEMP; + out->type = NNOM_TENSOR_BUF_TEMP; + comp->type = NNOM_TENSOR_BUF_TEMP; + // put in & out on the layer. + layer->super.in = io_init(layer, in); + layer->super.out = io_init(layer, out); + layer->super.comp = comp; + // set run and outshape methods + layer->super.run = dense_run; + layer->super.build = dense_build; + layer->super.free = dense_free; + + // set parameters + layer->output_unit = tensor_get_num_channel(config->weight); + layer->bias = config->bias; + layer->weight = config->weight; + // set shifts + layer->output_rshift = (nnom_qformat_param_t *)config->output_shift; + layer->bias_lshift = (nnom_qformat_param_t *)config->bias_shift; + // set config + layer->super.config = (void*) config; + + return (nnom_layer_t *)layer; +} + +nnom_layer_t *Dense(size_t output_unit, const nnom_weight_t *w, const nnom_bias_t *b) +{ + nnom_dense_layer_t *layer; + nnom_buf_t *comp; + nnom_layer_io_t *in, *out; + + // apply a block memory for all the sub handles. + size_t mem_size = sizeof(nnom_dense_layer_t) + sizeof(nnom_layer_io_t) * 2 + sizeof(nnom_buf_t); + layer = nnom_mem(mem_size); + if (layer == NULL) + return NULL; + + // distribut the memory to sub handles. + in = (void *)((uint8_t*)layer + sizeof(nnom_dense_layer_t)); + out = (void *)((uint8_t*)in + sizeof(nnom_layer_io_t)); + comp = (void *)((uint8_t*)out + sizeof(nnom_layer_io_t)); + + // set type in layer parent + layer->super.type = NNOM_DENSE; + // set buf state + in->type = NNOM_TENSOR_BUF_TEMP; + out->type = NNOM_TENSOR_BUF_TEMP; + comp->type = NNOM_TENSOR_BUF_TEMP; + // put in & out on the layer. + layer->super.in = io_init(layer, in); + layer->super.out = io_init(layer, out); + layer->super.comp = comp; + // set run and outshape methods + layer->super.run = dense_run; + layer->super.build = dense_build; + + // set parameters + layer->output_unit = output_unit; // this is no longer needed. the information is contained in the weight tensor. + + layer->weight = new_tensor(NNOM_QTYPE_PER_TENSOR, 2, output_unit); + layer->bias = new_tensor(NNOM_QTYPE_PER_TENSOR, 1, output_unit); + + // configure weight tensor manually to support new tensor-based backends. + // needs to be very careful + { + // config weight + nnom_shape_data_t dim[2] = {0, output_unit}; // the first dim doesnt matter here. will be file in later. + *(layer->weight->q_offset) = 0; // we have no support of offset here + *(layer->weight->q_dec) = 0; // this is not even correct + layer->weight->p_data = (void*)w->p_value; + layer->weight->bitwidth = 8; + layer->weight->qtype = NNOM_QTYPE_PER_TENSOR; + nnom_memcpy(layer->weight->dim, dim, layer->weight->num_dim * sizeof(nnom_shape_data_t)); + + // config bias + dim[0] = output_unit; + *(layer->bias->q_offset) = 0; // we have no support of offset here + *(layer->bias->q_dec) = 0; // this is not even correct + layer->bias->p_data = (void*)b->p_value; + layer->bias->bitwidth = 8; + layer->weight->qtype = NNOM_QTYPE_PER_TENSOR; + nnom_memcpy(layer->bias->dim, dim, layer->bias->num_dim * sizeof(nnom_shape_data_t)); + } + + // set output shifts + layer->output_rshift = (nnom_qformat_param_t *)&w->shift; + layer->bias_lshift = (nnom_qformat_param_t *)&b->shift; + + return (nnom_layer_t *)layer; +} + +nnom_status_t dense_build(nnom_layer_t *layer) +{ + nnom_dense_layer_t *cl = (nnom_dense_layer_t *)layer; + + // get the tensor from last layer's output + layer->in->tensor = layer->in->hook.io->tensor; + + // create new tensor for output + layer->out->tensor = new_tensor(NNOM_QTYPE_PER_TENSOR, 1, tensor_get_num_channel(layer->in->tensor)); + // setup new tensor + nnom_shape_data_t dim[1] = {cl->output_unit}; + tensor_set_attr(layer->out->tensor, cl->weight->q_dec, cl->weight->q_offset, dim, 1, 8); // test, this is not correct + + // calculate the output tensor q format, only support per tensor quantise now + layer->out->tensor->q_dec[0] = layer->in->tensor->q_dec[0] + cl->weight->q_dec[0] - cl->output_rshift[0]; + // see if the activation will change the q format + if(layer->actail) + layer->out->tensor->q_dec[0] = act_get_dec_bit(layer->actail->type, layer->out->tensor->q_dec[0]); + + // vec_buffer size: dim_vec (*2, q7->q15) ? I am not sure this is right + layer->comp->size = tensor_size(layer->in->tensor)*2; + + // computational cost: In * out + layer->stat.macc = tensor_size(layer->in->tensor) * tensor_size(layer->out->tensor); + return NN_SUCCESS; +} + +nnom_status_t dense_free(nnom_layer_t *layer) +{ + // free weight and bias tensor when we are not initialised from structured configuration. + if(!layer->config) + { + nnom_dense_layer_t* cl = (nnom_dense_layer_t*)layer; + delete_tensor(cl->weight); + delete_tensor(cl->bias); + } + + return NN_SUCCESS; +} + +nnom_status_t dense_run(nnom_layer_t *layer) +{ + nnom_status_t result = NN_SUCCESS; + nnom_dense_layer_t *cl = (nnom_dense_layer_t *)(layer); + nnom_qformat_param_t bias_shift = cl->bias_lshift[0]; // this is not correct but a temporary fix solution for backward compatibility. + nnom_qformat_param_t output_shift = cl->output_rshift[0]; + + +#if !(DENSE_WEIGHT_OPT) + #ifdef NNOM_USING_CMSIS_NN + result = (nnom_status_t)arm_fully_connected_q7( + #else + local_fully_connected_q7( + #endif +#else + #ifdef NNOM_USING_CMSIS_NN + result = (nnom_status_t)arm_fully_connected_q7_opt( + #else + local_fully_connected_q7_opt( + #endif +#endif + layer->in->tensor->p_data, + cl->weight->p_data, + tensor_size(layer->in->tensor), layer->out->tensor->dim[0], + bias_shift, output_shift, + cl->bias->p_data, + layer->out->tensor->p_data, (q15_t *)(layer->comp->mem->blk)); + return result; +} + diff --git a/APP_Framework/Framework/knowing/nnom/src/layers/nnom_dw_conv2d.c b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_dw_conv2d.c new file mode 100644 index 000000000..72ac7754e --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_dw_conv2d.c @@ -0,0 +1,140 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2019-07-23 Jianjia Ma The first version + */ + +#include +#include +#include + +#include "nnom.h" +#include "nnom_local.h" +#include "nnom_layers.h" +#include "layers/nnom_dw_conv2d.h" + +#ifdef NNOM_USING_CMSIS_NN +#include "arm_math.h" +#include "arm_nnfunctions.h" +#endif + +nnom_layer_t *dw_conv2d_s(const nnom_conv2d_config_t *config) +{ + nnom_layer_t *layer; + layer = conv2d_s(config); + if (layer) + { + layer->type = NNOM_DW_CONV_2D; + layer->run = dw_conv2d_run; + layer->build = dw_conv2d_build; + } + return layer; +} + +nnom_layer_t *DW_Conv2D(uint32_t multiplier, nnom_3d_shape_t k, nnom_3d_shape_t s, nnom_3d_shape_t d, nnom_padding_t pad_type, + const nnom_weight_t *w, const nnom_bias_t *b) +{ + nnom_layer_t *layer = Conv2D(multiplier, k, s, d, pad_type, w, b); // passing multiplier in . + if (layer != NULL) + { + layer->type = NNOM_DW_CONV_2D; + layer->run = dw_conv2d_run; + layer->build = dw_conv2d_build; + } + return layer; +} + +nnom_status_t dw_conv2d_build(nnom_layer_t *layer) +{ + nnom_conv2d_layer_t *cl = (nnom_conv2d_layer_t *)layer; + + // get the tensor from last layer's output + layer->in->tensor = layer->in->hook.io->tensor; + + // create new tensor for output + layer->out->tensor = new_tensor(NNOM_QTYPE_PER_TENSOR, layer->in->tensor->num_dim, tensor_get_num_channel(layer->in->tensor) * cl->filter_mult); + // copy then change later. + tensor_cpy_attr(layer->out->tensor, layer->in->tensor); + + // calculate the output tensor q format, only support per tensor quantise now + layer->out->tensor->q_dec[0] = layer->in->tensor->q_dec[0] + cl->weight->q_dec[0] - cl->output_rshift[0]; + // see if the activation will change the q format + if(layer->actail) + layer->out->tensor->q_dec[0] = act_get_dec_bit(layer->actail->type, layer->out->tensor->q_dec[0]); + + // now we set up the tensor shape, always HWC format + layer->out->tensor->dim[0] = conv_output_length(layer->in->tensor->dim[0], cl->kernel.h, cl->padding_type, cl->stride.h, cl->dilation.h); + layer->out->tensor->dim[1] = conv_output_length(layer->in->tensor->dim[1], cl->kernel.w, cl->padding_type, cl->stride.w, cl->dilation.w); + layer->out->tensor->dim[2] = layer->in->tensor->dim[2] * cl->filter_mult; // channel stays the same + + // fill padding + if (cl->padding_type == PADDING_SAME) + { + cl->pad.w = cl->dilation.w * (cl->kernel.w - 1) / 2; + cl->pad.h = cl->dilation.h * (cl->kernel.h - 1) / 2; + cl->pad.c = 0; + } + + // bufferA size: + #ifdef NNOM_USING_CMSIS_NN + layer->comp->size = 2 * 2 * (layer->in->tensor->dim[2] / cl->filter_mult) * cl->kernel.w * cl->kernel.h; + #endif + + // computational cost: K x K x Cin x Hout x Wout x Multiplier + // or : K x K x Cout x Hout x Wout + layer->stat.macc = cl->kernel.w * cl->kernel.h * tensor_size(layer->out->tensor); + return NN_SUCCESS; +} + +nnom_status_t dw_conv2d_run(nnom_layer_t *layer) +{ + nnom_status_t result = NN_SUCCESS; + nnom_conv2d_layer_t *cl = (nnom_conv2d_layer_t *)layer; + +#ifndef NNOM_USING_CHW + #ifdef NNOM_USING_CMSIS_NN + // Current CMSIS-NN does not support dilation + if(cl->dilation.w ==1 && cl->dilation.h == 1 && cl->weight->qtype == NNOM_QTYPE_PER_TENSOR && cl->filter_mult == 1) + { + // CMSIS-NN only support 1 mulplipier in depthwise conv + if (layer->in->tensor->dim[2] % 2 != 0 || layer->out->tensor->dim[2] % 2) + return NN_ARGUMENT_ERROR; + result = (nnom_status_t)arm_depthwise_separable_conv_HWC_q7_nonsquare( + layer->in->tensor->p_data, + layer->in->tensor->dim[1], layer->in->tensor->dim[0], layer->in->tensor->dim[2], + cl->weight->p_data, + layer->out->tensor->dim[2], + cl->kernel.w, cl->kernel.h, + cl->pad.w, cl->pad.h, + cl->stride.w, cl->stride.h, + cl->bias->p_data, + cl->bias_lshift[0], cl->output_rshift[0], + layer->out->tensor->p_data, + layer->out->tensor->dim[1], layer->out->tensor->dim[0], (q15_t *)(layer->comp->mem->blk), NULL); + } + else + #endif + local_depthwise_separable_conv_HWC_q7_nonsquare( +#else + local_depthwise_separable_conv_CHW_q7_nonsquare( +#endif + layer->in->tensor->p_data, + layer->in->tensor->dim[1], layer->in->tensor->dim[0], layer->in->tensor->dim[2], + cl->weight->p_data, + layer->out->tensor->dim[2], + cl->kernel.w, cl->kernel.h, + cl->pad.w, cl->pad.h, + cl->stride.w, cl->stride.h, + cl->dilation.w, cl->dilation.h, + cl->bias->p_data, + cl->bias_lshift, cl->output_rshift, cl->weight->qtype, + layer->out->tensor->p_data, + layer->out->tensor->dim[1], layer->out->tensor->dim[0], NULL, NULL); + return result; +} diff --git a/APP_Framework/Framework/knowing/nnom/src/layers/nnom_flatten.c b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_flatten.c new file mode 100644 index 000000000..c976bca9a --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_flatten.c @@ -0,0 +1,84 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2019-07-23 Jianjia Ma The first version + */ + +#include +#include +#include + +#include "nnom.h" +#include "nnom_local.h" +#include "nnom_layers.h" +#include "layers/nnom_flatten.h" + +nnom_layer_t *flatten_s(const nnom_flatten_config_t *config) +{ + nnom_layer_t *layer = Flatten(); + if(layer) + layer->config = (void*) config; + return layer; +} + +nnom_layer_t *Flatten(void) +{ + nnom_layer_t *layer; + nnom_layer_io_t *in, *out; + + // apply a block memory for all the sub handles. + size_t mem_size = sizeof(nnom_layer_t) + sizeof(nnom_layer_io_t) * 2; + layer = nnom_mem(mem_size); + if (layer == NULL) + return NULL; + + // distribut the memory to sub handles. + in = (void *)((uint8_t*)layer + sizeof(nnom_layer_t)); + out = (void *)((uint8_t*)in + sizeof(nnom_layer_io_t)); + + // set type in layer parent + layer->type = NNOM_FLATTEN; + layer->run = flatten_run; + layer->build = flatten_build; + // set buf state + in->type = NNOM_TENSOR_BUF_TEMP; + #ifdef NNOM_USING_CHW + out->type = NNOM_TENSOR_BUF_TEMP; // test for CHW format + #else + out->type = NNOM_TENSOR_BUF_NULL; + #endif + // put in & out on the layer. + layer->in = io_init(layer, in); + layer->out = io_init(layer, out); + + return layer; +} + +nnom_status_t flatten_build(nnom_layer_t *layer) +{ + // get the tensor from last layer's output + layer->in->tensor = layer->in->hook.io->tensor; + + // create new tensor for output + layer->out->tensor = new_tensor(NNOM_QTYPE_PER_TENSOR, layer->in->tensor->num_dim, tensor_get_num_channel(layer->in->tensor)); + // setup new tensor + nnom_shape_data_t dim[1] = {tensor_size(layer->in->tensor)}; + tensor_set_attr(layer->out->tensor, layer->in->tensor->q_dec, layer->in->tensor->q_offset, dim, 1, 8); + + return NN_SUCCESS; +} + +nnom_status_t flatten_run(nnom_layer_t *layer) +{ + #ifdef NNOM_USING_CHW + // CHW format must reorder to HWC for dense layer and all other 1D layer (?) + tensor_chw2hwc_q7(layer->out->tensor, layer->in->tensor); + #endif + return NN_SUCCESS; +} diff --git a/APP_Framework/Framework/knowing/nnom/src/layers/nnom_global_pool.c b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_global_pool.c new file mode 100644 index 000000000..8e0d1ee64 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_global_pool.c @@ -0,0 +1,145 @@ + +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2019-07-23 Jianjia Ma The first version + */ + +#include +#include +#include + +#include "nnom.h" +#include "nnom_local.h" +#include "nnom_layers.h" +#include "layers/nnom_global_pool.h" + +nnom_layer_t * global_maxpool_s(const nnom_global_pool_config_t *config) +{ + nnom_maxpool_layer_t * cl = (nnom_maxpool_layer_t *)GlobalMaxPool(); + if(cl) + { + cl->super.config = (void*) config; + cl->output_shift = config->output_shift; + } + return (nnom_layer_t *)cl; +} +nnom_layer_t * global_avgpool_s(const nnom_global_pool_config_t *config) +{ + nnom_maxpool_layer_t * cl = (nnom_maxpool_layer_t *)GlobalAvgPool(); + if(cl) + { + cl->super.config = (void*) config; + cl->output_shift = config->output_shift; + } + return (nnom_layer_t *)cl; +} + +nnom_layer_t * global_sumpool_s(const nnom_global_pool_config_t *config) +{ + nnom_maxpool_layer_t * cl = (nnom_maxpool_layer_t *)GlobalSumPool(); + if(cl) + { + cl->super.config = (void*) config; + cl->output_shift = config->output_shift; + } + return (nnom_layer_t *)cl; +} + + +nnom_layer_t *GlobalMaxPool(void) +{ + // create the normal pooling layer, the parameters are left empty to fill in later. + // parameters will be filled in in global_pooling_build() + nnom_layer_t *layer = MaxPool(kernel(0, 0), stride(0, 0), PADDING_VALID); + + // change to global max pool + if (layer != NULL) + { + layer->type = NNOM_GLOBAL_MAXPOOL; + layer->build = global_pool_build; + } + + return (nnom_layer_t *)layer; +} + +nnom_layer_t *GlobalAvgPool(void) +{ + // create the normal pooling layer, the parameters are left empty to fill in later. + // parameters will be filled in global_pooling_build() remotely + nnom_layer_t *layer = MaxPool(kernel(0, 0), stride(0, 0), PADDING_VALID); + + // change some parameters to be recognised as avg pooling + if (layer != NULL) + { + layer->type = NNOM_GLOBAL_AVGPOOL; + layer->run = avgpool_run; // global and basic pooling share the same runner + layer->build = global_pool_build; + } + + return (nnom_layer_t *)layer; +} + +nnom_layer_t *GlobalSumPool(void) +{ + // create the normal pooling layer, the parameters are left empty to fill in later. + // parameters will be filled in global_pooling_build() remotely + nnom_layer_t *layer = MaxPool(kernel(0, 0), stride(0, 0), PADDING_VALID); + + // change some parameters to be recognised as avg pooling + if (layer != NULL) + { + layer->type = NNOM_GLOBAL_SUMPOOL; + layer->run = sumpool_run; // global and basic pooling share the same runner + layer->build = global_pool_build; + } + + return (nnom_layer_t *)layer; +} + +nnom_status_t global_pool_build(nnom_layer_t *layer) +{ + nnom_maxpool_layer_t *cl = (nnom_maxpool_layer_t *)layer; + + // get the tensor from last layer's output + layer->in->tensor = layer->in->hook.io->tensor; + + // create new tensor for output + layer->out->tensor = new_tensor(NNOM_QTYPE_PER_TENSOR, 1, tensor_get_num_channel(layer->in->tensor)); + + nnom_shape_data_t dim[1] = {tensor_get_num_channel(layer->in->tensor)}; // fill the first 2 dim later + tensor_set_attr_v(layer->out->tensor, layer->in->tensor->q_dec[0], 0, dim, sizeof(dim)/sizeof(nnom_shape_data_t), 8); + + // see if the activation will change the q format + if(layer->actail) + layer->out->tensor->q_dec[0] = act_get_dec_bit(layer->actail->type, layer->out->tensor->q_dec[0]); + + // different from other *_build(), the kernel..padding left by layer API needs to be set in here + // due to the *_run() methods of global pooling are using the normall pooling's. + // fill in the parameters left by layer APIs (GlobalAvgPool and MaxAvgPool) + cl->kernel = shape(layer->in->tensor->dim[0], layer->in->tensor->dim[1], 1); + cl->stride = shape(1, 1, 1); + cl->pad = shape(0, 0, 0); + cl->padding_type = PADDING_VALID; + + // additionally, avg pooling require computational buffer, which is 2*dim_im_out*ch_im_in + if (layer->type == NNOM_AVGPOOL || layer->type == NNOM_GLOBAL_AVGPOOL) + { + // bufferA size: 2*dim_im_out*ch_im_in + layer->comp->size = 2 * layer->out->tensor->dim[0] * layer->in->tensor->dim[2]; + } + + // additional for sumpool + if (layer->type == NNOM_SUMPOOL || layer->type == NNOM_GLOBAL_SUMPOOL) + layer->comp->size = 4 * tensor_size(layer->out->tensor); + + return NN_SUCCESS; +} + + diff --git a/APP_Framework/Framework/knowing/nnom/src/layers/nnom_gru_cell.c b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_gru_cell.c new file mode 100644 index 000000000..7e01e9e2a --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_gru_cell.c @@ -0,0 +1,338 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2020-08-24 Jianjia Ma The first version + */ + +#include +#include +#include + +#include "nnom.h" +#include "nnom_local.h" +#include "nnom_layers.h" +#include "layers/nnom_gru_cell.h" + +#ifdef NNOM_USING_CMSIS_NN +#include "arm_math.h" +#include "arm_nnfunctions.h" +#endif + +nnom_rnn_cell_t *gru_cell_s(const nnom_gru_cell_config_t* config) +{ + nnom_gru_cell_t *cell; + cell = nnom_mem(sizeof(nnom_gru_cell_t)); + if (cell == NULL) + return NULL; + // set methods + cell->super.run = gru_cell_run; + cell->super.build = gru_cell_build; + cell->super.free = gru_cell_free; + cell->super.config = (void*) config; + cell->super.units = config->units; + cell->super.type = NNOM_GRU_CELL; + + // set parameters + cell->bias = config->bias; + cell->weights = config->weights; + cell->recurrent_weights = config->recurrent_weights; + + // q format for intermediate calculation + cell->q_dec_h = config->q_dec_h; + cell->q_dec_z = config->q_dec_z; + + return (nnom_rnn_cell_t *)cell; +} + +nnom_status_t gru_cell_free(nnom_rnn_cell_t* cell) +{ + return NN_SUCCESS; +} + +// the state buffer and computational buffer shape of the cell +nnom_status_t gru_cell_build(nnom_rnn_cell_t* cell) +{ + nnom_layer_t *layer = cell->layer; + nnom_gru_cell_t *c = (nnom_gru_cell_t *)cell; + + // calculate output shift for the 2 calculation. + // hw = the product of hidden x weight, iw = the product of input x weight + // due to the addition of them, they must have same q format. + // that is -> c->q_dec_z; + + // for the dots in cell: output shift = input_dec + weight_dec - output_dec + c->oshift_hw = c->q_dec_h + c->recurrent_weights->q_dec[0] - c->q_dec_z; + c->oshift_iw = layer->in->tensor->q_dec[0] + c->weights->q_dec[0] - c->q_dec_z; + + // bias shift = bias_dec - out_dec + c->bias_shift = layer->in->tensor->q_dec[0] + c->weights->q_dec[0] - c->bias->q_dec[0]; + + // state size = one timestamp output size. + cell->state_size = cell->units * 2; // Q15 + + // comp buffer size: not required + cell->comp_buf_size = cell->units * (3*3) * 2 + cell->feature_size * 2; //q15 + input q7->q15 buffer. + + // finally, calculate the MAC for info for each timestamp + cell->macc = cell->feature_size * cell->units *3 // input: feature * state * 3 gates + + cell->units * cell->units *8 // recurrent, state * output_unit * (5 gate + 3 mult) + + cell->units * (3 + 3 + 5); // 3 gates, 3 mult, 5 addition + + return NN_SUCCESS; +} + + +// keras implementation as below. +/* + def step(cell_inputs, cell_states): + """Step function that will be used by Keras RNN backend.""" + h_tm1 = cell_states[0] + + # inputs projected by all gate matrices at once + matrix_x = K.dot(cell_inputs, kernel) + matrix_x = K.bias_add(matrix_x, input_bias) + + x_z, x_r, x_h = array_ops.split(matrix_x, 3, axis=1) + + # hidden state projected by all gate matrices at once + matrix_inner = K.dot(h_tm1, recurrent_kernel) + matrix_inner = K.bias_add(matrix_inner, recurrent_bias) + + recurrent_z, recurrent_r, recurrent_h = array_ops.split(matrix_inner, 3, + axis=1) + z = nn.sigmoid(x_z + recurrent_z) + r = nn.sigmoid(x_r + recurrent_r) + hh = nn.tanh(x_h + r * recurrent_h) + + # previous and candidate state mixed by update gate + h = z * h_tm1 + (1 - z) * hh + return h, [h] +*/ + +// +nnom_status_t gru_cell_run(nnom_rnn_cell_t* cell) +{ + nnom_layer_t *layer = cell->layer; + nnom_gru_cell_t* c = (nnom_gru_cell_t*) cell; + int act_int_bit = 7 - c->q_dec_z; + // gate data + q15_t* x_z, *x_r, *x_h; + q15_t* recurrent_z, *recurrent_r, *recurrent_h; + q15_t* temp[3]; + + // bias + q7_t* bias = (q7_t*)c->bias->p_data; + q7_t* recurrent_bias = (q7_t*)c->bias->p_data + cell->units*3; + + // state buffer + q15_t* h_tm1 = (q15_t*)cell->in_state; + q15_t* h_t = (q15_t*)cell->out_state; + + // computing buffer + // low |-- buf0 --|-- buf1 --|-- buf2 --|-- input_q15 --| + q15_t *buf[3]; + buf[0] = (q15_t*)layer->comp->mem->blk; + buf[1] = (q15_t*)layer->comp->mem->blk + cell->units*3; + buf[2] = (q15_t*)layer->comp->mem->blk + cell->units*6; + q15_t *in_q15_buf = (q15_t*)layer->comp->mem->blk + cell->units*9; + + // input q7 cast to q15 + local_q7_to_q15(cell->in_data, in_q15_buf, cell->feature_size); + + // matrix_x = K.dot(cell_inputs, kernel) + bias --> buf0 + #ifdef NNOM_USING_CMSIS_NN + arm_fully_connected_mat_q7_vec_q15_opt + #else + local_fully_connected_mat_q7_vec_q15_opt + #endif + (in_q15_buf, c->weights->p_data, cell->feature_size, + cell->units*3, c->bias_shift + 8, c->oshift_iw, bias, buf[0], NULL); + + // matrix_intter = K.dot(h_tm1, recurrent_kernel) + bias -> buf1 + #ifdef NNOM_USING_CMSIS_NN + arm_fully_connected_mat_q7_vec_q15_opt + #else + local_fully_connected_mat_q7_vec_q15_opt + #endif + (h_tm1, c->recurrent_weights->p_data, cell->units, + cell->units*3, c->bias_shift + 8, c->oshift_hw, recurrent_bias, buf[1], NULL); + + // split to each gate + x_z = buf[0]; + x_r = buf[0] + cell->units; + x_h = buf[0] + cell->units*2; + recurrent_z = buf[1]; + recurrent_r = buf[1] + cell->units; + recurrent_h = buf[1] + cell->units*2; + // buffers + temp[0] = buf[2]; + temp[1] = buf[2] + cell->units; + temp[2] = buf[2] + cell->units*2; + + /* z = nn.sigmoid(x_z + recurrent_z) */ + // 1. z1 = x_z + recurrent_z ---> temp[0] + local_add_q15(x_z, recurrent_z, temp[0], 0, cell->units); + // 2. z = sigmoid(z1) + local_sigmoid_q15(temp[0], cell->units, act_int_bit); + + /* r = nn.sigmoid(x_r + recurrent_r) */ + // 1. r1 = x_r + recurrent_r ---> temp[1] + local_add_q15(x_r, recurrent_r, temp[1], 0, cell->units); + // 2. r = sigmoid(r1) + local_sigmoid_q15(temp[1], cell->units, act_int_bit); + + /* hh = nn.tanh(x_h + r * recurrent_h) */ + // 1. hh1 = r * recurrent_h ---> temp[2] + local_mult_q15(temp[1], recurrent_h, temp[2], 15, cell->units); + // 2. hh2 = x_h + hh1 ---> temp[1] + local_add_q15(x_h, temp[2], temp[1], 0, cell->units); + // 3. hh = tanh(h2) ---> temp[1] + local_tanh_q15(temp[1], cell->units, act_int_bit); + + /* h = z * h_tm1 + (1 - z) * hh */ + // 1. h1 = z*h_tm1 ---> temp[2] + local_mult_q15(temp[0], h_tm1, temp[2], 15, cell->units); + // 2. h2 = 1 - z ---> h_t state buff + local_1_minor_z_q15(temp[0], h_t, 15, cell->units); + // 3. h3 = h2 * hh ---> temp[0] + local_mult_q15(h_t, temp[1], temp[0], 15, cell->units); + // h = h1 + h3 + local_add_q15(temp[2], temp[0], h_t, 0, cell->units); + + // finally, copy and convert state to output + local_q15_to_q7(h_t, cell->out_data, 8, cell->units); + return NN_SUCCESS; +} + + +// Researve for debugging, printing the intermediate variables/data. +#if 0 +// delete after testing completed +static void print_variable_q15(q15_t *data,char*name, int dec_bit, int size) +{ + printf("\n\n"); + printf("%s", name); + for(int i = 0; i < size; i++) + { + if(i%8==0) + printf("\n"); + printf("%f\t", (float) data[i] / (1 << dec_bit)); + } + printf("\n"); +} + +// +nnom_status_t gru_cell_run(nnom_rnn_cell_t* cell) +{ + nnom_layer_t *layer = cell->layer; + nnom_gru_cell_t* c = (nnom_gru_cell_t*) cell; + int act_int_bit = 7 - c->q_dec_z; + // gate data + q15_t* x_z, *x_r, *x_h; + q15_t* recurrent_z, *recurrent_r, *recurrent_h; + q15_t* temp[3]; + + // test + //nnom_memset(cell->in_data, 5 * (1<in->tensor->q_dec[0]), cell->feature_size); + + // bias + q7_t* bias = (q7_t*)c->bias->p_data; + q7_t* recurrent_bias = (q7_t*)c->bias->p_data + cell->units*3; + + // state buffer + q15_t* h_tm1 = (q15_t*)cell->in_state; + q15_t* h_t = (q15_t*)cell->out_state; + + // computing buffer + // low |-- buf0 --|-- buf1 --|-- buf2 --|-- input_q15 --| + q15_t *buf[3]; + buf[0] = (q15_t*)layer->comp->mem->blk; + buf[1] = (q15_t*)layer->comp->mem->blk + cell->units*3; + buf[2] = (q15_t*)layer->comp->mem->blk + cell->units*6; + q15_t *in_q15_buf = (q15_t*)layer->comp->mem->blk + cell->units*9; + + // input q7 cast to q15 + local_q7_to_q15(cell->in_data, in_q15_buf, cell->feature_size); + + // matrix_x = K.dot(cell_inputs, kernel) + bias --> buf0 + #ifdef NNOM_USING_CMSIS_NN + arm_fully_connected_mat_q7_vec_q15_opt + #else + local_fully_connected_mat_q7_vec_q15_opt + #endif + (in_q15_buf, c->weights->p_data, cell->feature_size, + cell->units*3, c->bias_shift + 8, c->oshift_iw, bias, buf[0], NULL); + + // matrix_intter = K.dot(h_tm1, recurrent_kernel) + bias -> buf1 + #ifdef NNOM_USING_CMSIS_NN + arm_fully_connected_mat_q7_vec_q15_opt + #else + local_fully_connected_mat_q7_vec_q15_opt + #endif + (h_tm1, c->recurrent_weights->p_data, cell->units, + cell->units*3, c->bias_shift + 8, c->oshift_hw, recurrent_bias, buf[1], NULL); + + print_variable_q15(in_q15_buf, "input", layer->in->tensor->q_dec[0]+8, cell->feature_size); + print_variable_q15(buf[0], "matrix_x", c->q_dec_z+8, cell->units*3); + print_variable_q15(buf[1], "matrix_recurrent", c->q_dec_z+8, cell->units*3); + + // split to each gate + x_z = buf[0]; + x_r = buf[0] + cell->units; + x_h = buf[0] + cell->units*2; + recurrent_z = buf[1]; + recurrent_r = buf[1] + cell->units; + recurrent_h = buf[1] + cell->units*2; + // buffers + temp[0] = buf[2]; + temp[1] = buf[2] + cell->units; + temp[2] = buf[2] + cell->units*2; + + // z = nn.sigmoid(x_z + recurrent_z) + // 1. z1 = x_z + recurrent_z ---> temp[0] + local_add_q15(x_z, recurrent_z, temp[0], 0, cell->units); + // 2. z = sigmoid(z1) + local_sigmoid_q15(temp[0], cell->units, act_int_bit); + print_variable_q15(temp[0], "z", 15, cell->units); + + // r = nn.sigmoid(x_r + recurrent_r) + // 1. r1 = x_r + recurrent_r ---> temp[1] + local_add_q15(x_r, recurrent_r, temp[1], 0, cell->units); + // 2. r = sigmoid(r1) + local_sigmoid_q15(temp[1], cell->units, act_int_bit); + print_variable_q15(temp[1], "r", 15, cell->units); + + // hh = nn.tanh(x_h + r * recurrent_h) + // 1. hh1 = r * recurrent_h ---> temp[2] + local_mult_q15(temp[1], recurrent_h, temp[2], 15, cell->units); + // 2. hh2 = x_h + h1 ---> temp[1] + local_add_q15(x_h, temp[2], temp[1], 0, cell->units); + // 3. hh = tanh(h2) ---> temp[1] + local_tanh_q15(temp[1], cell->units, act_int_bit); + print_variable_q15(temp[1], "hh", 15, cell->units); + + // h = z * h_tm1 + (1 - z) * hh + // 1. h1 = z*h_tm1 ---> temp[2] + local_mult_q15(temp[0], h_tm1, temp[2], 15, cell->units); + print_variable_q15( temp[2], "h1", 15, cell->units); + // 2. h2 = 1 - z ---> h_t state buff + local_1_minor_z_q15(temp[0], h_t, 15, cell->units); + print_variable_q15( h_t, "h2", 15, cell->units); + // 3. h3 = h2 * hh ---> temp[0] + local_mult_q15(h_t, temp[1], temp[0], 15, cell->units); + print_variable_q15( temp[0], "h3", 15, cell->units); + // h = h1 + h3 + local_add_q15(temp[2], temp[0], h_t, 0, cell->units); + print_variable_q15(h_t, "h", 15, cell->units); + + // finally, copy and convert state to output + local_q15_to_q7(h_t, cell->out_data, 8, cell->units); + return NN_SUCCESS; +} +#endif diff --git a/APP_Framework/Framework/knowing/nnom/src/layers/nnom_input.c b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_input.c new file mode 100644 index 000000000..f1fc3b9c9 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_input.c @@ -0,0 +1,145 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2019-07-23 Jianjia Ma The first version + */ + +#include +#include +#include + +#include "nnom.h" +#include "nnom_local.h" +#include "nnom_layers.h" +#include "layers/nnom_input.h" + +nnom_layer_t *input_s(const nnom_io_config_t* config) +{ + nnom_io_layer_t *layer; + nnom_layer_io_t *in, *out; + // apply a block memory for all the sub handles. + layer = nnom_mem(sizeof(nnom_io_layer_t) + sizeof(nnom_layer_io_t) * 2); + if (layer == NULL) + return NULL; + + // distribut the memory to sub handles. + in = (void *)((uint8_t*)layer + sizeof(nnom_io_layer_t)); + out = (void *)((uint8_t*)in + sizeof(nnom_layer_io_t)); + + // set type in layer parent + layer->super.type = NNOM_INPUT; + layer->super.run = input_run; + layer->super.build = input_build; + // set buf state + in->type = NNOM_TENSOR_BUF_TEMP; + out->type = NNOM_TENSOR_BUF_NULL; + // put in & out on the layer. + layer->super.in = io_init(layer, in); + layer->super.out = io_init(layer, out); + + /* + // some other layers (Conv, pooling) are not supporting 12 d input, we still expand the 1,2 dimension to 3 + // test -> native support 1,2,3 D input. + layer->super.in->tensor = new_tensor(NNOM_QTYPE_PER_TENSOR, config->tensor->num_dim, tensor_get_num_channel(config->tensor)); + tensor_cpy_attr(layer->super.in->tensor, config->tensor); + layer->buf = config->tensor->p_data; + layer->dec_bit = config->tensor->q_dec[0]; + */ + + // set parameters + if(config->tensor->num_dim == 1) // test for 1d input, expend h = 1 + layer->shape = shape(1, 1, config->tensor->dim[0]); + else if (config->tensor->num_dim == 2) // test for 1d input, expend h = 1 + layer->shape = shape(1, config->tensor->dim[0], config->tensor->dim[1]); + else + layer->shape = shape(config->tensor->dim[0], config->tensor->dim[1], config->tensor->dim[2]); + layer->buf = config->tensor->p_data; + layer->dec_bit = config->tensor->q_dec[0]; + + // experimental: fixed input dim to 3 + // input normally dont have a tensor, so we create one to store the initial data. + nnom_shape_data_t dim[3] = {layer->shape.h, layer->shape.w, layer->shape.c}; + layer->super.in->tensor = new_tensor(NNOM_QTYPE_PER_TENSOR, 3, tensor_get_num_channel(config->tensor)); + tensor_set_attr_v(layer->super.in->tensor, layer->dec_bit, 0, dim, sizeof(dim)/sizeof(nnom_shape_data_t), 8); + return (nnom_layer_t *)layer; +} + +nnom_layer_t *Input(nnom_3d_shape_t input_shape, void *p_buf) +{ + nnom_io_layer_t *layer; + nnom_layer_io_t *in, *out; + + // apply a block memory for all the sub handles. + layer = nnom_mem(sizeof(nnom_io_layer_t) + sizeof(nnom_layer_io_t) * 2); + if (layer == NULL) + return NULL; + + // distribut the memory to sub handles. + in = (void *)((uint8_t*)layer + sizeof(nnom_io_layer_t)); + out = (void *)((uint8_t*)in + sizeof(nnom_layer_io_t)); + + // set type in layer parent + layer->super.type = NNOM_INPUT; + layer->super.run = input_run; + layer->super.build = input_build; + // set buf state + in->type = NNOM_TENSOR_BUF_TEMP; + out->type = NNOM_TENSOR_BUF_NULL; + // put in & out on the layer. + layer->super.in = io_init(layer, in); + layer->super.out = io_init(layer, out); + + // set parameters + layer->shape = input_shape; + layer->buf = p_buf; + layer->dec_bit = 7; + + // experimental: fixed input dim to 3 + // input normally dont have a tensor, so we create one to store the initial data. + nnom_shape_data_t dim[3] = { input_shape.h, input_shape.w, input_shape.c }; + layer->super.in->tensor = new_tensor(NNOM_QTYPE_PER_TENSOR, 3, input_shape.c); + tensor_set_attr_v(layer->super.in->tensor, layer->dec_bit, 0, dim, sizeof(dim)/sizeof(nnom_shape_data_t), 8); + return (nnom_layer_t *)layer; +} + +nnom_status_t input_build(nnom_layer_t* layer) +{ + // the input tensor of inputlayer has assigned previously + + // output tensor + // 1. allocate a new tensor for output + // 2. set the same dim, qfmt to the new tensor. + layer->out->tensor = new_tensor(NNOM_QTYPE_PER_TENSOR, layer->in->tensor->num_dim, tensor_get_num_channel(layer->in->tensor)); + tensor_cpy_attr(layer->out->tensor, layer->in->tensor); + + // now this build has passed the input tensors (shapes, formats) to the new tensors. + return NN_SUCCESS; +} + + +nnom_status_t input_run(nnom_layer_t *layer) +{ + nnom_io_layer_t *cl = (nnom_io_layer_t *)layer; +#ifdef NNOM_USING_CHW + if(layer->in->tensor->num_dim == 3) + { + nnom_3d_shape_t shape = {layer->in->tensor->dim[0], layer->in->tensor->dim[1], layer->in->tensor->dim[2]}; + hwc2chw_q7(shape, cl->buf, layer->in->tensor->p_data); + } + else if (layer->in->tensor->num_dim == 2) + { + nnom_3d_shape_t shape = {1, layer->in->tensor->dim[0], layer->in->tensor->dim[1]}; + hwc2chw_q7(shape, cl->buf, layer->in->tensor->p_data); + } + else +#endif + nnom_memcpy(layer->in->tensor->p_data, cl->buf, tensor_size(layer->in->tensor)); + + return NN_SUCCESS; +} diff --git a/APP_Framework/Framework/knowing/nnom/src/layers/nnom_lambda.c b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_lambda.c new file mode 100644 index 000000000..31e9c7c5e --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_lambda.c @@ -0,0 +1,81 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2019-07-23 Jianjia Ma The first version + */ + +#include +#include +#include + +#include "nnom.h" +#include "nnom_local.h" +#include "nnom_layers.h" +#include "layers/nnom_lambda.h" + +nnom_layer_t *lambda_s(const nnom_lambda_config_t * config) +{ + nnom_lambda_layer_t *cl = (nnom_lambda_layer_t *)Lambda( + config->run_func_name, + config->build_func_name, + config->free_func_name, + config->parameters); + if(cl) + cl->super.config = (void*) config; + return (nnom_layer_t *)cl; +} + +// TODO: extended to multiple IO layer +nnom_layer_t *Lambda(nnom_status_t (*run)(nnom_layer_t *), + nnom_status_t (*build)(nnom_layer_t *), + nnom_status_t (*free)(nnom_layer_t *), + void *parameters) +{ + nnom_lambda_layer_t *layer; + nnom_layer_io_t *in, *out; + + // apply a block memory for all the sub handles. + size_t mem_size = sizeof(nnom_io_layer_t) + sizeof(nnom_layer_io_t) * 2; + layer = nnom_mem(mem_size); + if (layer == NULL) + return NULL; + + // distribut the memory to sub handles. + in = (void *)((uint8_t*)layer + sizeof(nnom_lambda_layer_t)); + out = (void *)((uint8_t*)in + sizeof(nnom_layer_io_t)); + + // set buf type. + in->type = NNOM_TENSOR_BUF_TEMP; + out->type = NNOM_TENSOR_BUF_TEMP; + + // set io modules to the layer + layer->super.in = io_init(layer, in); + layer->super.out = io_init(layer, out); + // layer type + layer->super.type = NNOM_LAMBDA; + + // user parameters + layer->parameters = parameters; + + // free method + layer->super.free = free; + + // output shape method. pass NULL in will use the default outshape method, which set the output shape same as input shape. + if (build == NULL) + layer->super.build = default_build; + else + layer->super.build = build; + // run method. default_run() will simply copy data from input tensor to output tensor. + if(run == NULL) + layer->super.run = default_run; + else + layer->super.run = run; + + return (nnom_layer_t *)layer; +} diff --git a/APP_Framework/Framework/knowing/nnom/src/layers/nnom_lstm_cell.c b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_lstm_cell.c new file mode 100644 index 000000000..ed4a120b4 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_lstm_cell.c @@ -0,0 +1,334 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2020-08-24 Jianjia Ma The first version + */ + +#include +#include +#include + +#include "nnom.h" +#include "nnom_local.h" +#include "nnom_layers.h" +#include "layers/nnom_lstm_cell.h" + +#ifdef NNOM_USING_CMSIS_NN +#include "arm_math.h" +#include "arm_nnfunctions.h" +#endif + +// LSTM RNN +// unit = output shape +// type of activation +nnom_rnn_cell_t *lstm_cell_s(const nnom_lstm_cell_config_t* config) +{ + nnom_lstm_cell_t *cell; + cell = nnom_mem(sizeof(nnom_lstm_cell_t)); + if (cell == NULL) + return NULL; + // set methods + cell->super.run = lstm_cell_q7_q15_run; + cell->super.build = lstm_cell_q7_q15_build; + cell->super.free = lstm_cell_free; + cell->super.config = (void*) config; + cell->super.units = config->units; + cell->super.type = NNOM_LSTM_CELL; + + // set parameters + cell->bias = config->bias; + cell->weights = config->weights; + cell->recurrent_weights = config->recurrent_weights; + + // q format for intermediate calculation + cell->q_dec_c = config->q_dec_c; + cell->q_dec_h = config->q_dec_h; + cell->q_dec_z = config->q_dec_z; + + return (nnom_rnn_cell_t *)cell; +} + +nnom_status_t lstm_cell_free(nnom_rnn_cell_t* cell) +{ + return NN_SUCCESS; +} + +// keras implementation as below. +/* + def step(cell_inputs, cell_states): + """Step function that will be used by Keras RNN backend.""" + h_tm1 = cell_states[0] # previous memory state + c_tm1 = cell_states[1] # previous carry state + + z = K.dot(cell_inputs, kernel) -> q_iw + z += K.dot(h_tm1, recurrent_kernel) -> q_hw + z = K.bias_add(z, bias) + + z0, z1, z2, z3 = array_ops.split(z, 4, axis=1) + + i = nn.sigmoid(z0) + f = nn.sigmoid(z1) + c = f * c_tm1 + i * nn.tanh(z2) + o = nn.sigmoid(z3) + + h = o * nn.tanh(c) + return h, [h, c] +*/ + + + +// the state buffer and computational buffer shape of the cell +nnom_status_t lstm_cell_q7_q15_build(nnom_rnn_cell_t* cell) +{ + nnom_layer_t *layer = cell->layer; + nnom_lstm_cell_t *c = (nnom_lstm_cell_t *)cell; + + // calculate output shift for the 2 calculation. + // hw = the product of hidden x weight, iw = the product of input x weight + // due to the addition of them, they must have same q format. + // that is -> c->q_dec_z; + + // for the dots in cell: output shift = input_dec + weight_dec - output_dec + c->oshift_hw = c->q_dec_h + c->recurrent_weights->q_dec[0] - c->q_dec_z; + c->oshift_iw = layer->in->tensor->q_dec[0] + c->weights->q_dec[0] - c->q_dec_z; + + // bias shift = bias_dec - out_dec + c->bias_shift = layer->in->tensor->q_dec[0] + c->weights->q_dec[0] - c->bias->q_dec[0]; + + // state size = one timestamp output size. + cell->state_size = cell->units * 2 * 2; // Q15 + + // // comp buffer size: not required + cell->comp_buf_size = cell->units * 12 * 2 + cell->feature_size * 2; //q15 + input q7->q15 buffer. + + // finally, calculate the MAC for info (for each timestamp) + cell->macc = cell->feature_size * cell->units *4 // input: feature * state * 4 gates + + cell->units * cell->units *4 // recurrent, state + + cell->units *10; // output_unit * (5 gate + 3 mult + 2 addition) + + return NN_SUCCESS; +} + +// Q7 input output +// Q7 weights +// Q15 states and intermediate buffer +nnom_status_t lstm_cell_q7_q15_run(nnom_rnn_cell_t* cell) +{ + nnom_layer_t *layer = cell->layer; + nnom_lstm_cell_t* c = (nnom_lstm_cell_t*) cell; + int act_int_bit = 7 - c->q_dec_z; + + // state buffer + // low |-- hidden --|-- carry --| high + q15_t* h_tm1 = (q15_t*)cell->in_state; + q15_t* c_tm1 = (q15_t*)cell->in_state + cell->units; + q15_t* o_state[2]; + o_state[0] = (q15_t*)cell->out_state; + o_state[1] = (q15_t*)cell->out_state + cell->units; + + // computing buffer + // low |-- buf0 --|-- buf1 --|-- buf2 --|-- input q15 --| + q15_t* z[4]; + q15_t *buf0, *buf1, *buf2, *in_q15_buf; + buf0 = (q15_t*)layer->comp->mem->blk; + buf1 = (q15_t*)layer->comp->mem->blk + cell->units*4; + buf2 = (q15_t*)layer->comp->mem->blk + cell->units*8; + in_q15_buf = (q15_t*)layer->comp->mem->blk + cell->units*12; + + // input q7 -> q15 + local_q7_to_q15(cell->in_data, in_q15_buf, cell->feature_size); + + // z1 = K.dot(cell_inputs, kernel) + bias -> buf1 + #ifdef NNOM_USING_CMSIS_NN + arm_fully_connected_mat_q7_vec_q15_opt + #else + local_fully_connected_mat_q7_vec_q15_opt + #endif + (in_q15_buf, c->weights->p_data, cell->feature_size, cell->units*4, c->bias_shift + 8, c->oshift_iw, c->bias->p_data, buf1, NULL); + + // z2 = K.dot(h_tm1, recurrent_kernel) -> buf2 + // --- arm version must use bias, so we have to use local implementation + local_fully_connected_mat_q7_vec_q15_opt(h_tm1, c->recurrent_weights->p_data, + cell->units, cell->units*4, 0, c->oshift_hw, NULL, buf2, NULL); + + // z = z1 + z2 -> buf0 + local_add_q15(buf1, buf2, buf0, 0, cell->units*4); + + // split the data to each gate + z[0] = buf0; + z[1] = buf0 + cell->units; + z[2] = buf0 + cell->units*2; + z[3] = buf0 + cell->units*3; + + // i = nn.sigmoid(z0) + local_sigmoid_q15(z[0], cell->units, act_int_bit); + // f = nn.sigmoid(z1) + local_sigmoid_q15(z[1], cell->units, act_int_bit); + // o = nn.sigmoid(z3) + local_sigmoid_q15(z[3], cell->units, act_int_bit); + + /* c = f * c_tm1 + i * nn.tanh(z2) for the step 1-3. */ + // 1. i * tanh(z2) -> buf1 + local_tanh_q15(z[2], cell->units, act_int_bit); + local_mult_q15(z[0], z[2], buf1, 30 - (c->q_dec_c+8), cell->units); + // 2. f * c_tm1 -> o_state[0] + local_mult_q15(z[1], c_tm1, o_state[0], 15, cell->units); + // 3. c = i*tanh + f*c_tm1 -> o_state[1] ** fill the upper state (carry) + local_add_q15(buf1, o_state[0], o_state[1], 0, cell->units); + + /* h = o * nn.tanh(c) -> o_state[0] for the step 1-2 */ + // 1. tanh(c) -> buf2 --- first copy then activate. + nnom_memcpy(buf2, o_state[1], cell->units*2); + local_tanh_q15(buf2, cell->units, 7 - c->q_dec_c); // this int bit is under 8bit + // 2. h = o*tanh(c) -> o_state[0] ** fill the lower state (memory, hidden) + local_mult_q15(z[3], buf2, o_state[0], 15, cell->units); + + // copy and shift q15 to q7 ** (copy hidden to output) + local_q15_to_q7(o_state[0], cell->out_data, 8, cell->units); + return NN_SUCCESS; +} + + +// researve for debugging, printing the intermediate products and variables +#if 0 +static void print_variable(q7_t* data,char*name, int dec_bit, int size) +{ + printf("\n"); + printf("%s\n", name); + for(int i = 0; i < size; i++) + { + if(i%8==0) + printf("\n"); + printf("%f\t", (float) data[i] / (1 << dec_bit)); + } + printf("\n"); +} + +static void print_variable_q15(q15_t *data,char*name, int dec_bit, int size) +{ + printf("\n\n"); + printf("%s", name); + for(int i = 0; i < size; i++) + { + if(i%8==0) + printf("\n"); + printf("%f\t", (float) data[i] / (1 << dec_bit)); + } + printf("\n"); +} + + +// Q7 input output +// Q7 weights +// Q15 states and intermediate buffer +nnom_status_t lstm_cell_q7_q15_run(nnom_rnn_cell_t* cell) +{ + nnom_layer_t *layer = cell->layer; + nnom_rnn_layer_t* cl = (nnom_rnn_layer_t *) layer; + nnom_lstm_cell_t* c = (nnom_lstm_cell_t*) cell; + int act_int_bit = 7 - c->q_dec_z; + + // test + //nnom_memset(cell->in_data, 32, cell->feature_size); + + // state buffer + // low |-- hidden --|-- carry --| high + q15_t* h_tm1 = (q15_t*)cell->in_state; + q15_t* c_tm1 = (q15_t*)cell->in_state + cell->units; + q15_t* o_state[2]; + o_state[0] = (q15_t*)cell->out_state; + o_state[1] = (q15_t*)cell->out_state + cell->units; + + // computing buffer + // low |-- buf0 --|-- buf1 --|-- buf2 --|-- input q15 --| + q15_t* z[4]; + q15_t *buf0, *buf1, *buf2, *in_q15_buf; + buf0 = (q15_t*)layer->comp->mem->blk; + buf1 = (q15_t*)layer->comp->mem->blk + cell->units*4; + buf2 = (q15_t*)layer->comp->mem->blk + cell->units*8; + in_q15_buf = (q15_t*)layer->comp->mem->blk + cell->units*12; + + // input q7 -> q15 + //local_q7_to_q15_no_shift(cell->in_data, in_q15_buf, cell->feature_size); + local_q7_to_q15(cell->in_data, in_q15_buf, cell->feature_size); + print_variable_q15(in_q15_buf, "input", layer->in->tensor->q_dec[0] + 8, cell->feature_size); + print_variable_q15(h_tm1, "h_tml", 15, cell->units); + print_variable_q15(c_tm1, "c_tml", c->q_dec_c + 8, cell->units); + + // z1 = K.dot(cell_inputs, kernel) + bias -> buf1 + #ifdef NNOM_USING_CMSIS_NN + arm_fully_connected_mat_q7_vec_q15_opt + #else + local_fully_connected_mat_q7_vec_q15_opt + #endif + (in_q15_buf, c->weights->p_data, cell->feature_size, cell->units*4, c->bias_shift + 8, c->oshift_iw, c->bias->p_data, buf1, NULL); + + // z2 = K.dot(h_tm1, recurrent_kernel) -> buf2 + // arm version must use bias, so we have to use local implementation + local_fully_connected_mat_q7_vec_q15_opt(h_tm1, c->recurrent_weights->p_data, + cell->units, cell->units*4, 0, c->oshift_hw, NULL, buf2, NULL); + + // z = z1 + z2 -> buf0 + local_add_q15(buf1, buf2, buf0, 0, cell->units*4); + + print_variable_q15(buf0, "z", c->q_dec_z + 8, cell->units*4); + print_variable_q15(buf1, "z1", c->q_dec_z + 8, cell->units*4); + print_variable_q15(buf2, "z2", c->q_dec_z + 8, cell->units*4); + + // split the data to each gate + z[0] = buf0; + z[1] = buf0 + cell->units; + z[2] = buf0 + cell->units*2; + z[3] = buf0 + cell->units*3; + + // i = nn.sigmoid(z0) + local_sigmoid_q15(z[0], cell->units, act_int_bit); + // f = nn.sigmoid(z1) + local_sigmoid_q15(z[1], cell->units, act_int_bit); + // o = nn.sigmoid(z3) + local_sigmoid_q15(z[3], cell->units, act_int_bit); + + print_variable_q15(z[0], "z[0] - i", 15, cell->units); + print_variable_q15(z[1], "z[1] - f", 15, cell->units); + print_variable_q15(z[3], "z[3] - o", 15, cell->units); + + /* c = f * c_tm1 + i * nn.tanh(z2) for the step 1-3. */ + // 1. i * tanh(z2) -> buf1 + local_tanh_q15(z[2], cell->units, act_int_bit); + print_variable_q15(z[2], "z[2] - ?", 15, cell->units); + + local_mult_q15(z[0], z[2], buf1, 30 - (c->q_dec_c+8), cell->units); //q0.15 * q0.15 >> (shift) = (q_c + 8) // i am not very sure + print_variable_q15(buf1, "c2: i * tanh(z2) ", c->q_dec_c+8, cell->units); + + // 2. f * c_tm1 -> o_state[0] + local_mult_q15(z[1], c_tm1, o_state[0], 15, cell->units); + print_variable_q15(o_state[0], "c1: f * c_tm1", c->q_dec_c+8, cell->units); + + // 3. c = i*tanh + f*c_tm1 -> o_state[1] ** fill the upper state (carry) + local_add_q15(buf1, o_state[0], o_state[1], 0, cell->units); + print_variable_q15(o_state[1], "c = c1+c2", c->q_dec_c+8, cell->units); + + /* h = o * nn.tanh(c) -> o_state[0] for the step 1-2 */ + // 1. tanh(c) -> buf2 --- first copy then activate. + nnom_memcpy(buf2, o_state[1], cell->units*2); + local_tanh_q15(buf2, cell->units, 7 - c->q_dec_c); // this int bit is under 8bit + print_variable_q15(buf2, "tanh(c)", 15, cell->units); + + // 2. h = o*tanh(c) -> o_state[0] ** fill the lower state (memory, hidden) + local_mult_q15(z[3], buf2, o_state[0], 15, cell->units); + print_variable_q15(o_state[0], "h = o*tanh(c)", 15, cell->units); + + // copy and shift q15 to q7 ** (copy hidden to output) + local_q15_to_q7(o_state[0], cell->out_data, 8, cell->units); + + print_variable(cell->out_data, "q7 output)", 7, cell->units); + + return NN_SUCCESS; +} +#endif diff --git a/APP_Framework/Framework/knowing/nnom/src/layers/nnom_matrix.c b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_matrix.c new file mode 100644 index 000000000..e011ecc0f --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_matrix.c @@ -0,0 +1,239 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2019-07-23 Jianjia Ma The first version + */ + +#include +#include +#include + +#include "nnom.h" +#include "nnom_local.h" +#include "nnom_layers.h" +#include "layers/nnom_matrix.h" + +// TODO, completely change this file to local version +#ifdef NNOM_USING_CMSIS_NN +#include "arm_math.h" +#include "arm_nnfunctions.h" +#endif + +nnom_status_t matrix_build(nnom_layer_t *layer); + +nnom_layer_t *add_s(const nnom_matrix_config_t * config) +{ + nnom_matrix_layer_t *cl = (nnom_matrix_layer_t *) Add(config->output_shift); + if(cl) + cl->super.config = (void*) config; + return (nnom_layer_t *)cl; +} + +nnom_layer_t *sub_s(const nnom_matrix_config_t * config) +{ + nnom_matrix_layer_t *cl = (nnom_matrix_layer_t *) Sub(config->output_shift); + if(cl) + cl->super.config = (void*) config; + return (nnom_layer_t *)cl; +} + +nnom_layer_t *mult_s(const nnom_matrix_config_t * config) +{ + nnom_matrix_layer_t *cl = (nnom_matrix_layer_t *) Mult(config->output_shift); + if(cl) + cl->super.config = (void*) config; + return (nnom_layer_t *)cl; +} + +nnom_layer_t *Add(int16_t oshift) +{ + nnom_matrix_layer_t *cl = (nnom_matrix_layer_t *)_same_shape_matrix_layer(); + if (cl == NULL) + return NULL; + // set type in layer parent + cl->super.type = NNOM_ADD; + cl->super.run = add_run; + cl->oshift = oshift; + return (nnom_layer_t *)cl; +} + +nnom_layer_t *Sub(int16_t oshift) +{ + nnom_matrix_layer_t *cl = (nnom_matrix_layer_t *)_same_shape_matrix_layer(); + if (cl == NULL) + return NULL; + // set type in layer parent + cl->super.type = NNOM_SUB; + cl->super.run = sub_run; + cl->oshift = oshift; + return (nnom_layer_t *)cl; +} + +nnom_layer_t *Mult(int16_t oshift) +{ + nnom_matrix_layer_t *cl = (nnom_matrix_layer_t *)_same_shape_matrix_layer(); + if (cl == NULL) + return NULL; + // set type in layer parent + cl->super.type = NNOM_MULT; + cl->super.run = mult_run; + cl->oshift = oshift; + return (nnom_layer_t *)cl; +} + +// init a base layer instance with same shape 1 in 1 out. More IO can be added later +// mainly used by matrix calculation (add, mult, sub) +nnom_layer_t *_same_shape_matrix_layer() +{ + nnom_matrix_layer_t *layer; + nnom_layer_io_t *in, *out; + //nnom_buf_t *comp; + size_t mem_size; + + // apply a block memory for all the sub handles. + mem_size = sizeof(nnom_matrix_layer_t) + sizeof(nnom_layer_io_t) * 2; + layer = nnom_mem(mem_size); + if (layer == NULL) + return NULL; + + // distribut the memory to sub handles. + in = (void *)((uint8_t*)layer + sizeof(nnom_matrix_layer_t)); + out = (void *)((uint8_t*)in + sizeof(nnom_layer_io_t)); + //comp = (void *)((uint8_t*)out + sizeof(nnom_layer_io_t)); + + // set type in layer parent + layer->super.build = matrix_build; + // set buf state + in->type = NNOM_TENSOR_BUF_TEMP; + out->type = NNOM_TENSOR_BUF_TEMP; + //comp->type = NNOM_TENSOR_BUF_TEMP; + // put in & out on the layer. + layer->super.in = io_init(layer, in); + layer->super.out = io_init(layer, out); + //layer->super.comp = comp; + return (nnom_layer_t*)layer; +} + +nnom_status_t matrix_build(nnom_layer_t *layer) +{ + // get the last layer's output as input shape (if more than one) + nnom_layer_io_t *in = layer->in; + while(in) + { + in->tensor = in->hook.io->tensor; + in = in->aux; + } + // output tensor + layer->out->tensor = new_tensor(NNOM_QTYPE_PER_TENSOR,layer->in->tensor->num_dim, tensor_get_num_channel(layer->in->tensor)); + tensor_cpy_attr(layer->out->tensor, layer->in->tensor); + + // see if the activation will change the q format + if(layer->actail) + layer->out->tensor->q_dec[0] = act_get_dec_bit(layer->actail->type, layer->out->tensor->q_dec[0]); + + // now this build has passed the input tensors (shapes, formats) to the new tensors. + return NN_SUCCESS; +} + + +nnom_status_t add_run(nnom_layer_t *layer) +{ + nnom_matrix_layer_t* cl = (nnom_matrix_layer_t*)layer; + nnom_layer_io_t *in = layer->in;; + size_t t_size = tensor_size(layer->out->tensor); + int32_t oshift = cl->oshift; + size_t num_input = nnom_io_length(layer->in); + q7_t *input_mem_blk[MAX_INPUT_LAYER]; + + // if there is only 2 matrix + if(num_input == 2) + { + #ifdef NNOM_USING_CMSIS_NN + if(oshift == 0) + arm_add_q7(layer->in->tensor->p_data, layer->in->aux->tensor->p_data, layer->out->tensor->p_data, t_size); + else + #endif + local_add_q7(layer->in->tensor->p_data, layer->in->aux->tensor->p_data, layer->out->tensor->p_data, oshift, t_size); + } + else + { + for(int i = 0; i < num_input; i++) + { + input_mem_blk[i] = in->tensor->p_data; + in = in->aux; + } + local_multiple_add_q7(layer->out->tensor->p_data, oshift, t_size, num_input, input_mem_blk); + } + + return NN_SUCCESS; +} + +nnom_status_t sub_run(nnom_layer_t *layer) +{ + nnom_matrix_layer_t* cl = (nnom_matrix_layer_t*)layer; + nnom_layer_io_t *in = layer->in; + size_t t_size = tensor_size(layer->out->tensor); + int32_t oshift = cl->oshift; + size_t num_input = nnom_io_length(layer->in); + q7_t *input_mem_blk[MAX_INPUT_LAYER]; + + // if there is only 2 matrix + if(num_input == 2) + { + // the first 2 matrix + #ifdef NNOM_USING_CMSIS_NN + if(oshift == 0) + arm_sub_q7(layer->in->tensor->p_data, layer->in->aux->tensor->p_data, layer->out->tensor->p_data, t_size); + else + #endif + local_sub_q7(layer->in->tensor->p_data, layer->in->aux->tensor->p_data, layer->out->tensor->p_data, oshift, t_size); + } + else + { + for(int i = 0; i < num_input; i++) + { + input_mem_blk[i] = in->tensor->p_data; + in = in->aux; + } + local_multiple_sub_q7(layer->out->tensor->p_data, oshift, t_size, num_input, input_mem_blk); + } + return NN_SUCCESS; +} + +nnom_status_t mult_run(nnom_layer_t *layer) +{ + nnom_matrix_layer_t* cl = (nnom_matrix_layer_t*)layer; + nnom_layer_io_t *in = layer->in; + size_t t_size = tensor_size(layer->out->tensor); + int32_t oshift = cl->oshift; + size_t num_input = nnom_io_length(layer->in); + q7_t *input_mem_blk[MAX_INPUT_LAYER]; + + // if there is only 2 matrix + if(num_input == 2) + { + // the first 2 matrix + #ifdef NNOM_USING_CMSIS_NN + if(oshift == 0) + arm_mult_q7(layer->in->tensor->p_data, layer->in->aux->tensor->p_data, layer->out->tensor->p_data, t_size); + else + #endif + local_mult_q7(layer->in->tensor->p_data, layer->in->aux->tensor->p_data, layer->out->tensor->p_data, oshift, t_size); + } + else + { + for(int i = 0; i < num_input; i++) + { + input_mem_blk[i] = in->tensor->p_data; + in = in->aux; + } + local_multiple_mult_q7(layer->out->tensor->p_data, oshift, t_size, num_input, input_mem_blk); + } + return NN_SUCCESS; +} diff --git a/APP_Framework/Framework/knowing/nnom/src/layers/nnom_maxpool.c b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_maxpool.c new file mode 100644 index 000000000..fe904bad8 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_maxpool.c @@ -0,0 +1,191 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2019-07-23 Jianjia Ma The first version + */ + +#include +#include +#include + +#include "nnom.h" +#include "nnom_local.h" +#include "nnom_layers.h" +#include "layers/nnom_maxpool.h" + +#ifdef NNOM_USING_CMSIS_NN +#include "arm_math.h" +#include "arm_nnfunctions.h" +#endif + +nnom_layer_t *maxpool_s(const nnom_pool_config_t * config) +{ + nnom_layer_t *layer; + + // test, to accomodate 1d and 2d input + if(config->num_dim == 1) + { + layer = MaxPool(kernel(1, config->kernel_size[0]), + stride(1, config->stride_size[0]), + config->padding_type); + } + else + { + layer = MaxPool(kernel(config->kernel_size[0], config->kernel_size[1]), + stride(config->stride_size[0], config->stride_size[1]), + config->padding_type); + } + + if(layer) + layer->config = (void*) config; + return layer; +} + +nnom_layer_t *MaxPool(nnom_3d_shape_t k, nnom_3d_shape_t s, nnom_padding_t pad_type) +{ + nnom_maxpool_layer_t *layer; + nnom_buf_t *comp; + nnom_layer_io_t *in, *out; + + // apply a block memory for all the sub handles. + size_t mem_size = sizeof(nnom_maxpool_layer_t) + sizeof(nnom_layer_io_t) * 2 + sizeof(nnom_buf_t); + layer = nnom_mem(mem_size); + if (layer == NULL) + return NULL; + + // distribut the memory to sub handles. + in = (void *)((uint8_t*)layer + sizeof(nnom_maxpool_layer_t)); + out = (void *)((uint8_t*)in + sizeof(nnom_layer_io_t)); + comp = (void *)((uint8_t*)out + sizeof(nnom_layer_io_t)); + + // set type in layer parent + layer->super.type = NNOM_MAXPOOL; + layer->super.run = maxpool_run; + layer->super.build = maxpool_build; + // set buf state + in->type = NNOM_TENSOR_BUF_TEMP; + out->type = NNOM_TENSOR_BUF_TEMP; + comp->type = NNOM_TENSOR_BUF_TEMP; + // put in & out on the layer. + layer->super.in = io_init(layer, in); + layer->super.out = io_init(layer, out); + layer->super.comp = comp; + + // set parameters + layer->kernel = k; + layer->stride = s; + layer->padding_type = pad_type; + + // padding + if (layer->padding_type == PADDING_SAME) + { + layer->pad.h = (k.h - 1) / 2; + layer->pad.w = (k.w - 1) / 2; + layer->pad.c = 1; // no meaning + } + else + { + layer->pad.h = 0; + layer->pad.w = 0; + layer->pad.c = 0; + } + return (nnom_layer_t *)layer; +} + +nnom_status_t maxpool_build(nnom_layer_t *layer) +{ + nnom_maxpool_layer_t *cl = (nnom_maxpool_layer_t *)layer; + + // get the tensor from last layer's output + layer->in->tensor = layer->in->hook.io->tensor; + + // create new tensor for output + layer->out->tensor = new_tensor(NNOM_QTYPE_PER_TENSOR, layer->in->tensor->num_dim, tensor_get_num_channel(layer->in->tensor)); + // copy then change later. + tensor_cpy_attr(layer->out->tensor, layer->in->tensor); + + // see if the activation will change the q format + if(layer->actail) + layer->out->tensor->q_dec[0] = act_get_dec_bit(layer->actail->type, layer->out->tensor->q_dec[0]); + + // now we set up the tensor shape, always HWC format + if (cl->padding_type == PADDING_SAME) + { + layer->out->tensor->dim[0] = NN_CEILIF(layer->in->tensor->dim[0], cl->stride.h); + layer->out->tensor->dim[1] = NN_CEILIF(layer->in->tensor->dim[1], cl->stride.w); + layer->out->tensor->dim[2] = layer->in->tensor->dim[2]; // channel stays the same + } + else + { + layer->out->tensor->dim[0] = NN_CEILIF(layer->in->tensor->dim[0] - cl->kernel.h + 1, cl->stride.h); + layer->out->tensor->dim[1] = NN_CEILIF(layer->in->tensor->dim[1] - cl->kernel.w + 1, cl->stride.w); + layer->out->tensor->dim[2] = layer->in->tensor->dim[2]; + } + + return NN_SUCCESS; +} + +nnom_status_t maxpool_run(nnom_layer_t *layer) +{ + nnom_maxpool_layer_t *cl = (nnom_maxpool_layer_t *)(layer); + + uint16_t out_x, out_y; + + // if global pooling + if(layer->out->tensor->num_dim == 1) + { + out_x = 1; out_y = 1; + } + else // normal pooling. + { + out_x = layer->out->tensor->dim[1]; //W + out_y = layer->out->tensor->dim[0]; //h + } + +#ifdef NNOM_USING_CHW + local_maxpool_q7_CHW(layer->in->tensor->p_data, + layer->in->tensor->dim[1], layer->in->tensor->dim[0], layer->in->tensor->dim[2], + cl->kernel.w, cl->kernel.h, + cl->pad.w, cl->pad.h, + cl->stride.w, cl->stride.h, + out_x, out_y, + NULL, + layer->out->tensor->p_data); +#else //end of CHW + // HWC + #ifdef NNOM_USING_CMSIS_NN + // 2D, square + if (layer->in->tensor->dim[1] == layer->in->tensor->dim[0] && + layer->out->tensor->dim[1] == layer->out->tensor->dim[0]) + { + arm_maxpool_q7_HWC( + layer->in->tensor->p_data, + layer->in->tensor->dim[1], layer->in->tensor->dim[2], + cl->kernel.w, cl->pad.w, cl->stride.w, + layer->out->tensor->dim[1], + NULL, + layer->out->tensor->p_data); + } + // none square 2D, or 1D + else + #endif + { + // CMSIS-NN does not support none-square pooling, we have to use local implementation + local_maxpool_q7_HWC(layer->in->tensor->p_data, + layer->in->tensor->dim[1], layer->in->tensor->dim[0], layer->in->tensor->dim[2], + cl->kernel.w, cl->kernel.h, + cl->pad.w, cl->pad.h, + cl->stride.w, cl->stride.h, + out_x, out_y, + NULL, + layer->out->tensor->p_data); + } +#endif // CHW/HWC + return NN_SUCCESS; +} diff --git a/APP_Framework/Framework/knowing/nnom/src/layers/nnom_output.c b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_output.c new file mode 100644 index 000000000..bed1c89cd --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_output.c @@ -0,0 +1,54 @@ + +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2019-07-23 Jianjia Ma The first version + */ + +#include +#include +#include + +#include "nnom.h" +#include "nnom_local.h" +#include "nnom_layers.h" +#include "layers/nnom_output.h" + +nnom_layer_t *output_s(const nnom_io_config_t* config) +{ + nnom_layer_t *layer = input_s(config); + if(layer) + { + layer->config = (void*) config; + layer->type = NNOM_OUTPUT; + layer->run = output_run; + layer->build = default_build; + } + return layer; +} + +nnom_layer_t *Output(nnom_3d_shape_t output_shape, void *p_buf) +{ + // they are acturally the same.. expect the type defined + nnom_layer_t *layer = Input(output_shape, p_buf); + if (layer != NULL) + { + layer->type = NNOM_OUTPUT; + layer->run = output_run; + layer->build = default_build; + } + return layer; +} + +nnom_status_t output_run(nnom_layer_t *layer) +{ + nnom_io_layer_t *cl = (nnom_io_layer_t *)layer; + nnom_memcpy(cl->buf, layer->in->tensor->p_data, tensor_size(layer->out->tensor)); // in->memory -> user memory + return NN_SUCCESS; +} diff --git a/APP_Framework/Framework/knowing/nnom/src/layers/nnom_reshape.c b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_reshape.c new file mode 100644 index 000000000..1b6ae82f7 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_reshape.c @@ -0,0 +1,77 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2020-12-07 Jianjia Ma The first version + */ + +#include +#include +#include + +#include "nnom.h" +#include "nnom_local.h" +#include "nnom_layers.h" +#include "layers/nnom_reshape.h" + + +nnom_layer_t *reshape_s(const nnom_reshape_config_t *config) +{ + nnom_reshape_layer_t *layer; + nnom_layer_io_t *in, *out; + + // allocate a block memory for all the sub handles and shifts. + size_t mem_size = sizeof(nnom_reshape_layer_t) + sizeof(nnom_layer_io_t) * 2 ; + layer = nnom_mem(mem_size); + if (layer == NULL) + return NULL; + + // distribut the memory to sub handles. + in = (void *)((uint8_t*)layer + sizeof(nnom_reshape_layer_t)); + out = (void *)((uint8_t*)in + sizeof(nnom_layer_io_t)); + + // set type in layer parent + layer->super.type = NNOM_RESHAPE; + layer->super.run = reshape_run; + layer->super.build = reshape_build; + // set buf state + in->type = NNOM_TENSOR_BUF_TEMP; + out->type = NNOM_TENSOR_BUF_NULL; + + // config + //nnom_memcpy(layer->dim, config->dim, config->num_dim * sizeof(nnom_shape_data_t)); + layer->super.config = config; + layer->dim = config->dim; // temporary use the config directly. (not preferable.) + layer->num_dim = config->num_dim; + + // put in & out on the layer. + layer->super.in = io_init(layer, in); + layer->super.out = io_init(layer, out); + + return (nnom_layer_t *)layer; +} + +nnom_status_t reshape_build(nnom_layer_t *layer) +{ + nnom_reshape_layer_t *cl = (nnom_reshape_layer_t *)layer; + + // get the tensor from last layer's output + layer->in->tensor = layer->in->hook.io->tensor; + + // create new tensor for output + layer->out->tensor = new_tensor(NNOM_QTYPE_PER_TENSOR, layer->in->tensor->num_dim, tensor_get_num_channel(layer->in->tensor)); + tensor_set_attr(layer->out->tensor, layer->in->tensor->q_dec, layer->in->tensor->q_offset, cl->dim, cl->num_dim, 8); + + return NN_SUCCESS; +} + +nnom_status_t reshape_run(nnom_layer_t *layer) +{ + return NN_SUCCESS; +} + diff --git a/APP_Framework/Framework/knowing/nnom/src/layers/nnom_rnn.c b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_rnn.c new file mode 100644 index 000000000..6fe9662e0 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_rnn.c @@ -0,0 +1,191 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2019-07-23 Jianjia Ma The first version + */ + +#include +#include +#include + +#include "nnom.h" +#include "nnom_local.h" +#include "nnom_layers.h" +#include "layers/nnom_rnn.h" + +nnom_status_t rnn_build(nnom_layer_t *layer); +nnom_status_t rnn_run(nnom_layer_t *layer); +nnom_status_t rnn_free(nnom_layer_t* layer); + +// RNN +nnom_layer_t *rnn_s(nnom_rnn_cell_t *cell, const nnom_rnn_config_t* config) +{ + nnom_rnn_layer_t *layer; + nnom_buf_t *comp; + nnom_layer_io_t *in, *out; + + // apply a block memory for all the sub handles. + size_t mem_size = sizeof(nnom_rnn_layer_t) + sizeof(nnom_layer_io_t) * 2 + sizeof(nnom_buf_t); + layer = nnom_mem(mem_size); + if (layer == NULL) + return NULL; + + // distribut the memory to sub handles. + in = (void *)((uint8_t*)layer + sizeof(nnom_rnn_layer_t)); + out = (void *)((uint8_t*)in + sizeof(nnom_layer_io_t)); + comp = (void *)((uint8_t*)out + sizeof(nnom_layer_io_t)); + + // set type in layer parent + layer->super.type = NNOM_RNN; + // set buf state + in->type = NNOM_TENSOR_BUF_TEMP; + out->type = NNOM_TENSOR_BUF_TEMP; + comp->type = NNOM_TENSOR_BUF_TEMP; + // put in & out on the layer. + layer->super.in = io_init(layer, in); + layer->super.out = io_init(layer, out); + layer->super.comp = comp; + // set run and outshape methods + layer->super.run = rnn_run; + layer->super.build = rnn_build; + layer->super.free = rnn_free; + + // rnn parameters. + layer->return_sequence = config->return_sequence; + layer->stateful = config->stateful; + layer->go_backwards = config->go_backwards; + layer->super.config = (void*)config; + layer->cell = cell; + + // set this layer to the cell + layer->cell->layer = (nnom_layer_t *)layer; + + return (nnom_layer_t *)layer; +} + +nnom_status_t rnn_free(nnom_layer_t* layer) +{ + nnom_rnn_layer_t* cl = (nnom_rnn_layer_t*)layer; + // free cell + if(cl->cell->free) + cl->cell->free(cl->cell); + + // free state buffer + nnom_free(cl->state_buf); + + return NN_SUCCESS; +} + +nnom_status_t rnn_build(nnom_layer_t* layer) +{ + nnom_rnn_layer_t *cl = (nnom_rnn_layer_t *)layer; + + // get the tensor from last layer's output + layer->in->tensor = layer->in->hook.io->tensor; + + // timestamp size + cl->timestamp_size = layer->in->tensor->num_dim > 2 ? layer->in->tensor->dim[1] : layer->in->tensor->dim[0]; + + if(cl->return_sequence) + { + // create new tensor for the output + layer->out->tensor = new_tensor(NNOM_QTYPE_PER_TENSOR, 2, 0); + // shape: timestamp, units + layer->out->tensor->dim[0] = cl->timestamp_size; + layer->out->tensor->dim[1] = cl->cell->units; + } + else + { + // create new tensor for the output + layer->out->tensor = new_tensor(NNOM_QTYPE_PER_TENSOR, 1, 0); + // shape: timestamp, units + layer->out->tensor->dim[0] = cl->cell->units; + } + + // output q format - the output of the available activations are both q0.7. + layer->out->tensor->q_dec[0] = layer->in->tensor->bitwidth==16? 15: 7; + layer->out->tensor->bitwidth = layer->in->tensor->bitwidth; + // see if the activation will change the q format + if(layer->actail) + layer->out->tensor->q_dec[0] = act_get_dec_bit(layer->actail->type, layer->out->tensor->q_dec[0]); + + // get feature size from input tensor + cl->cell->feature_size = tensor_get_num_channel(layer->in->tensor); // vector (feature) size + + // call cell builder to build the cell + cl->cell->build(cl->cell); + + // get the size of computation buffer? + cl->super.comp->size = cl->cell->comp_buf_size; // size of intermediate buffer required by the cell. + cl->state_buf = nnom_mem(cl->cell->state_size * 2); // allocate state buf for upper/lower state buffer. + if(!cl->state_buf) + return NN_NO_MEMORY; + + // get the computational cost provided by Cell + layer->stat.macc = cl->cell->macc * cl->timestamp_size; + return NN_SUCCESS; +} + +nnom_status_t rnn_run(nnom_layer_t* layer) +{ + nnom_status_t result; + nnom_rnn_layer_t* cl = (nnom_rnn_layer_t*)(layer); + size_t timestamps_size = layer->in->tensor->dim[layer->in->tensor->num_dim-2]; + size_t feature_size = tensor_get_num_channel(layer->in->tensor); // feature size = last dimension. + size_t state_size = cl->cell->state_size; + size_t output_growth; + void* upper_state = (q7_t*)cl->state_buf + state_size; + void* lower_state = (q7_t*)cl->state_buf; + + // reset state buffer if not in stateful + if (!cl->stateful) + nnom_memset(cl->state_buf, 0, state_size * 2); + + // set output data + output_growth = cl->return_sequence ? cl->cell->units : 0; + + // run timestamp by timestamp + for (uint32_t round = 0; round < timestamps_size; round++) + { + if(cl->go_backwards) + { + // set input data + cl->cell->in_data = (q7_t*)layer->in->tensor->p_data + feature_size*(timestamps_size - 1 - round); + // set output data + cl->cell->out_data = (q7_t*)layer->out->tensor->p_data + output_growth*(timestamps_size - 1 - round); + } + else + { + // set input data + cl->cell->in_data = (q7_t*)layer->in->tensor->p_data + feature_size*round; + // set output data + cl->cell->out_data = (q7_t*)layer->out->tensor->p_data + output_growth*round; + } + + // switch upper/lower state buffer + if(cl->cell->in_state != lower_state) + { + cl->cell->in_state = lower_state; + cl->cell->out_state = upper_state; + } + else + { + cl->cell->in_state = upper_state; + cl->cell->out_state = lower_state; + } + + // run it + result = cl->cell->run(cl->cell); + if(result != NN_SUCCESS) + return result; + } + + return NN_SUCCESS; +} + diff --git a/APP_Framework/Framework/knowing/nnom/src/layers/nnom_simple_cell.c b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_simple_cell.c new file mode 100644 index 000000000..b61acbef3 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_simple_cell.c @@ -0,0 +1,142 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2020-08-21 Jianjia Ma The first version + */ + +#include +#include +#include + +#include "nnom.h" +#include "nnom_local.h" +#include "nnom_layers.h" +#include "layers/nnom_simple_cell.h" + +#ifdef NNOM_USING_CMSIS_NN +#include "arm_math.h" +#include "arm_nnfunctions.h" +#endif + +// Simple RNN +// unit = output shape +// type of activation +nnom_rnn_cell_t *simple_cell_s(const nnom_simple_cell_config_t* config) +{ + nnom_simple_cell_t *cell; + cell = nnom_mem(sizeof(nnom_simple_cell_t)); + if (cell == NULL) + return NULL; + // set methods + cell->super.run = simple_cell_run; + cell->super.build = simple_cell_build; + cell->super.free = simple_cell_free; + cell->super.config = (void*) config; + cell->super.units = config->units; + cell->super.type = NNOM_SIMPLE_CELL; + + // set parameters + cell->bias = config->bias; + cell->weights = config->weights; + cell->recurrent_weights = config->recurrent_weights; + cell->act_type = config->act_type; + // q format for intermediate products + cell->q_dec_iw = config->q_dec_iw; + cell->q_dec_hw = config->q_dec_hw; + cell->q_dec_h = config->q_dec_h; + + return (nnom_rnn_cell_t *)cell; +} + +nnom_status_t simple_cell_free(nnom_rnn_cell_t* cell) +{ + return NN_SUCCESS; +} + +// the state buffer and computational buffer shape of the cell +nnom_status_t simple_cell_build(nnom_rnn_cell_t* cell) +{ + nnom_layer_t *layer = cell->layer; + nnom_simple_cell_t *c = (nnom_simple_cell_t *)cell; + nnom_simple_cell_config_t *config = (nnom_simple_cell_config_t *)cell->config; + int q_hw_iw; + + // activation, check if activation is supported + if(config->act_type != ACT_SIGMOID && config->act_type != ACT_TANH) + return NN_ARGUMENT_ERROR; + + // calculate output shift for the 2 calculation. + // hw = the product of hidden x weight, iw = the product of input x weight + // due to the addition of them, they must have same q format. + q_hw_iw = MIN(c->q_dec_hw, c->q_dec_iw); + + // for the 2 dot in cell: output shift = input_dec + weight_dec - output_dec + c->oshift_hw = c->q_dec_h + c->recurrent_weights->q_dec[0] - q_hw_iw; + c->oshift_iw = layer->in->tensor->q_dec[0] + c->weights->q_dec[0] - q_hw_iw; + + // bias shift = bias_dec - out_dec + c->bias_shift = layer->in->tensor->q_dec[0] + c->weights->q_dec[0] - c->bias->q_dec[0]; + + // state size = one timestamp output size. + cell->state_size = cell->units; + + // comp buffer size: not required + cell->comp_buf_size = 0; + + // finally, calculate the MAC for info + cell->macc = cell->feature_size * cell->units // input: feature * state + + cell->units * cell->units; // recurrent, state * output_unit + + return NN_SUCCESS; +} + +// This Simple Cell replicate the Keras's SimpleCell as blow +/* + def call(self, inputs, states, training=None): + prev_output = states[0] if nest.is_sequence(states) else states + + h = K.dot(inputs, self.kernel) + h = K.bias_add(h, self.bias) + + h2 = K.dot(prev_output, self.recurrent_kernel) + output = h + H2 + output = self.activation(output) + + new_state = [output] if nest.is_sequence(states) else output + return output, new_state +*/ + +nnom_status_t simple_cell_run(nnom_rnn_cell_t* cell) +{ + nnom_simple_cell_t* c = (nnom_simple_cell_t*) cell; + int act_int_bit = 7 - MIN(c->q_dec_hw, c->q_dec_iw); + + // in_state x recurrent_weight -> h2 (output buf) + local_dot_q7_opt(cell->in_state, c->recurrent_weights->p_data, cell->units, cell->units, c->oshift_hw, cell->out_data); + // (input x weight) + bias -> h (in_state buf) + local_fully_connected_q7_opt(cell->in_data, c->weights->p_data, + cell->feature_size, cell->units, c->bias_shift, c->oshift_iw, c->bias->p_data, cell->in_state, NULL); + // h + h2 -> (out_state buf) + local_add_q7(cell->in_state, cell->out_data, cell->out_state, 0, cell->units); + + // active(out_state buf) + if(c->act_type == ACT_TANH) + local_tanh_q7(cell->out_state, cell->units, act_int_bit); + //local_hard_tanh_q7(cell->out_state, cell->units, act_int_bit); + else + local_sigmoid_q7(cell->out_state, cell->units, act_int_bit); + //local_hard_sigmoid_q7(cell->out_state, cell->units, act_int_bit); + + // (out_state buf) --copy--> (output buf) + nnom_memcpy(cell->out_data, cell->out_state, cell->units); + + return NN_SUCCESS; +} + + diff --git a/APP_Framework/Framework/knowing/nnom/src/layers/nnom_softmax.c b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_softmax.c new file mode 100644 index 000000000..04b009b35 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_softmax.c @@ -0,0 +1,86 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2019-07-23 Jianjia Ma The first version + */ + +#include +#include +#include + +#include "nnom.h" +#include "nnom_local.h" +#include "nnom_layers.h" +#include "layers/nnom_softmax.h" + +#ifdef NNOM_USING_CMSIS_NN +#include "arm_math.h" +#include "arm_nnfunctions.h" +#endif + +nnom_layer_t *softmax_s(const nnom_softmax_config_t * config) +{ + nnom_layer_t * layer = Softmax(); + if(layer) + layer->config = (void*) config; + return layer; +} + +nnom_layer_t *Softmax(void) +{ + nnom_layer_t *layer; + nnom_layer_io_t *in, *out; + + // apply a block memory for all the sub handles. + size_t mem_size = sizeof(nnom_layer_t) + sizeof(nnom_layer_io_t) * 2; + layer = nnom_mem(mem_size); + if (layer == NULL) + return NULL; + + // distribut the memory to sub handles. + in = (void *)((uint8_t*)layer + sizeof(nnom_layer_t)); + out = (void *)((uint8_t*)in + sizeof(nnom_layer_io_t)); + + // set type in layer parent + layer->type = NNOM_SOFTMAX; + layer->run = softmax_run; + layer->build = softmax_build; + // set buf state + in->type = NNOM_TENSOR_BUF_TEMP; + out->type = NNOM_TENSOR_BUF_TEMP; + // put in & out on the layer. + layer->in = io_init(layer, in); + layer->out = io_init(layer, out); + + return layer; +} + +nnom_status_t softmax_build(nnom_layer_t *layer) +{ + // get the last layer's output as input shape + layer->in->tensor = layer->in->hook.io->tensor; + // output tensor + layer->out->tensor = new_tensor(NNOM_QTYPE_PER_TENSOR, layer->in->tensor->num_dim, tensor_get_num_channel(layer->in->tensor)); + tensor_cpy_attr(layer->out->tensor, layer->in->tensor); + // softmax has fixed output dec bit + layer->out->tensor->q_dec[0] = 7; + return NN_SUCCESS; +} + +nnom_status_t softmax_run(nnom_layer_t *layer) +{ + // looks like the new version cause accuracy drop quite a lot. +// #ifdef NNOM_USING_CMSIS_NN +// // temporary fixed for mutiple dimension input. +// arm_softmax_q7(layer->in->tensor->p_data, tensor_size(layer->out->tensor), layer->out->tensor->p_data); +// #else + local_softmax_q7(layer->in->tensor->p_data, tensor_size(layer->out->tensor), layer->out->tensor->p_data); + //#endif + return NN_SUCCESS; +} diff --git a/APP_Framework/Framework/knowing/nnom/src/layers/nnom_sumpool.c b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_sumpool.c new file mode 100644 index 000000000..82de147c4 --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_sumpool.c @@ -0,0 +1,104 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2019-07-23 Jianjia Ma The first version + */ + +#include +#include +#include + +#include "nnom.h" +#include "nnom_local.h" +#include "nnom_layers.h" +#include "layers/nnom_sumpool.h" + +nnom_layer_t *sumpool_s(const nnom_pool_config_t * config) +{ + nnom_sumpool_layer_t *cl; + if(config->num_dim == 1) + { + cl = (nnom_sumpool_layer_t *)SumPool(kernel(1, config->kernel_size[0]), + stride(1, config->stride_size[0]), + config->padding_type); + } + else + { + cl = (nnom_sumpool_layer_t *)SumPool(kernel(config->kernel_size[0], config->kernel_size[1]), + stride(config->stride_size[0], config->stride_size[1]), + config->padding_type); + } + if(cl) + { + cl->super.config = (void*) config; + cl->output_shift = config->output_shift; // no idea if we need it + } + return (nnom_layer_t *)cl; +} + + +nnom_layer_t *SumPool(nnom_3d_shape_t k, nnom_3d_shape_t s, nnom_padding_t pad_type) +{ + nnom_layer_t *layer = MaxPool(k, s, pad_type); + + if (layer != NULL) + { + layer->type = NNOM_SUMPOOL; + layer->run = sumpool_run; + layer->build = sumpool_build; + } + return (nnom_layer_t *)layer; +} + + +nnom_status_t sumpool_build(nnom_layer_t *layer) +{ + // avg pooling share the same output shape, stride, padding setting. + maxpool_build(layer); + + // however, avg pooling require a computational buffer. + layer->comp->size = 4 * tensor_size(layer->out->tensor); + + return NN_SUCCESS; +} + + +// sum pooling, dynamic change Q format, must be used in the last layer before softmax in current version +nnom_status_t sumpool_run(nnom_layer_t *layer) +{ + nnom_sumpool_layer_t *cl = (nnom_sumpool_layer_t *)(layer); + uint16_t out_x, out_y; + + // if global pooling + if(layer->out->tensor->num_dim == 1) + { + out_x = 1; out_y = 1; + } + else // normal pooling. + { + out_x = layer->out->tensor->dim[1]; //W + out_y = layer->out->tensor->dim[0]; //h + } + +#ifdef NNOM_USING_CHW + local_sumpool_q7_CHW( +#else + local_sumpool_q7_HWC( +#endif + layer->in->tensor->p_data, + layer->in->tensor->dim[1], layer->in->tensor->dim[0], layer->in->tensor->dim[2], + cl->kernel.w, cl->kernel.h, + cl->pad.w, cl->pad.h, + cl->stride.w, cl->stride.h, + out_x, out_y, + layer->comp->mem->blk, + layer->out->tensor->p_data); + + return NN_SUCCESS; +} diff --git a/APP_Framework/Framework/knowing/nnom/src/layers/nnom_upsample.c b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_upsample.c new file mode 100644 index 000000000..96472a5ab --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_upsample.c @@ -0,0 +1,104 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2019-07-23 Jianjia Ma The first version + */ + +#include +#include +#include + +#include "nnom.h" +#include "nnom_local.h" +#include "nnom_layers.h" +#include "layers/nnom_upsample.h" + +nnom_layer_t *upsample_s(const nnom_upsample_config_t *config) +{ + nnom_layer_t *layer = UpSample(kernel(config->kernel[0], config->kernel[1])); + if(layer) + layer->config = (void*) config; + return layer; +} + +// up sampling layer +nnom_layer_t *UpSample(nnom_3d_shape_t kernel) +{ + nnom_upsample_layer_t *layer; + nnom_layer_io_t *in, *out; + + // apply a block memory for all the sub handles. + size_t mem_size = sizeof(nnom_upsample_layer_t) + sizeof(nnom_layer_io_t) * 2; + layer = nnom_mem(mem_size); + if (layer == NULL) + return NULL; + + // distribut the memory to sub handles. + in = (void *)((uint8_t*)layer + sizeof(nnom_upsample_layer_t)); + out = (void *)((uint8_t*)in + sizeof(nnom_layer_io_t)); + + // set type in layer parent + layer->super.type = NNOM_UPSAMPLE; + // set buf state + in->type = NNOM_TENSOR_BUF_TEMP; + out->type = NNOM_TENSOR_BUF_TEMP; + // put in & out on the layer. + layer->super.in = io_init(layer, in); + layer->super.out = io_init(layer, out); + // set run and outshape methods + layer->super.run = upsample_run; + layer->super.build = upsample_build; + + // set parameters + layer->kernel = kernel; + + return (nnom_layer_t*)layer; +} + +nnom_status_t upsample_build(nnom_layer_t *layer) +{ + nnom_upsample_layer_t* cl = (nnom_upsample_layer_t*)layer; + + // get the last layer's output as input shape + layer->in->tensor = layer->in->hook.io->tensor; + // output tensor + // 1. allocate a new tensor for output + // 2. set the same dim, qfmt to the new tensor. + layer->out->tensor = new_tensor(NNOM_QTYPE_PER_TENSOR, layer->in->tensor->num_dim, tensor_get_num_channel(layer->in->tensor)); + tensor_cpy_attr(layer->out->tensor, layer->in->tensor); + + // see if the activation will change the q format + if(layer->actail) + layer->out->tensor->q_dec[0] = act_get_dec_bit(layer->actail->type, layer->out->tensor->q_dec[0]); + + // enlarge w and h, c stay the same. + layer->out->tensor->dim[0] = layer->in->tensor->dim[0] * cl->kernel.h; + layer->out->tensor->dim[1] = layer->in->tensor->dim[1] * cl->kernel.w; + + return NN_SUCCESS; +} + +// up sampling, or so called unpooling +nnom_status_t upsample_run(nnom_layer_t *layer) +{ + nnom_upsample_layer_t *cl = (nnom_upsample_layer_t *)(layer); + +#ifdef NNOM_USING_CHW + local_up_sampling_q7_CHW( +#else + local_up_sampling_q7_HWC( +#endif + layer->in->tensor->p_data, + layer->in->tensor->dim[1], layer->in->tensor->dim[0], layer->in->tensor->dim[2], + cl->kernel.w, cl->kernel.h, + layer->out->tensor->dim[1], layer->out->tensor->dim[0], + NULL, + layer->out->tensor->p_data); + return NN_SUCCESS; +} diff --git a/APP_Framework/Framework/knowing/nnom/src/layers/nnom_zero_padding.c b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_zero_padding.c new file mode 100644 index 000000000..2352e614e --- /dev/null +++ b/APP_Framework/Framework/knowing/nnom/src/layers/nnom_zero_padding.c @@ -0,0 +1,107 @@ +/* + * Copyright (c) 2018-2020 + * Jianjia Ma + * majianjia@live.com + * + * SPDX-License-Identifier: Apache-2.0 + * + * Change Logs: + * Date Author Notes + * 2019-07-23 Jianjia Ma The first version + */ + +#include +#include +#include + +#include "nnom.h" +#include "nnom_local.h" +#include "nnom_layers.h" +#include "layers/nnom_zero_padding.h" + +nnom_layer_t * zeropadding_s(const nnom_zero_padding_config_t* config) +{ + nnom_layer_t *layer = ZeroPadding(config->pad); + if(layer) + layer->config = (void*) config; + return (nnom_layer_t*)layer; +} + +// Zero padding layer +nnom_layer_t *ZeroPadding(nnom_border_t pad) +{ + nnom_zero_padding_layer_t *layer; + nnom_layer_io_t *in, *out; + + // apply a block memory for all the sub handles. + size_t mem_size = sizeof(nnom_zero_padding_layer_t) + sizeof(nnom_layer_io_t) * 2; + layer = nnom_mem(mem_size); + if (layer == NULL) + return NULL; + + // distribut the memory to sub handles. + in = (void *)((uint8_t*)layer + sizeof(nnom_zero_padding_layer_t)); + out = (void *)((uint8_t*)in + sizeof(nnom_layer_io_t)); + + // set type in layer parent + layer->super.type = NNOM_ZERO_PADDING; + // set buf state + in->type = NNOM_TENSOR_BUF_TEMP; + out->type = NNOM_TENSOR_BUF_TEMP; + // put in & out on the layer. + layer->super.in = io_init(layer, in); + layer->super.out = io_init(layer, out); + // set run and outshape methods + layer->super.run = zero_padding_run; + layer->super.build = zero_padding_build; + + // set parameters + layer->pad = pad; + + return (nnom_layer_t*)layer; +} + +nnom_status_t zero_padding_build(nnom_layer_t* layer) +{ + nnom_zero_padding_layer_t *cl = (nnom_zero_padding_layer_t *)layer; + + // get the tensor from last layer's output + layer->in->tensor = layer->in->hook.io->tensor; + + // create new tensor for output + layer->out->tensor = new_tensor(NNOM_QTYPE_PER_TENSOR, layer->in->tensor->num_dim, tensor_get_num_channel(layer->in->tensor)); + // copy then change later. + tensor_cpy_attr(layer->out->tensor, layer->in->tensor); + + // see if the activation will change the q format + if(layer->actail) + layer->out->tensor->q_dec[0] = act_get_dec_bit(layer->actail->type, layer->out->tensor->q_dec[0]); + + // output shape + layer->out->tensor->dim[1] = layer->in->tensor->dim[1] + cl->pad.left + cl->pad.right; + layer->out->tensor->dim[0] = layer->in->tensor->dim[0] + cl->pad.top + cl->pad.bottom; + layer->out->tensor->dim[2] = layer->in->tensor->dim[2]; + return NN_SUCCESS; +} + +nnom_status_t zero_padding_run(nnom_layer_t * layer) +{ + nnom_zero_padding_layer_t *cl = (nnom_zero_padding_layer_t*)layer; + +#ifdef NNOM_USING_CHW + local_zero_padding_CHW_q7( +#else + local_zero_padding_HWC_q7( +#endif + layer->in->tensor->p_data, + layer->in->tensor->dim[1], layer->in->tensor->dim[0], layer->in->tensor->dim[2], + cl->pad.top, + cl->pad.bottom, + cl->pad.left, + cl->pad.right, + layer->out->tensor->p_data, + layer->out->tensor->dim[1], layer->out->tensor->dim[0]); + + return NN_SUCCESS; +} +