Sample to operate omron HVC-P2 on GR-PEACH.

Dependencies:   AsciiFont

Information

Please see here for Japanese version.
日本語版はこちらを参照ください。

What is this ?

This is a sample that runs OMRON HVC-P2 with GR-PEACH. In this sample, you can try following among the functions of HVC-P2 : Human Body Detection, Face Detection, Age Estimation, Gender Estimation, Expression Estimation and Face Recognition.
Both GR-PEACH and HVC-P2 use Renesas RZ/A1H included ARM® Cortex™-A9 processor.

/media/uploads/dkato/hvcp2_demo_img3.jpg

HVC-P2 (Human Vision Components B5T-007001) is a human-sensing component that recognizes people. It is an integrated module that is built into other device and provides both the OKAO Vision's ten types of image sensing and a camera module.
For details, please refer to the following link.

In the HVCApi folder of this sample, the code of the following link destination Sample Code "SampleCode_rev.2.0.2" is used. (You can download from "Product Information" -> "Sample Code" in the middle of the following page.)
http://www.omron.com/ecb/products/mobile/hvc_p2/

Constitution

  1. HVC-P2 x 1
  2. USBA-microUSB conversion cable x 2
  3. USBA-microUSB conversion adapter x 1
  4. GR-PEACH x 1
  5. 4.3inc LCD shield x 1

/media/uploads/dkato/composition_hvcp2_demo.jpg

/media/uploads/dkato/composition_hvcp2_demo_2.jpg

Please close JP3 of GR-PEACH.
/media/uploads/RyoheiHagimoto/usb.jpg

How to use

It starts when connecting the power supply USB cable. At startup, all functions are turned off. By pressing the button on the right of the screen you can switch the function on / off.

  • Function ON : orange or green
  • Function OFF : blue or gray

Only the FACE button changes to "FACE (blue) -> FACE (orange) -> RECOGNITION (green)". When FACE (blue), following buttons are gray and can not be operated : AGE, GENDER and EXPRESSION.
"Response time" at the bottom left of the screen indicates "image processing + USB transfer time". It is not pure image processing time.

Register Data (Face Recognition)

Set the FACE button to RECOGNITION (green), and touch the screen with one person on the screen to register the face. In this sample, face registration will record up to 10 people. Delete the old registrant when registering after 11 people. Registration information is stored in the RAM on the HVC-P2 side. It is discarded by power off and reset.

/media/uploads/dkato/hvcp2_demo_img2.jpg

Change parameters

When you press Config icon at the bottom right of the screen, the parameter setting screen is displayed. You can change threshold value, detection size and face angle parameters.

/media/uploads/dkato/hvcp2_demo_config_icon.jpg
/media/uploads/dkato/hvcp2_demo_config.jpg

Change transfer image size

By pressing USER_BUTTON0 on the back of the board, the image transfer size switches in the order of "160 x 120 -> 320 x 240 -> no image".
/media/uploads/dkato/gr-peach_switch2.jpg

Committer:
dkato
Date:
Tue Sep 05 10:01:51 2017 +0000
Revision:
5:49a61433290a
Add HVC sensing result stabilizing library

Who changed what in which revision?

UserRevisionLine numberNew contents of line
dkato 5:49a61433290a 1 /*---------------------------------------------------------------------------*/
dkato 5:49a61433290a 2 /* Copyright(C) 2017 OMRON Corporation */
dkato 5:49a61433290a 3 /* */
dkato 5:49a61433290a 4 /* Licensed under the Apache License, Version 2.0 (the "License"); */
dkato 5:49a61433290a 5 /* you may not use this file except in compliance with the License. */
dkato 5:49a61433290a 6 /* You may obtain a copy of the License at */
dkato 5:49a61433290a 7 /* */
dkato 5:49a61433290a 8 /* http://www.apache.org/licenses/LICENSE-2.0 */
dkato 5:49a61433290a 9 /* */
dkato 5:49a61433290a 10 /* Unless required by applicable law or agreed to in writing, software */
dkato 5:49a61433290a 11 /* distributed under the License is distributed on an "AS IS" BASIS, */
dkato 5:49a61433290a 12 /* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. */
dkato 5:49a61433290a 13 /* See the License for the specific language governing permissions and */
dkato 5:49a61433290a 14 /* limitations under the License. */
dkato 5:49a61433290a 15 /*---------------------------------------------------------------------------*/
dkato 5:49a61433290a 16
dkato 5:49a61433290a 17 #ifndef __STB_OKOA_RESULT_H__
dkato 5:49a61433290a 18 #define __STB_OKOA_RESULT_H__
dkato 5:49a61433290a 19
dkato 5:49a61433290a 20 #ifndef VOID
dkato 5:49a61433290a 21 #define VOID void
dkato 5:49a61433290a 22 #endif
dkato 5:49a61433290a 23
dkato 5:49a61433290a 24 typedef signed char STB_INT8 ; /*8-bit signed integer*/
dkato 5:49a61433290a 25 typedef unsigned char STB_UINT8 ; /*8-bit unsigned integer*/
dkato 5:49a61433290a 26 typedef signed short STB_INT16 ; /*16-bit signed integer*/
dkato 5:49a61433290a 27 typedef unsigned short STB_UINT16 ; /*16-bit unsigned integer*/
dkato 5:49a61433290a 28 typedef int STB_INT32 ; /*32 bit signed integer*/
dkato 5:49a61433290a 29 typedef unsigned int STB_UINT32 ; /*32 bit unsigned integer*/
dkato 5:49a61433290a 30 typedef float STB_FLOAT32 ; /*32-bit floating point number*/
dkato 5:49a61433290a 31 typedef double STB_FLOAT64 ; /*64-bit floating point number*/
dkato 5:49a61433290a 32
dkato 5:49a61433290a 33 typedef enum {
dkato 5:49a61433290a 34 STB_Expression_Neutral,
dkato 5:49a61433290a 35 STB_Expression_Happiness,
dkato 5:49a61433290a 36 STB_Expression_Surprise,
dkato 5:49a61433290a 37 STB_Expression_Anger,
dkato 5:49a61433290a 38 STB_Expression_Sadness,
dkato 5:49a61433290a 39 STB_Expression_Max
dkato 5:49a61433290a 40 } STB_OKAO_EXPRESSION;
dkato 5:49a61433290a 41
dkato 5:49a61433290a 42 typedef struct {
dkato 5:49a61433290a 43 STB_INT32 nX;
dkato 5:49a61433290a 44 STB_INT32 nY;
dkato 5:49a61433290a 45 } STB_POINT;
dkato 5:49a61433290a 46
dkato 5:49a61433290a 47 /*Face direction estimation*/
dkato 5:49a61433290a 48 typedef struct {
dkato 5:49a61433290a 49 STB_INT32 nLR;
dkato 5:49a61433290a 50 STB_INT32 nUD;
dkato 5:49a61433290a 51 STB_INT32 nRoll;
dkato 5:49a61433290a 52 STB_INT32 nConfidence;
dkato 5:49a61433290a 53 } STB_FRAME_RESULT_DIRECTION;
dkato 5:49a61433290a 54
dkato 5:49a61433290a 55 /*Age estimation*/
dkato 5:49a61433290a 56 typedef struct {
dkato 5:49a61433290a 57 STB_INT32 nAge;
dkato 5:49a61433290a 58 STB_INT32 nConfidence;
dkato 5:49a61433290a 59 } STB_FRAME_RESULT_AGE;
dkato 5:49a61433290a 60
dkato 5:49a61433290a 61 /*Gender estimation*/
dkato 5:49a61433290a 62 typedef struct {
dkato 5:49a61433290a 63 STB_INT32 nGender;
dkato 5:49a61433290a 64 STB_INT32 nConfidence;
dkato 5:49a61433290a 65 } STB_FRAME_RESULT_GENDER;
dkato 5:49a61433290a 66
dkato 5:49a61433290a 67 /*Gaze estimation*/
dkato 5:49a61433290a 68 typedef struct {
dkato 5:49a61433290a 69 STB_INT32 nLR;
dkato 5:49a61433290a 70 STB_INT32 nUD;
dkato 5:49a61433290a 71 } STB_FRAME_RESULT_GAZE;
dkato 5:49a61433290a 72
dkato 5:49a61433290a 73 /*Blink estimation*/
dkato 5:49a61433290a 74 typedef struct {
dkato 5:49a61433290a 75 STB_INT32 nLeftEye;
dkato 5:49a61433290a 76 STB_INT32 nRightEye;
dkato 5:49a61433290a 77 } STB_FRAME_RESULT_BLINK;
dkato 5:49a61433290a 78
dkato 5:49a61433290a 79 /*estimation of facial expression*/
dkato 5:49a61433290a 80 typedef struct {
dkato 5:49a61433290a 81 STB_INT32 anScore[STB_Expression_Max] ;
dkato 5:49a61433290a 82 STB_INT32 nDegree;
dkato 5:49a61433290a 83 } STB_FRAME_RESULT_EXPRESSION;
dkato 5:49a61433290a 84
dkato 5:49a61433290a 85 /*Face recognition*/
dkato 5:49a61433290a 86 typedef struct {
dkato 5:49a61433290a 87 STB_INT32 nUID;
dkato 5:49a61433290a 88 STB_INT32 nScore;
dkato 5:49a61433290a 89 } STB_FRAME_RESULT_RECOGNITION;
dkato 5:49a61433290a 90
dkato 5:49a61433290a 91 /*One detection result*/
dkato 5:49a61433290a 92 typedef struct {
dkato 5:49a61433290a 93 STB_POINT center;
dkato 5:49a61433290a 94 STB_INT32 nSize;
dkato 5:49a61433290a 95 STB_INT32 nConfidence ;
dkato 5:49a61433290a 96 } STB_FRAME_RESULT_DETECTION;
dkato 5:49a61433290a 97
dkato 5:49a61433290a 98 /*Face detection and post-processing result (1 person)*/
dkato 5:49a61433290a 99 typedef struct {
dkato 5:49a61433290a 100 STB_POINT center;
dkato 5:49a61433290a 101 STB_INT32 nSize;
dkato 5:49a61433290a 102 STB_INT32 nConfidence;
dkato 5:49a61433290a 103 STB_FRAME_RESULT_DIRECTION direction;
dkato 5:49a61433290a 104 STB_FRAME_RESULT_AGE age;
dkato 5:49a61433290a 105 STB_FRAME_RESULT_GENDER gender;
dkato 5:49a61433290a 106 STB_FRAME_RESULT_GAZE gaze;
dkato 5:49a61433290a 107 STB_FRAME_RESULT_BLINK blink;
dkato 5:49a61433290a 108 STB_FRAME_RESULT_EXPRESSION expression;
dkato 5:49a61433290a 109 STB_FRAME_RESULT_RECOGNITION recognition;
dkato 5:49a61433290a 110 } STB_FRAME_RESULT_FACE;
dkato 5:49a61433290a 111
dkato 5:49a61433290a 112 /*One human body detection result*/
dkato 5:49a61433290a 113 typedef struct {
dkato 5:49a61433290a 114 STB_INT32 nCount;
dkato 5:49a61433290a 115 STB_FRAME_RESULT_DETECTION body[35];
dkato 5:49a61433290a 116 } STB_FRAME_RESULT_BODYS;
dkato 5:49a61433290a 117
dkato 5:49a61433290a 118 /*Face detection and post-processing result (1 frame)*/
dkato 5:49a61433290a 119 typedef struct {
dkato 5:49a61433290a 120 STB_INT32 nCount;
dkato 5:49a61433290a 121 STB_FRAME_RESULT_FACE face[35];
dkato 5:49a61433290a 122 } STB_FRAME_RESULT_FACES;
dkato 5:49a61433290a 123
dkato 5:49a61433290a 124
dkato 5:49a61433290a 125 /*FRAME result (1 frame)*/
dkato 5:49a61433290a 126 typedef struct {
dkato 5:49a61433290a 127 STB_FRAME_RESULT_BODYS bodys;
dkato 5:49a61433290a 128 STB_FRAME_RESULT_FACES faces;
dkato 5:49a61433290a 129 } STB_FRAME_RESULT;
dkato 5:49a61433290a 130
dkato 5:49a61433290a 131 #endif /*__HVCW_RESULT_H__*/