Selfie Printing Camera using GR-LYCHEE with OpenCV

mbed祭り 2019@春の名古屋

/media/uploads/takehiro/dsc_0627.jpg

デモ機の説明

自分の顔をLCDに表示させ、mbedシールを張った札を数秒静止させると、サーマルプリンタで印字する自撮りするデモです。

  • 画像をグレースケール化
  • 平滑化
  • ハフ変換による円の検出
  • 検出円の中心の少し上のBGR値取得しHSV値に変換
  • HSV値が青っぽい色だとmbedシール判定

という流れです。GR-LYCHEEのUSR_BUTTON0でサーマルプリンタへの出力をON/OFFできます。GR-LYCHEEには3MBのRAMがあるのですがHEAP領域でないRAM_NC領域1MBも活用しています。

Mbec CLIとOpenCV環境を整える

構成

include the mbed library with this snippet

#include "mbed.h"
#include "EasyAttach_CameraAndLCD.h"
#include "opencv.hpp"
#include "SdUsbConnect.h"
#include "AS289R2.h"
#include <vector>

#define VIDEO_PIXEL_HW			(480u)  /* WQVGA */
#define VIDEO_PIXEL_VW			(272u)  /* WQVGA */

/*! Frame buffer stride: Frame buffer stride should be set to a multiple of 32 or 128
    in accordance with the frame buffer burst transfer mode. */
#define DATA_SIZE_PER_PIC		(2u)
#define FRAME_BUFFER_STRIDE		(((VIDEO_PIXEL_HW * DATA_SIZE_PER_PIC) + 31u) & ~31u)
#define FRAME_BUFFER_STRIDE_3	(((VIDEO_PIXEL_HW * 3u) + 31u) & ~31u)
#define FRAME_BUFFER_STRIDE_1	((VIDEO_PIXEL_HW + 31u) & ~31u)
#define FRAME_BUFFER_HEIGHT		(VIDEO_PIXEL_VW)

static uint8_t FrameBuffer_Video[FRAME_BUFFER_STRIDE * FRAME_BUFFER_HEIGHT]__attribute((section("NC_BSS"),aligned(32)));
static uint8_t FrameBuffer_Gray[FRAME_BUFFER_STRIDE_1 * FRAME_BUFFER_HEIGHT]__attribute((section("NC_BSS"),aligned(32)));
static uint8_t FrameBuffer_Result[FRAME_BUFFER_STRIDE * FRAME_BUFFER_HEIGHT]__attribute((section("NC_BSS"),aligned(32)));
static uint8_t FrameBuffer_BGR[FRAME_BUFFER_STRIDE_3 * FRAME_BUFFER_HEIGHT]__attribute((section("NC_BSS"),aligned(32)));
static DisplayBase Display;

static cv::Mat src_img(VIDEO_PIXEL_VW, VIDEO_PIXEL_HW, CV_8UC2, FrameBuffer_Video);
static cv::Mat gray_img(VIDEO_PIXEL_VW, VIDEO_PIXEL_HW, CV_8UC1, FrameBuffer_Gray);
static cv::Mat result_img(VIDEO_PIXEL_VW, VIDEO_PIXEL_HW, CV_8UC2, FrameBuffer_Result);

static InterruptIn btn0(USER_BUTTON0);
static int btn0_type = 0;
#define BTN0_TYPE_MAX		(1)

DigitalOut myLed1(LED1);
DigitalOut myLed2(LED2);
DigitalOut myLed3(LED3);
DigitalOut myLed4(LED4);

int center_px_bak = 0;
int center_py_bak = 0;
int lock_ct = 0;
#define LOCK_CT_MAX			(5)

AS289R2 tp(D1, 57600);

Thread thread;

static void camera_start(void)
{
	// Camera
	EasyAttach_Init(Display, VIDEO_PIXEL_HW, VIDEO_PIXEL_VW);
	// Video capture setting (progressive form fixed)
	Display.Video_Write_Setting(
		DisplayBase::VIDEO_INPUT_CHANNEL_0,
		DisplayBase::COL_SYS_NTSC_358,
		(void *)FrameBuffer_Video,
		FRAME_BUFFER_STRIDE,
		DisplayBase::VIDEO_FORMAT_YCBCR422,
		DisplayBase::WR_RD_WRSWA_32_16BIT,
		VIDEO_PIXEL_VW,
		VIDEO_PIXEL_HW
	);
	EasyAttach_CameraStart(Display, DisplayBase::VIDEO_INPUT_CHANNEL_0);
}

static void lcd_start(void)
{
	DisplayBase::rect_t rect;
	// GRAPHICS_LAYER_0
	rect.vs = 0;
	rect.vw = VIDEO_PIXEL_VW;
	rect.hs = 0;
	rect.hw = VIDEO_PIXEL_HW;
	Display.Graphics_Read_Setting(
		DisplayBase::GRAPHICS_LAYER_0,
		(void *)&src_img.data[0],
		FRAME_BUFFER_STRIDE,
		DisplayBase::GRAPHICS_FORMAT_YCBCR422,
		DisplayBase::WR_RD_WRSWA_32_16BIT,
		&rect
	);
    Display.Graphics_Start(DisplayBase::GRAPHICS_LAYER_0);
    // GRAPHICS_LAYER_2
	rect.vs = 0;
	rect.vw = VIDEO_PIXEL_VW;
	rect.hs = 0;
	rect.hw = VIDEO_PIXEL_HW;
	Display.Graphics_Read_Setting(
		DisplayBase::GRAPHICS_LAYER_2,
		(void *)&result_img.data[0],
		FRAME_BUFFER_STRIDE,
		DisplayBase::GRAPHICS_FORMAT_ARGB4444,
		DisplayBase::WR_RD_WRSWA_32_16BIT,
		&rect
	);
	Display.Graphics_Start(DisplayBase::GRAPHICS_LAYER_2);

	Thread::wait(50);
	EasyAttach_LcdBacklight(true);
}

static void btn0_fall(void)
{
	if ( btn0_type < BTN0_TYPE_MAX ) {
		btn0_type++;
	}
	else {
		btn0_type = 0;
	}
}

/**
 * Image Printing : Mat to AS-289R2
 */
void MatToAS289R2(cv::Mat& gray_img)
{
	int imgHeight = gray_img.rows;
	int offset = 48;
	tp.printf("\x1C\x2A\x65");	// AS-289R2 CMD
	tp.putc((uint8_t)(imgHeight / 256));
	tp.putc((uint8_t)(imgHeight % 256));
	for (int iy = 0 ; iy < imgHeight; iy++) {
		for (int ix = 0; ix < 48; ix ++) {
			uint8_t pixel8 = 0;
			for (int ib = 0; ib < 8; ib ++) {
				uint8_t pixel = gray_img.at<uint8_t>( iy, ix * 8 + ib + offset ) ^ 0xFF ;
				pixel8 <<= 1;
				if (pixel && 0xFF) {
					pixel8 |= 1;
				}
			}
			tp.putc( pixel8 );
		}
	}
}

/**
 * Dithering Algorithm
 */
uint8_t saturated_add(uint8_t val1, int8_t val2)
{
	int16_t val1_int = val1;
	int16_t val2_int = val2;
	int16_t tmp = val1_int + val2_int;
	if (tmp > 255) {
		return 255;
	}
	else if (tmp < 0) {
		return 0;
	}
	else {
		return tmp;
	}
}

void Dithering(cv::Mat& gray_img)
{
	int dstWidth = gray_img.cols;
	int dstHeight = gray_img.rows;
	int err;
	int8_t a, b, c, d;
	for (int i = 0; i < dstHeight; i++) {
		for (int j = 0; j < dstWidth; j++) {
			if (gray_img.at<uint8_t>( i, j ) > 127) {
				err = gray_img.at<uint8_t>(i, j) - 255;
				gray_img.at<uint8_t>(i, j) = 255;
			}
			else {
				err = gray_img.at<uint8_t>(i, j) - 0;
				gray_img.at<uint8_t>(i, j) = 0;
			}
			a = (err * 7) / 16;
			b = (err * 1) / 16;
			c = (err * 5) / 16;
			d = (err * 3) / 16;
			if ((i != (dstHeight - 1)) && (j != 0) && (j != (dstWidth - 1))) {
				gray_img.at<uint8_t>(i + 0, j + 1) = saturated_add( gray_img.at<uint8_t>(i + 0, j + 1), a);
				gray_img.at<uint8_t>(i + 1, j + 1) = saturated_add( gray_img.at<uint8_t>(i + 1, j + 1), b);
				gray_img.at<uint8_t>(i + 1, j + 0) = saturated_add( gray_img.at<uint8_t>(i + 1, j + 0), c);
				gray_img.at<uint8_t>(i + 1, j - 1) = saturated_add( gray_img.at<uint8_t>(i + 1, j - 1), d );
			}
		}
	}
}

void Lockon(int center_px_val, int center_py_val)
{
	int tebure = 10;
	if (center_px_val < center_px_bak - tebure || center_px_val > center_px_bak + tebure) {
		lock_ct = 0;
	}
	else if (center_py_val < center_py_bak - tebure || center_py_val > center_py_bak + tebure) {
		lock_ct = 0;
	}
	else if (center_px_val == 0 || center_py_val == 0) {
		lock_ct = 0;
	}
	else {
		if (lock_ct < LOCK_CT_MAX) lock_ct ++;
	}
	center_px_bak = center_px_val; center_py_bak = center_py_val;
}


int main() {
    btn0.fall(&btn0_fall);
    camera_start();
    lcd_start();

    while (1) {
		cv::Mat bgr_img(VIDEO_PIXEL_VW, VIDEO_PIXEL_HW, CV_8UC3, FrameBuffer_BGR);
		cv::cvtColor(src_img, bgr_img, cv::COLOR_YUV2BGR_YUYV);		// copy camera image to BGR
		cv::cvtColor(src_img, gray_img, cv::COLOR_YUV2GRAY_YUY2);	// copy camera image to Gray
		cv::GaussianBlur(gray_img, gray_img, cv::Size(55, 55), 1);	// 平滑化

		// ハフ変換による円の検出
		vector<cv::Vec3f> circles;
		cv::HoughCircles(gray_img, circles, CV_HOUGH_GRADIENT, 2, 30, 200, 200, 20, 130);

        // 検出結果バッファのクリア
        memset(FrameBuffer_Result, 0, sizeof(FrameBuffer_Result));

		cv::putText(result_img, "Detecting Circles in Images using OpenCV.", cv::Point(5, 20), cv::FONT_HERSHEY_DUPLEX, 0.5, cv::Scalar(0xF0, 0xF0), 1);

		// 円描画
		vector<cv::Vec3f>::iterator it;
		for (it = circles.begin(); it != circles.end(); it++)
		{
			cv::Point center(cv::saturate_cast<int>((*it)[0]), cv::saturate_cast<int>((*it)[1]));
			int radius = cv::saturate_cast<int>((*it)[2]);
			// 円の中心より少し上のポイントのHSV値をチェック(注意OpenCVではH値は0-180の範囲。0-360ではない)
			int py = center.y - (radius * 2 / 3);
			int px = center.x;
			cv::Mat ex1_img(1, 1, CV_8UC3, 3);
			ex1_img.at<cv::Vec3b>(0, 0) = bgr_img.at<cv::Vec3b>(py, px);
			cv::Mat ex2_img(1, 1, CV_8UC3, 3);
			cv::cvtColor(ex1_img, ex2_img, cv::COLOR_BGR2HSV);	// BGR to HSV
			int hsv = ex2_img.at<cv::Vec3b>(0, 0)[0];
			printf("xy:%d-%d %d\r\n", center.x, center.y, hsv);
			// mbedシールの青色(Hue90-115)
			if (hsv > 90 && hsv < 115) {
				cv::circle(result_img, center, radius, cv::Scalar(0x19, 0xFF), 3);	// 円描画:赤色
				cv::putText(result_img, "+", cv::Point(center.x - 10, center.y + 10), cv::FONT_HERSHEY_DUPLEX, 1.0, cv::Scalar(0x19, 0xFF), 2);	// 円の中心に+を描画
				Lockon(center.x, center.y);
				// カウントダウン値を描画
				switch (lock_ct) {
					case 2:
						cv::putText(result_img, "3", cv::Point(center.x - 10, center.y - 25), cv::FONT_HERSHEY_DUPLEX, 1.0, cv::Scalar(0x19, 0xFF), 2);
						break;
					case 3:
						cv::putText(result_img, "2", cv::Point(center.x - 10, center.y - 25), cv::FONT_HERSHEY_DUPLEX, 1.0, cv::Scalar(0x19, 0xFF), 2);
						break;
					case 4:
						cv::putText(result_img, "1", cv::Point(center.x - 10, center.y - 25), cv::FONT_HERSHEY_DUPLEX, 1.0, cv::Scalar(0x19, 0xFF), 2);
						break;
					default:
						break;
				}
			}
			else {
				cv::circle(result_img, center, radius, cv::Scalar(0xF0, 0xF0), 3);	// 円描画:緑色
				cv::putText(result_img, "+", cv::Point(center.x - 10, center.y + 10), cv::FONT_HERSHEY_DUPLEX, 1.0, cv::Scalar(0xF0, 0xF0), 2);
				Lockon(0, 0);
			}

		}

		// Printing
		if (btn0_type == 0 && lock_ct >= LOCK_CT_MAX) {
			tp.printf( "   mbed祭り 2019@春の名古屋\r" );
			tp.putPaperFeed(15);
			tp.printf( "        いぶし銀カメラ\r" );
			tp.putPaperFeed(15);
			tp.printf( "  GR-LYCHEE+カメラ+OpenCV\r" );
			tp.putPaperFeed(15);
			Dithering(gray_img);
			MatToAS289R2(gray_img);
			tp.putPaperFeed(15);
			tp.printf( "#mbed_fest SPRING 2019 NAGOYA" );
			tp.putPaperFeed(15);
			tp.printf( "   Selfie Printing Camera\r" );
			tp.putPaperFeed(15);
			tp.printf( " using GR-LYCHEE with OpenCV\r" );
			tp.putLineFeed(7);
			lock_ct = 0;
		}

		if (btn0_type == 0) {
			myLed4 = 1;
		}
		else {
			myLed4 = 0;
		}
	}
}


1 comment on Selfie Printing Camera using GR-LYCHEE with OpenCV:

16 Mar 2019

名古屋、お疲れ様です。 サーマルプリンタ、自宅のおもちゃに繋ぎたくなってきた。。。

Please log in to post comments.