å å¦è¿·å½©åç»
ã以前勢いで買ったKinectãè²ã å¼ã£ã¦ãã¾ããæåã¯ã©ãããã°ããã®ãéæ¹ã«æ®ãã¦ããã®ã§ãããè²ã ãªå 人ã®æ¹ã®è³ç£ãæ´»ç¨ãã¦ãªãã¨ãåããã¦ãã¾ããã¨ããããæåã«å®£è¨ããå å¦è¿·å½©ã£ã½ãåç»ï¼æ»æ®»æ©åéã®ã¢ã¬ãï¼ãä½ã£ã¦ã¿ã¾ããã
ãå®å
¨ã«ããã£ã½ãå¹æãçã£ã¦ããã ãã§ãèæ¯ãæ¢ã¾ã£ã¦ããæ¡ä»¶ãããªãã¨ä½¿ãã¾ããããå°ãã¯ããã£ã½ããã®ãåºæ¥ãããããªããã¨æãã¾ããããã¡ãã£ã¨ããã°ãã°ãã£ã¨ããã£ã½ããªãã¨æãã¾ãããããã¤ã¯ãããªã¨ããã§ã
ãå¤å°è¦é ã¯ããã£ãã®ã§ã次ã¯ããã¡ãã£ã¨é¢ç½ããã®ã¤ãããã¨æãã¾ãã
ã½ã¼ã¹ã³ã¼ãã¨ä½¿ãæ¹
ãä»åã¯ã³ã¼ããã¤ã以ä¸ã«æ±ãã®ã§è¿·ãã¾ãããå
¬éãã¾ããèµ·åããã¦ãã°ããå¾
ã£ã¦ä¸ãããããã¨ã«ã¡ã©ç»åºã¾ããããããã«ã¡ã©ã®ä¸ã«ãã¬ã¼ã ã¤ã³ãã¦ä¸ãããããã¯æ»æ®»æ©åéã®ä¸çã§ããã¹ãã¼ã¹ãã¼ãæ¼ãã¨åç»ãçæããã¦çµäºãã¾ããåç»ãæ±ãã®ã§ãProcessingã®ä»ã«OpenCVã®ã©ã¤ãã©ãªã®ãã¦ã³ãã¼ããå¿ããªãããã«ãã¦ä¸ããã
ãã«ã¡ã©ç»ãæãå ´åã¯ãint shotWait, int delaytimeã®å¤ã大ãããã¦ã«ã¡ã©èª¿æ´ã®ããã®å¾
ã¡æéãé·ãã¨ã£ã¦ä¸ãããã«ã¡ã©ãèµ·åã¾ã§é·éãã¦å¾
ã¦ãªã人ã¯ãint shotWait, int delaytimeãéã«å°ãããã¦ä¸ããããã ãããã¾ãå¾
ã¡æéãçãããã¨ã«ã¡ã©ãé²åºèª¿æ´ã§ããªãã®ã§æ³¨æãã¦è¨å®ä¸ããã
import SimpleOpenNI.*; SimpleOpenNI context; import hypermedia.video.*; import processing.video.*; int fps = 30; OpenCV opencv; MovieMaker mm; color cBlack = color(0, 0, 0); color cWhite = color(255, 255, 255); PImage firstRGB1; PImage firstRGB2; boolean firstShot=true; // loop count int shot = 0; // wait for camera boot int shotWait = 100; int delaytime = 20; void setup() { context = new SimpleOpenNI(this); if(context.enableDepth() == false) { println("Can't open the depthMap, maybe the camera is not connected!"); exit(); return; } context.enableScene(); if(context.enableRGB() == false) { println("Can't open the rgbMap, maybe the camera is not connected or there is no rgbSensor!"); exit(); return; } // matching depth map with rgb map context.alternativeViewPointDepthToImage(); size(context.depthWidth() , context.depthHeight()); firstRGB1 = createGraphics(context.depthWidth(), context.depthHeight(), P2D); firstRGB2 = createGraphics(context.depthWidth(), context.depthHeight(), P2D); mm = new MovieMaker(this, context.depthWidth(), context.depthHeight(), "OpticalCamouflage.mov", fps, MovieMaker.VIDEO, MovieMaker.LOSSLESS); frameRate(fps); } void draw() { context.update(); delay(delaytime); if(firstShot == true){ background(0, 0, 0); if(shot > shotWait){ image(context.rgbImage(), 0 , 0); save("screenshot.jpg"); firstShot = false; }else{ shot++; } }else{ PImage sceneImg = context.sceneImage(); sceneImg.loadPixels(); PImage maskImg1 = createGraphics(context.depthWidth(), context.depthHeight(), P2D); PImage maskImg2 = createGraphics(context.depthWidth(), context.depthHeight(), P2D); PImage maskedImg1 = createGraphics(context.depthWidth(), context.depthHeight(), P2D); PImage maskedImg2 = createGraphics(context.depthWidth(), context.depthHeight(), P2D); for (int x = 0; x < context.depthWidth(); x++){ for (int y = 0; y < context.depthHeight(); y++){ color c = sceneImg.pixels[x+y*context.depthWidth()]; if (red(c) == green(c) & green(c) == blue(c)){ maskImg1.pixels[x+y*context.depthWidth()] = cBlack; // mask black maskImg2.pixels[x+y*context.depthWidth()] = cWhite; // mask white }else{ maskImg1.pixels[x+y*context.depthWidth()] = cWhite; // mask black maskImg2.pixels[x+y*context.depthWidth()] = cBlack; // mask black } } } maskImg1.updatePixels(); maskImg2.updatePixels(); maskedImg1 = loadImage("screenshot.jpg"); maskedImg2 = loadImage("screenshot.jpg"); maskedImg1.mask(maskImg1); maskedImg2.mask(maskImg2); background(maskedImg2); image(maskedImg1, 0, 0, context.depthWidth(), context.depthHeight()); blend(maskedImg2, 0, 0, context.depthWidth(), context.depthHeight() , 0 ,0, context.depthWidth(), context.depthHeight(), SCREEN); mm.addFrame(); } } void keyPressed() { if (key == ' ') { mm.finish(); println("save movie."); exit(); } }
é¢é£è¨äº
åèãµã¤ã
ãUsing the Kinect with Processingã
ã120202-【動画】ProcessingとKinectで合成映像(クロマキー)ã