The QCAR::Tool::projectPoint() method takes a 3D point in target space and transforms it into a 2D point in camera space. The camera image typically has a different aspect ratio than the screen, and by default it is aspect scaled to fill the screen (meaning some of the camera image is cropped by the screen). The following code snippet will take the camera space point to screen space:
QCAR::Vec2F cameraPointToScreenPoint(QCAR::Vec2F cameraPoint){ QCAR::VideoMode videoMode = QCAR::CameraDevice::getInstance().getVideoMode(QCAR::CameraDevice::MODE_DEFAULT); QCAR::VideoBackgroundConfig config = QCAR::Renderer::getInstance().getVideoBackgroundConfig(); int xOffset = ((int) screenWidth - config.mSize.data[0]) / 2.0f + config.mPosition.data[0]; int yOffset = ((int) screenHeight - config.mSize.data[1]) / 2.0f - config.mPosition.data[1]; if (isActivityInPortraitMode) { // camera image is rotated 90 degrees int rotatedX = videoMode.mHeight - cameraPoint.data[1]; int rotatedY = cameraPoint.data[0]; return QCAR::Vec2F(rotatedX * config.mSize.data[0] / (float) videoMode.mHeight + xOffset, rotatedY * config.mSize.data[1] / (float) videoMode.mWidth + yOffset); } else { return QCAR::Vec2F(cameraPoint.data[0] * config.mSize.data[0] / (float) videoMode.mWidth + xOffset, cameraPoint.data[1] * config.mSize.data[1] / (float) videoMode.mHeight + yOffset); }}
Here is an implementation that is specific to iOS ..
- (CGPoint) projectCoord:(CGPoint)coord inView:(const QCAR::CameraCalibration&)cameraCalibration andPose:(QCAR::Matrix34F)pose withOffset:(CGPoint)offset andScale:(CGFloat)scale{ CGPoint converted; QCAR::Vec3F vec(coord.x,coord.y,0); QCAR::Vec2F sc = QCAR::Tool::projectPoint(cameraCalibration, pose, vec); converted.x = sc.data[0]*scale - offset.x; converted.y = sc.data[1]*scale - offset.y; return converted;}- (void) calcScreenCoordsOf:(CGSize)target inView:(CGFloat *)matrix inPose:(QCAR::Matrix34F)pose{ // 0,0 is at centre of target so extremities are at w/2,h/2 CGFloat w = target.width/2; CGFloat h = target.height/2; // need to account for the orientation on view size CGFloat viewWidth = self.frame.size.height; // Portrait CGFloat viewHeight = self.frame.size.width; // Portrait UIInterfaceOrientation orientation = [UIApplication sharedApplication].statusBarOrientation; if (UIInterfaceOrientationIsLandscape(orientation)) { viewWidth = self.frame.size.width; viewHeight = self.frame.size.height; } // calculate any mismatch of screen to video size QCAR::CameraDevice& cameraDevice = QCAR::CameraDevice::getInstance(); const QCAR::CameraCalibration& cameraCalibration = cameraDevice.getCameraCalibration(); QCAR::VideoMode videoMode = cameraDevice.getVideoMode(QCAR::CameraDevice::MODE_DEFAULT); CGFloat scale = viewWidth/videoMode.mWidth; if (videoMode.mHeight * scale < viewHeight) scale = viewHeight/videoMode.mHeight; CGFloat scaledWidth = videoMode.mWidth * scale; CGFloat scaledHeight = videoMode.mHeight * scale; CGPoint margin = {(scaledWidth - viewWidth)/2, (scaledHeight - viewHeight)/2}; // now project the 4 corners of the target ImageTargetsAppDelegate *delegate = [[UIApplication sharedApplication] delegate]; delegate.s0 = [self projectCoord:CGPointMake(-w,h) inView:cameraCalibration andPose:pose withOffset:margin andScale:scale]; delegate.s1 = [self projectCoord:CGPointMake(-w,-h) inView:cameraCalibration andPose:pose withOffset:margin andScale:scale]; delegate.s2 = [self projectCoord:CGPointMake(w,-h) inView:cameraCalibration andPose:pose withOffset:margin andScale:scale]; delegate.s3 = [self projectCoord:CGPointMake(w,h) inView:cameraCalibration andPose:pose withOffset:margin andScale:scale];}