본문 바로가기

프로그래밍/WebRTC

[WebRTC] android camera 관련

webrtc의 카메라 캡처러 부분은 아래와 같은 구조로 여러 카메라 api에 대응하도록 설계되어 있다.

enumerator 의 createCapturer() 를 호출해 CameraVideoCapturer 를 생성
camera2enumerator 의 경우 Camera2Capturer 를 생성
CameraCapturer 의 startCapture() 를 호출하면 Camera2Capturer의 createCameraSession() 호출

실제 카메라 처리 부분은 CameraSession 에서 이루어지며, 캡처된 데이터는 opengl es 를 통해 렌더링 되어 CameraCapturer 에 구현되어 있는 CameraSession.Events.onFrameCaptured() 콜백으로 VideoFrame을 전달한다.
VideoFrame은 해당 콜백에서 CapturerObserver.onFrameCaptured() 로 다시 전달된다.

 

 


CameraEnumerator

public String[] getDeviceNames();
public boolean isFrontFacing(String deviceName);
public boolean isBackFacing(String deviceName);
public List<CaptureFormat> getSupportedFormats(String deviceName);

public CameraVideoCapturer createCapturer(
      String deviceName,
      CameraVideoCapturer.CameraEventsHandler eventsHandler);

 

Camera2Enumrator

@Override
public CameraVideoCapturer createCapturer( String deviceName, 
                                           CameraVideoCapturer.CameraEventsHandler eventsHandler) {
    return new Camera2Capturer(context, deviceName, eventsHandler);
}


// 그외 카메라의 정보들을 얻는 유틸 메쏘드들
public static boolean isSupported(Context context) {
    if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
        return false;
    }
    
    CameraManager cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
    try {
      String[] cameraIds = cameraManager.getCameraIdList();
      for (String id : cameraIds) {
        CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(id);
        if (characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)
            == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
          return false;
        }
      }
      // On Android OS pre 4.4.2, a class will not load because of VerifyError if it contains a
      // catch statement with an Exception from a newer API, even if the code is never executed.
      // https://code.google.com/p/android/issues/detail?id=209129
    } catch (/* CameraAccessException */ AndroidException e) {
      Logging.e(TAG, "Camera access exception: " + e);
      return false;
    }
    return true;
}


static int getFpsUnitFactor(Range<Integer>[] fpsRanges) {
    if (fpsRanges.length == 0) {
        return 1000;
    }
    return fpsRanges[0].getUpper() < 1000 ? 1000 : 1;
}


static List<Size> getSupportedSizes(CameraCharacteristics cameraCharacteristics) {
final StreamConfigurationMap streamMap =
        cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
    final int supportLevel =
        cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);

    final android.util.Size[] nativeSizes = streamMap.getOutputSizes(SurfaceTexture.class);
    final List<Size> sizes = convertSizes(nativeSizes);

    // Video may be stretched pre LMR1 on legacy implementations.
    // Filter out formats that have different aspect ratio than the sensor array.
    if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP_MR1
        && supportLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
      final Rect activeArraySize =
          cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
      final ArrayList<Size> filteredSizes = new ArrayList<Size>();

      for (Size size : sizes) {
        if (activeArraySize.width() * size.height == activeArraySize.height() * size.width) {
          filteredSizes.add(size);
        }
      }

      return filteredSizes;
    } else {
      return sizes;
    }
}


static List<CaptureFormat> getSupportedFormats(Context context, String cameraId) {
    CameraManager cameraManager = 
        (CameraManager) context.getSystemService(Context.CAMERA_SERVICE), cameraId);
        
    synchronized (cachedSupportedFormats) {
      if (cachedSupportedFormats.containsKey(cameraId)) {
        return cachedSupportedFormats.get(cameraId);
      }

      Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
      final long startTimeMs = SystemClock.elapsedRealtime();

      final CameraCharacteristics cameraCharacteristics;
      try {
        cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
      } catch (Exception ex) {
        Logging.e(TAG, "getCameraCharacteristics(): " + ex);
        return new ArrayList<CaptureFormat>();
      }

      final StreamConfigurationMap streamMap =
          cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);

      Range<Integer>[] fpsRanges =
          cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
      List<CaptureFormat.FramerateRange> framerateRanges =
          convertFramerates(fpsRanges, getFpsUnitFactor(fpsRanges));
      List<Size> sizes = getSupportedSizes(cameraCharacteristics);

      int defaultMaxFps = 0;
      for (CaptureFormat.FramerateRange framerateRange : framerateRanges) {
        defaultMaxFps = Math.max(defaultMaxFps, framerateRange.max);
      }

      final List<CaptureFormat> formatList = new ArrayList<CaptureFormat>();
      for (Size size : sizes) {
        long minFrameDurationNs = 0;
        try {
          minFrameDurationNs = streamMap.getOutputMinFrameDuration(
              SurfaceTexture.class, new android.util.Size(size.width, size.height));
        } catch (Exception e) {
          // getOutputMinFrameDuration() is not supported on all devices. Ignore silently.
        }
        final int maxFps = (minFrameDurationNs == 0)
            ? defaultMaxFps
            : (int) Math.round(NANO_SECONDS_PER_SECOND / minFrameDurationNs) * 1000;
        formatList.add(new CaptureFormat(size.width, size.height, 0, maxFps));
        Logging.d(TAG, "Format: " + size.width + "x" + size.height + "@" + maxFps);
      }

      cachedSupportedFormats.put(cameraId, formatList);
      final long endTimeMs = SystemClock.elapsedRealtime();
      Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
              + " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
      return formatList;
    }
}


private static List<Size> convertSizes(android.util.Size[] cameraSizes) {
    final List<Size> sizes = new ArrayList<Size>();
    for (android.util.Size size : cameraSizes) {
        sizes.add(new Size(size.getWidth(), size.getHeight()));
    }
    return sizes;
}

// Convert from android.util.Range<Integer> to CaptureFormat.FramerateRange.
static List<CaptureFormat.FramerateRange> convertFramerates( Range<Integer>[] arrayRanges, int unitFactor) {
    final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
    for (Range<Integer> range : arrayRanges) {
        ranges.add(new CaptureFormat.FramerateRange(range.getLower() * unitFactor, range.getUpper() * unitFactor));
    }
    return ranges;
  }
}

VideoCapturer
webrtc 캡처러 인터페이스

void initialize(SurfaceTextureHelper surfaceTextureHelper, 
                Context applicationContext,
                CapturerObserver capturerObserver);
                 
void startCapture(int width, int height, int framerate);
void stopCapture() throws InterruptedException;
void changeCaptureFormat(int width, int height, int framerate);
void dispose();
boolean isScreencast();

 

CameraVideoCapturer
webrtc 카메라 캡처러 인터페이스로 CameraEventHandler, CameraSwitchHandler 인터페이스 제공

CameraCapturer

 카메라 세션에서 전달하는 콜백을 정의

private final CameraSession.CreateSessionCallback createSessionCallback = new CameraSession.CreateSessionCallback() {
    @Override
    public void onDone(CameraSession session) {
    }
    
    @Override
    public void onFailure(CameraSession.FailureType failureType, String error) {
    }
}

private final CameraSession.Events cameraSessionEventsHandler = new CameraSession.Events() {
    @Override
    public void onCameraOpening() {
    }
    
    @Override
    public void onCameraError(CameraSession session, String error) {
    }
    
    @Override
    public void onCameraDisconnected(CameraSession session) {
    }
    
    @Override
    public void onCameraClosed(CameraSession session) {
    }
    
    @Override
    public void onFrameCaptured(CameraSession session, VideoFrame frame) {
    if (session != currentSession) {
        Logging.w(TAG, "onFrameCaptured from another session.");
            return;
        }
        if (!firstFrameObserved) {
            eventsHandler.onFirstFrameAvailable();
            firstFrameObserved = true;
        }
        cameraStatistics.addFrame();
        capturerObserver.onFrameCaptured(frame);
    }
    
}

 

 

 


// 하위 카메라 캡처 객체에서 구현해야 하는 메쏘드
abstract protected void createCameraSession(
      CameraSession.CreateSessionCallback createSessionCallback, 
      CameraSession.Events events,
      Context applicationContext, 
      SurfaceTextureHelper surfaceTextureHelper, 
      String cameraName,
      int width, 
      int height, 
      int framerate);
      

@Override
public void initialize(@Nullable SurfaceTextureHelper surfaceTextureHelper,
                       Context applicationContext,
                       org.webrtc.CapturerObserver capturerObserver) {
    
    this.applicationContext = applicationContext;
    this.capturerObserver = capturerObserver;
    this.surfaceHelper = surfaceTextureHelper;
    this.cameraThreadHandler = surfaceTextureHelper == null ? null : surfaceTextureHelper.getHandler();
}

@Override
public void startCapture(int width, int height, int framerate) {
    createSessionInternal(0);
}

private void createSessionInternal(int delayMs) {
    uiThreadHandler.postDelayed(openCameraTimeoutRunnable, delayMs + OPEN_CAMERA_TIMEOUT);
    cameraThreadHandler.postDelayed(new Runnable() {
        @Override
        public void run() {
            createCameraSession(createSessionCallback,
                              cameraSessionEventsHandler,
                              applicationContext,
                              surfaceHelper,
                              cameraName,
                              width,
                              height,
                              framerate);
        }
    }, delayMs);
}

 

Camera2Capturer

public Camera2Capturer(Context context, String cameraName, CameraEventsHandler eventsHandler) {
    super(cameraName, eventsHandler, new Camera2Enumerator(context));

    this.context = context;
    cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
}


@Override
protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
                                   CameraSession.Events events,
                                   Context applicationContext,
                                   SurfaceTextureHelper surfaceTextureHelper,
                                   String cameraName,
                                   int width,
                                   int height,
                                   int framerate) {
                                   
    Camera2Session.create( createSessionCallback, 
                           events,
                           applicationContext,
                           cameraManager,
                           surfaceTextureHelper,
                           cameraName,
                           width,
                           height,
                           framerate);
}

 


CameraSession

interface CreateSessionCallback {
    void onDone(CameraSession session);
    void onFailure(FailureType failureType, String error);
}

interface Event {
    void onCameraOpening();
    void onCameraError(CameraSession session, String error);
    void onCameraDisconnected(CameraSession session);
    void onCameraClosed(CameraSession session);
    void onFrameCaptured(CameraSession session, VideoFrame frame);
}

void stop();



static int getDeviceOrientation(Context context) {
    final WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
    switch (wm.getDefaultDisplay().getRotation()) {
      case Surface.ROTATION_90:
        return 90;
      case Surface.ROTATION_180:
        return 180;
      case Surface.ROTATION_270:
        return 270;
      case Surface.ROTATION_0:
      default:
        return 0;
    }
}
  
static VideoFrame.TextureBuffer createTextureBufferWithModifiedTransformMatrix(
      TextureBufferImpl buffer, boolean mirror, int rotation) {
      
    final Matrix transformMatrix = new Matrix();
    // Perform mirror and rotation around (0.5, 0.5) since that is the center of the texture.
    transformMatrix.preTranslate(/* dx= */ 0.5f, /* dy= */ 0.5f);
    if (mirror) {
      transformMatrix.preScale(/* sx= */ -1f, /* sy= */ 1f);
    }
    transformMatrix.preRotate(rotation);
    transformMatrix.preTranslate(/* dx= */ -0.5f, /* dy= */ -0.5f);

    // The width and height are not affected by rotation since Camera2Session has set them to the
    // value they should be after undoing the rotation.
    return buffer.applyTransformMatrix(transformMatrix, buffer.getWidth(), buffer.getHeight());
  }

 

Camera2Session

public static void create(CreateSessionCallback callback, 
                          Events events,
                          Context applicationContext,
                          CameraManager cameraManager,
                          SurfaceTextureHelper surfaceTextureHelper,
                          String cameraId, 
                          int width, 
                          int height,
                          int framerate) {
    
    new Camera2Session( callback,
                        events,
                        applicationContext,
                        cameraManager,
                        surfaceTextureHelper,
                        cameraId,
                        width,
                        height,
                        framerate);
}

 

 

 




// 카메라 시작
private void start() {
    checkIsOnCameraThread();
    Logging.d(TAG, "start");

    try {
        cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
    } catch (final CameraAccessException e) {
        reportError("getCameraCharacteristics(): " + e.getMessage());
        return;
    }
    
    cameraOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
    isCameraFrontFacing = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING)
        == CameraMetadata.LENS_FACING_FRONT;

    findCaptureFormat();
    openCamera();
}


// 캡처 포맷 검색
private void findCaptureFormat() {
    checkIsOnCameraThread();

    Range<Integer>[] fpsRanges =
        cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
    fpsUnitFactor = Camera2Enumerator.getFpsUnitFactor(fpsRanges);
    List<CaptureFormat.FramerateRange> framerateRanges =
        Camera2Enumerator.convertFramerates(fpsRanges, fpsUnitFactor);
    List<Size> sizes = Camera2Enumerator.getSupportedSizes(cameraCharacteristics);
    Logging.d(TAG, "Available preview sizes: " + sizes);
    Logging.d(TAG, "Available fps ranges: " + framerateRanges);

    if (framerateRanges.isEmpty() || sizes.isEmpty()) {
      reportError("No supported capture formats.");
      return;
    }

    final CaptureFormat.FramerateRange bestFpsRange =
        CameraEnumerationAndroid.getClosestSupportedFramerateRange(framerateRanges, framerate);

    final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize(sizes, width, height);
    CameraEnumerationAndroid.reportCameraResolution(camera2ResolutionHistogram, bestSize);

    captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRange);
    Logging.d(TAG, "Using capture format: " + captureFormat);
}


// 카메라 열기
private void openCamera() {
    checkIsOnCameraThread();

    Logging.d(TAG, "Opening camera " + cameraId);
    events.onCameraOpening();

    try {
        cameraManager.openCamera(cameraId, new CameraStateCallback(), cameraThreadHandler);
    } catch (CameraAccessException e) {
        reportError("Failed to open camera: " + e);
        return;
    }
}

 

private class CameraStateCallback extends CameraDevice.StateCallback {
    @Override
    public void onDisconnected(CameraDevice camera) {
    }
    
    @Override
    public void onError(CameraDevice camera, int errorCode) {
    }
    
    @Override
    public void onOpened(CameraDevice camera) {
        cameraDevice = camera;
        surfaceTextureHelper.setTextureSize(captureFormat.width, captureFormat.height);
        surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
        camera.createCaptureSession(Arrays.asList(surface), new CaptureSessionCallback(), cameraThreadHandler);
    }
    
    @Override
    public void onClosed(CameraDevice camera) {
    }
    
}

private class CaptureSessionCallback extends CameraCaptureSession.StateCallback {
    @Override
    public void onConfigureFailed(CameraCaptureSession session) {
    }
    
    @Override
    public void onConfigured(CameraCaptureSession session) {
    
        captureSession = session;
        final CaptureRequest.Builder captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
        captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE,
                                  new Range<Integer>(captureFormat.framerate.min / fpsUnitFactor,
                                  captureFormat.framerate.max / fpsUnitFactor));
        captureRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
        captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false);
        chooseStabilizationMode(captureRequestBuilder);
        chooseFocusMode(captureRequestBuilder);

        captureRequestBuilder.addTarget(surface);
        session.setRepeatingRequest(captureRequestBuilder.build(), new CameraCaptureCallback(), cameraThreadHandler);


        surfaceTextureHelper.startListening((VideoFrame frame) -> {
            final int startTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
            camera2StartTimeMsHistogram.addSample(startTimeMs);
            
            final VideoFrame modifiedFrame = 
                     new VideoFrame(CameraSession.createTextureBufferWithModifiedTransformMatrix(
                                            (TextureBufferImpl) frame.getBuffer(),
                                            /* mirror= */ isCameraFrontFacing,
                                            /* rotation= */ -cameraOrientation),
                     /* rotation= */ getFrameOrientation(),
                     frame.getTimestampNs());
                     
             events.onFrameCaptured(Camera2Session.this, modifiedFrame);
             modifiedFrame.release();
        });
        callback.onDone(Camera2Session.this);
    }
    
    
    
    
    private void chooseStabilizationMode(CaptureRequest.Builder captureRequestBuilder) {
      final int[] availableOpticalStabilization = cameraCharacteristics.get(
          CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION);
      if (availableOpticalStabilization != null) {
        for (int mode : availableOpticalStabilization) {
          if (mode == CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON) {
            captureRequestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE,
                CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON);
            captureRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE,
                CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF);
            Logging.d(TAG, "Using optical stabilization.");
            return;
          }
        }
      }
      // If no optical mode is available, try software.
      final int[] availableVideoStabilization = cameraCharacteristics.get(
          CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES);
      for (int mode : availableVideoStabilization) {
        if (mode == CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON) {
          captureRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE,
              CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON);
          captureRequestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE,
              CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_OFF);
          Logging.d(TAG, "Using video stabilization.");
          return;
        }
      }
      Logging.d(TAG, "Stabilization not available.");
    }

    private void chooseFocusMode(CaptureRequest.Builder captureRequestBuilder) {
      final int[] availableFocusModes =
          cameraCharacteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
      for (int mode : availableFocusModes) {
        if (mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO) {
          captureRequestBuilder.set(
              CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
          Logging.d(TAG, "Using continuous video auto-focus.");
          return;
        }
      }
      Logging.d(TAG, "Auto-focus is not available.");
    }
}