2017-01-28 16 views
1

Я использую образец Google для создания приложения для камеры Android. В течение нескольких дней я не могу решить проблему с качеством выходного изображения. Это базовый фрагмент для съемки фото/видео.Как увеличить качество изображения камеры 2 в Android?

public class Camera2BaseFragment extends Fragment implements View.OnTouchListener { 

    protected static final SparseIntArray DEFAULT_ORIENTATIONS = new SparseIntArray(); 

    protected final String FRAGMENT_DIALOG = "dialog"; 
    protected Integer[] mExposureCompensation = {}; 
    protected String mCameraId = "0"; 
    protected float mFingerSpacing = 0f; 
    protected double mZoomLevel = 1; 
    protected int mCurrentProgress = 0; 
    protected int mSensorOrientation; 
    protected ImageView mToolbarBackIcon; 
    protected TextView mTvZoomLevel; 
    protected Rect mZoomRect; 
    protected View mView; 
    protected Camera2FitTextureView mTextureView; 
    protected CameraCaptureSession mCaptureSession; 
    protected CameraDevice mCameraDevice; 
    protected Size mPreviewSize; 
    protected HandlerThread mBackgroundThread; 
    protected Handler mBackgroundHandler; 
    protected CaptureRequest.Builder mPreviewRequestBuilder; 
    protected Semaphore mCameraOpenCloseLock = new Semaphore(1); 
    private CameraCharacteristics mCharacteristics; 
    private float mRatio = 1.0f; 
    @Override 
    public View onCreateView(LayoutInflater inflater, ViewGroup container, 
          Bundle savedInstanceState) { 
     return inflater.inflate(R.layout.fragment_camera2, container, false); 
    } 

    @Override 
    public boolean onTouch(View v, MotionEvent event) { 
     try { 
      mCharacteristics = MainFragment.getCameraCharacteristics(getActivity()); 
      if (mCharacteristics == null) return true; 
      if (mCharacteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM) == null) 
       return true; 
      float maxZoom = mCharacteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM) * 10; 
      Rect m = mCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); 
      if (m == null) return true; 
      float current_finger_spacing; 
      if (event.getPointerCount() > 1) { 
       // Multi touch logic 
       current_finger_spacing = getFingerSpacing(event); 
       if (mFingerSpacing != 0) { 
        if (current_finger_spacing > mFingerSpacing && maxZoom > mZoomLevel) { 
         //mZoomLevel++; 
         mZoomLevel = mZoomLevel + .5; 
        } else if (current_finger_spacing < mFingerSpacing && mZoomLevel > 1) { 
         //mZoomLevel--; 
         mZoomLevel = mZoomLevel - .5; 
        } 
        int minW = (int) (m.width()/maxZoom); 
        int minH = (int) (m.height()/maxZoom); 
        int difW = m.width() - minW; 
        int difH = m.height() - minH; 
        int cropW = difW/100 * (int)mZoomLevel; 
        int cropH = difH/100 * (int)mZoomLevel; 
        cropW -= cropW & 3; 
        cropH -= cropH & 3; 
        mZoomRect = new Rect(cropW, cropH, m.width() - cropW, m.height() - cropH); 
        mPreviewRequestBuilder.set(CaptureRequest.SCALER_CROP_REGION, mZoomRect); 
        mRatio = (int)mZoomLevel >= 1 ? ((float)mZoomLevel/10) : (float) mZoomLevel; 
        setZoomLevelText(mRatio); 
       } 
       mFingerSpacing = current_finger_spacing; 
       mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), null, null); 
      } else { 
       // Single touch logic 
      } 
     } catch (CameraAccessException | NullPointerException e) { 
      //.. 
     } 
     return true; 
    } 

    private void setZoomLevelText(float ratio) { 
     if(ratio <= 1.0f) ratio = 1.0f; 
     mTvZoomLevel.setText(String.format(Locale.US, "%.1fX", ratio)); 
    } 

    private float getFingerSpacing(MotionEvent event) { 
     float x = event.getX(0) - event.getX(1); 
     float y = event.getY(0) - event.getY(1); 
     return (float) Math.sqrt(x * x + y * y); 
    } 

    static class CompareSizesByArea implements Comparator<Size> { 
     @Override 
     public int compare(Size lhs, Size rhs) { 
      return Long.signum((long) lhs.getWidth() * lhs.getHeight() - 
        (long) rhs.getWidth() * rhs.getHeight()); 
     } 
    } 
} 

Мой фрагмент для захвата Фото

public class Camera2PhotoFragment extends Camera2BaseFragment 
     implements View.OnClickListener, ActivityCompat.OnRequestPermissionsResultCallback { 

    private static final int STATE_PREVIEW = 0; 
    private static final int STATE_WAITING_LOCK = 1; 
    private static final int STATE_WAITING_PRECAPTURE = 2; 
    private static final int STATE_WAITING_NON_PRECAPTURE = 3; 
    private static final int STATE_PICTURE_TAKEN = 4; 
    private static final int MAX_PREVIEW_WIDTH = 1920; 
    private static final int MAX_PREVIEW_HEIGHT = 1080; 
    private ImageReader mImageReader; 
    private File mFile = new File("ImagePath"); 
    private final ImageReader.OnImageAvailableListener mOnImageAvailableListener 
      = new ImageReader.OnImageAvailableListener() { 
     @Override 
     public void onImageAvailable(ImageReader reader) { 
      mBackgroundHandler.post(new ImageSaver(reader.acquireLatestImage(), mFile, 
        mOnImageSavedListener)); 
     } 
    }; 
    private int mState = STATE_PREVIEW; 
    private boolean isEnabledCameraImg = true; 
    private final OnImageSavedListener mOnImageSavedListener = new OnImageSavedListener() { 
     @Override 
     public void onImageSavedSuccessfully() { 
      // Do something with saved image 
      isEnabledCameraImg = true; 
     } 
    }; 
    private CameraCaptureSession.CaptureCallback mCaptureCallback 
      = new CameraCaptureSession.CaptureCallback() { 

     private void process(CaptureResult result) { 
      switch (mState) { 
       case STATE_PREVIEW: { 
        break; 
       } 
       case STATE_WAITING_LOCK: { 
        Integer afState = result.get(CaptureResult.CONTROL_AF_STATE); 
        if (afState == null) { 
         captureStillPicture(); 
        } else if (CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED == afState || 
          CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED == afState) { 
         Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE); 
         if (aeState == null || 
           aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED) { 
          mState = STATE_PICTURE_TAKEN; 
          captureStillPicture(); 
         } else { 
          runPrecaptureSequence(); 
         } 
        } else { 
         mState = STATE_PICTURE_TAKEN; 
         captureStillPicture(); 
        } 
        break; 
       } 
       case STATE_WAITING_PRECAPTURE: { 
        Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE); 
        if (aeState == null || 
          aeState == CaptureResult.CONTROL_AE_STATE_PRECAPTURE || 
          aeState == CaptureRequest.CONTROL_AE_STATE_FLASH_REQUIRED) { 
         mState = STATE_WAITING_NON_PRECAPTURE; 
        } 
        break; 
       } 
       case STATE_WAITING_NON_PRECAPTURE: { 
        Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE); 
        if (aeState == null || aeState != CaptureResult.CONTROL_AE_STATE_PRECAPTURE) { 
         mState = STATE_PICTURE_TAKEN; 
         captureStillPicture(); 
        } 
        break; 
       } 
      } 
     } 

     @Override 
     public void onCaptureProgressed(@NonNull CameraCaptureSession session, 
             @NonNull CaptureRequest request, 
             @NonNull CaptureResult partialResult) { 
      process(partialResult); 
     } 

     @Override 
     public void onCaptureCompleted(@NonNull CameraCaptureSession session, 
             @NonNull CaptureRequest request, 
             @NonNull TotalCaptureResult result) { 
      process(result); 
     } 
    }; 
    private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() { 
     @Override 
     public void onOpened(@NonNull CameraDevice cameraDevice) { 
      mCameraOpenCloseLock.release(); 
      mCameraDevice = cameraDevice; 
      createCameraPreviewSession(); 
     } 
    }; 
    private final TextureView.SurfaceTextureListener mSurfaceTextureListener 
      = new TextureView.SurfaceTextureListener() { 
     @Override 
     public void onSurfaceTextureAvailable(SurfaceTexture texture, int width, int height) { 
      openCamera(width, height); 
     } 
     @Override 
     public void onSurfaceTextureSizeChanged(SurfaceTexture texture, int width, int height) { 
      configureTransform(width, height); 
     } 
    }; 

    public static Camera2PhotoFragment newInstance() { 
     return new Camera2PhotoFragment(); 
    } 

    @Override 
    public View onCreateView(LayoutInflater inflater, ViewGroup container, 
          Bundle savedInstanceState) { 
     super.onCreateView(inflater, container, savedInstanceState); 
     return mView; 
    } 

    @Override 
    public void onResume() { 
     super.onResume(); 
     isEnabledCameraImg = true; 
     if (mTextureView.isAvailable()) { 
      openCamera(mTextureView.getWidth(), mTextureView.getHeight()); 
     } else { 
      mTextureView.setSurfaceTextureListener(mSurfaceTextureListener); 
     } 
    } 

    @Override 
    public void onClick(View view) { 
     switch (view.getId()) { 
      case R.id.imgCameraTakePicture: { 
        takePicture(); 
       break; 
      } 
     } 
    } 

    private static Size chooseOptimalSize(Size[] choices, int textureViewWidth, 
             int textureViewHeight, int maxWidth, int maxHeight, Size aspectRatio) { 
     List<Size> bigEnough = new ArrayList<>(); 
     List<Size> notBigEnough = new ArrayList<>(); 
     int w = aspectRatio.getWidth(); 
     int h = aspectRatio.getHeight(); 
     for (Size option : choices) { 
      if (option.getWidth() <= maxWidth && option.getHeight() <= maxHeight && 
        option.getHeight() == option.getWidth() * h/w) { 
       if (option.getWidth() >= textureViewWidth && 
         option.getHeight() >= textureViewHeight) { 
        bigEnough.add(option); 
       } else { 
        notBigEnough.add(option); 
       } 
      } 
     } 
     if (bigEnough.size() > 0) { 
      return Collections.min(bigEnough, new CompareSizesByArea()); 
     } else if (notBigEnough.size() > 0) { 
      return Collections.max(notBigEnough, new CompareSizesByArea()); 
     } else { 
      return choices[0]; 
     } 
    } 

    private void setUpCameraOutputs(int width, int height) { 
     Activity activity = getActivity(); 
     CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); 
     try { 
      for (String cameraId : manager.getCameraIdList()) { 
       CameraCharacteristics characteristics 
         = manager.getCameraCharacteristics(cameraId); 
       Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING); 
       if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) { 
        continue; 
       } 
       StreamConfigurationMap map = characteristics.get(
         CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); 
       if (map == null) { 
        continue; 
       } 
       Size largest = Collections.max(
         Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), 
         new CompareSizesByArea()); 
       mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(), 
         ImageFormat.JPEG, /*maxImages*/2); 
       mImageReader.setOnImageAvailableListener(
         mOnImageAvailableListener, mBackgroundHandler); 
       int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation(); 
       mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); 
       boolean swappedDimensions = false; 
       switch (displayRotation) { 
        case Surface.ROTATION_0: { 
         // do nothing 
        } 
        case Surface.ROTATION_180: { 
         if (mSensorOrientation == 90 || mSensorOrientation == 270) { 
          swappedDimensions = true; 
         } 
         break; 
        } 
        case Surface.ROTATION_90: { 
         // do nothing 
        } 
        case Surface.ROTATION_270: { 
         if (mSensorOrientation == 0 || mSensorOrientation == 180) { 
          swappedDimensions = true; 
         } 
         break; 
        } 
       } 

       Point displaySize = new Point(); 
       activity.getWindowManager().getDefaultDisplay().getSize(displaySize); 
       int rotatedPreviewWidth = width; 
       int rotatedPreviewHeight = height; 
       int maxPreviewWidth = displaySize.x; 
       int maxPreviewHeight = displaySize.y; 
       if (swappedDimensions) { 
        rotatedPreviewWidth = height; 
        rotatedPreviewHeight = width; 
        maxPreviewWidth = displaySize.y; 
        maxPreviewHeight = displaySize.x; 
       } 
       if (maxPreviewWidth > MAX_PREVIEW_WIDTH) { 
        maxPreviewWidth = MAX_PREVIEW_WIDTH; 
       } 
       if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) { 
        maxPreviewHeight = MAX_PREVIEW_HEIGHT; 
       } 
       mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), 
         rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth, 
         maxPreviewHeight, largest); 
       int orientation = getResources().getConfiguration().orientation; 
       if (orientation == Configuration.ORIENTATION_LANDSCAPE) { 
        mTextureView.setAspectRatio(
          mPreviewSize.getWidth(), mPreviewSize.getHeight()); 
       } else { 
        mTextureView.setAspectRatio(
          mPreviewSize.getHeight(), mPreviewSize.getWidth()); 
       } 
       mCameraId = cameraId; 
       return; 
      } 
     } catch (CameraAccessException e) { 
      //.. 
     } 
    } 

    private void openCamera(int width, int height) { 
     setUpCameraOutputs(width, height); 
     configureTransform(width, height); 
     Activity activity = getActivity(); 
     CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); 
     try { 
      if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) { 
       throw new RuntimeException("Time out waiting to lock camera opening."); 
      } 
      if (ActivityCompat.checkSelfPermission(getContext(), Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) { 
       ActivityCompat.requestPermissions(getActivity(), new String[]{Manifest.permission.CAMERA}, REQUEST_OPEN_CAMERA); 
       return; 
      } 
      manager.openCamera(mCameraId, mStateCallback, mBackgroundHandler); 
     } catch (CameraAccessException | InterruptedException e) { 
      e.printStackTrace(); 
     } 
    } 

    private void closeCamera() { 
     //... 
    } 

    private void createCameraPreviewSession() { 
     try { 
      SurfaceTexture texture = mTextureView.getSurfaceTexture(); 
      assert texture != null; 
      texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight()); 
      Surface surface = new Surface(texture); 
      mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 
      mPreviewRequestBuilder.addTarget(surface); 
      if (mZoomRect != null) { 
       mPreviewRequestBuilder.set(CaptureRequest.SCALER_CROP_REGION, mZoomRect); 
      } 
      mCameraDevice.createCaptureSession(Arrays.asList(surface, mImageReader.getSurface()), 
        new CameraCaptureSession.StateCallback() { 

         @Override 
         public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) { 
          if (null == mCameraDevice) { 
           return; 
          } 
          mCaptureSession = cameraCaptureSession; 
          try { 
           mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, 
             CaptureRequest.CONTROL_AF_MODE_AUTO); 
           mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), 
             mCaptureCallback, mBackgroundHandler); 
          } catch (CameraAccessException e) { 
           e.printStackTrace(); 
          } 
         } 

         @Override 
         public void onConfigureFailed(
           @NonNull CameraCaptureSession cameraCaptureSession) { 
          //... 
         } 
        }, null 
      ); 
     } catch (CameraAccessException e) { 
      //.. 
     } 
    } 

    private void configureTransform(int viewWidth, int viewHeight) { 
     Activity activity = getActivity(); 
     if (null == mTextureView || null == mPreviewSize || null == activity) { 
      return; 
     } 
     int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); 
     Matrix matrix = new Matrix(); 
     RectF viewRect = new RectF(0, 0, viewWidth, viewHeight); 
     RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth()); 
     float centerX = viewRect.centerX(); 
     float centerY = viewRect.centerY(); 
     if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) { 
      bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY()); 
      matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL); 
      float scale = Math.max(
        (float) viewHeight/mPreviewSize.getHeight(), 
        (float) viewWidth/mPreviewSize.getWidth()); 
      matrix.postScale(scale, scale, centerX, centerY); 
      matrix.postRotate(90 * (rotation - 2), centerX, centerY); 
     } else if (Surface.ROTATION_180 == rotation) { 
      matrix.postRotate(180, centerX, centerY); 
     } 
     mTextureView.setTransform(matrix); 
    } 

    private void takePicture() { 
     lockFocus(); 
    } 

    private void lockFocus() { 
     try { 
      mState = STATE_WAITING_LOCK; 
      mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback, mBackgroundHandler); 
     } catch (CameraAccessException e) { 
      //.. 
     } 
    } 

    private void runPrecaptureSequence() { 
     try { 
      mState = STATE_WAITING_PRECAPTURE; 
      mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback, 
        mBackgroundHandler); 
     } catch (CameraAccessException | NullPointerException e) { 
      //.. 
     } 
    } 

    private void captureStillPicture() { 
     try { 
      final Activity activity = getActivity(); 
      if (null == activity || null == mCameraDevice) { 
       return; 
      } 
      final CaptureRequest.Builder captureBuilder = 
        mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); 
      captureBuilder.addTarget(mImageReader.getSurface()); 
      if (mZoomRect != null) captureBuilder.set(CaptureRequest.SCALER_CROP_REGION, mZoomRect); 
      if (mCurrentProgress != 0) { 
       captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, mExposureCompensation[mCurrentProgress]); 
      } 
      captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, CameraMetadata.CONTROL_AF_MODE_AUTO); 
      // Orientation 
      int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); 
      captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, getOrientation(rotation)); 
      captureBuilder.set(CaptureRequest.JPEG_QUALITY, (byte) 100); 
      CameraCaptureSession.CaptureCallback CaptureCallback 
        = new CameraCaptureSession.CaptureCallback() { 

       @Override 
       public void onCaptureCompleted(@NonNull CameraCaptureSession session, 
               @NonNull CaptureRequest request, 
               @NonNull TotalCaptureResult result) { 
        isEnabledCameraImg = true; 
       } 
      }; 

      mCaptureSession.stopRepeating(); 
      mCaptureSession.capture(captureBuilder.build(), CaptureCallback, null); 
      unlockFocus(); 
      if (mZoomRect != null) 
       mPreviewRequestBuilder.set(CaptureRequest.SCALER_CROP_REGION, mZoomRect); 
      mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, 
        CaptureRequest.CONTROL_AF_MODE_AUTO); 
      mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), 
        mCaptureCallback, mBackgroundHandler); 
     } catch (Throwable e) { 
      //.. 
     } 
    } 

    private int getOrientation(int rotation) { 
     return (DEFAULT_ORIENTATIONS.get(rotation) + mSensorOrientation + 270) % 360; 
    } 

    private void unlockFocus() { 
     try { 
      mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, 
        CameraMetadata.CONTROL_AF_TRIGGER_IDLE); 
      mState = STATE_PREVIEW; 
      mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), mCaptureCallback, 
        mBackgroundHandler); 
     } catch (Throwable e) { 
      //.. 
     } 
    } 

    interface OnImageSavedListener { 
     void onImageSavedSuccessfully(); 
    } 

    private static class ImageSaver implements Runnable { 

     private final Image mImage; 
     private final File mFile; 
     private final OnImageSavedListener mOnImageSavedListener; 

     public ImageSaver(Image image, File file, OnImageSavedListener onImageSavedListener) { 
      mImage = image; 
      mFile = file; 
      mOnImageSavedListener = onImageSavedListener; 
     } 

     @Override 
     public void run() { 
      ByteBuffer buffer = mImage.getPlanes()[0].getBuffer(); 
      byte[] bytes = new byte[buffer.remaining()]; 
      buffer.get(bytes); 
      FileOutputStream output = null; 
      try { 
       output = new FileOutputStream(mFile); 
       output.write(bytes); 
       mOnImageSavedListener.onImageSavedSuccessfully(); 
      } catch (IOException e) { 
       //.. 
      } finally { 
       mImage.close(); 
       if (null != output) { 
        try { 
         output.close(); 
        } catch (IOException e) { 
         //.. 
       } 
      } 
     } 
    } 
} 

Существует разница в качестве изображения между камерой предварительного просмотром и сохраненным изображением. enter image description here С левой стороны: снимок экрана предварительного просмотра камеры (с масштабированием) С правой стороны: захваченное/сохраненное изображение. Изображение на левой стороне лучше.

Как добиться хорошего качества изображения для выходного изображения? Может быть проблема при масштабировании?

я нашел в камере 2 документации следующие константы ФОТ в CaptureRequest:

captureBuilder.set(CaptureRequest.EDGE_MODE, CameraMetadata.EDGE_MODE_HIGH_QUALITY); 
captureBuilder.set(CaptureRequest.SHADING_MODE, CameraMetadata.SHADING_MODE_HIGH_QUALITY); 
captureBuilder.set(CaptureRequest.TONEMAP_MODE, CameraMetadata.TONEMAP_MODE_HIGH_QUALITY); 
captureBuilder.set(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE, CameraMetadata.COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY); 
captureBuilder.set(CaptureRequest.COLOR_CORRECTION_MODE, CameraMetadata.COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY); 
captureBuilder.set(CaptureRequest.HOT_PIXEL_MODE, CameraMetadata.HOT_PIXEL_MODE_HIGH_QUALITY); 
captureBuilder.set(CaptureRequest.NOISE_REDUCTION_MODE, CameraMetadata.NOISE_REDUCTION_MODE_HIGH_QUALITY); 
captureBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, CameraMetadata.LENS_OPTICAL_STABILIZATION_MODE_ON); 

Для тестирования я добавляю все это константы вместе ниже строки кода из моего кода: captureBuilder.set (CaptureRequest.JPEG_QUALITY, (байт) 100);

но это не решило мою проблему.

У кого-то была аналогичная проблема с качеством изображения на Камере 2, или кто-то знает причину этой проблемы?

Спасибо за любой ответ!

Я могу загрузить свой код на github, если вам нужно.

+0

Вы нашли решение? – imudin07

ответ

0

Это может быть хак, но я получил лучшие результаты, установив CameraDevice.TEMPLATE_PREVIEW вместо CameraDevice.TEMPLATE_STILL_CAPTURE в captureStillPicture().

+0

Непонятно, что вы подразумеваете под «лучше». Является ли эта зеленая фигура случайным выстрелом или действительно представляет собой тип ввода, который вы ожидаете обработать в этом проекте? Если это так, ваш «взлом» вполне законный. –

+1

@AlexCohn Я не уверен, удовлетворен он или нет, но изображение в моем случае менее размыто. – da1

 Смежные вопросы

  • Нет связанных вопросов^_^