diff --git a/code/AutoFitTextureView.java b/code/AutoFitTextureView.java new file mode 100644 index 0000000..8f782a3 --- /dev/null +++ b/code/AutoFitTextureView.java @@ -0,0 +1,48 @@ +package com.yeyupiaoling.teststadiometry; + +import android.content.Context; +import android.util.AttributeSet; +import android.view.TextureView; + + +public class AutoFitTextureView extends TextureView { + + private int mRatioWidth = 0; + private int mRatioHeight = 0; + + public AutoFitTextureView(Context context) { + this(context, null); + } + + public AutoFitTextureView(Context context, AttributeSet attrs) { + this(context, attrs, 0); + } + + public AutoFitTextureView(Context context, AttributeSet attrs, int defStyle) { + super(context, attrs, defStyle); + } + + /** + * Sets the aspect ratio for this view. The size of the view will be measured based on the ratio + * calculated from the parameters. Note that the actual sizes of parameters don't matter, that + * is, calling setAspectRatio(2, 3) and setAspectRatio(4, 6) make the same result. + * + * @param width Relative horizontal size + * @param height Relative vertical size + */ + public void setAspectRatio(int width, int height) { + if (width < 0 || height < 0) { + throw new IllegalArgumentException("Size cannot be negative."); + } + mRatioWidth = width; + mRatioHeight = height; + requestLayout(); + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + setMeasuredDimension(mRatioWidth, mRatioHeight); + } + +} diff --git a/code/CameraActivity.java b/code/CameraActivity.java new file mode 100644 index 0000000..9396cbb --- /dev/null +++ b/code/CameraActivity.java @@ -0,0 +1,398 @@ +package com.yeyupiaoling.teststadiometry; + +import androidx.annotation.NonNull; +import androidx.appcompat.app.AppCompatActivity; + +import android.Manifest; +import android.annotation.SuppressLint; +import android.content.Context; +import android.content.pm.PackageManager; +import android.graphics.Bitmap; +import android.graphics.ImageFormat; +import android.graphics.Matrix; +import android.graphics.SurfaceTexture; +import android.hardware.camera2.CameraAccessException; +import android.hardware.camera2.CameraCaptureSession; +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CameraDevice; +import android.hardware.camera2.CameraManager; +import android.hardware.camera2.CaptureFailure; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.CaptureResult; +import android.hardware.camera2.params.StreamConfigurationMap; +import android.media.ImageReader; +import android.os.Build; +import android.os.Bundle; +import android.os.Handler; +import android.os.HandlerThread; +import android.util.Log; +import android.util.Size; +import android.view.Surface; +import android.view.TextureView; +import android.view.View; +import android.widget.Button; +import android.widget.ImageView; +import android.widget.LinearLayout; +import android.widget.TextView; + +import org.opencv.android.OpenCVLoader; + +import java.util.Arrays; +import java.util.List; + +public class CameraActivity extends AppCompatActivity { + private static final String TAG = CameraActivity.class.getSimpleName(); + private CameraCaptureSession mCaptureSession; + private CameraDevice mCameraDevice; + + private HandlerThread mCaptureThread; + private Handler mCaptureHandler; + + private ImageReader mImageReader; + private boolean isFont = false; + private Size mPreviewSize; + private boolean mCapturing; + + private AutoFitTextureView mTextureView; + private ImageView imageViewLeft; + private ImageView imageViewRight; + private ImageView imageViewResult; + private int sensorOrientation = 0; + private View bgView; + private LinearLayout ll; + private TextView textView; + private StereoBMUtil stereoBMUtil; + private Bitmap leftBitmap; + private Bitmap rightBitmap; + + @SuppressLint("ClickableViewAccessibility") + @Override + protected void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + setContentView(R.layout.activity_camera); + + // 权限申请 + if (!hasPermission()) { + requestPermission(); + } + //初始化 + if (OpenCVLoader.initDebug()) { + Log.d(TAG, "OpenCVLoader初始化成功"); + } + + stereoBMUtil = new StereoBMUtil(); + mTextureView = findViewById(R.id.texture_view); + imageViewLeft = findViewById(R.id.imageViewLeft); + imageViewRight = findViewById(R.id.imageViewRight); + imageViewResult = findViewById(R.id.imageViewResult); + bgView = findViewById(R.id.bg_view); + ll = findViewById(R.id.show_ll); + textView = findViewById(R.id.result_tv); + + Button button2 = findViewById(R.id.button2); + Button button3 = findViewById(R.id.button3); + Button button4 = findViewById(R.id.button4); + + // 拍照获取左右摄像头的图像 + button2.setOnClickListener(v -> { + bgView.setVisibility(View.VISIBLE); + ll.setVisibility(View.VISIBLE); + Bitmap imgBitmap = mTextureView.getBitmap(); + Bitmap b = Utils.rotateBitmap(imgBitmap, 360 - sensorOrientation); + List bitmapList = Utils.bisectionBitmap(b); + // 左右目摄像头的图像 + leftBitmap = bitmapList.get(0); + rightBitmap = bitmapList.get(1); + imageViewLeft.setImageBitmap(leftBitmap); + imageViewRight.setImageBitmap(rightBitmap); + + }); + + // 显示相机预览 + button3.setOnClickListener(v -> { + bgView.setVisibility(View.GONE); + ll.setVisibility(View.GONE); + }); + + // 执行StereoBM算法 + button4.setOnClickListener(v -> { + Bitmap result = stereoBMUtil.compute(leftBitmap, rightBitmap); + imageViewResult.setImageBitmap(result); + }); + + // 点击计算后的图片,获取三维坐标数据 + imageViewResult.setOnTouchListener((v, event) -> { + // 获取触摸点的坐标 x, y + float x = event.getX(); + float y = event.getY(); + float[] dst = new float[2]; + Matrix imageMatrix = imageViewResult.getImageMatrix(); + Matrix inverseMatrix = new Matrix(); + imageMatrix.invert(inverseMatrix); + inverseMatrix.mapPoints(dst, new float[]{x, y}); + int dstX = (int) dst[0]; + int dstY = (int) dst[1]; + // 获取该点的三维坐标 + double[] c = stereoBMUtil.getCoordinate(dstX, dstY); + String s = String.format("点(%d, %d) 三维坐标:[%.2f, %.2f, %.2f]", dstX, dstY, c[0], c[1], c[2]); + Log.d(TAG, s); + textView.setText(s); + return true; + }); + } + + // 初始化以下变量和状态 + private void initStatus() { + // 启动线程 + startCaptureThread(); + + // 判断SurfaceTexture是否可用,可用就直接启动捕获图片 + if (mTextureView.isAvailable()) { + startCapture(); + } else { + mTextureView.setSurfaceTextureListener(new TextureView.SurfaceTextureListener() { + @Override + public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) { + startCapture(); + } + + @Override + public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) { + } + + @Override + public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) { + return true; + } + + @Override + public void onSurfaceTextureUpdated(SurfaceTexture surface) { + } + }); + } + } + + // 启动捕获图片 + private void startCapture() { + // 判断是否正处于捕获图片的状态 + if (mCapturing) return; + mCapturing = true; + + final CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE); + + String cameraIdAvailable = null; + try { + assert manager != null; + for (final String cameraId : manager.getCameraIdList()) { + final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); + + final Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING); + // 设置相机前摄像头或者后摄像头 + if (isFont) { + if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) { + cameraIdAvailable = cameraId; + break; + } + } else { + if (facing != null && facing == CameraCharacteristics.LENS_FACING_BACK) { + cameraIdAvailable = cameraId; + break; + } + } + } + } catch (CameraAccessException e) { + Log.e(TAG, "启动图片捕获异常 ", e); + } + + try { + assert cameraIdAvailable != null; + final CameraCharacteristics characteristics = + manager.getCameraCharacteristics(cameraIdAvailable); + + final StreamConfigurationMap map = + characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); + try { + sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); + Log.i(TAG, "orientation: " + sensorOrientation); + } catch (Exception e) { + e.printStackTrace(); + } + assert map != null; + mPreviewSize = Utils.chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), + mTextureView.getWidth(), + mTextureView.getHeight()); + Log.d("mPreviewSize", String.valueOf(mPreviewSize)); + mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth()); + + manager.openCamera(cameraIdAvailable, new CameraDevice.StateCallback() { + @Override + public void onOpened(@NonNull CameraDevice camera) { + mCameraDevice = camera; + createCaptureSession(); + } + + @Override + public void onDisconnected(@NonNull CameraDevice camera) { + camera.close(); + mCameraDevice = null; + mCapturing = false; + } + + @Override + public void onError(@NonNull CameraDevice camera, final int error) { + Log.e(TAG, "打开相机错误 = " + error); + camera.close(); + mCameraDevice = null; + mCapturing = false; + } + }, mCaptureHandler); + } catch (CameraAccessException | SecurityException e) { + mCapturing = false; + Log.e(TAG, "启动图片捕获异常 ", e); + } + } + + // 创建捕获图片session + private void createCaptureSession() { + try { + final SurfaceTexture texture = mTextureView.getSurfaceTexture(); + texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight()); + + final Surface surface = new Surface(texture); + final CaptureRequest.Builder captureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); + captureRequestBuilder.addTarget(surface); + + mImageReader = ImageReader.newInstance( + mPreviewSize.getWidth(), mPreviewSize.getHeight(), ImageFormat.JPEG, 10); + + mCameraDevice.createCaptureSession( + Arrays.asList(surface, mImageReader.getSurface()), + new CameraCaptureSession.StateCallback() { + + @Override + public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) { + if (null == mCameraDevice) { + return; + } + + mCaptureSession = cameraCaptureSession; + try { + captureRequestBuilder.set( + CaptureRequest.CONTROL_AF_MODE, + CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); + captureRequestBuilder.set( + CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH); + + CaptureRequest previewRequest = captureRequestBuilder.build(); + mCaptureSession.setRepeatingRequest( + previewRequest, new CameraCaptureSession.CaptureCallback() { + @Override + public void onCaptureProgressed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureResult partialResult) { + super.onCaptureProgressed(session, request, partialResult); + } + + @Override + public void onCaptureFailed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureFailure failure) { + super.onCaptureFailed(session, request, failure); + Log.d(TAG, "onCaptureFailed = " + failure.getReason()); + } + + @Override + public void onCaptureSequenceCompleted(@NonNull CameraCaptureSession session, int sequenceId, long frameNumber) { + super.onCaptureSequenceCompleted(session, sequenceId, frameNumber); + Log.d(TAG, "onCaptureSequenceCompleted"); + } + }, mCaptureHandler); + } catch (final CameraAccessException e) { + Log.e(TAG, "onConfigured exception ", e); + } + } + + @Override + public void onConfigureFailed(@NonNull final CameraCaptureSession cameraCaptureSession) { + Log.e(TAG, "onConfigureFailed "); + } + }, + null); + } catch (final CameraAccessException e) { + Log.e(TAG, "创建捕获图片session异常 ", e); + } + } + + // 关闭相机 + private void closeCamera() { + if (mCaptureSession != null) { + mCaptureSession.close(); + mCaptureSession = null; + } + if (mCameraDevice != null) { + mCameraDevice.close(); + mCameraDevice = null; + } + if (mImageReader != null) { + mImageReader.close(); + mImageReader = null; + } + mCapturing = false; + } + + // 关闭捕获图片线程 + private void stopCaptureThread() { + try { + if (mCaptureThread != null) { + mCaptureThread.quitSafely(); + mCaptureThread.join(); + } + mCaptureThread = null; + mCaptureHandler = null; + } catch (final Exception e) { + e.printStackTrace(); + } + } + + @Override + protected void onPause() { + // 关闭相机 + closeCamera(); + stopCaptureThread(); + super.onPause(); + } + + @Override + protected void onStop() { + // 关闭相机 + closeCamera(); + stopCaptureThread(); + super.onStop(); + } + + // 启动捕获图片线程 + private void startCaptureThread() { + mCaptureThread = new HandlerThread("capture"); + mCaptureThread.start(); + mCaptureHandler = new Handler(mCaptureThread.getLooper()); + } + + @Override + protected void onResume() { + initStatus(); + super.onResume(); + } + + // check had permission + private boolean hasPermission() { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + return checkSelfPermission(Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED; + } else { + return true; + } + } + + // request permission + private void requestPermission() { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + requestPermissions(new String[]{Manifest.permission.CAMERA}, 1); + } + } + +} diff --git a/code/MainActivity.java b/code/MainActivity.java new file mode 100644 index 0000000..b44e274 --- /dev/null +++ b/code/MainActivity.java @@ -0,0 +1,94 @@ +package com.yeyupiaoling.teststadiometry; + +import android.annotation.SuppressLint; +import android.content.Intent; +import android.graphics.Bitmap; +import android.graphics.BitmapFactory; +import android.graphics.Matrix; +import android.os.Bundle; +import android.util.Log; +import android.widget.Button; +import android.widget.ImageView; +import android.widget.TextView; + +import androidx.appcompat.app.AppCompatActivity; + +import org.opencv.android.OpenCVLoader; + +import java.io.IOException; + +public class MainActivity extends AppCompatActivity { + private static final String TAG = MainActivity.class.getName(); + private ImageView imageViewResult; + private StereoBMUtil stereoBMUtil; + private TextView textView; + private Bitmap leftBitmap; + private Bitmap rightBitmap; + + @SuppressLint("ClickableViewAccessibility") + @Override + protected void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + setContentView(R.layout.activity_main); + + //初始化 + if (OpenCVLoader.initDebug()) { + Log.d(TAG, "OpenCVLoader初始化成功"); + } + + stereoBMUtil = new StereoBMUtil(); + ImageView imageViewLeft = findViewById(R.id.imageViewLeft); + ImageView imageViewRight = findViewById(R.id.imageViewRight); + imageViewResult = findViewById(R.id.imageViewResult); + textView = findViewById(R.id.result_tv); + Button button = findViewById(R.id.button); + Button button1 = findViewById(R.id.button1); + + // 加载图片 + try { + leftBitmap = BitmapFactory.decodeStream(getAssets().open("Left3.bmp")); + rightBitmap = BitmapFactory.decodeStream(getAssets().open("Right3.bmp")); + imageViewLeft.setImageBitmap(leftBitmap); + imageViewRight.setImageBitmap(rightBitmap); + } catch (IOException e) { + e.printStackTrace(); + } + + // 执行StereoBM算法 + button.setOnClickListener(v -> { + try { + Bitmap result = stereoBMUtil.compute(leftBitmap, rightBitmap); + imageViewResult.setImageBitmap(result); + } catch (Exception e) { + e.printStackTrace(); + } + }); + + // 打开相机activity + button1.setOnClickListener(v -> { + Intent intent = new Intent(MainActivity.this, CameraActivity.class); + startActivity(intent); + }); + + // 点击计算后的图片,获取三维坐标数据 + imageViewResult.setOnTouchListener((v, event) -> { + // 获取触摸点的坐标 x, y + float x = event.getX(); + float y = event.getY(); + // 目标点的坐标 + float[] dst = new float[2]; + Matrix imageMatrix = imageViewResult.getImageMatrix(); + Matrix inverseMatrix = new Matrix(); + imageMatrix.invert(inverseMatrix); + inverseMatrix.mapPoints(dst, new float[]{x, y}); + int dstX = (int) dst[0]; + int dstY = (int) dst[1]; + // 获取该点的三维坐标 + double[] c = stereoBMUtil.getCoordinate(dstX, dstY); + String s = String.format("点(%d, %d) 三维坐标:[%.2f, %.2f, %.2f]", dstX, dstY, c[0], c[1], c[2]); + Log.d(TAG, s); + textView.setText(s); + return true; + }); + } +} diff --git a/code/StereoBMUtil.java b/code/StereoBMUtil.java new file mode 100644 index 0000000..2f22708 --- /dev/null +++ b/code/StereoBMUtil.java @@ -0,0 +1,123 @@ +package com.yeyupiaoling.teststadiometry; + +import android.graphics.Bitmap; + +import org.opencv.calib3d.Calib3d; +import org.opencv.calib3d.StereoBM; +import org.opencv.core.Core; +import org.opencv.core.CvType; +import org.opencv.core.Mat; +import org.opencv.core.Rect; +import org.opencv.core.Scalar; +import org.opencv.core.Size; +import org.opencv.imgproc.Imgproc; + +import static org.opencv.android.Utils.bitmapToMat; +import static org.opencv.android.Utils.matToBitmap; + +public class StereoBMUtil { + private static final String TAG = StereoBMUtil.class.getName(); + // 【需要根据摄像头修改参数】 + private final int imageWidth = 1280; // 单目图像的宽度 + private final int imageHeight = 720; // 单目图像的高度 + private Mat Q = new Mat(); + + //映射表 + private Mat mapLx = new Mat(); + private Mat mapLy = new Mat(); + private Mat mapRx = new Mat(); + private Mat mapRy = new Mat(); + + private StereoBM bm = StereoBM.create(); + private Mat xyz; + + public StereoBMUtil() { + Mat cameraMatrixL = new Mat(3, 3, CvType.CV_64F); + Mat distCoeffL = new Mat(5, 1, CvType.CV_64F); + Mat cameraMatrixR = new Mat(3, 3, CvType.CV_64F); + Mat distCoeffR = new Mat(5, 1, CvType.CV_64F); + Mat T = new Mat(3, 1, CvType.CV_64F); + Mat rec = new Mat(3, 1, CvType.CV_64F); + // 【需要根据摄像头修改参数】左目相机标定参数 fc_left_x 0 cc_left_x 0 fc_left_y cc_left_y 0 0 1 + cameraMatrixL.put(0, 0, 849.38718, 0, 720.28472, 0, 850.60613, 373.88887, 0, 0, 1); + //【需要根据摄像头修改参数】左目相机标定参数 kc_left_01, kc_left_02, kc_left_03, kc_left_04, kc_left_05 + distCoeffL.put(0, 0, 0.01053, 0.02881, 0.00144, 0.00192, 0.00000); + //【需要根据摄像头修改参数】右目相机标定参数 fc_right_x 0 cc_right_x 0 fc_right_y cc_right_y 0 0 1 + cameraMatrixR.put(0, 0, 847.54814, 0, 664.36648, 0, 847.75828, 368.46946, 0, 0, 1); + //【需要根据摄像头修改参数】右目相机标定参数 kc_right_01, kc_right_02, kc_right_03, kc_right_04, kc_right_05 + distCoeffR.put(0, 0, 0.00905, 0.02094, 0.00082, 0.00183, 0.00000); + //【需要根据摄像头修改参数】T平移向量 + T.put(0, 0, -59.32102, 0.27563, -0.79807); + // 【需要根据摄像头修改参数】rec旋转向量 + rec.put(0, 0, -0.00927, -0.00228, -0.00070); + + Size imageSize = new Size(imageWidth, imageHeight); + Mat R = new Mat(); + Mat Rl = new Mat(); + Mat Rr = new Mat(); + Mat Pl = new Mat(); + Mat Pr = new Mat(); + Rect validROIL = new Rect(); + Rect validROIR = new Rect(); + Calib3d.Rodrigues(rec, R); //Rodrigues变换 + //图像校正之后,会对图像进行裁剪,这里的validROI就是指裁剪之后的区域 + Calib3d.stereoRectify(cameraMatrixL, distCoeffL, cameraMatrixR, distCoeffR, imageSize, R, T, Rl, Rr, Pl, Pr, Q, Calib3d.CALIB_ZERO_DISPARITY, + 0, imageSize, validROIL, validROIR); + Imgproc.initUndistortRectifyMap(cameraMatrixL, distCoeffL, Rl, Pl, imageSize, CvType.CV_32FC1, mapLx, mapLy); + Imgproc.initUndistortRectifyMap(cameraMatrixR, distCoeffR, Rr, Pr, imageSize, CvType.CV_32FC1, mapRx, mapRy); + + int blockSize = 18; + int numDisparities = 11; + int uniquenessRatio = 5; + bm.setBlockSize(2 * blockSize + 5); //SAD窗口大小 + bm.setROI1(validROIL); //左右视图的有效像素区域 + bm.setROI2(validROIR); + bm.setPreFilterCap(61); //预处理滤波器 + bm.setMinDisparity(32); //最小视差,默认值为0, 可以是负值,int型 + bm.setNumDisparities(numDisparities * 16); //视差窗口,即最大视差值与最小视差值之差,16的整数倍 + bm.setTextureThreshold(10); + bm.setUniquenessRatio(uniquenessRatio); //视差唯一性百分比,uniquenessRatio主要可以防止误匹配 + bm.setSpeckleWindowSize(100); //检查视差连通区域变化度的窗口大小 + bm.setSpeckleRange(32); //32视差变化阈值,当窗口内视差变化大于阈值时,该窗口内的视差清零 + bm.setDisp12MaxDiff(-1); + } + + public Bitmap compute(Bitmap left, Bitmap right) { + Mat rgbImageL = new Mat(); + Mat rgbImageR = new Mat(); + Mat grayImageL = new Mat(); + Mat rectifyImageL = new Mat(); + Mat rectifyImageR = new Mat(); + Mat grayImageR = new Mat(); + //用于存放每个像素点距离相机镜头的三维坐标 + xyz = new Mat(); + Mat disp = new Mat(); + bitmapToMat(left, rgbImageL); + bitmapToMat(right, rgbImageR); + Imgproc.cvtColor(rgbImageL, grayImageL, Imgproc.COLOR_BGR2GRAY); + Imgproc.cvtColor(rgbImageR, grayImageR, Imgproc.COLOR_BGR2GRAY); + + Imgproc.remap(grayImageL, rectifyImageL, mapLx, mapLy, Imgproc.INTER_LINEAR); + Imgproc.remap(grayImageR, rectifyImageR, mapRx, mapRy, Imgproc.INTER_LINEAR); + + bm.compute(rectifyImageL, rectifyImageR, disp); //输入图像必须为灰度图 + Calib3d.reprojectImageTo3D(disp, xyz, Q, true); //在实际求距离时,ReprojectTo3D出来的X / W, Y / W, Z / W都要乘以16 + Core.multiply(xyz, new Mat(xyz.size(), CvType.CV_32FC3, new Scalar(16, 16, 16)), xyz); + + // 用于显示处理 + Mat disp8U = new Mat(disp.rows(), disp.cols(), CvType.CV_8UC1); + disp.convertTo(disp, CvType.CV_32F, 1.0 / 16); //除以16得到真实视差值 + Core.normalize(disp, disp8U, 0, 255, Core.NORM_MINMAX, CvType.CV_8U); + Imgproc.medianBlur(disp8U, disp8U, 9); + Bitmap resultBitmap = Bitmap.createBitmap(disp8U.cols(), disp8U.rows(), Bitmap.Config.ARGB_8888); + matToBitmap(disp8U, resultBitmap); + return resultBitmap; + } + + public double[] getCoordinate(int dstX, int dstY) { + double x = xyz.get(dstY, dstX)[0]; + double y = xyz.get(dstY, dstX)[1]; + double z = xyz.get(dstY, dstX)[2]; + return new double[]{x, y, z}; + } +} diff --git a/code/Utils.java b/code/Utils.java new file mode 100644 index 0000000..2704956 --- /dev/null +++ b/code/Utils.java @@ -0,0 +1,78 @@ +package com.yeyupiaoling.teststadiometry; + +import android.graphics.Bitmap; +import android.graphics.Matrix; +import android.util.Size; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; + +public class Utils { + private static final String TAG = Utils.class.getName(); + + // 获取最优的预览图片大小 + public static Size chooseOptimalSize(final Size[] choices, final int width, final int height) { + final Size desiredSize = new Size(width, height); + + // Collect the supported resolutions that are at least as big as the preview Surface + boolean exactSizeFound = false; + float desiredAspectRatio = width * 1.0f / height; //in landscape perspective + float bestAspectRatio = 0; + final List bigEnough = new ArrayList(); + for (final Size option : choices) { + if (option.equals(desiredSize)) { + // Set the size but don't return yet so that remaining sizes will still be logged. + exactSizeFound = true; + break; + } + + float aspectRatio = option.getWidth() * 1.0f / option.getHeight(); + if (aspectRatio > desiredAspectRatio) continue; //smaller than screen + //try to find the best aspect ratio which fits in screen + if (aspectRatio > bestAspectRatio) { + if (option.getHeight() >= height && option.getWidth() >= width) { + bigEnough.clear(); + bigEnough.add(option); + bestAspectRatio = aspectRatio; + } + } else if (aspectRatio == bestAspectRatio) { + if (option.getHeight() >= height && option.getWidth() >= width) { + bigEnough.add(option); + } + } + } + if (exactSizeFound) { + return desiredSize; + } + + if (bigEnough.size() > 0) { + final Size chosenSize = Collections.min(bigEnough, new Comparator() { + @Override + public int compare(Size lhs, Size rhs) { + return Long.signum( + (long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight()); + } + }); + return chosenSize; + } else { + return choices[0]; + } + } + + public static Bitmap rotateBitmap(Bitmap bitmap, int angle) { + Matrix matrix = new Matrix(); + matrix.postRotate(angle); + return Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true); + } + + public static List bisectionBitmap(Bitmap bitmap) { + List bitmapList = new ArrayList<>(); + Bitmap left = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth() / 2, bitmap.getHeight(), null, true); + bitmapList.add(left); + Bitmap right = Bitmap.createBitmap(bitmap, bitmap.getWidth() / 2, 0, bitmap.getWidth() / 2, bitmap.getHeight(), null, true); + bitmapList.add(right); + return bitmapList; + } +} diff --git a/doc/原型设计.docx b/doc/原型设计.docx new file mode 100644 index 0000000..c201162 Binary files /dev/null and b/doc/原型设计.docx differ