master
Dana 3 years ago
parent c39174e409
commit 3601b83c9f

@ -1,10 +1,9 @@
package com.yeyupiaoling.teststadiometry;
package com.example.otherclass;
import android.content.Context;
import android.util.AttributeSet;
import android.view.TextureView;
public class AutoFitTextureView extends TextureView {
private int mRatioWidth = 0;
@ -45,4 +44,7 @@ public class AutoFitTextureView extends TextureView {
setMeasuredDimension(mRatioWidth, mRatioHeight);
}
public void setAspectRatio(float height, float width) {
}
}

@ -1,11 +1,13 @@
package com.yeyupiaoling.teststadiometry;
package com.example.smartglasses;
import androidx.annotation.NonNull;
import androidx.annotation.RequiresApi;
import androidx.appcompat.app.AppCompatActivity;
import android.Manifest;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.ImageFormat;
@ -25,6 +27,7 @@ import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.provider.MediaStore;
import android.util.Log;
import android.util.Size;
import android.view.Surface;
@ -35,11 +38,16 @@ import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.example.otherclass.AutoFitTextureView;
import com.example.otherclass.StereoBMUtil;
import com.example.otherclass.Utils;
import org.opencv.android.OpenCVLoader;
import java.util.Arrays;
import java.util.List;
@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
public class CameraActivity extends AppCompatActivity {
private static final String TAG = CameraActivity.class.getSimpleName();
private CameraCaptureSession mCaptureSession;
@ -69,7 +77,9 @@ public class CameraActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Log.d("Test", "未引入.xml ");
setContentView(R.layout.activity_camera);
Log.d("Test", "进入C活动成功 ");
// 权限申请
if (!hasPermission()) {
@ -77,7 +87,7 @@ public class CameraActivity extends AppCompatActivity {
}
//初始化
if (OpenCVLoader.initDebug()) {
Log.d(TAG, "OpenCVLoader初始化成功");
Log.d("Test", "OpenCVLoader初始化成功");
}
stereoBMUtil = new StereoBMUtil();
@ -95,6 +105,7 @@ public class CameraActivity extends AppCompatActivity {
// 拍照获取左右摄像头的图像
button2.setOnClickListener(v -> {
Log.d("Test", "点击打开摄像头 ");
bgView.setVisibility(View.VISIBLE);
ll.setVisibility(View.VISIBLE);
Bitmap imgBitmap = mTextureView.getBitmap();
@ -103,6 +114,7 @@ public class CameraActivity extends AppCompatActivity {
// 左右目摄像头的图像
leftBitmap = bitmapList.get(0);
rightBitmap = bitmapList.get(1);
Log.d("Test", "getbitmap成功 ");
imageViewLeft.setImageBitmap(leftBitmap);
imageViewRight.setImageBitmap(rightBitmap);

@ -0,0 +1,428 @@
package com.example.fragment;
import android.Manifest;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.ImageFormat;
import android.graphics.Matrix;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.ImageReader;
import android.os.Build;
import android.os.Bundle;
import androidx.annotation.NonNull;
import androidx.annotation.RequiresApi;
import androidx.fragment.app.Fragment;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.util.Size;
import android.view.LayoutInflater;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.example.smartglasses.R;
import com.example.otherclass.AutoFitTextureView;
import com.example.otherclass.StereoBMUtil;
import com.example.otherclass.Utils;
import com.example.otherclass.AutoFitTextureView;
import org.opencv.android.OpenCVLoader;
import java.util.Arrays;
import java.util.List;
@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
public class CameraFragment extends Fragment {
private CameraCaptureSession mCaptureSession;
private CameraDevice mCameraDevice;
private HandlerThread mCaptureThread;
private Handler mCaptureHandler;
private ImageReader mImageReader;
private boolean isFont = false;
private Size mPreviewSize;
private boolean mCapturing;
private AutoFitTextureView mTextureView;
private ImageView imageViewLeft;
private ImageView imageViewRight;
private ImageView imageViewResult;
private int sensorOrientation = 0;
private View bgView;
private LinearLayout ll;
private TextView textView;
private StereoBMUtil stereoBMUtil;
private Bitmap leftBitmap;
private Bitmap rightBitmap;
public CameraFragment() {
// Required empty public constructor
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
}
@SuppressLint("ClickableViewAccessibility")
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View view=inflater.inflate(R.layout.fragment_camera, container, false);
// 权限申请
if (!hasPermission()) {
requestPermission();
}
//初始化
if (OpenCVLoader.initDebug()) {
Log.d("Test", "OpenCVLoader初始化成功");
}
stereoBMUtil = new StereoBMUtil();
mTextureView = view.findViewById(R.id.texture_view);
imageViewLeft = view.findViewById(R.id.imageViewLeft);
imageViewRight = view.findViewById(R.id.imageViewRight);
imageViewResult = view.findViewById(R.id.imageViewResult);
bgView = view.findViewById(R.id.bg_view);
ll = view.findViewById(R.id.show_ll);
textView = view.findViewById(R.id.result_tv);
Button button2 = view.findViewById(R.id.button2);
Button button3 = view.findViewById(R.id.button3);
Button button4 = view.findViewById(R.id.button4);
// 拍照获取左右摄像头的图像
button2.setOnClickListener(v -> {
Log.d("Test", "点击打开摄像头 ");
bgView.setVisibility(View.VISIBLE);
ll.setVisibility(View.VISIBLE);
Bitmap imgBitmap = mTextureView.getBitmap();
Log.d("Test", "getBitmap()v ");
Bitmap b = Utils.rotateBitmap(imgBitmap, 360 - sensorOrientation);
Log.d("Test", " rotateBitmap");
List<Bitmap> bitmapList = Utils.bisectionBitmap(b);
// 左右目摄像头的图像
leftBitmap = bitmapList.get(0);
rightBitmap = bitmapList.get(1);
Log.d("Test", "getbitmap成功 ");
imageViewLeft.setImageBitmap(leftBitmap);
imageViewRight.setImageBitmap(rightBitmap);
});
// 显示相机预览
button3.setOnClickListener(v -> {
bgView.setVisibility(View.GONE);
ll.setVisibility(View.GONE);
});
// 执行StereoBM算法
button4.setOnClickListener(v -> {
Bitmap result = stereoBMUtil.compute(leftBitmap, rightBitmap);
imageViewResult.setImageBitmap(result);
});
// 点击计算后的图片,获取三维坐标数据
imageViewResult.setOnTouchListener((v, event) -> {
// 获取触摸点的坐标 x, y
float x = event.getX();
float y = event.getY();
float[] dst = new float[2];
Matrix imageMatrix = imageViewResult.getImageMatrix();
Matrix inverseMatrix = new Matrix();
imageMatrix.invert(inverseMatrix);
inverseMatrix.mapPoints(dst, new float[]{x, y});
int dstX = (int) dst[0];
int dstY = (int) dst[1];
// 获取该点的三维坐标
double[] c = stereoBMUtil.getCoordinate(dstX, dstY);
String s = String.format("点(%d, %d) 三维坐标:[%.2f, %.2f, %.2f]", dstX, dstY, c[0], c[1], c[2]);
Log.d("TAG", s);
textView.setText(s);
return true;
});
return view;
}
// 初始化以下变量和状态
private void initStatus() {
// 启动线程
startCaptureThread();
// 判断SurfaceTexture是否可用可用就直接启动捕获图片
if (mTextureView.isAvailable()) {
startCapture();
} else {
mTextureView.setSurfaceTextureListener(new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
startCapture();
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return true;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
});
}
}
// 启动捕获图片
@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
private void startCapture() {
// 判断是否正处于捕获图片的状态
if (mCapturing) return;
mCapturing = true;
// final CameraManager manager = (CameraManager) getContext().getSystemService(Context.CAMERA_SERVICE);
final CameraManager manager = (CameraManager) getActivity().getSystemService(Context.CAMERA_SERVICE);
String cameraIdAvailable = null;
try {
assert manager != null;
for (final String cameraId : manager.getCameraIdList()) {
final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
final Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
// 设置相机前摄像头或者后摄像头
if (isFont) {
if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
cameraIdAvailable = cameraId;
break;
}
} else {
if (facing != null && facing == CameraCharacteristics.LENS_FACING_BACK) {
cameraIdAvailable = cameraId;
break;
}
}
}
} catch (CameraAccessException e) {
Log.e("TAG", "启动图片捕获异常 ", e);
}
try {
assert cameraIdAvailable != null;
final CameraCharacteristics characteristics =
manager.getCameraCharacteristics(cameraIdAvailable);
final StreamConfigurationMap map =
characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
try {
sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
Log.i("TAG", "orientation: " + sensorOrientation);
} catch (Exception e) {
e.printStackTrace();
}
assert map != null;
mPreviewSize = Utils.chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class),
mTextureView.getWidth(),
mTextureView.getHeight());
Log.d("mPreviewSize", String.valueOf(mPreviewSize));
mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
manager.openCamera(cameraIdAvailable, new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice camera) {
mCameraDevice = camera;
createCaptureSession();
}
@Override
public void onDisconnected(@NonNull CameraDevice camera) {
camera.close();
mCameraDevice = null;
mCapturing = false;
}
@Override
public void onError(@NonNull CameraDevice camera, final int error) {
Log.e("TAG", "打开相机错误 = " + error);
camera.close();
mCameraDevice = null;
mCapturing = false;
}
}, mCaptureHandler);
} catch (CameraAccessException | SecurityException e) {
mCapturing = false;
Log.e("TAG", "启动图片捕获异常 ", e);
}
}
// 创建捕获图片session
private void createCaptureSession() {
try {
final SurfaceTexture texture = mTextureView.getSurfaceTexture();
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
final Surface surface = new Surface(texture);
final CaptureRequest.Builder captureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
captureRequestBuilder.addTarget(surface);
mImageReader = ImageReader.newInstance(
mPreviewSize.getWidth(), mPreviewSize.getHeight(), ImageFormat.JPEG, 10);
mCameraDevice.createCaptureSession(
Arrays.asList(surface, mImageReader.getSurface()),
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
if (null == mCameraDevice) {
return;
}
mCaptureSession = cameraCaptureSession;
try {
captureRequestBuilder.set(
CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
captureRequestBuilder.set(
CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
CaptureRequest previewRequest = captureRequestBuilder.build();
mCaptureSession.setRepeatingRequest(
previewRequest, new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureProgressed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureResult partialResult) {
super.onCaptureProgressed(session, request, partialResult);
}
@Override
public void onCaptureFailed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureFailure failure) {
super.onCaptureFailed(session, request, failure);
Log.d("TAG", "onCaptureFailed = " + failure.getReason());
}
@Override
public void onCaptureSequenceCompleted(@NonNull CameraCaptureSession session, int sequenceId, long frameNumber) {
super.onCaptureSequenceCompleted(session, sequenceId, frameNumber);
Log.d("TAG", "onCaptureSequenceCompleted");
}
}, mCaptureHandler);
} catch (final CameraAccessException e) {
Log.e("TAG", "onConfigured exception ", e);
}
}
@Override
public void onConfigureFailed(@NonNull final CameraCaptureSession cameraCaptureSession) {
Log.e("TAG", "onConfigureFailed ");
}
},
null);
} catch (final CameraAccessException e) {
Log.e("TAG", "创建捕获图片session异常 ", e);
}
}
// 关闭相机
private void closeCamera() {
if (mCaptureSession != null) {
mCaptureSession.close();
mCaptureSession = null;
}
if (mCameraDevice != null) {
mCameraDevice.close();
mCameraDevice = null;
}
if (mImageReader != null) {
mImageReader.close();
mImageReader = null;
}
mCapturing = false;
}
// 关闭捕获图片线程
private void stopCaptureThread() {
try {
if (mCaptureThread != null) {
mCaptureThread.quitSafely();
mCaptureThread.join();
}
mCaptureThread = null;
mCaptureHandler = null;
} catch (final Exception e) {
e.printStackTrace();
}
}
@Override
public void onPause() {
// 关闭相机
closeCamera();
stopCaptureThread();
super.onPause();
}
@Override
public void onStop() {
// 关闭相机
closeCamera();
stopCaptureThread();
super.onStop();
}
// 启动捕获图片线程
private void startCaptureThread() {
mCaptureThread = new HandlerThread("capture");
mCaptureThread.start();
mCaptureHandler = new Handler(mCaptureThread.getLooper());
}
@Override
public void onResume() {
initStatus();
super.onResume();
}
// check had permission
private boolean hasPermission() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
return getActivity().checkSelfPermission(Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED;
} else {
return true;
}
}
// request permission
private void requestPermission() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
requestPermissions(new String[]{Manifest.permission.CAMERA}, 1);
}
}
}

@ -0,0 +1,38 @@
package com.example.adapter;
import androidx.annotation.NonNull;
import com.example.smartglasses.R;
import com.xuexiang.xui.adapter.recyclerview.BaseRecyclerAdapter;
import com.xuexiang.xui.adapter.recyclerview.RecyclerViewHolder;
import com.xuexiang.xui.adapter.simple.AdapterItem;
import com.xuexiang.xui.widget.imageview.ImageLoader;
import com.xuexiang.xui.widget.imageview.RadiusImageView;
public class CommonGridAdapter extends BaseRecyclerAdapter<AdapterItem> {
private boolean mIsCircle;
public CommonGridAdapter(boolean isCircle) {
super();
mIsCircle = isCircle;
}
@Override
protected int getItemLayoutId(int viewType) {
return R.layout.adapter_common_grid_item;
}
@Override
protected void bindData(@NonNull RecyclerViewHolder holder, int position, AdapterItem item) {
if (item != null) {
RadiusImageView imageView = holder.findViewById(R.id.riv_item);
imageView.setCircle(mIsCircle);
ImageLoader.get().loadImage(imageView, item.getIcon());
holder.text(R.id.tv_title, item.getTitle().toString().substring(0, 1));
holder.text(R.id.tv_sub_title, item.getTitle());
}
}
}

@ -0,0 +1,55 @@
package com.example.fragment;
import android.os.Bundle;
import androidx.fragment.app.Fragment;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.example.smartglasses.R;
import com.example.smartglasses.databinding.FragmentDeviseBinding;
public class DeviseFragment extends Fragment {
FragmentDeviseBinding binding;
private String jumpPage;
private static final String ARG_PARAM1 = "jumpPage";
public DeviseFragment() {
// Required empty public constructor
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (getArguments() != null) {
jumpPage = getArguments().getString(ARG_PARAM1);
Log.i("devise", "onCreate: 在碎片中获得devise" + jumpPage);
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
binding = FragmentDeviseBinding.inflate(inflater, container, false);
binding.cardView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Log.i("devise", "onClick: 我点击了");
if (jumpPage.equals("location")) {
LocationFragment fragment = new LocationFragment();
getFragmentManager().beginTransaction().replace(R.id.frag_layout, fragment).commit();
} else if (jumpPage.equals("trace")) {
TraceFragment fragment = new TraceFragment();
getFragmentManager().beginTransaction().replace(R.id.frag_layout, fragment).commit();
}
}
});
return binding.getRoot();
}
}

@ -0,0 +1,117 @@
package com.example.fragment;
import android.annotation.SuppressLint;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.os.Bundle;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.TextView;
import androidx.fragment.app.Fragment;
import com.example.activity.BarActivity;
import com.example.smartglasses.MainActivity;
import com.example.smartglasses.R;
import com.example.otherclass.StereoBMUtil;
import org.opencv.android.OpenCVLoader;
import java.io.IOException;
public class HomeFragment extends Fragment {
private ImageView imageViewResult;
private StereoBMUtil stereoBMUtil;
private TextView textView;
private Bitmap leftBitmap;
private Bitmap rightBitmap;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
}
@SuppressLint("ClickableViewAccessibility")
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View view=inflater.inflate(R.layout.fragment_home, container, false);
//初始化
if (OpenCVLoader.initDebug()) {
Log.d("TAG", "OpenCVLoader初始化成功");
}
stereoBMUtil = new StereoBMUtil();
ImageView imageViewLeft = view.findViewById(R.id.imageViewLeft);
ImageView imageViewRight = view.findViewById(R.id.imageViewRight);
imageViewResult = view.findViewById(R.id.imageViewResult);
textView = view.findViewById(R.id.result_tv);
Button button = view.findViewById(R.id.button);
Button button1 = view.findViewById(R.id.button1);
// 加载图片
try {
leftBitmap = BitmapFactory.decodeStream(getContext().getAssets().open("Left3.bmp"));
rightBitmap = BitmapFactory.decodeStream(getContext().getAssets().open("Right3.bmp"));
imageViewLeft.setImageBitmap(leftBitmap);
imageViewRight.setImageBitmap(rightBitmap);
} catch (IOException e) {
e.printStackTrace();
}
// 执行StereoBM算法
button.setOnClickListener(v -> {
try {
Bitmap result = stereoBMUtil.compute(leftBitmap, rightBitmap);
imageViewResult.setImageBitmap(result);
} catch (Exception e) {
e.printStackTrace();
}
});
// 打开相机activity
button1.setOnClickListener(v -> {
// Intent intent = new Intent(MainActivity.this, CameraActivity.class);
Log.d("Test", "启动C活动");
BarActivity activity = (BarActivity) getActivity();
// startActivity(intent);
activity.setCurrentFragment(4);
});
// 点击计算后的图片,获取三维坐标数据
imageViewResult.setOnTouchListener((v, event) -> {
// 获取触摸点的坐标 x, y
float x = event.getX();
float y = event.getY();
// 目标点的坐标
float[] dst = new float[2];
Matrix imageMatrix = imageViewResult.getImageMatrix();
Matrix inverseMatrix = new Matrix();
imageMatrix.invert(inverseMatrix);
inverseMatrix.mapPoints(dst, new float[]{x, y});
int dstX = (int) dst[0];
int dstY = (int) dst[1];
// 获取该点的三维坐标
double[] c = stereoBMUtil.getCoordinate(dstX, dstY);
String s = String.format("点(%d, %d) 三维坐标:[%.2f, %.2f, %.2f]", dstX, dstY, c[0], c[1], c[2]);
// Log.d(TAG, s);
textView.setText(s);
return true;
});
return view;
}
}

@ -0,0 +1,62 @@
package com.example.fragment;
import android.os.Bundle;
import androidx.fragment.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.example.smartglasses.R;
public class LocationFragment extends Fragment {
// TODO: Rename parameter arguments, choose names that match
// the fragment initialization parameters, e.g. ARG_ITEM_NUMBER
private static final String ARG_PARAM1 = "param1";
private static final String ARG_PARAM2 = "param2";
// TODO: Rename and change types of parameters
private String mParam1;
private String mParam2;
public LocationFragment() {
// Required empty public constructor
}
/**
* Use this factory method to create a new instance of
* this fragment using the provided parameters.
*
* @param param1 Parameter 1.
* @param param2 Parameter 2.
* @return A new instance of fragment LocationFragment.
*/
// TODO: Rename and change types and number of parameters
public static LocationFragment newInstance(String param1, String param2) {
LocationFragment fragment = new LocationFragment();
Bundle args = new Bundle();
args.putString(ARG_PARAM1, param1);
args.putString(ARG_PARAM2, param2);
fragment.setArguments(args);
return fragment;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (getArguments() != null) {
mParam1 = getArguments().getString(ARG_PARAM1);
mParam2 = getArguments().getString(ARG_PARAM2);
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
return inflater.inflate(R.layout.fragment_location, container, false);
}
}

@ -1,4 +1,6 @@
package com.yeyupiaoling.teststadiometry;
package com.example.smartglasses;
import androidx.appcompat.app.AppCompatActivity;
import android.annotation.SuppressLint;
import android.content.Intent;
@ -7,12 +9,15 @@ import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.os.Bundle;
import android.util.Log;
import android.view.SurfaceView;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.TextView;
import androidx.appcompat.app.AppCompatActivity;
import com.example.otherclass.StereoBMUtil;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.JavaCameraView;
import org.opencv.android.OpenCVLoader;
import java.io.IOException;
@ -67,6 +72,7 @@ public class MainActivity extends AppCompatActivity {
// 打开相机activity
button1.setOnClickListener(v -> {
Intent intent = new Intent(MainActivity.this, CameraActivity.class);
Log.d("Test", "启动C活动");
startActivity(intent);
});

@ -0,0 +1,66 @@
package com.example.fragment;
import android.content.Context;
import android.graphics.drawable.Drawable;
import android.os.Bundle;
import androidx.fragment.app.Fragment;
import androidx.recyclerview.widget.RecyclerView;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.example.adapter.CommonGridAdapter;
import com.example.smartglasses.R;
import com.example.smartglasses.databinding.FragmentMineBinding;
import com.xuexiang.xui.adapter.simple.AdapterItem;
import com.xuexiang.xui.utils.ResUtils;
import com.xuexiang.xui.utils.WidgetUtils;
import java.util.ArrayList;
import java.util.List;
public class MineFragment extends Fragment {
RecyclerView recyclerHead;
FragmentMineBinding binding;
private CommonGridAdapter mGridAdapter;
public MineFragment() {
// Required empty public constructor
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
binding = FragmentMineBinding.inflate(inflater, container, false);
recyclerHead = binding.recyclerHead;
initTools();
return binding.getRoot();
}
public void initTools(){
WidgetUtils.initGridRecyclerView(recyclerHead, 4, 0);
recyclerHead.setAdapter(mGridAdapter = new CommonGridAdapter(true));
mGridAdapter.refresh(getGridItems(this.getActivity(),R.array.grid_titles_entry,R.array.grid_icons_entry));
}
private static List<AdapterItem> getGridItems(Context context, int titleArrayId, int iconArrayId) {
List<AdapterItem> list = new ArrayList<>();
String[] titles = ResUtils.getStringArray(titleArrayId);
Drawable[] icons = ResUtils.getDrawableArray(context, iconArrayId);
for (int i = 0; i < titles.length; i++) {
list.add(new AdapterItem(titles[i], icons[i]));
}
Log.i("TAB", "getGridItems: "+list.size());
return list;
}
}

@ -0,0 +1,15 @@
package com.example;
import android.app.Application;
import com.xuexiang.xui.XUI;
public class MyApplication extends Application {
@Override
public void onCreate() {
super.onCreate();
XUI.init(this);
XUI.debug(true);
}
}

@ -1,4 +1,4 @@
package com.yeyupiaoling.teststadiometry;
package com.example.otherclass;
import android.graphics.Bitmap;
@ -16,108 +16,108 @@ import static org.opencv.android.Utils.bitmapToMat;
import static org.opencv.android.Utils.matToBitmap;
public class StereoBMUtil {
private static final String TAG = StereoBMUtil.class.getName();
// 【需要根据摄像头修改参数】
private final int imageWidth = 1280; // 单目图像的宽度
private final int imageHeight = 720; // 单目图像的高度
private Mat Q = new Mat();
private static final String TAG = StereoBMUtil.class.getName();
// 【需要根据摄像头修改参数】
private final int imageWidth = 1280; // 单目图像的宽度
private final int imageHeight = 720; // 单目图像的高度
private Mat Q = new Mat();
//映射表
private Mat mapLx = new Mat();
private Mat mapLy = new Mat();
private Mat mapRx = new Mat();
private Mat mapRy = new Mat();
//映射表
private Mat mapLx = new Mat();
private Mat mapLy = new Mat();
private Mat mapRx = new Mat();
private Mat mapRy = new Mat();
private StereoBM bm = StereoBM.create();
private Mat xyz;
private StereoBM bm = StereoBM.create();
private Mat xyz;
public StereoBMUtil() {
Mat cameraMatrixL = new Mat(3, 3, CvType.CV_64F);
Mat distCoeffL = new Mat(5, 1, CvType.CV_64F);
Mat cameraMatrixR = new Mat(3, 3, CvType.CV_64F);
Mat distCoeffR = new Mat(5, 1, CvType.CV_64F);
Mat T = new Mat(3, 1, CvType.CV_64F);
Mat rec = new Mat(3, 1, CvType.CV_64F);
// 【需要根据摄像头修改参数】左目相机标定参数 fc_left_x 0 cc_left_x 0 fc_left_y cc_left_y 0 0 1
cameraMatrixL.put(0, 0, 849.38718, 0, 720.28472, 0, 850.60613, 373.88887, 0, 0, 1);
//【需要根据摄像头修改参数】左目相机标定参数 kc_left_01, kc_left_02, kc_left_03, kc_left_04, kc_left_05
distCoeffL.put(0, 0, 0.01053, 0.02881, 0.00144, 0.00192, 0.00000);
//【需要根据摄像头修改参数】右目相机标定参数 fc_right_x 0 cc_right_x 0 fc_right_y cc_right_y 0 0 1
cameraMatrixR.put(0, 0, 847.54814, 0, 664.36648, 0, 847.75828, 368.46946, 0, 0, 1);
//【需要根据摄像头修改参数】右目相机标定参数 kc_right_01, kc_right_02, kc_right_03, kc_right_04, kc_right_05
distCoeffR.put(0, 0, 0.00905, 0.02094, 0.00082, 0.00183, 0.00000);
//【需要根据摄像头修改参数】T平移向量
T.put(0, 0, -59.32102, 0.27563, -0.79807);
// 【需要根据摄像头修改参数】rec旋转向量
rec.put(0, 0, -0.00927, -0.00228, -0.00070);
public StereoBMUtil() {
Mat cameraMatrixL = new Mat(3, 3, CvType.CV_64F);
Mat distCoeffL = new Mat(5, 1, CvType.CV_64F);
Mat cameraMatrixR = new Mat(3, 3, CvType.CV_64F);
Mat distCoeffR = new Mat(5, 1, CvType.CV_64F);
Mat T = new Mat(3, 1, CvType.CV_64F);
Mat rec = new Mat(3, 1, CvType.CV_64F);
// 【需要根据摄像头修改参数】左目相机标定参数 fc_left_x 0 cc_left_x 0 fc_left_y cc_left_y 0 0 1
cameraMatrixL.put(0, 0, 849.38718, 0, 720.28472, 0, 850.60613, 373.88887, 0, 0, 1);
//【需要根据摄像头修改参数】左目相机标定参数 kc_left_01, kc_left_02, kc_left_03, kc_left_04, kc_left_05
distCoeffL.put(0, 0, 0.01053, 0.02881, 0.00144, 0.00192, 0.00000);
//【需要根据摄像头修改参数】右目相机标定参数 fc_right_x 0 cc_right_x 0 fc_right_y cc_right_y 0 0 1
cameraMatrixR.put(0, 0, 847.54814, 0, 664.36648, 0, 847.75828, 368.46946, 0, 0, 1);
//【需要根据摄像头修改参数】右目相机标定参数 kc_right_01, kc_right_02, kc_right_03, kc_right_04, kc_right_05
distCoeffR.put(0, 0, 0.00905, 0.02094, 0.00082, 0.00183, 0.00000);
//【需要根据摄像头修改参数】T平移向量
T.put(0, 0, -59.32102, 0.27563, -0.79807);
// 【需要根据摄像头修改参数】rec旋转向量
rec.put(0, 0, -0.00927, -0.00228, -0.00070);
Size imageSize = new Size(imageWidth, imageHeight);
Mat R = new Mat();
Mat Rl = new Mat();
Mat Rr = new Mat();
Mat Pl = new Mat();
Mat Pr = new Mat();
Rect validROIL = new Rect();
Rect validROIR = new Rect();
Calib3d.Rodrigues(rec, R); //Rodrigues变换
//图像校正之后会对图像进行裁剪这里的validROI就是指裁剪之后的区域
Calib3d.stereoRectify(cameraMatrixL, distCoeffL, cameraMatrixR, distCoeffR, imageSize, R, T, Rl, Rr, Pl, Pr, Q, Calib3d.CALIB_ZERO_DISPARITY,
0, imageSize, validROIL, validROIR);
Imgproc.initUndistortRectifyMap(cameraMatrixL, distCoeffL, Rl, Pl, imageSize, CvType.CV_32FC1, mapLx, mapLy);
Imgproc.initUndistortRectifyMap(cameraMatrixR, distCoeffR, Rr, Pr, imageSize, CvType.CV_32FC1, mapRx, mapRy);
Size imageSize = new Size(imageWidth, imageHeight);
Mat R = new Mat();
Mat Rl = new Mat();
Mat Rr = new Mat();
Mat Pl = new Mat();
Mat Pr = new Mat();
Rect validROIL = new Rect();
Rect validROIR = new Rect();
Calib3d.Rodrigues(rec, R); //Rodrigues变换
//图像校正之后会对图像进行裁剪这里的validROI就是指裁剪之后的区域
Calib3d.stereoRectify(cameraMatrixL, distCoeffL, cameraMatrixR, distCoeffR, imageSize, R, T, Rl, Rr, Pl, Pr, Q, Calib3d.CALIB_ZERO_DISPARITY,
0, imageSize, validROIL, validROIR);
Imgproc.initUndistortRectifyMap(cameraMatrixL, distCoeffL, Rl, Pl, imageSize, CvType.CV_32FC1, mapLx, mapLy);
Imgproc.initUndistortRectifyMap(cameraMatrixR, distCoeffR, Rr, Pr, imageSize, CvType.CV_32FC1, mapRx, mapRy);
int blockSize = 18;
int numDisparities = 11;
int uniquenessRatio = 5;
bm.setBlockSize(2 * blockSize + 5); //SAD窗口大小
bm.setROI1(validROIL); //左右视图的有效像素区域
bm.setROI2(validROIR);
bm.setPreFilterCap(61); //预处理滤波器
bm.setMinDisparity(32); //最小视差默认值为0, 可以是负值int型
bm.setNumDisparities(numDisparities * 16); //视差窗口,即最大视差值与最小视差值之差,16的整数倍
bm.setTextureThreshold(10);
bm.setUniquenessRatio(uniquenessRatio); //视差唯一性百分比,uniquenessRatio主要可以防止误匹配
bm.setSpeckleWindowSize(100); //检查视差连通区域变化度的窗口大小
bm.setSpeckleRange(32); //32视差变化阈值当窗口内视差变化大于阈值时该窗口内的视差清零
bm.setDisp12MaxDiff(-1);
}
int blockSize = 18;
int numDisparities = 11;
int uniquenessRatio = 5;
bm.setBlockSize(2 * blockSize + 5); //SAD窗口大小
bm.setROI1(validROIL); //左右视图的有效像素区域
bm.setROI2(validROIR);
bm.setPreFilterCap(61); //预处理滤波器
bm.setMinDisparity(32); //最小视差默认值为0, 可以是负值int型
bm.setNumDisparities(numDisparities * 16); //视差窗口,即最大视差值与最小视差值之差,16的整数倍
bm.setTextureThreshold(10);
bm.setUniquenessRatio(uniquenessRatio); //视差唯一性百分比,uniquenessRatio主要可以防止误匹配
bm.setSpeckleWindowSize(100); //检查视差连通区域变化度的窗口大小
bm.setSpeckleRange(32); //32视差变化阈值当窗口内视差变化大于阈值时该窗口内的视差清零
bm.setDisp12MaxDiff(-1);
}
public Bitmap compute(Bitmap left, Bitmap right) {
Mat rgbImageL = new Mat();
Mat rgbImageR = new Mat();
Mat grayImageL = new Mat();
Mat rectifyImageL = new Mat();
Mat rectifyImageR = new Mat();
Mat grayImageR = new Mat();
//用于存放每个像素点距离相机镜头的三维坐标
xyz = new Mat();
Mat disp = new Mat();
bitmapToMat(left, rgbImageL);
bitmapToMat(right, rgbImageR);
Imgproc.cvtColor(rgbImageL, grayImageL, Imgproc.COLOR_BGR2GRAY);
Imgproc.cvtColor(rgbImageR, grayImageR, Imgproc.COLOR_BGR2GRAY);
public Bitmap compute(Bitmap left, Bitmap right) {
Mat rgbImageL = new Mat();
Mat rgbImageR = new Mat();
Mat grayImageL = new Mat();
Mat rectifyImageL = new Mat();
Mat rectifyImageR = new Mat();
Mat grayImageR = new Mat();
//用于存放每个像素点距离相机镜头的三维坐标
xyz = new Mat();
Mat disp = new Mat();
bitmapToMat(left, rgbImageL);
bitmapToMat(right, rgbImageR);
Imgproc.cvtColor(rgbImageL, grayImageL, Imgproc.COLOR_BGR2GRAY);
Imgproc.cvtColor(rgbImageR, grayImageR, Imgproc.COLOR_BGR2GRAY);
Imgproc.remap(grayImageL, rectifyImageL, mapLx, mapLy, Imgproc.INTER_LINEAR);
Imgproc.remap(grayImageR, rectifyImageR, mapRx, mapRy, Imgproc.INTER_LINEAR);
Imgproc.remap(grayImageL, rectifyImageL, mapLx, mapLy, Imgproc.INTER_LINEAR);
Imgproc.remap(grayImageR, rectifyImageR, mapRx, mapRy, Imgproc.INTER_LINEAR);
bm.compute(rectifyImageL, rectifyImageR, disp); //输入图像必须为灰度图
Calib3d.reprojectImageTo3D(disp, xyz, Q, true); //在实际求距离时ReprojectTo3D出来的X / W, Y / W, Z / W都要乘以16
Core.multiply(xyz, new Mat(xyz.size(), CvType.CV_32FC3, new Scalar(16, 16, 16)), xyz);
bm.compute(rectifyImageL, rectifyImageR, disp); //输入图像必须为灰度图
Calib3d.reprojectImageTo3D(disp, xyz, Q, true); //在实际求距离时ReprojectTo3D出来的X / W, Y / W, Z / W都要乘以16
Core.multiply(xyz, new Mat(xyz.size(), CvType.CV_32FC3, new Scalar(16, 16, 16)), xyz);
// 用于显示处理
Mat disp8U = new Mat(disp.rows(), disp.cols(), CvType.CV_8UC1);
disp.convertTo(disp, CvType.CV_32F, 1.0 / 16); //除以16得到真实视差值
Core.normalize(disp, disp8U, 0, 255, Core.NORM_MINMAX, CvType.CV_8U);
Imgproc.medianBlur(disp8U, disp8U, 9);
Bitmap resultBitmap = Bitmap.createBitmap(disp8U.cols(), disp8U.rows(), Bitmap.Config.ARGB_8888);
matToBitmap(disp8U, resultBitmap);
return resultBitmap;
}
// 用于显示处理
Mat disp8U = new Mat(disp.rows(), disp.cols(), CvType.CV_8UC1);
disp.convertTo(disp, CvType.CV_32F, 1.0 / 16); //除以16得到真实视差值
Core.normalize(disp, disp8U, 0, 255, Core.NORM_MINMAX, CvType.CV_8U);
Imgproc.medianBlur(disp8U, disp8U, 9);
Bitmap resultBitmap = Bitmap.createBitmap(disp8U.cols(), disp8U.rows(), Bitmap.Config.ARGB_8888);
matToBitmap(disp8U, resultBitmap);
return resultBitmap;
}
public double[] getCoordinate(int dstX, int dstY) {
double x = xyz.get(dstY, dstX)[0];
double y = xyz.get(dstY, dstX)[1];
double z = xyz.get(dstY, dstX)[2];
return new double[]{x, y, z};
}
}
public double[] getCoordinate(int dstX, int dstY) {
double x = xyz.get(dstY, dstX)[0];
double y = xyz.get(dstY, dstX)[1];
double z = xyz.get(dstY, dstX)[2];
return new double[]{x, y, z};
}
}

@ -0,0 +1,35 @@
package com.example.fragment;
import android.os.Bundle;
import androidx.fragment.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.example.smartglasses.R;
import com.example.smartglasses.databinding.FragmentTraceBinding;
public class TraceFragment extends Fragment {
FragmentTraceBinding binding;
private String[] titles = {"今天","昨天"};
public TraceFragment() {
// Required empty public constructor
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
binding = FragmentTraceBinding.inflate(inflater, container, false);
binding.easyIndicator2.setTabTitles(titles);
return binding.getRoot();
}
}

@ -1,4 +1,4 @@
package com.yeyupiaoling.teststadiometry;
package com.example.otherclass;
import android.graphics.Bitmap;
import android.graphics.Matrix;

Loading…
Cancel
Save