pull/8/head
root 3 days ago
parent cb52c09b01
commit 8bcbbc68ad

Binary file not shown.

@ -0,0 +1,20 @@
cmake_minimum_required(VERSION 3.5)
project(mediamodule LANGUAGES CXX)
set(CMAKE_CXX_STANDARD 17)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
find_package(Qt5 REQUIRED COMPONENTS Core Widgets)
find_package(OpenCV REQUIRED)
add_library(mediamodule
camera_streamer.cpp
)
target_include_directories(mediamodule PUBLIC ${CMAKE_CURRENT_SOURCE_DIR} ${OpenCV_INCLUDE_DIRS})
target_link_libraries(mediamodule PUBLIC Qt5::Core Qt5::Widgets ${OpenCV_LIBS})
add_executable(example_viewer example_viewer.cpp)
target_link_libraries(example_viewer PRIVATE mediamodule)

@ -0,0 +1,105 @@
#include "camera_streamer.h"
#include <QDebug>
#include <QHostAddress>
#include <opencv2/imgproc.hpp>
#include <opencv2/imgcodecs.hpp>
CameraStreamer::CameraStreamer(QObject *parent)
: QObject(parent)
{
}
CameraStreamer::~CameraStreamer()
{
stopStreaming();
}
bool CameraStreamer::startStreaming(const QString &remoteUser,
const QString &remoteHost,
const QString &remoteCommand,
int localPort)
{
if (m_running.load())
return true; // already running
// 启动 ssh 进程,在远端开始推流
if (!m_sshProcess) {
m_sshProcess = new QProcess(this);
// 使 ssh 保持会话 & 不读取stdin-T 禁用伪终端
QStringList args;
args << QString("%1@%2").arg(remoteUser, remoteHost)
<< "-T" << remoteCommand;
m_sshProcess->start("ssh", args);
if (!m_sshProcess->waitForStarted(3000)) {
qWarning() << "Failed to start ssh process:" << m_sshProcess->errorString();
delete m_sshProcess;
m_sshProcess = nullptr;
return false;
}
}
// 启动本地接收线程
m_running = true;
m_captureThread = std::thread(&CameraStreamer::captureLoop, this, localPort);
return true;
}
void CameraStreamer::stopStreaming()
{
if (!m_running.load())
return;
// 停止读取线程
m_running = false;
if (m_captureThread.joinable())
m_captureThread.join();
// 结束远端 ssh 进程
if (m_sshProcess) {
m_sshProcess->terminate();
if (!m_sshProcess->waitForFinished(3000)) {
m_sshProcess->kill();
m_sshProcess->waitForFinished();
}
delete m_sshProcess;
m_sshProcess = nullptr;
}
}
void CameraStreamer::captureLoop(int localPort)
{
// GStreamer UDP pipeline
QString pipeline = QString("udpsrc address=0.0.0.0 port=%1 ! application/x-rtp,media=video,encoding-name=H264 ! rtph264depay ! h264parse ! avdec_h264 ! videoconvert ! appsink max-buffers=1 drop=true").arg(localPort);
cv::VideoCapture cap(pipeline.toStdString(), cv::CAP_GSTREAMER);
if (!cap.isOpened()) {
qWarning() << "Failed to open capture pipeline" << pipeline;
return;
}
cv::Mat frame;
while (m_running.load()) {
if (!cap.read(frame) || frame.empty()) {
std::this_thread::sleep_for(std::chrono::milliseconds(10));
continue;
}
cv::Mat rgb;
if (frame.channels() == 3) {
cv::cvtColor(frame, rgb, cv::COLOR_BGR2RGB);
} else if (frame.channels() == 4) {
cv::cvtColor(frame, rgb, cv::COLOR_BGRA2RGB);
} else {
rgb = frame;
}
QImage image(rgb.data, rgb.cols, rgb.rows, static_cast<int>(rgb.step), QImage::Format_RGB888);
emit newFrame(image.copy()); // copy to detach from cv::Mat memory
// 控制帧率:根据需要调整
std::this_thread::sleep_for(std::chrono::milliseconds(1));
}
cap.release();
}

@ -0,0 +1,35 @@
#pragma once
#include <QObject>
#include <QProcess>
#include <atomic>
#include <thread>
#include <opencv2/opencv.hpp>
class CameraStreamer : public QObject
{
Q_OBJECT
public:
explicit CameraStreamer(QObject *parent = nullptr);
~CameraStreamer();
// remoteUser@remoteHost 远程 ssh 登录信息remoteCommand 为远端启动摄像头流的命令
// localPort 本机接收 UDP 端口SDK 默认 9201
bool startStreaming(const QString &remoteUser,
const QString &remoteHost,
const QString &remoteCommand = "cd ~/UnitreecameraSDK && ./bins/example_putImagetrans",
int localPort = 9201);
void stopStreaming();
signals:
// 每当收到一帧图像时发射,供 Qt 前端显示
void newFrame(const QImage &image);
private:
void captureLoop(int localPort);
QProcess *m_sshProcess {nullptr};
std::thread m_captureThread;
std::atomic<bool> m_running {false};
};

@ -0,0 +1,39 @@
#include <QApplication>
#include <QLabel>
#include <QVBoxLayout>
#include <QHostAddress>
#include "camera_streamer.h"
int main(int argc, char *argv[])
{
QCoreApplication::setAttribute(Qt::AA_EnableHighDpiScaling);
QApplication app(argc, argv);
// 主窗口
QWidget window;
window.setWindowTitle("Unitree Camera Viewer");
auto *layout = new QVBoxLayout(&window);
QLabel *label = new QLabel(&window);
label->setAlignment(Qt::AlignCenter);
layout->addWidget(label);
CameraStreamer streamer;
QObject::connect(&streamer, &CameraStreamer::newFrame, &window, [label](const QImage &img){
label->setPixmap(QPixmap::fromImage(img).scaled(label->size(), Qt::KeepAspectRatio, Qt::SmoothTransformation));
});
// 根据实际情况修改远端用户名、IP、端口
QString remoteUser = "unitree";
QString remoteHost = "192.168.123.10"; // 狗端 IP
streamer.startStreaming(remoteUser, remoteHost);
window.resize(960, 540);
window.show();
int ret = app.exec();
streamer.stopStreaming();
return ret;
}

@ -0,0 +1,66 @@
add_executable(example_getRawFrame ./example_getRawFrame.cc)
target_link_libraries(example_getRawFrame ${SDKLIBS})
add_executable(example_getDepthFrame ./example_getDepthFrame.cc)
target_link_libraries(example_getDepthFrame ${SDKLIBS})
add_executable(example_getRectFrame ./example_getRectFrame.cc)
target_link_libraries(example_getRectFrame ${SDKLIBS})
add_executable(example_getCalibParamsFile ./example_getCalibParamsFile.cc)
target_link_libraries(example_getCalibParamsFile ${SDKLIBS})
add_executable(example_putImagetrans ./example_putImagetrans.cc)
target_link_libraries(example_putImagetrans ${SDKLIBS})
add_executable(example_getimagetrans ./example_getimagetrans.cc)
target_link_libraries(example_getimagetrans ${SDKLIBS})
# add_executable(example_share ./example_share.cc)
# target_link_libraries(example_share ${SDKLIBS})
find_package(OpenGL REQUIRED)
if(OpenGL_FOUND)
include_directories(${OPENGL_INCLUDE_DIR})
message(STATUS ${OPENGL_INCLUDE_DIR})
message(STATUS ${OPENGL_LIBRARIES})
else()
message(WARNING "OpenGL Library Not Found")
endif()
find_package(GLUT REQUIRED)
if(GLUT_FOUND)
include_directories(${GLUT_INCLUDE_DIR})
message(STATUS ${GLUT_INCLUDE_DIR})
message(STATUS ${GLUT_LIBRARY})
else()
message(WARNING "GLUT Library Not Found")
endif()
find_package(X11 REQUIRED)
if(X11_FOUND)
include_directories(${X11_INCLUDE_DIR})
message(${X11_INCLUDE_DIR})
message(${X11_LIBRARIES})
else()
message(WARNING "X11 Library Not Found")
endif()
if(X11_FOUND AND OpenGL_FOUND AND GLUT_FOUND)
set(ShowPointCloud true)
message(STATUS "Point Cloud Example Enabled")
else()
set(ShowPointCloud false)
message(WARNING "Point Cloud Example Disabled")
endif()
if(${ShowPointCloud})
add_executable(example_getPointCloud ./example_getPointCloud.cc ./glViewer/glwindow_x11.cpp ./glViewer/scenewindow.cpp)
target_link_libraries(example_getPointCloud ${SDKLIBS} ${OPENGL_LIBRARIES} ${GLUT_LIBRARY} ${X11_LIBRARIES} )
endif()
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -pthread")

@ -0,0 +1,27 @@
/**
* @file example_getCalibParamsFile.cc
* @brief This file is part of UnitreeCameraSDK.
* @details This example that how to get camera internal parameters
* @author ZhangChunyang
* @date 2021.07.31
* @version 1.0.1
* @copyright Copyright (c) 2020-2021, Hangzhou Yushu Technology Stock CO.LTD. All Rights Reserved.
*/
#include <UnitreeCameraSDK.hpp>
#include <unistd.h>
int main(int argc, char *argv[]){
UnitreeCamera cam("stereo_camera_config.yaml"); ///< init UnitreeCamera object by config file
if(!cam.isOpened()) ///< get camera open state
exit(EXIT_FAILURE);
cam.startCapture(); ///< disable image h264 encoding and share memory sharing
usleep(100000); ///< wait parameters initialization finished
cam.saveCalibParams("output_camCalibParams.yaml"); ///< save parameters to output_camCalibParams.yaml
std::cout << cam.getSerialNumber() << " " << cam.getPosNumber() << std::endl;
usleep(100000);
cam.stopCapture(); ///< stop camera capturing
return 0;
}

@ -0,0 +1,42 @@
/**
* @file example_getDepthFrame.cc
* @brief This file is part of UnitreeCameraSDK.
* @details This example that how to get depth frame
* @author SunMingzhe
* @date 2021.12.07
* @version 1.1.0
* @copyright Copyright (c) 2020-2021, Hangzhou Yushu Technology Stock CO.LTD. All Rights Reserved.
*/
#include <UnitreeCameraSDK.hpp>
#include <unistd.h>
int main(int argc, char *argv[]){
UnitreeCamera cam("stereo_camera_config.yaml"); ///< init UnitreeCamera object by config file
if(!cam.isOpened()) ///< get camera open state
exit(EXIT_FAILURE);
cam.startCapture(); ///< disable image h264 encoding and share memory sharing
cam.startStereoCompute(); ///< start disparity computing
while(cam.isOpened()){
cv::Mat depth;
std::chrono::microseconds t;
if(!cam.getDepthFrame(depth, true, t)){ ///< get stereo camera depth image
usleep(1000);
continue;
}
if(!depth.empty()){
cv::imshow("UnitreeCamera-Depth", depth);
}
char key = cv::waitKey(10);
if(key == 27) // press ESC key
break;
}
cam.stopStereoCompute(); ///< stop disparity computing
cam.stopCapture(); ///< stop camera capturing
return 0;
}

@ -0,0 +1,97 @@
/**
* @file example_getPointCloud.cc
* @brief This file is part of UnitreeCameraSDK.
* @details This example that how to get camera point cloud.
* @author ZhangChunyang
* @date 2021.07.31
* @version 1.0.1
* @copyright Copyright (c) 2020-2021, Hangzhou Yushu Technology Stock CO.LTD. All Rights Reserved.
*/
#include <GL/gl.h>
#include <signal.h>
#include <cerrno>
#include <cfenv>
#include <unistd.h>
#include "glViewer/scenewindow.hpp"
#include <UnitreeCameraSDK.hpp>
#define RGB_PCL true ///< Color Point Cloud Enable Flag
void DrawScene(const std::vector<PCLType>& pcl_vec) {
glBegin(GL_POINTS);
for (uint i = 0; i < pcl_vec.size(); ++i) {
PCLType pcl = pcl_vec[i];
glColor3ub(pcl.clr(2), pcl.clr(1), pcl.clr(0));
glVertex3f(-pcl.pts(0), -pcl.pts(1), pcl.pts(2));
}
glEnd();
}
void DrawScene(const std::vector<cv::Vec3f>& pcl_vec) {
glBegin(GL_POINTS);
for (uint i = 0; i < pcl_vec.size(); ++i) {
cv::Vec3f pcl = pcl_vec[i];
glColor3ub(255, 255, 0);
glVertex3f(-pcl(0), -pcl(1), pcl(2));
}
glEnd();
}
bool killSignalFlag = false;
void ctrl_c_handler(int s){
killSignalFlag = true;
return ;
}
int main(int argc, char *argv[]){
UnitreeCamera cam("stereo_camera_config.yaml");
if(!cam.isOpened())
exit(EXIT_FAILURE);
cam.startCapture();
cam.startStereoCompute();
struct sigaction sigIntHandler;
sigIntHandler.sa_handler = ctrl_c_handler;
sigemptyset(&sigIntHandler.sa_mask);
sigIntHandler.sa_flags = 0;
sigaction(SIGINT, &sigIntHandler, NULL);
std::cout << cam.getSerialNumber() << " " << cam.getPosNumber() << std::endl;
glwindow::SceneWindow scene(960, 720, "Panorama 3D Scene");
while(cam.isOpened()){
if(killSignalFlag){
break;
}
std::chrono::microseconds t;
#if RGB_PCL
std::vector<PCLType> pcl_vec;
if(!cam.getPointCloud(pcl_vec, t)){
usleep(1000);
continue;
}
#else
std::vector<cv::Vec3f> pcl_vec;
if(!cam.getPointCloud(pcl_vec, t)){
usleep(1000);
continue;
}
#endif
if (scene.win.alive()) {
if (scene.start_draw()) {
DrawScene(pcl_vec);
scene.finish_draw();
}
}
}
cam.stopStereoCompute();
cam.stopCapture();
return 0;
}

@ -0,0 +1,63 @@
/**
* @file example_getRawFrame.cc
* @brief This file is part of UnitreeCameraSDK.
* @details This example that how to get camera raw frame.
* @author ZhangChunyang
* @date 2021.07.31
* @version 1.0.1
* @copyright Copyright (c) 2020-2021, Hangzhou Yushu Technology Stock CO.LTD. All Rights Reserved.
*/
#include <UnitreeCameraSDK.hpp>
#include <unistd.h>
int main(int argc, char *argv[]){
int deviceNode = 0; // default 0 -> /dev/video0
cv::Size frameSize(1856, 800); // defalut image size: 1856 X 800
int fps = 30;
if(argc >= 2){
deviceNode = std::atoi(argv[1]);
if(argc >= 4){
frameSize = cv::Size(std::atoi(argv[2]), std::atoi(argv[3]));
}
if(argc >=5)
fps = std::atoi(argv[4]);
}
UnitreeCamera cam(deviceNode); ///< init camera by device node number
if(!cam.isOpened())
exit(EXIT_FAILURE);
cam.setRawFrameSize(frameSize); ///< set camera frame size
cam.setRawFrameRate(fps); ///< set camera frame rate
std::cout << "Device Position Number:" << cam.getPosNumber() << std::endl;
cam.startCapture(); ///< start camera capturing
while(cam.isOpened())
{
cv::Mat frame;
std::chrono::microseconds t;
if(!cam.getRawFrame(frame, t)){ ///< get camera raw image
usleep(1000);
continue;
}
cv::Mat left,right;
frame(cv::Rect(0, 0, frame.size().width/2, frame.size().height)).copyTo(right);
frame(cv::Rect(frame.size().width/2,0, frame.size().width/2, frame.size().height)).copyTo(left);
cv::hconcat(left, right, frame);
cv::imshow("UnitreeCamera_Left-Right", frame);
char key = cv::waitKey(10);
if(key == 27) // press ESC key
break;
}
cam.stopCapture(); ///< stop camera capturing
return 0;
}

@ -0,0 +1,60 @@
/**
* @file example_getRectFrame.cc
* @brief This file is part of UnitreeCameraSDK.
* @details This example that how to get depth frame
* @author ZhangChunyang
* @date 2021.07.31
* @version 1.0.1
* @copyright Copyright (c) 2020-2021, Hangzhou Yushu Technology Stock CO.LTD. All Rights Reserved.
*/
#include <UnitreeCameraSDK.hpp>
#include <unistd.h>
int main(int argc, char *argv[]){
int deviceNode = 0; ///< default 0 -> /dev/video0
cv::Size frameSize(1856, 800); ///< default frame size 1856x800
int fps = 30; ///< default camera fps: 30
if(argc >= 2){
deviceNode = std::atoi(argv[1]);
if(argc >= 4){
frameSize = cv::Size(std::atoi(argv[2]), std::atoi(argv[3]));
}
if(argc >=5)
fps = std::atoi(argv[4]);
}
UnitreeCamera cam("stereo_camera_config.yaml"); ///< init camera by device node number
if(!cam.isOpened()) ///< get camera open state
exit(EXIT_FAILURE);
cam.setRawFrameSize(frameSize); ///< set camera frame size
cam.setRawFrameRate(fps); ///< set camera camera fps
cam.setRectFrameSize(cv::Size(frameSize.width >> 2, frameSize.height >> 1)); ///< set camera rectify frame size
cam.startCapture(); ///< disable image h264 encoding and share memory sharing
usleep(500000);
while(cam.isOpened()){
cv::Mat left,right;
if(!cam.getRectStereoFrame(left,right)){ ///< get rectify left,right frame
usleep(1000);
continue;
}
cv::Mat stereo;
// cv::flip(left,left, -1);
// cv::flip(right,right, -1);
cv::hconcat(left, right, stereo);
cv::flip(stereo,stereo, -1);
cv::imshow("Longlat_Rect", stereo);
char key = cv::waitKey(10);
if(key == 27) // press ESC key
break;
}
cam.stopCapture(); ///< stop camera capturing
return 0;
}

@ -0,0 +1,63 @@
/**
* @file example_getRectFrame.cc
* @brief This file is part of UnitreeCameraSDK.
* @details This example that how to Transmission picture
* @author SunMingzhe
* @date 2021.12.07
* @version 1.1.0
* @copyright Copyright (c) 2020-2021, Hangzhou Yushu Technology Stock CO.LTD. All Rights Reserved.
*/
/*
udp:example_putImagetrans.cc
9201~9205
ip192.168.123.IpLastSegmentIpLastSegmentcofigure yaml-
*/
/*
listener
Introduction: This program uses directed UDP methods to get pictures,whitch requires sending pictures on other programs. for example:example_putImagetrans.cc
port:9201~9205 -> Front,chin,left,right,abdomen
local ip must be set to 192.168.123.IpLastSegment and IpLastSegment musb be set in cofigure yaml
*/
/*
local ip config
ip 192.168.123.IpLastSegment
netmask 255.255.255.0
gateway 192.168.123.1
*/
#include <opencv2/opencv.hpp>
#include <iostream>
int main(int argc,char** argv)
{
std::string IpLastSegment = "161";
int cam = 1;
if (argc>=2)
cam = std::atoi(argv[1]);
std::string udpstrPrevData = "udpsrc address=192.168.123."+ IpLastSegment + " port=";
//端口:前方,下巴,左,右,腹部
std::array<int,5> udpPORT = std::array<int, 5>{9201, 9202, 9203, 9204, 9205};
//std::string udpstrBehindData = " ! application/x-rtp,media=video,encoding-name=H264 ! rtph264depay ! h264parse ! omxh264dec ! videoconvert ! appsink";
std::string udpstrBehindData = " ! application/x-rtp,media=video,encoding-name=H264 ! rtph264depay ! h264parse ! avdec_h264 ! videoconvert ! appsink";
std::string udpSendIntegratedPipe = udpstrPrevData + std::to_string(udpPORT[cam-1]) + udpstrBehindData;
std::cout<<"udpSendIntegratedPipe:"<<udpSendIntegratedPipe<<std::endl;
cv::VideoCapture cap(udpSendIntegratedPipe);
if(!cap.isOpened())
return 0 ;
cv::Mat frame;
while(1)
{
cap >> frame;
if(frame.empty())
break;
imshow("video", frame);
cv::waitKey(20);
}
cap.release();//释放资源
return 0;
}

@ -0,0 +1,118 @@
/**
* @file example_getRectFrame.cc
* @brief This file is part of UnitreeCameraSDK.
* @details This example that how to Transmission picture
* @author SunMingzhe
* @date 2021.12.07
* @version 1.1.0
* @copyright Copyright (c) 2020-2021, Hangzhou Yushu Technology Stock CO.LTD. All Rights Reserved.
*/
#include <UnitreeCameraSDK.hpp>
#include <unistd.h>
/*
udpStereoCameraCommon::startCapture(true,false)udptrueudp264
IpLastSegmentip192.168.123.IpLastSegment
Transmode
Transrate
9201~9205
使kill.shUnitree/autostart/02camerarosnodeUnitree/autostart/04imageai
使
ps -A | grep point | awk '{print $1}' | xargs kill -9
ps -aux|grep mqttControlNode|grep -v grep|head -n 1|awk '{print $2}'|xargs kill -9
ps -aux|grep live_human_pose|grep -v grep|head -n 1|awk '{print $2}'|xargs kill -9
Transmode
0
1
260~140
3
->->
4使Depthmode = 2 使
*/
/*
sender:
Introduction: This program uses directed UDP methods to output pictures, transfer method written in programs. StereoCameraCommon::startCapture(true,false) the first parameter is a switch.
parameter:IpLastSegment is listener ip. listener ip:192.168.123.IpLastSegment
Transrate(config yaml) Transmission rate.it must lower than FrameRate
port:9201~9205 -> Front,chin,left,right,abdomen
If you start this file, you must bash "kill.sh" to turn off automatic startup program. "Unitree/autostart/02camerarosnode/kill.sh" and "Unitree/autostart/04imageai/kill.sh".
You can also use the following instructions:
ps -A | grep point | awk '{print $1}' | xargs kill -9
ps -aux|grep mqttControlNode|grep -v grep|head -n 1|awk '{print $2}'|xargs kill -9
ps -aux|grep live_human_pose|grep -v grep|head -n 1|awk '{print $2}'|xargs kill -9
Transfer picture mode: Decided by Transmode in the configuration yaml, which only writes in programs by reading the configuration yaml.
0ori left
1ori stereo
2rect left
3rect stereo
warning:The last left and right output position of the stereo picture is reversed left cam-> right picture right cam -> left picture
4Not recommended for use!!set Depthmode = 2 , rect left && depthimage
*/
/*
local ip config
ip 192.168.123.x
netmask 255.255.255.0
gateway 192.168.123.1
*/
int main(int argc, char *argv[])
{
UnitreeCamera cam("trans_rect_config.yaml"); ///< init camera by device node number
if(!cam.isOpened()) ///< get camera open state
exit(EXIT_FAILURE);
cam.startCapture(true,false); ///< disable share memory sharing and able image h264 encoding
usleep(500000);
while(cam.isOpened())
{
cv::Mat left,right,feim;
if(!cam.getRectStereoFrame(left,right))
{
usleep(1000);
continue;
}
char key = cv::waitKey(10);
if(key == 27) // press ESC key
break;
}
cam.stopCapture(); ///< stop camera capturing
return 0;
}
/*
int main(int argc, char *argv[])
{
UnitreeCamera cam("trans_rect_config.yaml"); ///< init camera by device node number
if(!cam.isOpened()) ///< get camera open state
exit(EXIT_FAILURE);
cam.startCapture(true,false); ///< disable share memory sharing and able image h264 encoding
usleep(500000);
while(cam.isOpened())
{
cv::Mat frame;
std::chrono::microseconds t;
if(!cam.getRawFrame(frame, t)){ ///< get camera raw image
usleep(1000);
continue;
}
char key = cv::waitKey(10);
if(key == 27) // press ESC key
break;
}
cam.stopCapture(); ///< stop camera capturing
return 0;
}
*/

@ -0,0 +1,29 @@
#include <UnitreeCameraSDK.hpp>
#include <unistd.h>
int main(int argc, char *argv[])
{
UnitreeCamera cam("trans_rect_config.yaml"); ///< init camera by device node number
if(!cam.isOpened()) ///< get camera open state
exit(EXIT_FAILURE);
cam.startCapture(false, true);
usleep(500000);
while(cam.isOpened())
{
cv::Mat left,right,feim;
if(!cam.getRectStereoFrame(left,right))
{
usleep(1000);
continue;
}
char key = cv::waitKey(10);
if(key == 27) // press ESC key
break;
}
cam.stopCapture(); ///< stop camera capturing
return 0;
}

@ -0,0 +1,124 @@
// Copyright (c) Ethan Eade, https://bitbucket.org/ethaneade/glwindow
#pragma once
#include <vector>
namespace glwindow {
namespace ButtonEvent {
enum Buttons {
LEFT=1, MIDDLE=2, RIGHT=4, WHEEL=8,
MODKEY_CTRL=16, MODKEY_SHIFT=32,
};
};
namespace KeyCode {
enum Codes {
BACKSPACE=0x8,
TAB=0x9,
ENTER=0xD,
ESCAPE=0x1B,
DEL=0x7F,
SHIFT=0xFF00,
CTRL,
ALT,
SUPER,
CAPSLOCK,
LEFT,
UP,
RIGHT,
DOWN,
};
};
class GLWindow;
struct EventHandler
{
public:
virtual ~EventHandler() {}
virtual bool on_key_down(GLWindow& win, int key) { return false; }
virtual bool on_key_up(GLWindow& win, int key) { return false; }
virtual bool on_text(GLWindow& win, const char *text, int len) { return false; }
virtual bool on_button_down(GLWindow& win, int btn, int state, int x, int y) { return false; }
virtual bool on_button_up(GLWindow& win, int btn, int state, int x, int y) { return false; }
virtual bool on_mouse_move(GLWindow& win, int state, int x, int y) { return false; }
virtual bool on_mouse_wheel(GLWindow& win, int state, int x, int y, int dx, int dy) { return false; }
virtual bool on_resize(GLWindow& win, int x, int y, int w, int h) { return false; }
virtual bool on_close(GLWindow& win) { return false; }
};
// Dispatches to each handler in reverse order until one returns true
class EventDispatcher : public EventHandler
{
public:
const std::vector<EventHandler*> &handlers;
EventDispatcher(const std::vector<EventHandler*> &h) :
handlers(h) {}
bool on_key_down(GLWindow& win, int key);
bool on_key_up(GLWindow& win, int key);
bool on_text(GLWindow& win, const char *text, int len);
bool on_button_down(GLWindow& win, int btn, int state, int x, int y);
bool on_button_up(GLWindow& win, int btn, int state, int x, int y);
bool on_mouse_move(GLWindow& win, int state, int x, int y);
bool on_mouse_wheel(GLWindow& win, int state, int x, int y, int dx, int dy);
bool on_resize(GLWindow& win, int x, int y, int w, int h);
bool on_close(GLWindow& win);
};
class GLWindow
{
public:
GLWindow(int w=-1, int h=-1, const char *title=0);
virtual ~GLWindow();
int width() const;
int height() const;
bool visible() const;
bool alive() const;
bool make_current();
bool push_context();
void pop_context();
struct ScopedContext {
GLWindow &win;
ScopedContext(GLWindow &w) : win(w) {
win.push_context();
}
~ScopedContext() {
win.pop_context();
}
};
void swap_buffers();
void set_size(int w, int h);
void set_position(int x, int y);
void set_title(const char* title);
void add_handler(EventHandler* handler);
bool remove_handler(EventHandler *handler);
void handle_events();
static void handle_all_events();
void destroy();
void draw_text(double x, double y, const char *text, int xywh[4]=0);
protected:
struct SystemState;
SystemState *sys_state;
std::vector<EventHandler*> handlers;
GLWindow *prev_active;
static GLWindow *active_context;
static std::vector<GLWindow*> all_windows;
static void add_window(GLWindow *win);
static bool remove_window(GLWindow *win);
};
}

@ -0,0 +1,518 @@
// Copyright (c) Ethan Eade, https://bitbucket.org/ethaneade/glwindow
#include "glwindow.hpp"
#include <X11/Xlib.h>
#include <X11/keysym.h>
#include <GL/glx.h>
#include <iostream>
using namespace glwindow;
std::vector<GLWindow*> GLWindow::all_windows;
void GLWindow::add_window(GLWindow *win)
{
all_windows.push_back(win);
}
bool GLWindow::remove_window(GLWindow *win)
{
for (size_t i=0; i<all_windows.size(); ++i) {
if (all_windows[i] == win) {
all_windows[i] = all_windows.back();
all_windows.pop_back();
return true;
}
}
return false;
}
void GLWindow::handle_all_events()
{
for (size_t i=0; i<all_windows.size(); ++i)
all_windows[i]->handle_events();
}
GLWindow *GLWindow::active_context = 0;
bool GLWindow::push_context()
{
prev_active = active_context;
return make_current();
}
void GLWindow::pop_context()
{
if (active_context != this)
return;
if (prev_active) {
prev_active->make_current();
} else {
active_context = 0;
}
}
void GLWindow::add_handler(EventHandler* handler)
{
handlers.push_back(handler);
}
bool GLWindow::remove_handler(EventHandler* handler)
{
std::vector<EventHandler*>::reverse_iterator it;
for (it = handlers.rbegin(); it != handlers.rend(); ++it) {
if (*it == handler) {
handlers.erase(it.base());
return true;
}
}
return false;
}
bool EventDispatcher::on_key_down(GLWindow& win, int key) {
for (int i=handlers.size()-1; i>=0; --i)
if (handlers[i]->on_key_down(win, key))
return true;
return false;
}
bool EventDispatcher::on_key_up(GLWindow& win, int key) {
for (int i=handlers.size()-1; i>=0; --i)
if (handlers[i]->on_key_up(win, key))
return true;
return false;
}
bool EventDispatcher::on_text(GLWindow& win, const char *text, int len) {
for (int i=handlers.size()-1; i>=0; --i)
if (handlers[i]->on_text(win, text, len))
return true;
return false;
}
bool EventDispatcher::on_button_down(GLWindow& win, int btn, int state, int x, int y) {
for (int i=handlers.size()-1; i>=0; --i)
if (handlers[i]->on_button_down(win, btn, state, x, y))
return true;
return false;
}
bool EventDispatcher::on_button_up(GLWindow& win, int btn, int state, int x, int y) {
for (int i=handlers.size()-1; i>=0; --i)
if (handlers[i]->on_button_up(win, btn, state, x, y))
return true;
return false;
}
bool EventDispatcher::on_mouse_move(GLWindow& win, int state, int x, int y) {
for (int i=handlers.size()-1; i>=0; --i)
if (handlers[i]->on_mouse_move(win, state, x, y))
return true;
return false;
}
bool EventDispatcher::on_mouse_wheel(GLWindow& win, int state, int x, int y, int dx, int dy) {
for (int i=handlers.size()-1; i>=0; --i)
if (handlers[i]->on_mouse_wheel(win, state, x, y, dx, dy))
return true;
return false;
}
bool EventDispatcher::on_resize(GLWindow &win, int x, int y, int w, int h) {
for (int i=handlers.size()-1; i>=0; --i)
if (handlers[i]->on_resize(win, x, y, w, h))
return true;
return false;
}
bool EventDispatcher::on_close(GLWindow &win) {
for (int i=handlers.size()-1; i>=0; --i)
if (handlers[i]->on_close(win))
return true;
return false;
}
static XVisualInfo *makeVisualInfo(Display *display)
{
int visualAttributes[] = {
GLX_RED_SIZE, 8,
GLX_GREEN_SIZE, 8,
GLX_BLUE_SIZE, 8,
GLX_DEPTH_SIZE, 16,
GLX_STENCIL_SIZE, 8,
GLX_RGBA,
GLX_DOUBLEBUFFER,
None
};
XVisualInfo *vi = glXChooseVisual(display, DefaultScreen(display), visualAttributes);
return vi;
}
static Window makeWindow(Display *display, XVisualInfo *vi, int width, int height)
{
Window rootWindow = RootWindow(display, vi->screen);
XSetWindowAttributes attributes;
attributes.border_pixel = 0;
attributes.colormap = XCreateColormap(display, rootWindow, vi->visual, AllocNone);
attributes.event_mask = (KeyPressMask | KeyReleaseMask |
ButtonPressMask | ButtonReleaseMask |
PointerMotionMask |
VisibilityChangeMask |
StructureNotifyMask |
ExposureMask);
Window window = XCreateWindow(display,
rootWindow,
0, 0, width, height,
0, vi->depth,
InputOutput,
vi->visual,
CWBorderPixel | CWColormap | CWEventMask,
&attributes);
return window;
}
struct GLWindow::SystemState
{
Display* display;
Window window;
GLXContext context;
Atom delete_atom;
Cursor cursor;
int width, height;
bool visible;
SystemState() {
display = 0;
window = 0;
width = 0;
height = 0;
visible = false;
}
~SystemState() {
if (!display)
return;
if (context) {
destroy();
glXMakeCurrent(display, None, 0);
glXDestroyContext(display, context);
}
XCloseDisplay(display);
}
bool init(int w, int h, const char *title)
{
display = XOpenDisplay(0);
if (!display)
return false;
XVisualInfo *vi = makeVisualInfo(display);
if (!vi)
return false;
context = glXCreateContext(display, vi, 0, True);
if (!context)
return false;
width = w;
height = h;
window = makeWindow(display, vi, width, height);
if (!window)
return false;
XStoreName(display, window, title);
{
XClassHint classHint;
classHint.res_name = const_cast<char*>(title);
char classname[] = "glwindow";
classHint.res_class = classname;
XSetClassHint(display, window, &classHint);
XMapWindow(display, window);
}
XEvent ev;
do {
XNextEvent(display, &ev);
} while (ev.type != MapNotify);
visible = true;
delete_atom = XInternAtom(display, "WM_DELETE_WINDOW", True);
XSetWMProtocols(display, window, &delete_atom, 1);
cursor = XCreateFontCursor(display, ' ');
return true;
}
void destroy()
{
if (window) {
XUnmapWindow(display, window);
XDestroyWindow(display, window);
window = 0;
}
}
void swap_buffers()
{
if (window) {
glXSwapBuffers(display, window);
}
}
void set_title(const char *title)
{
if (window) {
XStoreName(display, window, title);
}
}
bool make_current()
{
if (!window)
return false;
glXMakeCurrent(display, window, context);
return true;
}
};
GLWindow::GLWindow(int w, int h, const char *title)
{
sys_state = new SystemState();
sys_state->init(w, h, title);
all_windows.push_back(this);
}
GLWindow::~GLWindow()
{
for (size_t i=0; i<all_windows.size(); ++i) {
if (all_windows[i] == this) {
all_windows[i] = all_windows.back();
all_windows.pop_back();
break;
}
}
delete sys_state;
}
int GLWindow::width() const
{
return sys_state->width;
}
int GLWindow::height() const
{
return sys_state->height;
}
bool GLWindow::visible() const
{
return sys_state->visible;
}
bool GLWindow::alive() const
{
return 0 != sys_state->window;
}
bool GLWindow::make_current()
{
if (!sys_state->make_current())
return false;
active_context = this;
return true;
}
void GLWindow::swap_buffers()
{
sys_state->swap_buffers();
}
void GLWindow::set_size(int w, int h)
{
if (!alive())
return;
XWindowChanges c;
c.width = w;
c.height = h;
XConfigureWindow(sys_state->display,
sys_state->window,
CWWidth | CWHeight,
&c);
}
void GLWindow::set_position(int x, int y)
{
if (!alive())
return;
XWindowChanges c;
c.x = x;
c.y = y;
XConfigureWindow(sys_state->display,
sys_state->window,
CWX | CWY,
&c);
}
void GLWindow::set_title(const char* title)
{
if (!alive())
return;
sys_state->set_title(title);
}
static int convert_button_state(unsigned int state)
{
int s = 0;
if (state & Button1Mask) s |= ButtonEvent::LEFT;
if (state & Button2Mask) s |= ButtonEvent::MIDDLE;
if (state & Button3Mask) s |= ButtonEvent::RIGHT;
if (state & ControlMask) s |= ButtonEvent::MODKEY_CTRL;
if (state & ShiftMask) s |= ButtonEvent::MODKEY_SHIFT;
return s;
}
static int convert_button(int button)
{
switch (button) {
case Button1: return ButtonEvent::LEFT;
case Button2: return ButtonEvent::MIDDLE;
case Button3: return ButtonEvent::RIGHT;
default: return 0;
}
}
static int convert_keycode(int key)
{
switch (key) {
case XK_BackSpace: return KeyCode::BACKSPACE;
case XK_Tab: return KeyCode::TAB;
case XK_Return: return KeyCode::ENTER;
case XK_Shift_L: return KeyCode::SHIFT;
case XK_Shift_R: return KeyCode::SHIFT;
case XK_Control_L: return KeyCode::CTRL;
case XK_Control_R: return KeyCode::CTRL;
case XK_Alt_L: return KeyCode::ALT;
case XK_Alt_R: return KeyCode::ALT;
case XK_Super_L: return KeyCode::SUPER;
case XK_Super_R: return KeyCode::SUPER;
case XK_Caps_Lock: return KeyCode::CAPSLOCK;
case XK_Delete: return KeyCode::DEL;
case XK_Escape: return KeyCode::ESCAPE;
case XK_Left: return KeyCode::LEFT;
case XK_Up: return KeyCode::UP;
case XK_Right: return KeyCode::RIGHT;
case XK_Down: return KeyCode::DOWN;
}
return key;
}
void GLWindow::handle_events()
{
if (!alive())
return;
XEvent event;
KeySym key;
const int text_size = 64;
char text[text_size];
int len;
EventDispatcher dispatcher(handlers);
int btn, state;
while (XPending(sys_state->display))
{
XNextEvent(sys_state->display, &event);
//std::cerr << "event " << event.type << std::endl;
switch (event.type) {
case ButtonPress:
state = convert_button_state(event.xbutton.state);
if (event.xbutton.button == Button4) {
// MouseWheel down
dispatcher.on_mouse_wheel(*this, state, event.xbutton.x, event.xbutton.y, 0, 1);
} else if (event.xbutton.button == Button5) {
// MouseWheel up
dispatcher.on_mouse_wheel(*this, state, event.xbutton.x, event.xbutton.y, 0, -1);
} else {
btn = convert_button(event.xbutton.button);
dispatcher.on_button_down(*this, btn, state, event.xbutton.x, event.xbutton.y);
}
break;
case ButtonRelease:
if (event.xbutton.button == Button4 ||
event.xbutton.button == Button5)
break;
btn = convert_button(event.xbutton.button);
state = convert_button_state(event.xbutton.state);
dispatcher.on_button_up(*this, btn, state, event.xbutton.x, event.xbutton.y);
break;
case MotionNotify:
state = convert_button_state(event.xbutton.state);
dispatcher.on_mouse_move(*this, state, event.xmotion.x, event.xmotion.y);
break;
case KeyPress:
len = XLookupString(&event.xkey, text, text_size-1, &key, 0);
dispatcher.on_key_down(*this, convert_keycode(key));
if (len > 0) {
text[len] = 0;
dispatcher.on_text(*this, text, len);
}
break;
case KeyRelease:
XLookupString(&event.xkey, 0, 0, &key, 0);
dispatcher.on_key_up(*this, convert_keycode(key));
break;
case ConfigureNotify:
sys_state->width = event.xconfigure.width;
sys_state->height = event.xconfigure.height;
dispatcher.on_resize(*this, event.xconfigure.x, event.xconfigure.y,
sys_state->width, sys_state->height);
break;
case VisibilityNotify:
if (event.xvisibility.state == VisibilityFullyObscured)
sys_state->visible = false;
else
sys_state->visible = true;
break;
case DestroyNotify:
//std::cerr << "DestroyNotify" << std::endl;
//sys_state->window = 0;
break;
case Expose:
//std::cerr << "Expose" << std::endl;
break;
case ClientMessage:
if (event.xclient.data.l[0] == (int)sys_state->delete_atom) {
if (!dispatcher.on_close(*this))
destroy();
}
break;
default:
break;
}
}
}
void GLWindow::destroy()
{
sys_state->destroy();
}

@ -0,0 +1,181 @@
// Copyright (c) Ethan Eade, https://bitbucket.org/ethaneade/glwindow
#include "scenewindow.hpp"
#include <GL/gl.h>
#include <cmath>
#include <iostream>
using namespace glwindow;
SceneWindow::SceneWindow(int width, int height, const char *title)
: win(width, height, title)
{
dragging = false;
drawing = false;
win.add_handler(this);
}
SceneWindow::~SceneWindow()
{
}
void SceneWindow::update()
{
win.handle_events();
}
SceneWindow::Viewpoint::Viewpoint()
{
target[0] = -0.05;
target[1] = -0.75;
target[2] = 0.;
azimuth = 0.;
elevation = 0.0;
distance = 8.0;
}
static void set_viewpoint(const SceneWindow::Viewpoint &vp)
{
const double RAD_TO_DEG = 180.0 / 3.141592653589793;
glTranslated(0,0,vp.distance);
glRotated(vp.elevation * RAD_TO_DEG, 1, 0, 0);
glRotated(vp.azimuth * RAD_TO_DEG, 0, 1, 0);
glTranslated(-vp.target[0], -vp.target[1], -vp.target[2]);
}
bool SceneWindow::start_draw()
{
if (!win.alive() || drawing)
return false;
drawing = true;
win.push_context();
glPushAttrib(GL_COLOR_BUFFER_BIT | GL_CURRENT_BIT | GL_ENABLE_BIT);
glViewport(0, 0, win.width(), win.height());
double aspect = (double)win.width() / (double)std::max(win.height(),1);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
double znear = 0.01;
double zfar = 100.0;
double fy = 0.6 * znear;
double fx = aspect * fy;
glFrustum(-fx,fx,-fy,fy, znear, zfar);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glScaled(1, -1, -1);
set_viewpoint(viewpoint);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_LEQUAL);
glDisable(GL_LIGHTING);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
return true;
}
void SceneWindow::finish_draw()
{
if (!drawing)
return;
glPopAttrib();
glFlush();
win.swap_buffers();
win.handle_events();
win.pop_context();
drawing = false;
}
bool SceneWindow::on_key_down(GLWindow& win, int key)
{
return true;
}
bool SceneWindow::on_button_down(GLWindow& win, int btn, int state, int x, int y)
{
if (dragging)
return false;
//std::cerr << "down " << btn << std::endl;
drag_btn = btn;
x0 = x;
y0 = y;
vp0 = viewpoint;
inv_w0 = 1.0 / win.width();
inv_h0 = 1.0 / win.height();
dragging = true;
return true;
}
bool SceneWindow::on_button_up(GLWindow& win, int btn, int state, int x, int y)
{
//std::cerr << "up " << btn << std::endl;
dragging = false;
return true;
}
bool SceneWindow::on_mouse_move(GLWindow& win, int state, int x, int y)
{
int idx = x - x0;
int idy = y - y0;
double dx = idx * inv_w0;
double dy = idy * inv_w0;
//std::cerr << dx << ", " << dy << std::endl;
if (!dragging)
return false;
if (drag_btn == ButtonEvent::LEFT) {
viewpoint.azimuth = vp0.azimuth - dx * 4.0;
viewpoint.elevation = vp0.elevation + dy * 4.0;
return true;
} else if (drag_btn == ButtonEvent::RIGHT) {
viewpoint.distance = ::exp(dy * 4.0) * vp0.distance;
return true;
} else if (drag_btn == ButtonEvent::MIDDLE) {
double sa = ::sin(-vp0.azimuth);
double ca = ::cos(-vp0.azimuth);
double se = ::sin(vp0.elevation);
double ce = ::cos(vp0.elevation);
double tx = -idx * 0.003;
double ty = -idy * 0.003;
double dtx = ca * tx - se*sa*ty;
double dty = ce * ty;
double dtz = -sa*tx - se*ca*ty;
double r = vp0.distance;
viewpoint.target[0] = vp0.target[0] + r*dtx;
viewpoint.target[1] = vp0.target[1] + r*dty;
viewpoint.target[2] = vp0.target[2] + r*dtz;
return true;
}
return false;
}
bool SceneWindow::on_mouse_wheel(GLWindow& win, int state, int x, int y, int dx, int dy)
{
viewpoint.distance *= ::exp(dy * -0.08);
return true;
}
bool SceneWindow::on_resize(GLWindow& win, int x, int y, int w, int h)
{
return false;
}

@ -0,0 +1,49 @@
// Copyright (c) Ethan Eade, https://bitbucket.org/ethaneade/glwindow
#pragma once
#include "glwindow.hpp"
namespace glwindow
{
class SceneWindow : public EventHandler
{
public:
struct Viewpoint
{
double target[3];
double azimuth, elevation, distance;
Viewpoint();
};
SceneWindow(int width, int height, const char *title);
virtual ~SceneWindow();
void update();
bool start_draw();
void finish_draw();
GLWindow win;
Viewpoint viewpoint;
protected:
bool on_key_down(GLWindow& win, int key);
bool on_button_down(GLWindow& win, int btn, int state, int x, int y);
bool on_button_up(GLWindow& win, int btn, int state, int x, int y);
bool on_mouse_move(GLWindow& win, int state, int x, int y);
bool on_mouse_wheel(GLWindow& win, int state, int x, int y, int dx, int dy);
bool on_resize(GLWindow& win, int x, int y, int w, int h);
bool dragging;
int drag_btn;
int x0, y0;
double inv_w0, inv_h0;
Viewpoint vp0;
bool drawing;
};
}
Loading…
Cancel
Save