Compare commits

..

No commits in common. '3b83e60352f2dd6914d13afc1c16182213b24cdb' and 'a529e8291263447531b03b27b0e232e9925e0be6' have entirely different histories.

@ -1,4 +1,11 @@
/****************************************************************************
*
* (c) 2009-2020 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org>
*
* QGroundControl is licensed according to the terms in the file
* COPYING.md in the root of the source code directory.
*
****************************************************************************/
#include "GPSManager.h"

@ -1,3 +1,11 @@
/****************************************************************************
*
* (c) 2009-2020 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org>
*
* QGroundControl is licensed according to the terms in the file
* COPYING.md in the root of the source code directory.
*
****************************************************************************/
#pragma once

@ -1,4 +1,11 @@
/****************************************************************************
*
* (c) 2009-2020 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org>
*
* QGroundControl is licensed according to the terms in the file
* COPYING.md in the root of the source code directory.
*
****************************************************************************/
#pragma once

@ -1,4 +1,11 @@
/****************************************************************************
*
* (c) 2009-2020 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org>
*
* QGroundControl is licensed according to the terms in the file
* COPYING.md in the root of the source code directory.
*
****************************************************************************/
#include "GPSProvider.h"

@ -1,3 +1,12 @@
/****************************************************************************
*
* (c) 2009-2020 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org>
*
* QGroundControl is licensed according to the terms in the file
* COPYING.md in the root of the source code directory.
*
****************************************************************************/
#pragma once

@ -1,4 +1,11 @@
/****************************************************************************
*
* (c) 2009-2020 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org>
*
* QGroundControl is licensed according to the terms in the file
* COPYING.md in the root of the source code directory.
*
****************************************************************************/
#include "RTCMMavlink.h"

@ -1,4 +1,11 @@
/****************************************************************************
*
* (c) 2009-2020 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org>
*
* QGroundControl is licensed according to the terms in the file
* COPYING.md in the root of the source code directory.
*
****************************************************************************/
#pragma once

@ -1,4 +1,35 @@
/****************************************************************************
*
* Copyright (c) 2016 PX4 Development Team. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
* 3. Neither the name PX4 nor the names of its contributors may be
* used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
* OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
****************************************************************************/
/**
* @file definitions.h

@ -1,4 +1,35 @@
/****************************************************************************
*
* Copyright (c) 2012-2014 PX4 Development Team. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
* 3. Neither the name PX4 nor the names of its contributors may be
* used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
* OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
****************************************************************************/
#pragma once
#include <stdint.h>

@ -1,4 +1,37 @@
/****************************************************************************
*
* Copyright (c) 2012-2014 PX4 Development Team. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
* 3. Neither the name PX4 nor the names of its contributors may be
* used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
* OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
****************************************************************************/
/* Auto-generated by genmsg_cpp from file /home/beat/px4/src/Firmware/msg/vehicle_gps_position.msg */
#pragma once

@ -1,575 +0,0 @@
/****************************************************************************
*
* (c) 2019 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org>
*
* QGroundControl is licensed according to the terms in the file
* COPYING.md in the root of the source code directory.
*
****************************************************************************/
#include "PairingManager.h"
#include "SettingsManager.h"
#include "MicrohardManager.h"
#include "QGCApplication.h"
#include "QGCCorePlugin.h"
#include <QSettings>
#include <QJsonObject>
#include <QStandardPaths>
#include <QMutexLocker>
QGC_LOGGING_CATEGORY(PairingManagerLog, "PairingManagerLog")
static const char* jsonFileName = "pairing.json";
//-----------------------------------------------------------------------------
static QString
random_string(uint length)
{
auto randchar = []() -> char
{
const char charset[] =
"0123456789"
"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
"abcdefghijklmnopqrstuvwxyz";
const uint max_index = (sizeof(charset) - 1);
return charset[static_cast<uint>(rand()) % max_index];
};
std::string str(length, 0);
std::generate_n(str.begin(), length, randchar);
return QString::fromStdString(str);
}
//-----------------------------------------------------------------------------
PairingManager::PairingManager(QGCApplication* app, QGCToolbox* toolbox)
: QGCTool(app, toolbox)
, _aes("J6+KuWh9K2!hG(F'", 0x368de30e8ec063ce)
{
_jsonFileName = QDir::temp().filePath(jsonFileName);
connect(this, &PairingManager::parsePairingJson, this, &PairingManager::_parsePairingJson);
connect(this, &PairingManager::setPairingStatus, this, &PairingManager::_setPairingStatus);
connect(this, &PairingManager::startUpload, this, &PairingManager::_startUpload);
}
//-----------------------------------------------------------------------------
PairingManager::~PairingManager()
{
}
//-----------------------------------------------------------------------------
void
PairingManager::setToolbox(QGCToolbox *toolbox)
{
QGCTool::setToolbox(toolbox);
_updatePairedDeviceNameList();
emit pairedListChanged();
}
//-----------------------------------------------------------------------------
void
PairingManager::_pairingCompleted(QString name)
{
_writeJson(_jsonDoc, _pairingCacheFile(name));
_remotePairingMap["NM"] = name;
_lastPaired = name;
_updatePairedDeviceNameList();
emit pairedListChanged();
emit pairedVehicleChanged();
//_app->informationMessageBoxOnMainThread("", tr("Paired with %1").arg(name));
setPairingStatus(PairingSuccess, tr("Pairing Successfull"));
}
//-----------------------------------------------------------------------------
void
PairingManager::_connectionCompleted(QString /*name*/)
{
//QString pwd = _remotePairingMap["PWD"].toString();
//_toolbox->microhardManager()->switchToConnectionEncryptionKey(pwd);
//_app->informationMessageBoxOnMainThread("", tr("Connected to %1").arg(name));
setPairingStatus(PairingConnected, tr("Connection Successfull"));
}
//-----------------------------------------------------------------------------
void
PairingManager::_startUpload(QString pairURL, QJsonDocument jsonDoc)
{
QMutexLocker lock(&_uploadMutex);
if (_uploadManager != nullptr) {
return;
}
_uploadManager = new QNetworkAccessManager(this);
QString str = jsonDoc.toJson(QJsonDocument::JsonFormat::Compact);
qCDebug(PairingManagerLog) << "Starting upload to: " << pairURL << " " << str;
_uploadData = QString::fromStdString(_aes.encrypt(str.toStdString()));
_uploadURL = pairURL;
_startUploadRequest();
}
//-----------------------------------------------------------------------------
void
PairingManager::_startUploadRequest()
{
QNetworkRequest req;
req.setUrl(QUrl(_uploadURL));
req.setHeader(QNetworkRequest::ContentTypeHeader, "application/x-www-form-urlencoded");
QNetworkReply *reply = _uploadManager->post(req, _uploadData.toUtf8());
connect(reply, &QNetworkReply::finished, this, &PairingManager::_uploadFinished);
}
//-----------------------------------------------------------------------------
void
PairingManager::_stopUpload()
{
QMutexLocker lock(&_uploadMutex);
if (_uploadManager != nullptr) {
delete _uploadManager;
_uploadManager = nullptr;
}
}
//-----------------------------------------------------------------------------
void
PairingManager::_uploadFinished()
{
QMutexLocker lock(&_uploadMutex);
QNetworkReply* reply = qobject_cast<QNetworkReply*>(QObject::sender());
if (reply) {
if (_uploadManager != nullptr) {
if (reply->error() == QNetworkReply::NoError) {
qCDebug(PairingManagerLog) << "Upload finished.";
QByteArray bytes = reply->readAll();
QString str = QString::fromUtf8(bytes.data(), bytes.size());
qCDebug(PairingManagerLog) << "Reply: " << str;
auto a = str.split(QRegExp("\\s+"));
if (a[0] == "Accepted" && a.length() > 1) {
_pairingCompleted(a[1]);
} else if (a[0] == "Connected" && a.length() > 1) {
_connectionCompleted(a[1]);
} else if (a[0] == "Connection" && a.length() > 1) {
setPairingStatus(PairingConnectionRejected, tr("Connection Rejected"));
qCDebug(PairingManagerLog) << "Connection error: " << str;
} else {
setPairingStatus(PairingRejected, tr("Pairing Rejected"));
qCDebug(PairingManagerLog) << "Pairing error: " << str;
}
_uploadManager->deleteLater();
_uploadManager = nullptr;
} else {
if(++_pairRetryCount > 3) {
qCDebug(PairingManagerLog) << "Giving up";
setPairingStatus(PairingError, tr("No Response From Vehicle"));
_uploadManager->deleteLater();
_uploadManager = nullptr;
} else {
qCDebug(PairingManagerLog) << "Upload error: " + reply->errorString();
_startUploadRequest();
}
}
}
}
}
//-----------------------------------------------------------------------------
void
PairingManager::_parsePairingJsonFile()
{
QFile file(_jsonFileName);
file.open(QIODevice::ReadOnly | QIODevice::Text);
QString json = file.readAll();
file.remove();
file.close();
jsonReceived(json);
}
//-----------------------------------------------------------------------------
void
PairingManager::connectToPairedDevice(QString name)
{
setPairingStatus(PairingConnecting, tr("Connecting to %1").arg(name));
QFile file(_pairingCacheFile(name));
file.open(QIODevice::ReadOnly | QIODevice::Text);
QString json = file.readAll();
jsonReceived(json);
}
//-----------------------------------------------------------------------------
void
PairingManager::removePairedDevice(QString name)
{
QFile file(_pairingCacheFile(name));
file.remove();
_updatePairedDeviceNameList();
emit pairedListChanged();
}
//-----------------------------------------------------------------------------
void
PairingManager::_updatePairedDeviceNameList()
{
_deviceList.clear();
QDirIterator it(_pairingCacheDir().absolutePath(), QDir::Files);
while (it.hasNext()) {
QFileInfo fileInfo(it.next());
_deviceList.append(fileInfo.fileName());
qCDebug(PairingManagerLog) << "Listing: " << fileInfo.fileName();
}
}
//-----------------------------------------------------------------------------
QString
PairingManager::_assumeMicrohardPairingJson()
{
QJsonDocument json;
QJsonObject jsonObject;
jsonObject.insert("LT", "MH");
jsonObject.insert("IP", "192.168.168.10");
jsonObject.insert("AIP", _toolbox->microhardManager()->remoteIPAddr());
jsonObject.insert("CU", _toolbox->microhardManager()->configUserName());
jsonObject.insert("CP", _toolbox->microhardManager()->configPassword());
jsonObject.insert("EK", _toolbox->microhardManager()->encryptionKey());
json.setObject(jsonObject);
return QString(json.toJson(QJsonDocument::Compact));
}
//-----------------------------------------------------------------------------
void
PairingManager::_parsePairingJson(QString jsonEnc)
{
QString json = QString::fromStdString(_aes.decrypt(jsonEnc.toStdString()));
if (json == "") {
json = jsonEnc;
}
qCDebug(PairingManagerLog) << "Parsing JSON: " << json;
_jsonDoc = QJsonDocument::fromJson(json.toUtf8());
if (_jsonDoc.isNull()) {
setPairingStatus(PairingError, tr("Invalid Pairing File"));
qCDebug(PairingManagerLog) << "Failed to create Pairing JSON doc.";
return;
}
if (!_jsonDoc.isObject()) {
setPairingStatus(PairingError, tr("Error Parsing Pairing File"));
qCDebug(PairingManagerLog) << "Pairing JSON is not an object.";
return;
}
QJsonObject jsonObj = _jsonDoc.object();
if (jsonObj.isEmpty()) {
setPairingStatus(PairingError, tr("Error Parsing Pairing File"));
qCDebug(PairingManagerLog) << "Pairing JSON object is empty.";
return;
}
_remotePairingMap = jsonObj.toVariantMap();
QString linkType = _remotePairingMap["LT"].toString();
QString pport = _remotePairingMap["PP"].toString();
if (pport.length()==0) {
pport = "29351";
}
if (linkType.length()==0) {
setPairingStatus(PairingError, tr("Error Parsing Pairing File"));
qCDebug(PairingManagerLog) << "Pairing JSON is malformed.";
return;
}
_toolbox->microhardManager()->switchToPairingEncryptionKey();
QString pairURL = "http://" + _remotePairingMap["IP"].toString() + ":" + pport;
bool connecting = jsonObj.contains("PWD");
QJsonDocument jsonDoc;
if (!connecting) {
pairURL += + "/pair";
QString pwd = random_string(8);
// TODO generate certificates
QString cert1 = "";
QString cert2 = "";
jsonObj.insert("PWD", pwd);
jsonObj.insert("CERT1", cert1);
jsonObj.insert("CERT2", cert2);
_jsonDoc.setObject(jsonObj);
if (linkType == "ZT") {
jsonDoc = _createZeroTierPairingJson(cert1);
} else if (linkType == "MH") {
jsonDoc = _createMicrohardPairingJson(pwd, cert1);
}
} else {
pairURL += + "/connect";
QString cert2 = _remotePairingMap["CERT2"].toString();
if (linkType == "ZT") {
jsonDoc = _createZeroTierConnectJson(cert2);
} else if (linkType == "MH") {
jsonDoc = _createMicrohardConnectJson(cert2);
}
}
if (linkType == "ZT") {
_toolbox->settingsManager()->appSettings()->enableMicrohard()->setRawValue(false);
_toolbox->settingsManager()->appSettings()->enableTaisync()->setRawValue(false);
emit startUpload(pairURL, jsonDoc);
} else if (linkType == "MH") {
_toolbox->settingsManager()->appSettings()->enableMicrohard()->setRawValue(true);
_toolbox->settingsManager()->appSettings()->enableTaisync()->setRawValue(false);
if (_remotePairingMap.contains("AIP")) {
_toolbox->microhardManager()->setRemoteIPAddr(_remotePairingMap["AIP"].toString());
}
if (_remotePairingMap.contains("CU")) {
_toolbox->microhardManager()->setConfigUserName(_remotePairingMap["CU"].toString());
}
if (_remotePairingMap.contains("CP")) {
_toolbox->microhardManager()->setConfigPassword(_remotePairingMap["CP"].toString());
}
if (_remotePairingMap.contains("EK") && !connecting) {
_toolbox->microhardManager()->setEncryptionKey(_remotePairingMap["EK"].toString());
}
_toolbox->microhardManager()->updateSettings();
emit startUpload(pairURL, jsonDoc);
}
}
//-----------------------------------------------------------------------------
QString
PairingManager::_getLocalIPInNetwork(QString remoteIP, int num)
{
QStringList pieces = remoteIP.split(".");
QString ipPrefix = "";
for (int i = 0; i<num && i<pieces.length(); i++) {
ipPrefix += pieces[i] + ".";
}
const QHostAddress &localhost = QHostAddress(QHostAddress::LocalHost);
for (const QHostAddress &address: QNetworkInterface::allAddresses()) {
if (address.protocol() == QAbstractSocket::IPv4Protocol && address != localhost) {
if (address.toString().startsWith(ipPrefix)) {
return address.toString();
}
}
}
return "";
}
//-----------------------------------------------------------------------------
QDir
PairingManager::_pairingCacheDir()
{
const QString spath(QFileInfo(QSettings().fileName()).dir().absolutePath());
QDir dir = spath + QDir::separator() + "PairingCache";
if (!dir.exists()) {
dir.mkpath(".");
}
return dir;
}
//-----------------------------------------------------------------------------
QString
PairingManager::_pairingCacheFile(QString uavName)
{
return _pairingCacheDir().filePath(uavName);
}
//-----------------------------------------------------------------------------
void
PairingManager::_writeJson(QJsonDocument &jsonDoc, QString fileName)
{
QString val = jsonDoc.toJson(QJsonDocument::JsonFormat::Compact);
qCDebug(PairingManagerLog) << "Write json " << val;
QString enc = QString::fromStdString(_aes.encrypt(val.toStdString()));
QFile file(fileName);
file.open(QIODevice::WriteOnly | QIODevice::Text | QIODevice::Truncate);
file.write(enc.toUtf8());
file.close();
}
//-----------------------------------------------------------------------------
QJsonDocument
PairingManager::_createZeroTierPairingJson(QString cert1)
{
QString localIP = _getLocalIPInNetwork(_remotePairingMap["IP"].toString(), 2);
QJsonObject jsonObj;
jsonObj.insert("LT", "ZT");
jsonObj.insert("IP", localIP);
jsonObj.insert("P", 14550);
jsonObj.insert("CERT1", cert1);
return QJsonDocument(jsonObj);
}
//-----------------------------------------------------------------------------
QJsonDocument
PairingManager::_createMicrohardPairingJson(QString pwd, QString cert1)
{
QString localIP = _getLocalIPInNetwork(_remotePairingMap["IP"].toString(), 3);
QJsonObject jsonObj;
jsonObj.insert("LT", "MH");
jsonObj.insert("IP", localIP);
jsonObj.insert("P", 14550);
jsonObj.insert("PWD", pwd);
jsonObj.insert("CERT1", cert1);
return QJsonDocument(jsonObj);
}
//-----------------------------------------------------------------------------
QJsonDocument
PairingManager::_createZeroTierConnectJson(QString cert2)
{
QString localIP = _getLocalIPInNetwork(_remotePairingMap["IP"].toString(), 2);
QJsonObject jsonObj;
jsonObj.insert("LT", "ZT");
jsonObj.insert("IP", localIP);
jsonObj.insert("P", 14550);
jsonObj.insert("CERT2", cert2);
return QJsonDocument(jsonObj);
}
//-----------------------------------------------------------------------------
QJsonDocument
PairingManager::_createMicrohardConnectJson(QString cert2)
{
QString localIP = _getLocalIPInNetwork(_remotePairingMap["IP"].toString(), 3);
QJsonObject jsonObj;
jsonObj.insert("LT", "MH");
jsonObj.insert("IP", localIP);
jsonObj.insert("P", 14550);
jsonObj.insert("CERT2", cert2);
return QJsonDocument(jsonObj);
}
//-----------------------------------------------------------------------------
QStringList
PairingManager::pairingLinkTypeStrings()
{
//-- Must follow same order as enum LinkType in LinkConfiguration.h
static QStringList list;
int i = 0;
if (!list.size()) {
#if defined QGC_ENABLE_QTNFC
list += tr("NFC");
_nfcIndex = i++;
#endif
#if defined QGC_GST_MICROHARD_ENABLED
list += tr("Microhard");
_microhardIndex = i++;
#endif
}
return list;
}
//-----------------------------------------------------------------------------
void
PairingManager::_setPairingStatus(PairingStatus status, QString statusStr)
{
_status = status;
_statusString = statusStr;
emit pairingStatusChanged();
}
//-----------------------------------------------------------------------------
QString
PairingManager::pairingStatusStr() const
{
return _statusString;
}
#if QGC_GST_MICROHARD_ENABLED
//-----------------------------------------------------------------------------
void
PairingManager::startMicrohardPairing()
{
stopPairing();
_pairRetryCount = 0;
setPairingStatus(PairingActive, tr("Pairing..."));
_parsePairingJson(_assumeMicrohardPairingJson());
}
#endif
//-----------------------------------------------------------------------------
void
PairingManager::stopPairing()
{
#if defined QGC_ENABLE_QTNFC
pairingNFC.stop();
#endif
_stopUpload();
setPairingStatus(PairingIdle, "");
}
#if defined QGC_ENABLE_QTNFC
//-----------------------------------------------------------------------------
void
PairingManager::startNFCScan()
{
stopPairing();
setPairingStatus(PairingActive, tr("Pairing..."));
pairingNFC.start();
}
#endif
//-----------------------------------------------------------------------------
#ifdef __android__
static const char kJniClassName[] {"org/mavlink/qgroundcontrol/QGCActivity"};
//-----------------------------------------------------------------------------
static void jniNFCTagReceived(JNIEnv *envA, jobject thizA, jstring messageA)
{
Q_UNUSED(thizA);
const char *stringL = envA->GetStringUTFChars(messageA, nullptr);
QString ndef = QString::fromUtf8(stringL);
envA->ReleaseStringUTFChars(messageA, stringL);
if (envA->ExceptionCheck())
envA->ExceptionClear();
qCDebug(PairingManagerLog) << "NDEF Tag Received: " << ndef;
qgcApp()->toolbox()->pairingManager()->jsonReceived(ndef);
}
//-----------------------------------------------------------------------------
void PairingManager::setNativeMethods(void)
{
// REGISTER THE C++ FUNCTION WITH JNI
JNINativeMethod javaMethods[] {
{"nativeNFCTagReceived", "(Ljava/lang/String;)V", reinterpret_cast<void *>(jniNFCTagReceived)}
};
QAndroidJniEnvironment jniEnv;
if (jniEnv->ExceptionCheck()) {
jniEnv->ExceptionDescribe();
jniEnv->ExceptionClear();
}
jclass objectClass = jniEnv->FindClass(kJniClassName);
if(!objectClass) {
qWarning() << "Couldn't find class:" << kJniClassName;
return;
}
jint val = jniEnv->RegisterNatives(objectClass, javaMethods, sizeof(javaMethods) / sizeof(javaMethods[0]));
if (val < 0) {
qWarning() << "Error registering methods: " << val;
} else {
qCDebug(PairingManagerLog) << "Native Functions Registered";
}
if (jniEnv->ExceptionCheck()) {
jniEnv->ExceptionDescribe();
jniEnv->ExceptionClear();
}
}
#endif
//-----------------------------------------------------------------------------

@ -1,153 +0,0 @@
/****************************************************************************
*
* (c) 2019 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org>
*
* QGroundControl is licensed according to the terms in the file
* COPYING.md in the root of the source code directory.
*
****************************************************************************/
#pragma once
#include <QJsonDocument>
#include <QMutex>
#include <QNetworkReply>
#include <QTimer>
#include <QTime>
#include <QVariantMap>
#include "aes.h"
#include "QGCToolbox.h"
#include "QGCLoggingCategory.h"
#include "Fact.h"
#if defined QGC_ENABLE_QTNFC
#include "QtNFC.h"
#endif
#ifdef __android__
#include <jni.h>
#include <QtAndroidExtras/QtAndroidExtras>
#include <QtAndroidExtras/QAndroidJniObject>
#endif
Q_DECLARE_LOGGING_CATEGORY(PairingManagerLog)
class AppSettings;
class QGCApplication;
//-----------------------------------------------------------------------------
class PairingManager : public QGCTool
{
Q_OBJECT
public:
explicit PairingManager (QGCApplication* app, QGCToolbox* toolbox);
~PairingManager () override;
// Override from QGCTool
virtual void setToolbox(QGCToolbox *toolbox) override;
enum PairingStatus {
PairingIdle,
PairingActive,
PairingSuccess,
PairingConnecting,
PairingConnected,
PairingRejected,
PairingConnectionRejected,
PairingError
};
Q_ENUM(PairingStatus)
QStringList pairingLinkTypeStrings ();
QString pairingStatusStr () const;
QStringList pairedDeviceNameList () { return _deviceList; }
PairingStatus pairingStatus () { return _status; }
QString pairedVehicle () { return _lastPaired; }
int nfcIndex () { return _nfcIndex; }
int microhardIndex () { return _microhardIndex; }
bool firstBoot () { return _firstBoot; }
bool errorState () { return _status == PairingRejected || _status == PairingConnectionRejected || _status == PairingError; }
void setStatusMessage (PairingStatus status, QString statusStr) { emit setPairingStatus(status, statusStr); }
void jsonReceived (QString json) { emit parsePairingJson(json); }
void setFirstBoot (bool set) { _firstBoot = set; emit firstBootChanged(); }
#ifdef __android__
static void setNativeMethods (void);
#endif
Q_INVOKABLE void connectToPairedDevice (QString name);
Q_INVOKABLE void removePairedDevice (QString name);
#if defined defined QGC_ENABLE_QTNFC
Q_INVOKABLE void startNFCScan();
#endif
#if QGC_GST_MICROHARD_ENABLED
Q_INVOKABLE void startMicrohardPairing();
#endif
Q_INVOKABLE void stopPairing();
Q_PROPERTY(QString pairingStatusStr READ pairingStatusStr NOTIFY pairingStatusChanged)
Q_PROPERTY(PairingStatus pairingStatus READ pairingStatus NOTIFY pairingStatusChanged)
Q_PROPERTY(QStringList pairedDeviceNameList READ pairedDeviceNameList NOTIFY pairedListChanged)
Q_PROPERTY(QStringList pairingLinkTypeStrings READ pairingLinkTypeStrings CONSTANT)
Q_PROPERTY(QString pairedVehicle READ pairedVehicle NOTIFY pairedVehicleChanged)
Q_PROPERTY(bool errorState READ errorState NOTIFY pairingStatusChanged)
Q_PROPERTY(int nfcIndex READ nfcIndex CONSTANT)
Q_PROPERTY(int microhardIndex READ microhardIndex CONSTANT)
Q_PROPERTY(bool firstBoot READ firstBoot WRITE setFirstBoot NOTIFY firstBootChanged)
signals:
void startUpload (QString pairURL, QJsonDocument);
void closeConnection ();
void pairingConfigurationsChanged ();
void nameListChanged ();
void pairingStatusChanged ();
void parsePairingJson (QString json);
void setPairingStatus (PairingStatus status, QString pairingStatus);
void pairedListChanged ();
void pairedVehicleChanged ();
void firstBootChanged ();
private slots:
void _startUpload (QString pairURL, QJsonDocument);
void _stopUpload ();
void _startUploadRequest ();
void _parsePairingJson (QString jsonEnc);
void _setPairingStatus (PairingStatus status, QString pairingStatus);
private:
QString _statusString;
QString _jsonFileName;
QString _lastPaired;
QVariantMap _remotePairingMap;
int _nfcIndex = -1;
int _microhardIndex = -1;
int _pairRetryCount = 0;
PairingStatus _status = PairingIdle;
AES _aes;
QJsonDocument _jsonDoc{};
QMutex _uploadMutex{};
QNetworkAccessManager* _uploadManager = nullptr;
QString _uploadURL{};
QString _uploadData{};
bool _firstBoot = true;
QStringList _deviceList;
void _parsePairingJsonFile ();
QJsonDocument _createZeroTierConnectJson (QString cert2);
QJsonDocument _createMicrohardConnectJson (QString cert2);
QJsonDocument _createZeroTierPairingJson (QString cert1);
QJsonDocument _createMicrohardPairingJson (QString pwd, QString cert1);
QString _assumeMicrohardPairingJson ();
void _writeJson (QJsonDocument &jsonDoc, QString fileName);
QString _getLocalIPInNetwork (QString remoteIP, int num);
void _uploadFinished ();
void _uploadError (QNetworkReply::NetworkError code);
void _pairingCompleted (QString name);
void _connectionCompleted (QString name);
QDir _pairingCacheDir ();
QString _pairingCacheFile (QString uavName);
void _updatePairedDeviceNameList ();
#if defined QGC_ENABLE_QTNFC
PairingNFC pairingNFC;
#endif
};

@ -1,135 +0,0 @@
/****************************************************************************
*
* (c) 2019 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org>
*
* QGroundControl is licensed according to the terms in the file
* COPYING.md in the root of the source code directory.
*
****************************************************************************/
#include "PairingManager.h"
#include "QtNFC.h"
#include "QGCApplication.h"
#include <QSoundEffect>
QGC_LOGGING_CATEGORY(PairingNFCLog, "PairingNFCLog")
#include <QNdefNfcTextRecord>
//-----------------------------------------------------------------------------
PairingNFC::PairingNFC()
{
}
//-----------------------------------------------------------------------------
void
PairingNFC::start()
{
if (manager != nullptr) {
return;
}
qgcApp()->toolbox()->pairingManager()->setStatusMessage(tr("Waiting for NFC connection"));
qCDebug(PairingNFCLog) << "Waiting for NFC connection";
manager = new QNearFieldManager(this);
if (!manager->isAvailable()) {
qWarning() << "NFC not available";
delete manager;
manager = nullptr;
return;
}
QNdefFilter filter;
filter.setOrderMatch(false);
filter.appendRecord<QNdefNfcTextRecord>(1, UINT_MAX);
// type parameter cannot specify substring so filter for "image/" below
filter.appendRecord(QNdefRecord::Mime, QByteArray(), 0, 1);
int result = manager->registerNdefMessageHandler(filter, this, SLOT(handleMessage(QNdefMessage, QNearFieldTarget*)));
if (result < 0)
qWarning() << "Platform does not support NDEF message handler registration";
manager->startTargetDetection();
connect(manager, &QNearFieldManager::targetDetected, this, &PairingNFC::targetDetected);
connect(manager, &QNearFieldManager::targetLost, this, &PairingNFC::targetLost);
}
//-----------------------------------------------------------------------------
void
PairingNFC::stop()
{
if (manager != nullptr) {
qgcApp()->toolbox()->pairingManager()->setStatusMessage("");
qCDebug(PairingNFCLog) << "NFC: Stop";
manager->stopTargetDetection();
delete manager;
manager = nullptr;
}
}
//-----------------------------------------------------------------------------
void
PairingNFC::targetDetected(QNearFieldTarget *target)
{
if (!target) {
return;
}
qgcApp()->toolbox()->pairingManager()->setStatusMessage(tr("Device detected"));
qCDebug(PairingNFCLog) << "NFC: Device detected";
connect(target, &QNearFieldTarget::ndefMessageRead, this, &PairingNFC::handlePolledNdefMessage);
connect(target, SIGNAL(error(QNearFieldTarget::Error,QNearFieldTarget::RequestId)),
this, SLOT(targetError(QNearFieldTarget::Error,QNearFieldTarget::RequestId)));
connect(target, &QNearFieldTarget::requestCompleted, this, &PairingNFC::handleRequestCompleted);
manager->setTargetAccessModes(QNearFieldManager::NdefReadTargetAccess);
QNearFieldTarget::RequestId id = target->readNdefMessages();
if (target->waitForRequestCompleted(id)) {
qCDebug(PairingNFCLog) << "requestCompleted ";
QVariant res = target->requestResponse(id);
qCDebug(PairingNFCLog) << "Response: " << res.toString();
}
}
//-----------------------------------------------------------------------------
void
PairingNFC::handleRequestCompleted(const QNearFieldTarget::RequestId& id)
{
Q_UNUSED(id);
qCDebug(PairingNFCLog) << "handleRequestCompleted ";
}
//-----------------------------------------------------------------------------
void
PairingNFC::targetError(QNearFieldTarget::Error error, const QNearFieldTarget::RequestId& id)
{
Q_UNUSED(id);
qCDebug(PairingNFCLog) << "Error: " << error;
}
//-----------------------------------------------------------------------------
void
PairingNFC::targetLost(QNearFieldTarget *target)
{
qgcApp()->toolbox()->pairingManager()->setStatusMessage(tr("Device removed"));
qCDebug(PairingNFCLog) << "NFC: Device removed";
if (target) {
target->deleteLater();
}
}
//-----------------------------------------------------------------------------
void
PairingNFC::handlePolledNdefMessage(QNdefMessage message)
{
qCDebug(PairingNFCLog) << "NFC: Handle NDEF message";
// QNearFieldTarget *target = qobject_cast<QNearFieldTarget *>(sender());
for (const QNdefRecord &record : message) {
if (record.isRecordType<QNdefNfcTextRecord>()) {
QNdefNfcTextRecord textRecord(record);
qgcApp()->toolbox()->pairingManager()->jsonReceived(textRecord.text());
}
}
}
//-----------------------------------------------------------------------------

@ -1,45 +0,0 @@
/****************************************************************************
*
* (c) 2019 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org>
*
* QGroundControl is licensed according to the terms in the file
* COPYING.md in the root of the source code directory.
*
****************************************************************************/
#pragma once
#include <QObject>
#include <QNdefMessage>
#include <QNearFieldManager>
#include <QNearFieldTarget>
#include "QGCLoggingCategory.h"
Q_DECLARE_LOGGING_CATEGORY(PairingNFCLog)
class PairingNFC : public QObject
{
Q_OBJECT
public:
PairingNFC();
void start();
void stop();
signals:
void parsePairingJson(QString json);
public slots:
void targetDetected(QNearFieldTarget *target);
void targetLost(QNearFieldTarget *target);
void handlePolledNdefMessage(QNdefMessage message);
void targetError(QNearFieldTarget::Error error, const QNearFieldTarget::RequestId& id);
void handleRequestCompleted(const QNearFieldTarget::RequestId& id);
private:
bool _exitThread = false; ///< true: signal thread to exit
QNearFieldManager *manager = nullptr;
};

@ -1,154 +0,0 @@
#include "aes.h"
#include <memory>
#include <openssl/aes.h>
#include <openssl/bio.h>
#include <zlib.h>
//-----------------------------------------------------------------------------
AES::AES(std::string password, unsigned long long salt)
{
int nrounds = 5;
unsigned char key[32], iv[32];
/*
* Gen key & IV for AES 256 CBC mode. A SHA1 digest is used to hash the supplied key material.
* nrounds is the number of times the we hash the material. More rounds are more secure but
* slower.
*/
EVP_BytesToKey(EVP_aes_256_cbc(), EVP_sha1(),
reinterpret_cast<const unsigned char*>(&salt),
reinterpret_cast<const unsigned char*>(password.c_str()),
static_cast<int>(password.length()),
nrounds, key, iv);
#if OPENSSL_VERSION_NUMBER >= 0x1010000fL
encCipherContext = EVP_CIPHER_CTX_new();
decCipherContext = EVP_CIPHER_CTX_new();
EVP_CIPHER_CTX_init(encCipherContext);
EVP_EncryptInit_ex(encCipherContext, EVP_aes_256_cbc(), nullptr, key, iv);
EVP_CIPHER_CTX_init(decCipherContext);
EVP_DecryptInit_ex(decCipherContext, EVP_aes_256_cbc(), nullptr, key, iv);
#else
EVP_CIPHER_CTX_init(&encCipherContext);
EVP_EncryptInit_ex(&encCipherContext, EVP_aes_256_cbc(), nullptr, key, iv);
EVP_CIPHER_CTX_init(&decCipherContext);
EVP_DecryptInit_ex(&decCipherContext, EVP_aes_256_cbc(), nullptr, key, iv);
#endif
}
//-----------------------------------------------------------------------------
AES::~AES()
{
#if OPENSSL_VERSION_NUMBER >= 0x1010000fL
EVP_CIPHER_CTX_free(encCipherContext);
EVP_CIPHER_CTX_free(decCipherContext);
#else
EVP_CIPHER_CTX_cleanup(&encCipherContext);
EVP_CIPHER_CTX_cleanup(&decCipherContext);
#endif
}
//-----------------------------------------------------------------------------
std::string
AES::encrypt(std::string plainText)
{
unsigned long sourceLen = static_cast<unsigned long>(plainText.length() + 1);
unsigned long destLen = sourceLen * 2;
unsigned char* compressed = new unsigned char[destLen];
int err = compress2(compressed, &destLen,
reinterpret_cast<const unsigned char *>(plainText.c_str()),
sourceLen, 9);
if (err != Z_OK) {
return {};
}
int pLen = static_cast<int>(destLen);
int cLen = pLen + AES_BLOCK_SIZE;
int fLen = 0;
unsigned char* cipherText = new unsigned char[cLen];
#if OPENSSL_VERSION_NUMBER >= 0x1010000fL
EVP_EncryptInit_ex(encCipherContext, nullptr, nullptr, nullptr, nullptr);
EVP_EncryptUpdate(encCipherContext, cipherText, &cLen, compressed, pLen);
EVP_EncryptFinal_ex(encCipherContext, cipherText + cLen, &fLen);
#else
EVP_EncryptInit_ex(&encCipherContext, nullptr, nullptr, nullptr, nullptr);
EVP_EncryptUpdate(&encCipherContext, cipherText, &cLen, compressed, pLen);
EVP_EncryptFinal_ex(&encCipherContext, cipherText + cLen, &fLen);
#endif
std::vector<unsigned char> data(cipherText, cipherText + cLen + fLen);
std::string res = base64Encode(data);
delete[] cipherText;
delete[] compressed;
return res;
}
//-----------------------------------------------------------------------------
std::string
AES::decrypt(std::string cipherText)
{
int fLen = 0;
std::vector<unsigned char> text = base64Decode(cipherText);
int pLen = static_cast<int>(text.size());
unsigned char* plainText = new unsigned char[pLen];
#if OPENSSL_VERSION_NUMBER >= 0x1010000fL
EVP_DecryptInit_ex(decCipherContext, nullptr, nullptr, nullptr, nullptr);
EVP_DecryptUpdate(decCipherContext, plainText, &pLen, text.data(), pLen);
EVP_DecryptFinal_ex(decCipherContext, plainText + pLen, &fLen);
#else
EVP_DecryptInit_ex(&decCipherContext, nullptr, nullptr, nullptr, nullptr);
EVP_DecryptUpdate(&decCipherContext, plainText, &pLen, text.data(), pLen);
EVP_DecryptFinal_ex(&decCipherContext, plainText + pLen, &fLen);
#endif
unsigned long destLen = static_cast<unsigned long>((pLen + fLen) * 2);
unsigned char* uncompressed = new unsigned char[destLen];
int err = uncompress(uncompressed, &destLen, plainText, static_cast<unsigned long>(pLen + fLen));
if (err != Z_OK) {
return {};
}
std::string res(reinterpret_cast<char*>(uncompressed));
delete[] uncompressed;
return res;
}
//-----------------------------------------------------------------------------
struct BIOFreeAll { void operator()(BIO* p) { BIO_free_all(p); } };
std::string
AES::base64Encode(const std::vector<unsigned char>& binary)
{
std::unique_ptr<BIO, BIOFreeAll> b64(BIO_new(BIO_f_base64()));
BIO_set_flags(b64.get(), BIO_FLAGS_BASE64_NO_NL);
BIO* sink = BIO_new(BIO_s_mem());
BIO_push(b64.get(), sink);
BIO_write(b64.get(), binary.data(), static_cast<int>(binary.size()));
BIO_ctrl(b64.get(), BIO_CTRL_FLUSH, 0, nullptr);
const char* encoded;
const unsigned long len = static_cast<unsigned long>(BIO_ctrl(sink, BIO_CTRL_INFO, 0, &encoded));
return std::string(encoded, len);
}
//-----------------------------------------------------------------------------
std::vector<unsigned char>
AES::base64Decode(std::string encoded)
{
std::unique_ptr<BIO, BIOFreeAll> b64(BIO_new(BIO_f_base64()));
BIO_set_flags(b64.get(), BIO_FLAGS_BASE64_NO_NL);
BIO* source = BIO_new_mem_buf(encoded.c_str(), -1); // read-only source
BIO_push(b64.get(), source);
const unsigned long maxlen = encoded.length() / 4 * 3 + 1;
std::vector<unsigned char> decoded(maxlen);
const unsigned long len = static_cast<unsigned long>(BIO_read(b64.get(), decoded.data(), static_cast<int>(maxlen)));
decoded.resize(len);
return decoded;
}
//-----------------------------------------------------------------------------

@ -1,35 +0,0 @@
#ifndef AES_H
#define AES_H
#pragma once
#include <string>
#include <vector>
#include <openssl/evp.h>
class AES
{
public:
AES(std::string password, unsigned long long salt);
~AES();
std::string encrypt(std::string plainText);
std::string decrypt(std::string cipherText);
private:
#if OPENSSL_VERSION_NUMBER >= 0x1010000fL
EVP_CIPHER_CTX *encCipherContext = nullptr;
EVP_CIPHER_CTX *decCipherContext = nullptr;
#else
EVP_CIPHER_CTX encCipherContext;
EVP_CIPHER_CTX decCipherContext;
#endif
std::string base64Encode(const std::vector<unsigned char>& binary);
std::vector<unsigned char> base64Decode(std::string encoded);
};
#endif // AES_H

@ -1,3 +1,43 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2010-2013, University of Nizhny Novgorod, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
//Modified from latentsvm module's "lsvmc_featurepyramid.cpp".
@ -37,92 +77,60 @@
// RESULT
// Error status
*/
// 从输入的图像中提取特征映射
int getFeatureMaps(const IplImage* image, const int k, CvLSVMFeatureMapCaskade **map)
{
// 图像的宽度和高度
int sizeX, sizeY;
// 每个cell中的特征数量
int p, px;
// 存储特征的字符串大小
int stringSize;
// 图像的高度、宽度和通道数
int p, px, stringSize;
int height, width, numChannels;
// 循环变量
int i, j, kk, c, ii, jj, d;
// 梯度数据指针
float * datadx, * datady;
// 角度相关变量
int ch;
float magnitude, x, y, tx, ty;
// 梯度图像
IplImage * dx, * dy;
// 最近的邻域点索引
int *nearest;
// 权重向量
float *w;
// 线性插值系数
float a_x, b_x;
float *w, a_x, b_x;
// 梯度算子
float kernel[3] = {-1.f, 0.f, 1.f};
// OpenCV矩阵格式的梯度算子
CvMat kernel_dx = cvMat(1, 3, CV_32F, kernel);
CvMat kernel_dy = cvMat(3, 1, CV_32F, kernel);
// 临时指针
float * r;
// 未知用途的整型数组(可能是用于后续处理的索引或标志)
int * alfa;
// 边界角度值数组
float boundary_x[NUM_SECTOR + 1];
float boundary_y[NUM_SECTOR + 1];
// 用于查找最大值的变量
float max, dotProd;
int maxi;
// 获取图像的高度和宽度
height = image->height;
width = image->width;
width = image->width ;
// 获取图像的通道数
numChannels = image->nChannels;
// 创建用于存储梯度图像的容器
dx = cvCreateImage(cvSize(image->width, image->height),
IPL_DEPTH_32F, 3);
dy = cvCreateImage(cvSize(image->width, image->height),
IPL_DEPTH_32F, 3);
// 计算特征映射的宽和高基于k的大小
sizeX = width / k;
sizeY = height / k;
// 计算每个cell中的特征数量基于NUM_SECTOR
px = 3 * NUM_SECTOR;
p = px;
// 计算整个特征映射所需的总存储大小
stringSize = sizeX * p;
// 分配特征映射对象的内存
allocFeatureMapObject(map, sizeX, sizeY, p);
// 使用Sobel算子计算图像的梯度
cvFilter2D(image, dx, &kernel_dx, cvPoint(-1, 0));
cvFilter2D(image, dy, &kernel_dy, cvPoint(0, -1));
// 计算边界角度值
float arg_vector;
for(i = 0; i <= NUM_SECTOR; i++)
{
// 计算角度基于NUM_SECTOR将圆周等分为NUM_SECTOR个部分
arg_vector = ( (float) i ) * ( (float)(PI) / (float)(NUM_SECTOR) );
// 计算对应的余弦和正弦值
boundary_x[i] = cosf(arg_vector);
boundary_y[i] = sinf(arg_vector);
}
}/*for(i = 0; i <= NUM_SECTOR; i++) */
r = (float *)malloc( sizeof(float) * (width * height));
alfa = (int *)malloc( sizeof(int ) * (width * height * 2));
@ -279,63 +287,43 @@ int getFeatureMaps(const IplImage* image, const int k, CvLSVMFeatureMapCaskade *
// RESULT
// Error status
*/
// 对特征映射进行归一化和截断处理
int normalizeAndTruncate(CvLSVMFeatureMapCaskade *map, const float alfa)
{
int i, j, ii; // 定义循环变量
int sizeX, sizeY, p, pos, pp, xp; // 定义变量
float * partOfNorm; // 存储C(i, j)的范数的指针
float * newData; // 存储归一化后数据的指针
float valOfNorm; // 存储范数的值
int i,j, ii;
int sizeX, sizeY, p, pos, pp, xp, pos1, pos2;
float * partOfNorm; // norm of C(i, j)
float * newData;
float valOfNorm;
sizeX = map->sizeX; // 获取特征映射的宽度
sizeY = map->sizeY; // 获取特征映射的高度
partOfNorm = (float *)malloc (sizeof(float) * (sizeX * sizeY)); // 分配存储范数的内存
sizeX = map->sizeX;
sizeY = map->sizeY;
partOfNorm = (float *)malloc (sizeof(float) * (sizeX * sizeY));
p = NUM_SECTOR; // 定义一个常量,可能表示特征的分区数
xp = NUM_SECTOR * 3; // 定义一个常量,可能表示特征的某种属性
pp = NUM_SECTOR * 12; // 定义一个常量,可能表示特征的某种属性
p = NUM_SECTOR;
xp = NUM_SECTOR * 3;
pp = NUM_SECTOR * 12;
// 计算每个部分区域的范数
for(i = 0; i < sizeX * sizeY; i++)
{
valOfNorm = 0.0f; // 初始化范数为0
pos = i * map->numFeatures; // 计算每个部分的起始位置
for(j = 0; j < p; j++) // 遍历每个部分的特征
valOfNorm = 0.0f;
pos = i * map->numFeatures;
for(j = 0; j < p; j++)
{
valOfNorm += map->map[pos + j] * map->map[pos + j]; // 计算每个部分的范数
valOfNorm += map->map[pos + j] * map->map[pos + j];
}/*for(j = 0; j < p; j++)*/
partOfNorm[i] = valOfNorm; // 将每个部分的范数存储起来
partOfNorm[i] = valOfNorm;
}/*for(i = 0; i < sizeX * sizeY; i++)*/
// 调整特征映射的大小
sizeX -= 2;
sizeY -= 2;
// 分配存储归一化后数据的内存
newData = (float *)malloc (sizeof(float) * (sizeX * sizeY * pp));
// 归一化处理
//normalization
for(i = 1; i <= sizeY; i++)
{
for(j = 1; j <= sizeX; j++)
{
valOfNorm = sqrtf( // 计算当前像素的范数
partOfNorm[(i )*(sizeX + 2) + (j )] +
partOfNorm[(i )*(sizeX + 2) + (j + 1)] +
partOfNorm[(i + 1)*(sizeX + 2) + (j )] +
partOfNorm[(i + 1)*(sizeX + 2) + (j + 1)]) + FLT_EPSILON;
pos1 = (i ) * (sizeX + 2) * xp + (j ) * xp;
pos2 = (i-1) * (sizeX ) * pp + (j-1) * pp;
for(ii = 0; ii < p; ii++)
{
newData[pos2 + ii ] = map->map[pos1 + ii ] / valOfNorm; // 归一化
}/*for(ii = 0; ii < p; ii++)*/
for(ii = 0; ii < 2 * p; ii++)
{
newData[pos2 + ii + p * 4] = map->map[pos1 + ii + p] / valOfNorm; // 归一化
}/*for(ii = 0; ii < 2 * p; ii++)*/
valOfNorm = sqrtf( // 计算当前像素的范数
valOfNorm = sqrtf(
partOfNorm[(i )*(sizeX + 2) + (j )] +
partOfNorm[(i )*(sizeX + 2) + (j + 1)] +
partOfNorm[(i + 1)*(sizeX + 2) + (j )] +
@ -423,159 +411,102 @@ int normalizeAndTruncate(CvLSVMFeatureMapCaskade *map, const float alfa)
// RESULT
// Error status
*/
// 对输入的CvLSVMFeatureMapCaskade对象进行PCA特征映射处理
int PCAFeatureMaps(CvLSVMFeatureMapCaskade *map)
{
int i, j, ii, jj, k;
int sizeX, sizeY, p, pp, xp, yp, pos1, pos2;
int i,j, ii, jj, k;
int sizeX, sizeY, p, pp, xp, yp, pos1, pos2;
float * newData;
float val;
float nx, ny;
// 获取特征映射的宽、高和特征数量
sizeX = map->sizeX;
sizeY = map->sizeY;
p = map->numFeatures;
// 定义新的特征数量pp和xp、yp等参数
pp = NUM_SECTOR * 3 + 4;
yp = 4;
xp = NUM_SECTOR;
// 计算nx和ny两个归一化因子
nx = 1.0f / sqrtf((float)(xp * 2));
ny = 1.0f / sqrtf((float)(yp));
ny = 1.0f / sqrtf((float)(yp ));
// 为新的特征映射数据分配内存
newData = (float *)malloc (sizeof(float) * (sizeX * sizeY * pp));
if (newData == NULL) {
// 如果内存分配失败,应返回错误代码
// 这里为了简洁省略了错误处理
return LATENT_SVM_MEM_ERROR; // 假设的错误码
}
// 遍历特征映射的每一个位置
for(i = 0; i < sizeY; i++)
{
for(j = 0; j < sizeX; j++)
{
// 计算原始数据在当前位置的索引pos1
pos1 = ((i)*sizeX + j)*p;
// 计算新数据在当前位置的索引pos2
pos2 = ((i)*sizeX + j)*pp;
k = 0; // 初始化k用于索引新数据的位置
// 计算第一个部分的新特征
k = 0;
for(jj = 0; jj < xp * 2; jj++)
{
val = 0; // 初始化累加值
// 遍历yp个原始特征值进行累加
val = 0;
for(ii = 0; ii < yp; ii++)
{
val += map->map[pos1 + yp * xp + ii * xp * 2 + jj];
}
// 将累加值乘以ny并存储到新数据中
}/*for(ii = 0; ii < yp; ii++)*/
newData[pos2 + k] = val * ny;
k++;
}
// 计算第二个部分的新特征
}/*for(jj = 0; jj < xp * 2; jj++)*/
for(jj = 0; jj < xp; jj++)
{
val = 0; // 初始化累加值
// 遍历yp个原始特征值进行累加
val = 0;
for(ii = 0; ii < yp; ii++)
{
val += map->map[pos1 + ii * xp + jj];
}
// 将累加值乘以ny并存储到新数据中
}/*for(ii = 0; ii < yp; ii++)*/
newData[pos2 + k] = val * ny;
k++;
}
// 计算第三个部分的新特征
}/*for(jj = 0; jj < xp; jj++)*/
for(ii = 0; ii < yp; ii++)
{
val = 0; // 初始化累加值
// 遍历2*xp个原始特征值进行累加
val = 0;
for(jj = 0; jj < 2 * xp; jj++)
{
val += map->map[pos1 + yp * xp + ii * xp * 2 + jj];
}
// 将累加值乘以nx并存储到新数据中
}/*for(jj = 0; jj < xp; jj++)*/
newData[pos2 + k] = val * nx;
k++;
}
}
}
// 交换数据,这里看起来没有实际的交换操作,可能是对数据的后续处理或注释有误
} /*for(ii = 0; ii < yp; ii++)*/
}/*for(j = 0; j < sizeX; j++)*/
}/*for(i = 0; i < sizeY; i++)*/
//swop data
// 更新特征数量
map->numFeatures = pp;
// 释放原始特征映射数据占用的内存
free (map->map);
// 将新的特征映射数据赋值给map的map成员
map->map = newData;
// 返回成功码
return LATENT_SVM_OK;
}
// 分配一个CvLSVMFeatureMapCaskade对象并初始化其成员
//modified from "lsvmc_routine.cpp"
//两个函数分别用于分配和释放CvLSVMFeatureMapCaskade结构体的内存。
int allocFeatureMapObject(CvLSVMFeatureMapCaskade **obj, const int sizeX,
const int sizeY, const int numFeatures)
{
int i;
// 使用malloc为CvLSVMFeatureMapCaskade对象分配内存
(*obj) = (CvLSVMFeatureMapCaskade *)malloc(sizeof(CvLSVMFeatureMapCaskade));
if (!(*obj)) { // 如果分配失败,应该返回一个错误代码,但这里假设它总是成功的
// 实际应用中应该添加错误处理
}
// 初始化对象的成员变量
(*obj)->sizeX = sizeX;
(*obj)->sizeY = sizeY;
(*obj)->numFeatures = numFeatures;
// 使用malloc为map成员分配内存它用于存储特征映射数据
(*obj)->map = (float *) malloc(sizeof (float) *
(sizeX * sizeY * numFeatures));
if (!(*obj)->map) { // 如果分配失败,应该返回一个错误代码,但这里假设它总是成功的
// 实际应用中应该添加错误处理,并释放已经为对象分配的内存
free(*obj);
*obj = NULL;
return LATENT_SVM_MEM_ERROR; // 假设这是一个内存分配错误的返回码
}
// 将map中的所有元素初始化为0.0f
for(i = 0; i < sizeX * sizeY * numFeatures; i++)
{
(*obj)->map[i] = 0.0f;
}
// 返回成功码
return LATENT_SVM_OK;
}
// 释放一个CvLSVMFeatureMapCaskade对象占用的内存
int freeFeatureMapObject (CvLSVMFeatureMapCaskade **obj)
{
// 如果obj为空返回内存为空的错误码
if(*obj == NULL) return LATENT_SVM_MEM_NULL;
// 释放map成员占用的内存
free((*obj)->map);
// 释放对象本身占用的内存
free(*obj);
// 将指针设置为NULL避免野指针
(*obj) = NULL;
// 返回成功码
return LATENT_SVM_OK;
}

@ -1,363 +0,0 @@
// Track Object---advanced by Wangmingqiang -----------------------------------------
// 2024.6.01 at Hunan Changsha.
#include <math.h>
#include <string>
#include <vector>
#include <iostream>
#include <pthread.h>
#include <thread>
#include <chrono>
#include <boost/thread/mutex.hpp>
#include <boost/thread/shared_mutex.hpp>
#include <ros/ros.h>
#include <image_transport/image_transport.h>
#include <cv_bridge/cv_bridge.h>
#include <sensor_msgs/image_encodings.h>
#include <geometry_msgs/Pose.h>
#include <geometry_msgs/Pose2D.h>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/ml.hpp>
#include <std_srvs/SetBool.h>
#include "kcftracker.hpp"
#include "prometheus_gimbal_control/VisionDiff.h"
#include "gimbal_track/WindowPosition.h"
using namespace std;
using namespace cv;
#define MARKER_SIZE 0.18
#define F1 300
#define F2 300
#define C1 320
#define C2 240
static const std::string RGB_WINDOW = "RGB Image window";
//! Camera related parameters.
int frameWidth_;
int frameHeight_;
float get_ros_time(ros::Time begin); //获取ros当前时间
std_msgs::Header imageHeader_;
cv::Mat camImageCopy_;
boost::shared_mutex mutexImageCallback_;
bool imageStatus_ = false;
boost::shared_mutex mutexImageStatus_;
/*在接收到ROS图像话题消息时被调用。这个函数用于处理从摄像头或其他图像源接收到的图像数据并将其转换为OpenCV格式。*/
// 图像话题回调函数,用于处理接收到的图像消息
void cameraCallback(const sensor_msgs::ImageConstPtr &msg)
{
ROS_DEBUG("[EllipseDetector] USB image received."); // 打印调试信息,表示收到了图像消息
cv_bridge::CvImagePtr cam_image; // 定义一个OpenCV图像指针
// 尝试将ROS图像消息转换为OpenCV图像
try
{
cam_image = cv_bridge::toCvCopy(msg, sensor_msgs::image_encodings::BGR8); // 转换为BGR8格式的OpenCV图像
imageHeader_ = msg->header; // 保存图像消息的头部信息
}
catch (cv_bridge::Exception &e) // 捕获cv_bridge的异常
{
ROS_ERROR("cv_bridge exception: %s", e.what()); // 打印错误信息
return; // 结束函数调用
}
// 如果图像转换成功
if (cam_image)
{
// 使用互斥锁来保护图像数据的复制过程
{
boost::unique_lock<boost::shared_mutex> lockImageCallback(mutexImageCallback_);
camImageCopy_ = cam_image->image.clone(); // 克隆图像到全局变量
}
// 使用互斥锁来更新图像状态
{
boost::unique_lock<boost::shared_mutex> lockImageStatus(mutexImageStatus_);
imageStatus_ = true; // 设置图像状态为已接收
}
frameWidth_ = cam_image->image.size().width; // 保存图像宽度
frameHeight_ = cam_image->image.size().height; // 保存图像高度
}
return; // 结束函数调用
}
// 用此函数查看是否收到图像话题
bool getImageStatus(void)
{
boost::shared_lock<boost::shared_mutex> lock(mutexImageStatus_);
return imageStatus_;
}
//! ROS subscriber and publisher.
image_transport::Subscriber imageSubscriber_;
image_transport::Publisher image_vision_pub;
ros::Publisher pose_pub;
cv::Rect selectRect;
cv::Point origin;
cv::Rect result;
bool select_flag = false;
bool bRenewROI = false; // the flag to enable the implementation of KCF algorithm for the new chosen ROI
bool bBeginKCF = false;
int g_control_gimbal = 1;
float get_ros_time(ros::Time begin)
{
ros::Time time_now = ros::Time::now();
float currTimeSec = time_now.sec - begin.sec;
float currTimenSec = time_now.nsec / 1e9 - begin.nsec / 1e9;
return (currTimeSec + currTimenSec);
}
void bboxDrawCb(const gimbal_track::WindowPosition::ConstPtr &msg)
{
if (msg->mode != 0)
{
selectRect.x = msg->origin_x;
selectRect.y = msg->origin_y;
selectRect.width = msg->width;
selectRect.height = msg->height;
selectRect &= cv::Rect(0, 0, frameWidth_, frameHeight_);
if (selectRect.width * selectRect.height > 64)
{
bRenewROI = true;
}
g_control_gimbal = 1;
}
else
{
g_control_gimbal = 0;
}
}
/*在OpenCV中用于处理鼠标事件。这个函数在用户与图像窗口交互时被调用
ROI*/
// 鼠标事件回调函数
void onMouse(int event, int x, int y, int, void *)
{
// 如果正在选择区域
if (select_flag)
{
// 更新选择区域的坐标和大小
selectRect.x = MIN(origin.x, x); // 选择区域的左上角x坐标是最小值
selectRect.y = MIN(origin.y, y); // 选择区域的左上角y坐标是最小值
selectRect.width = abs(x - origin.x); // 选择区域的宽度是鼠标移动的水平距离
selectRect.height = abs(y - origin.y); // 选择区域的高度是鼠标移动的垂直距离
// 限制选择区域在图像范围内
selectRect &= cv::Rect(0, 0, frameWidth_, frameHeight_);
}
// 如果是鼠标左键按下事件
if (event == CV_EVENT_LBUTTONDOWN)
{
bBeginKCF = false; // 标志位表示不开始KCF跟踪
select_flag = true; // 标志位,表示开始选择区域
origin = cv::Point(x, y); // 记录鼠标按下的起始点
selectRect = cv::Rect(x, y, 0, 0); // 初始化选择区域
}
// 如果是鼠标左键释放事件
else if (event == CV_EVENT_LBUTTONUP)
{
// 如果选择区域的面积小于64可能是一个无效的选择
if (selectRect.width * selectRect.height < 64)
{
// 不执行任何操作
}
else
{
select_flag = false; // 标志位,表示结束选择区域
bRenewROI = true; // 标志位表示需要更新ROI
}
}
}
/*定义了一个ROS服务回调函数gimbalSer它用于处理一个名为/detection/gimbal_control的服务请求。
gimbal*/
// 服务回调函数,用于处理云台控制服务请求
bool gimbalSer(std_srvs::SetBool::Request &req, std_srvs::SetBool::Response &resp)
{
// 如果请求的数据为真(通常表示关闭云台控制)
if (req.data)
{
g_control_gimbal = 0; //// 设置全局变量,表示关闭云台控制
}
// 如果请求的数据为假(通常表示开启云台控制)
else if (selectRect.width * selectRect.height > 0)
{
bRenewROI = true; // 设置标志表示需要更新跟踪的区域ROI
g_control_gimbal = 1; // 设置全局变量,表示开启云台控制
}
else
{
bRenewROI = false; // 设置标志,表示不需要更新跟踪的区域
bBeginKCF = false; // 设置标志表示不需要开始KCF跟踪
}
// 设置响应的成功标志为真
resp.success = true;
// 根据请求的数据设置响应的消息
resp.message = req.data ? "Gimbal Control Close" : "Gimbal Control Open";
// 返回真,表示服务处理成功
return true;
}
bool HOG = true;
bool FIXEDWINDOW = false;
bool MULTISCALE = true;
bool SILENT = true;
bool LAB = false;
// Create KCFTracker object
KCFTracker tracker(HOG, FIXEDWINDOW, MULTISCALE, LAB);
int main(int argc, char **argv)
{
ros::init(argc, argv, "tracker_ros"); // // 初始化ROS节点
ros::NodeHandle nh("~"); // 创建节点句柄
image_transport::ImageTransport it(nh); // 图像传输对象
ros::Rate loop_rate(30); // 定义循环频率
bool auto_zoom, show_ui;
float max_size, min_size;
nh.param<bool>("auto_zoom", auto_zoom, false); // 从参数服务器获取参数
nh.param<bool>("show_ui", show_ui, true);
nh.param<float>("max_size", max_size, 0.0);
nh.param<float>("min_size", min_size, 0.0);
std::cout << "auto_zoom: " << auto_zoom << " "
<< "max_size: " << max_size << " "
<< "min_size: " << min_size << std::endl;
// 接收图像的话题
imageSubscriber_ = it.subscribe("/gimbal/image_raw", 1, cameraCallback);
// 发送绘制图像
image_vision_pub = it.advertise("/detection/image", 1);
// diff
ros::Publisher position_diff_pub = nh.advertise<prometheus_gimbal_control::VisionDiff>("/gimbal/track", 10);
// ros::Publisher auto_zoom_pub = nh.advertise<prometheus_gimbal_control::Diff>("/gimbal_server/auto_zoom", 10);
ros::Subscriber sub_bbox_draw = nh.subscribe("/detection/bbox_draw", 10, bboxDrawCb);
ros::ServiceServer server = nh.advertiseService("/detection/gimbal_control", gimbalSer);
sensor_msgs::ImagePtr msg_ellipse;
const auto wait_duration = std::chrono::milliseconds(2000);
if (show_ui)
{
cv::namedWindow(RGB_WINDOW);
cv::setMouseCallback(RGB_WINDOW, onMouse, 0);
}
float cur_time;
float last_time;
float last_error_x, last_error_y;
float dt;
prometheus_gimbal_control::VisionDiff error_pixels;
ros::Time begin_time = ros::Time::now();
while (ros::ok()) // 循环只要ROS节点正常运行就继续执行
{
// 获取当前ROS时间与开始时间的差值
cur_time = get_ros_time(begin_time);
dt = (cur_time - last_time); // 计算时间差
if (dt > 1.0 || dt < 0.0) // 如果时间差大于1秒或小于0秒则设置为0.05秒
{
dt = 0.05;
}
// 等待直到接收到图像
while (!getImageStatus())
{
printf("Waiting for image.\n"); // 打印等待图像的消息
std::this_thread::sleep_for(wait_duration); // 休眠一段时间
ros::spinOnce(); // 更新ROS事件循环
}
Mat frame; // 定义一个OpenCV图像矩阵
{
boost::unique_lock<boost::shared_mutex> lockImageCallback(mutexImageCallback_);
frame = camImageCopy_.clone(); // 克隆图像到局部变量
}
if (bRenewROI) // 如果需要更新ROI
{
tracker.init(selectRect, frame); // 初始化跟踪器
cv::rectangle(frame, selectRect, cv::Scalar(255, 0, 0), 2, 8, 0); // 在图像上绘制ROI
bRenewROI = false; // 重置更新ROI的标志
bBeginKCF = true; // 开始KCF跟踪
}
else if (bBeginKCF) // 如果已经开始KCF跟踪
{
result = tracker.update(frame); // 更新跟踪结果
error_pixels.detect = 1; // 设置检测到的标志
error_pixels.objectX = result.x; // 对象在图像中的位置
error_pixels.objectY = result.y;
error_pixels.objectWidth = result.width;
error_pixels.objectHeight = result.height;
error_pixels.frameWidth = frameWidth_; // 图像的宽度
error_pixels.frameHeight = frameHeight_; // 图像的高度
error_pixels.currSize = (float)result.width * (float)result.height / (frameHeight_ * frameWidth_); // 当前对象的大小
error_pixels.maxSize = (float)selectRect.width * (float)selectRect.height / (frameHeight_ * frameWidth_); // 最大对象大小
cv::rectangle(frame, result, cv::Scalar(255, 0, 0), 2, 8, 0); // 在图像上绘制跟踪结果
}
else // 如果未开始KCF跟踪
{
error_pixels.detect = 0; // 设置未检测到的标志
}
error_pixels.kp = 0.2; // 比例常数
error_pixels.ki = 0.0001; // 积分常数
error_pixels.kd = 0.003; // 微分常数
if (max_size != 0 && min_size != 0 && auto_zoom) // 如果自动缩放被启用
{
error_pixels.maxSize = max_size; // 设置最大对象大小
error_pixels.minSize = min_size; // 设置最小对象大小
}
error_pixels.autoZoom = auto_zoom; // 设置自动缩放标志
error_pixels.trackIgnoreError = 35; // 设置跟踪忽略误差
if (g_control_gimbal == 0) // 如果云台控制关闭
{
error_pixels.detect = 0; // 设置检测到的标志为0
}
position_diff_pub.publish(error_pixels); // 发布位置差异信息
// auto_zoom_pub.publish(error_pixels);
float left_point = frame.cols / 2 - 20;
float right_point = frame.cols / 2 + 20;
float up_point = frame.rows / 2 + 20;
float down_point = frame.rows / 2 - 20;
// draw
line(frame, Point(left_point, frame.rows / 2), Point(right_point, frame.rows / 2), Scalar(0, 255, 0), 1, 8);
line(frame, Point(frame.cols / 2, down_point), Point(frame.cols / 2, up_point), Scalar(0, 255, 0), 1, 8);
putText(frame, "x:", Point(50, 60), FONT_HERSHEY_SIMPLEX, 1, Scalar(255, 23, 0), 3, 8);
putText(frame, "y:", Point(50, 90), FONT_HERSHEY_SIMPLEX, 1, Scalar(255, 23, 0), 3, 8);
// draw
char s[20] = "";
sprintf(s, "%.2f", float(result.x + result.width / 2 - frame.cols / 2));
putText(frame, s, Point(100, 60), FONT_HERSHEY_SIMPLEX, 1, Scalar(255, 23, 0), 2, 8);
sprintf(s, "%.2f", float(result.y + result.height / 2 - frame.rows / 2));
putText(frame, s, Point(100, 90), FONT_HERSHEY_SIMPLEX, 1, Scalar(255, 23, 0), 2, 8);
if (show_ui)
{
imshow(RGB_WINDOW, frame);
waitKey(20);
}
image_vision_pub.publish(cv_bridge::CvImage(std_msgs::Header(), "bgr8", frame).toImageMsg());
ros::spinOnce();
loop_rate.sleep();
}
}

@ -1,4 +1,84 @@
/*
Tracker based on Kernelized Correlation Filter (KCF) [1] and Circulant Structure with Kernels (CSK) [2].
CSK is implemented by using raw gray level features, since it is a single-channel filter.
KCF is implemented by using HOG features (the default), since it extends CSK to multiple channels.
[1] J. F. Henriques, R. Caseiro, P. Martins, J. Batista,
"High-Speed Tracking with Kernelized Correlation Filters", TPAMI 2015.
[2] J. F. Henriques, R. Caseiro, P. Martins, J. Batista,
"Exploiting the Circulant Structure of Tracking-by-detection with Kernels", ECCV 2012.
Authors: Joao Faro, Christian Bailer, Joao F. Henriques
Contacts: joaopfaro@gmail.com, Christian.Bailer@dfki.de, henriques@isr.uc.pt
Institute of Systems and Robotics - University of Coimbra / Department Augmented Vision DFKI
Constructor parameters, all boolean:
hog: use HOG features (default), otherwise use raw pixels
fixed_window: fix window size (default), otherwise use ROI size (slower but more accurate)
multiscale: use multi-scale tracking (default; cannot be used with fixed_window = true)
Default values are set for all properties of the tracker depending on the above choices.
Their values can be customized further before calling init():
interp_factor: linear interpolation factor for adaptation
sigma: gaussian kernel bandwidth
lambda: regularization
cell_size: HOG cell size
padding: area surrounding the target, relative to its size
output_sigma_factor: bandwidth of gaussian target
template_size: template size in pixels, 0 to use ROI size
scale_step: scale step for multi-scale estimation, 1 to disable it
scale_weight: to downweight detection scores of other scales for added stability
For speed, the value (template_size/cell_size) should be a power of 2 or a product of small prime numbers.
Inputs to init():
image is the initial frame.
roi is a cv::Rect with the target positions in the initial frame
Inputs to update():
image is the current frame.
Outputs of update():
cv::Rect with target positions for the current frame
By downloading, copying, installing or using the software you agree to this license.
If you do not agree to this license, do not download, install,
copy or use the software.
License Agreement
For Open Source Computer Vision Library
(3-clause BSD License)
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the names of the copyright holders nor the names of the contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
This software is provided by the copyright holders and contributors "as is" and
any express or implied warranties, including, but not limited to, the implied
warranties of merchantability and fitness for a particular purpose are disclaimed.
In no event shall copyright holders or contributors be liable for any direct,
indirect, incidental, special, exemplary, or consequential damages
(including, but not limited to, procurement of substitute goods or services;
loss of use, data, or profits; or business interruption) however caused
and on any theory of liability, whether in contract, strict liability,
or tort (including negligence or otherwise) arising in any way out of
the use of this software, even if advised of the possibility of such damage.
*/
#include <iostream>
#ifndef _KCFTRACKER_HEADERS
#include "kcftracker.hpp"
@ -8,74 +88,75 @@
#include "labdata.hpp"
#endif
// KCFTracker类的构造函数用于初始化KCF跟踪器的参数
// Constructor
KCFTracker::KCFTracker(bool hog, bool fixed_window, bool multiscale, bool lab)
{
// 所有情况下参数都相同
lambda = 0.0001; // 权重参数
padding = 2.5; // 边界填充参数
output_sigma_factor = 0.125; // 输出高斯函数的标准差缩放因子
// 根据是否使用HOG特征来设置参数
if (hog) { // 如果使用HOG特征
// VOT
interp_factor = 0.012; // 插值因子
sigma = 0.6; // 高斯核的标准差
// Parameters equal in all cases
lambda = 0.0001;
padding = 2.5;
//output_sigma_factor = 0.1;
output_sigma_factor = 0.125;
cell_size = 4; // 大小
_hogfeatures = true; // 设置HOG特征标志为真
if (hog) { // HOG
// VOT
interp_factor = 0.012;
sigma = 0.6;
// TPAMI
//interp_factor = 0.02;
//sigma = 0.5;
cell_size = 4;
_hogfeatures = true;
// 如果使用LAB特征
if (lab) {
interp_factor = 0.005; // 插值因子
sigma = 0.4; // 高斯核的标准差
interp_factor = 0.005;
sigma = 0.4;
//output_sigma_factor = 0.025;
output_sigma_factor = 0.1; // 输出高斯函数的标准差缩放因子
output_sigma_factor = 0.1;
_labfeatures = true; // 设置LAB特征标志为真
_labCentroids = cv::Mat(nClusters, 3, CV_32FC1, &data); // 初始化LAB特征的聚类中心
cell_sizeQ = cell_size * cell_size; // 计算细胞大小的平方
_labfeatures = true;
_labCentroids = cv::Mat(nClusters, 3, CV_32FC1, &data);
cell_sizeQ = cell_size*cell_size;
}
else {
_labfeatures = false; // 设置LAB特征标志为假
else{
_labfeatures = false;
}
}
else { // 如果使用原始特征
interp_factor = 0.075; // 插值因子
sigma = 0.2; // 高斯核的标准差
cell_size = 1; // 细胞大小
_hogfeatures = false; // 设置HOG特征标志为假
else { // RAW
interp_factor = 0.075;
sigma = 0.2;
cell_size = 1;
_hogfeatures = false;
// 如果尝试使用LAB特征则打印错误信息
if (lab) {
printf("Lab features are only used with HOG features.\n");
_labfeatures = false; // 设置LAB特征标志为假
_labfeatures = false;
}
}
// 根据是否使用多尺度来设置参数
if (multiscale) { // 如果使用多尺度
template_size = 96; // 模板大小
if (multiscale) { // multiscale
template_size = 96;
//template_size = 100;
scale_step = 1.05; // 缩放步长
scale_weight = 0.95; // 缩放权重
if (!fixed_window) { // 如果不是固定窗口
scale_step = 1.05;
scale_weight = 0.95;
if (!fixed_window) {
//printf("Multiscale does not support non-fixed window.\n");
fixed_window = true; // 设置固定窗口标志为真
fixed_window = true;
}
}
else if (fixed_window) { // 如果使用固定窗口
template_size = 96; // 模板大小
else if (fixed_window) { // fit correction without multiscale
template_size = 96;
//template_size = 100;
scale_step = 1; // 缩放步长
scale_step = 1;
}
else { // 如果都不使用
template_size = 1; // 模板大小
scale_step = 1; // 缩放步长
else {
template_size = 1;
scale_step = 1;
}
}
// Initialize tracker
void KCFTracker::init(const cv::Rect &roi, cv::Mat image)
{
@ -91,51 +172,51 @@ void KCFTracker::init(const cv::Rect &roi, cv::Mat image)
// Update position based on the new frame
cv::Rect KCFTracker::update(cv::Mat image)
{
if (_roi.x + _roi.width <= 0) _roi.x = -_roi.width + 1; // 如果ROI的x坐标加上宽度小于0则调整x坐标
if (_roi.y + _roi.height <= 0) _roi.y = -_roi.height + 1; // 如果ROI的y坐标加上高度小于0则调整y坐标
if (_roi.x >= image.cols - 1) _roi.x = image.cols - 2; // 如果ROI的x坐标加上宽度大于图像宽度则调整x坐标
if (_roi.y >= image.rows - 1) _roi.y = image.rows - 2; // 如果ROI的y坐标加上高度大于图像高度则调整y坐标
if (_roi.x + _roi.width <= 0) _roi.x = -_roi.width + 1;
if (_roi.y + _roi.height <= 0) _roi.y = -_roi.height + 1;
if (_roi.x >= image.cols - 1) _roi.x = image.cols - 2;
if (_roi.y >= image.rows - 1) _roi.y = image.rows - 2;
float cx = _roi.x + _roi.width / 2.0f;
float cy = _roi.y + _roi.height / 2.0f;
float cx = _roi.x + _roi.width / 2.0f; // 计算ROI的中心x坐标
float cy = _roi.y + _roi.height / 2.0f; // 计算ROI的中心y坐标
float peak_value; // 峰值值
cv::Point2f res = detect(_tmpl, getFeatures(image, 0, 1.0f), peak_value); // 使用检测函数和模板进行检测
float peak_value;
cv::Point2f res = detect(_tmpl, getFeatures(image, 0, 1.0f), peak_value);
if (scale_step != 1) { // 如果缩放步长不是1则尝试在更小的缩放级别上进行检测
// 在更小的缩放级别上进行测试
if (scale_step != 1) {
// Test at a smaller _scale
float new_peak_value;
cv::Point2f new_res = detect(_tmpl, getFeatures(image, 0, 1.0f / scale_step), new_peak_value);
if (scale_weight * new_peak_value > peak_value) { // 如果新峰值值更大,则更新结果
if (scale_weight * new_peak_value > peak_value) {
res = new_res;
peak_value = new_peak_value;
_scale /= scale_step; // 更新缩放级别
_roi.width /= scale_step; // 更新ROI的宽度
_roi.height /= scale_step; // 更新ROI的高度
_scale /= scale_step;
_roi.width /= scale_step;
_roi.height /= scale_step;
}
// 在更大的缩放级别上进行测试
// Test at a bigger _scale
new_res = detect(_tmpl, getFeatures(image, 0, scale_step), new_peak_value);
if (scale_weight * new_peak_value > peak_value) { // 如果新峰值值更大,则更新结果
if (scale_weight * new_peak_value > peak_value) {
res = new_res;
peak_value = new_peak_value;
_scale *= scale_step; // 更新缩放级别
_roi.width *= scale_step; // 更新ROI的宽度
_roi.height *= scale_step; // 更新ROI的高度
_scale *= scale_step;
_roi.width *= scale_step;
_roi.height *= scale_step;
}
}
// 根据细胞大小和缩放级别调整ROI
_roi.x = cx - _roi.width / 2.0f + ((float) res.x * cell_size * _scale); // 调整ROI的x坐标
_roi.y = cy - _roi.height / 2.0f + ((float) res.y * cell_size * _scale); // 调整ROI的y坐标
if (_roi.x >= image.cols - 1) _roi.x = image.cols - 1; // 确保ROI的x坐标不超过图像宽度
if (_roi.y >= image.rows - 1) _roi.y = image.rows - 1; // 确保ROI的y坐标不超过图像高度
if (_roi.x + _roi.width <= 0) _roi.x = -_roi.width + 2; // 确保ROI的x坐标加上宽度大于0
if (_roi.y + _roi.height <= 0) _roi.y = -_roi.height + 2; // 确保ROI的y坐标加上高度大于0
// Adjust by cell size and _scale
_roi.x = cx - _roi.width / 2.0f + ((float) res.x * cell_size * _scale);
_roi.y = cy - _roi.height / 2.0f + ((float) res.y * cell_size * _scale);
if (_roi.x >= image.cols - 1) _roi.x = image.cols - 1;
if (_roi.y >= image.rows - 1) _roi.y = image.rows - 1;
if (_roi.x + _roi.width <= 0) _roi.x = -_roi.width + 2;
if (_roi.y + _roi.height <= 0) _roi.y = -_roi.height + 2;
assert(_roi.width >= 0 && _roi.height >= 0);
cv::Mat x = getFeatures(image, 0);
@ -180,19 +261,29 @@ cv::Point2f KCFTracker::detect(cv::Mat z, cv::Mat x, float &peak_value)
return p;
}
// 使用单个图像训练跟踪器
// train tracker with a single image
void KCFTracker::train(cv::Mat x, float train_interp_factor)
{
using namespace FFTTools; // 引入FFTTools命名空间可能包含相关函数和变量
using namespace FFTTools;
cv::Mat k = gaussianCorrelation(x, x); // 计算x和x之间的高斯相关性
cv::Mat alphaf = complexDivision(_prob, (fftd(k) + lambda)); // 计算高斯核的傅里叶变换,并除以(核变换 + lambda)
cv::Mat k = gaussianCorrelation(x, x);
cv::Mat alphaf = complexDivision(_prob, (fftd(k) + lambda));
_tmpl = (1 - train_interp_factor) * _tmpl + (train_interp_factor) * x; // 更新模板
_alphaf = (1 - train_interp_factor) * _alphaf + (train_interp_factor) * alphaf; // 更新高斯核的傅里叶变换
_tmpl = (1 - train_interp_factor) * _tmpl + (train_interp_factor) * x;
_alphaf = (1 - train_interp_factor) * _alphaf + (train_interp_factor) * alphaf;
}
/*cv::Mat kf = fftd(gaussianCorrelation(x, x));
cv::Mat num = complexMultiplication(kf, _prob);
cv::Mat den = complexMultiplication(kf, kf + lambda);
_tmpl = (1 - train_interp_factor) * _tmpl + (train_interp_factor) * x;
_num = (1 - train_interp_factor) * _num + (train_interp_factor) * num;
_den = (1 - train_interp_factor) * _den + (train_interp_factor) * den;
_alphaf = complexDivision(_num, _den);*/
}
// Evaluates a Gaussian kernel with bandwidth SIGMA for all relative shifts between input images X and Y, which must both be MxN. They must also be periodic (ie., pre-processed with a cosine window).
cv::Mat KCFTracker::gaussianCorrelation(cv::Mat x1, cv::Mat x2)
@ -230,28 +321,27 @@ cv::Mat KCFTracker::gaussianCorrelation(cv::Mat x1, cv::Mat x2)
return k;
}
// 在第一帧中创建高斯峰值
// Create Gaussian Peak. Function called only in the first frame.
cv::Mat KCFTracker::createGaussianPeak(int sizey, int sizex)
{
cv::Mat_<float> res(sizey, sizex); // 创建一个浮点数矩阵大小为sizey x sizex
cv::Mat_<float> res(sizey, sizex);
int syh = (sizey) / 2; // 计算y方向的高斯峰值中心
int sxh = (sizex) / 2; // 计算x方向的高斯峰值中心
int syh = (sizey) / 2;
int sxh = (sizex) / 2;
float output_sigma = std::sqrt((float) sizex * sizey) / padding * output_sigma_factor; // 计算输出高斯核的标准差
float mult = -0.5 / (output_sigma * output_sigma); // 计算高斯核的缩放因子
float output_sigma = std::sqrt((float) sizex * sizey) / padding * output_sigma_factor;
float mult = -0.5 / (output_sigma * output_sigma);
for (int i = 0; i < sizey; i++) // 遍历矩阵的每一行
for (int j = 0; j < sizex; j++) // 遍历矩阵的每一列
for (int i = 0; i < sizey; i++)
for (int j = 0; j < sizex; j++)
{
int ih = i - syh; // 计算当前点与y方向高斯峰值中心的距离
int jh = j - sxh; // 计算当前点与x方向高斯峰值中心的距离
res(i, j) = std::exp(mult * (float) (ih * ih + jh * jh)); // 计算高斯函数的值
int ih = i - syh;
int jh = j - sxh;
res(i, j) = std::exp(mult * (float) (ih * ih + jh * jh));
}
return FFTTools::fftd(res); // 返回高斯峰值的傅里叶变换
return FFTTools::fftd(res);
}
// Obtain sub-window from image, with replication-padding and extract features
cv::Mat KCFTracker::getFeatures(const cv::Mat & image, bool inithann, float scale_adjust)
{
@ -396,24 +486,21 @@ cv::Mat KCFTracker::getFeatures(const cv::Mat & image, bool inithann, float scal
return FeaturesMap;
}
// 初始化汉明窗口。该函数仅在第一帧调用。
// Initialize Hanning window. Function called only in the first frame.
void KCFTracker::createHanningMats()
{
cv::Mat hann1t = cv::Mat(cv::Size(size_patch[1],1), CV_32F, cv::Scalar(0));
cv::Mat hann2t = cv::Mat(cv::Size(1,size_patch[0]), CV_32F, cv::Scalar(0));
// 创建汉明窗口的一维版本
for (int i = 0; i < hann1t.cols; i++)
hann1t.at<float>(0, i) = 0.5 * (1 - std::cos(2 * 3.14159265358979323846 * i / (hann1t.cols - 1)));
hann1t.at<float > (0, i) = 0.5 * (1 - std::cos(2 * 3.14159265358979323846 * i / (hann1t.cols - 1)));
for (int i = 0; i < hann2t.rows; i++)
hann2t.at<float>(i, 0) = 0.5 * (1 - std::cos(2 * 3.14159265358979323846 * i / (hann2t.rows - 1)));
hann2t.at<float > (i, 0) = 0.5 * (1 - std::cos(2 * 3.14159265358979323846 * i / (hann2t.rows - 1)));
// 将一维汉明窗口组合成二维汉明窗口
cv::Mat hann2d = hann2t * hann1t;
// HOG特征
// HOG features
if (_hogfeatures) {
cv::Mat hann1d = hann2d.reshape(1,1); // 解决cv::Mat多通道bug的步骤
cv::Mat hann1d = hann2d.reshape(1,1); // Procedure do deal with cv::Mat multichannel bug
hann = cv::Mat(cv::Size(size_patch[0]*size_patch[1], size_patch[2]), CV_32F, cv::Scalar(0));
for (int i = 0; i < size_patch[2]; i++) {
@ -422,13 +509,13 @@ void KCFTracker::createHanningMats()
}
}
}
// 灰度特征
// Gray features
else {
hann = hann2d;
}
}
// 为单维度计算亚像素峰值
// Calculate sub-pixel peak for one dimension
float KCFTracker::subPixelPeak(float left, float center, float right)
{
float divisor = 2 * center - right - left;

Loading…
Cancel
Save