diff --git a/models/qrcode_wechatqrcode/CMakeLists.txt b/models/qrcode_wechatqrcode/CMakeLists.txt new file mode 100644 index 00000000..823d7e17 --- /dev/null +++ b/models/qrcode_wechatqrcode/CMakeLists.txt @@ -0,0 +1,11 @@ +cmake_minimum_required(VERSION 3.24.0) +project(opencv_zoo_qrcode_wechatqrcode) + +set(OPENCV_VERSION "4.10.0") +set(OPENCV_INSTALLATION_PATH "" CACHE PATH "Where to look for OpenCV installation") + +# Find OpenCV +find_package(OpenCV ${OPENCV_VERSION} REQUIRED HINTS ${OPENCV_INSTALLATION_PATH}) + +add_executable(demo demo.cpp) +target_link_libraries(demo ${OpenCV_LIBS}) diff --git a/models/qrcode_wechatqrcode/README.md b/models/qrcode_wechatqrcode/README.md index f310b48d..786490b3 100644 --- a/models/qrcode_wechatqrcode/README.md +++ b/models/qrcode_wechatqrcode/README.md @@ -9,6 +9,8 @@ Notes: ## Demo +### Python + Run the following command to try the demo: ```shell @@ -21,6 +23,23 @@ python demo.py --input /path/to/image -v python demo.py --help ``` +### C++ + +Install latest OpenCV (with opencv_contrib) and CMake >= 3.24.0 to get started with: + +```shell +# A typical and default installation path of OpenCV is /usr/local +cmake -B build -D OPENCV_INSTALLATION_PATH=/path/to/opencv/installation . +cmake --build build + +# detect on camera input +./build/demo +# detect on an image +./build/demo -i=/path/to/image -v +# get help messages +./build/demo -h +``` + ### Example outputs ![webcam demo](./example_outputs/wechat_qrcode_demo.gif) diff --git a/models/qrcode_wechatqrcode/demo.cpp b/models/qrcode_wechatqrcode/demo.cpp new file mode 100644 index 00000000..5f915a83 --- /dev/null +++ b/models/qrcode_wechatqrcode/demo.cpp @@ -0,0 +1,192 @@ +#include +#include +#include +#include +#include + +class WeChatQRCode { + public: + WeChatQRCode(const std::string& detect_prototxt, + const std::string& detect_model, + const std::string& sr_prototxt, const std::string& sr_model, + int backend_target_index) + : backend_target_index_(backend_target_index) { + + const std::vector> backend_target_pairs = { + {cv::dnn::DNN_BACKEND_OPENCV, cv::dnn::DNN_TARGET_CPU}, + {cv::dnn::DNN_BACKEND_CUDA, cv::dnn::DNN_TARGET_CUDA}, + {cv::dnn::DNN_BACKEND_CUDA, cv::dnn::DNN_TARGET_CUDA_FP16}, + {cv::dnn::DNN_BACKEND_TIMVX, cv::dnn::DNN_TARGET_NPU}, + {cv::dnn::DNN_BACKEND_CANN, cv::dnn::DNN_TARGET_NPU}}; + + if (backend_target_index_ < 0 || + backend_target_index_ >= backend_target_pairs.size()) { + throw std::invalid_argument("Invalid backend-target index"); + } + + // initialize detector + detector_ = cv::makePtr( + detect_prototxt, detect_model, sr_prototxt, sr_model); + } + + std::pair, std::vector> detect( + const cv::Mat& image) { + std::vector results; + std::vector points; + results = detector_->detectAndDecode(image, points); + return {results, points}; + } + + cv::Mat visualize(const cv::Mat& image, + const std::vector& results, + const std::vector& points, + cv::Scalar points_color = cv::Scalar(0, 255, 0), + cv::Scalar text_color = cv::Scalar(0, 255, 0), + double fps = -1) const { + cv::Mat output = image.clone(); + + if (fps >= 0) { + cv::putText(output, "FPS: " + std::to_string(fps), cv::Point(0, 15), + cv::FONT_HERSHEY_SIMPLEX, 0.5, text_color); + } + + double fontScale = 0.5; + int fontSize = 1; + + for (size_t i = 0; i < results.size(); ++i) { + const auto& p = points[i]; + + for (int r = 0; r < p.rows; ++r) { + cv::Point point(p.at(r, 0), p.at(r, 1)); + cv::circle(output, point, 10, points_color, -1); + } + + int qrcode_center_x = (p.at(0, 0) + p.at(2, 0)) / 2; + int qrcode_center_y = (p.at(0, 1) + p.at(2, 1)) / 2; + + int baseline = 0; + cv::Size text_size = + cv::getTextSize(results[i], cv::FONT_HERSHEY_DUPLEX, fontScale, + fontSize, &baseline); + + cv::Point text_pos(qrcode_center_x - text_size.width / 2, + qrcode_center_y + text_size.height / 2); + + cv::putText(output, results[i], text_pos, cv::FONT_HERSHEY_DUPLEX, + fontScale, text_color, fontSize); + } + + return output; + } + + private: + int backend_target_index_; + cv::Ptr detector_; +}; + +int main(int argc, char** argv) { + + cv::CommandLineParser parser( + argc, argv, + "{help h | | Show this help message.}" + "{input i | | Set path to the input image. Omit for using default camera.}" + "{detect_prototxt_path | detect_2021nov.prototxt | Set path to detect.prototxt.}" + "{detect_model_path | detect_2021nov.caffemodel | Set path to detect.caffemodel.}" + "{sr_prototxt_path | sr_2021nov.prototxt | Set path to sr.prototxt.}" + "{sr_model_path | sr_2021nov.caffemodel | Set path to sr.caffemodel.}" + "{backend_target bt | 0 | Choose one of the backend-target pairs to run this demo.}" + "{save s | false | Specify to save file with results.}" + "{vis v | false | Specify to open a new window to show results.}"); + + if (parser.has("help")) { + parser.printMessage(); + return 0; + } + + // get paths + std::string detect_prototxt = parser.get("detect_prototxt_path"); + std::string detect_model = parser.get("detect_model_path"); + std::string sr_prototxt = parser.get("sr_prototxt_path"); + std::string sr_model = parser.get("sr_model_path"); + int backend_target_index = parser.get("backend_target"); + + // input check + std::string input_path = parser.get("input"); + bool save_result = parser.get("save"); + bool visualize_result = parser.get("vis"); + + try { + WeChatQRCode qrDetector(detect_prototxt, detect_model, sr_prototxt, + sr_model, backend_target_index); + + if (!input_path.empty()) { + // process image + cv::Mat image = cv::imread(input_path); + if (image.empty()) { + std::cerr << "Could not read the image" << std::endl; + return -1; + } + + std::pair, std::vector> detectionResult = qrDetector.detect(image); + auto& results = detectionResult.first; + auto& points = detectionResult.second; + + for (const auto& result : results) { + std::cout << result << std::endl; + } + + cv::Mat result_image = qrDetector.visualize(image, results, points); + + if (save_result) { + cv::imwrite("result.jpg", result_image); + std::cout << "Results saved to result.jpg" << std::endl; + } + + if (visualize_result) { + cv::imshow(input_path, result_image); + cv::waitKey(0); + } + } else { + // process camera + cv::VideoCapture cap(0); + if (!cap.isOpened()) { + std::cerr << "Error opening camera" << std::endl; + return -1; + } + + cv::Mat frame; + cv::TickMeter tm; + + while (true) { + cap >> frame; + if (frame.empty()) { + std::cout << "No frames grabbed" << std::endl; + break; + } + + std::pair, std::vector> detectionResult = qrDetector.detect(frame); + auto& results = detectionResult.first; + auto& points = detectionResult.second; + + tm.start(); + double fps = tm.getFPS(); + tm.stop(); + + cv::Mat result_frame = qrDetector.visualize( + frame, results, points, cv::Scalar(0, 255, 0), + cv::Scalar(0, 255, 0), fps); + cv::imshow("WeChatQRCode Demo", result_frame); + + tm.reset(); + + if (cv::waitKey(1) >= 0) break; + } + } + + } catch (const std::exception& ex) { + std::cerr << "Error: " << ex.what() << std::endl; + return -1; + } + + return 0; +}