Product顔認証 SDK

顔トラッキング、フレーム選択、顔検知、顔特徴量抽出、顔特徴の1:N検索機能、生体検知、などの機能を、世界最高峰のクオリティで。単眼/複眼カメラそれぞれの特性に合わせた、独自の顔認証アプリケーションが、思いのままに開発いただけます。

手順

  • 1
  • 2
  • 3
  • 1

    ライセンス発行

    メールに記載のURLからライセンス・ライセンス利用のご案内をダウンロードし、オンラインでライセンス認証を行います。ライセンス認証APIを使用します。

  • 2

    環境構築

    OpenCV ライブラリをインストール、環境変数を追加、コンパイルします。

    SDK本体はOpenCVライブラリに依存しておらず、サンプルデモでのみ使用されています。

  • 3

    サンプルコードのテスト

    テストプロジェクトのヘッダファイルとライブラリを設定、SDK および opencv ライブラリファイルの依存ファイルを追加しコンパイル、SDK ライセンス認証を設定し、サンプルを実行します。

開発イメージ

DEMOサンプルコード

  • Swift
  • Java
  • C++
import UIKit

class STFacePro1VS1ViewController: STFaceProBaseViewController, STFaceProImagePickerDelegate, STFacePro1VS1ViewDelegate {
    let imagePicker = STFaceProImagePicker()

    // MARK: - Tools in SDK
    let convertTool = STFaceProConvertTool()
    let faceDetector = STFaceProFaceDetector()
    let featureExtraction = STFaceProFeatureExtraction()
    let featureComparison = STFaceProFeatureComparison()

    // The first image
    var oneImage: UIImage?
    // The feature of first image
    var oneFeature: UnsafeMutablePointer<Int8>?
    // The second image
    var anotherImage: UIImage?
    // The feature of second image
    var anotherFeature: UnsafeMutablePointer<Int8>?
    // Is the first photo on the left selected
    var isTheFirstSelected: Bool = false

    // Loading View displayed when clicking on compare
    let loadingView = STFaceProLoadingView(frame: .zero)
    // Threads for image processing
    private let queue = DispatchQueue(label: "com.sensetime.senseid.facepro.compare.queue")

    override func viewDidLoad() {
        super.viewDidLoad()
        self.view.backgroundColor = bgBlackColor
        self.navigationItem.title = NSLocalizedString("oneVSone", comment: "")

        addViews()
        createDetector()
    }

    deinit {
        // At the end of the life cycle, remember to destroy the feature value to avoid memory leaks
        self.clearFeature()
        print("STFacePro1VS1ViewController deinit")
    }

    func clearFeature() {
        if let feature = self.oneFeature {
            self.featureExtraction.releaseFeature(feature)
        }
        self.oneFeature = nil
        if let feature = self.anotherFeature {
            self.featureExtraction.releaseFeature(feature)
        }
        self.anotherFeature = nil
    }

    // MARK: - STFaceProImagePickerDelegate
    func didSelectedImage(with img: UIImage) {
        let image = STFaceProTool.fixOrientation(img)
        if self.isTheFirstSelected {
            self.oneImage = image
            self.mainView.refreshUploadButton(index: 0, image: image)
        } else {
            self.anotherImage = image
            self.mainView.refreshUploadButton(index: 1, image: image)
        }
        self.mainView.resetCompareBtn()
    }

    func didCancel() {
        print("STFaceProImagePickerDelegate didCancel")
    }

    // MARK: - STFacePro1VS1ViewDelegate
    func firstButtonAction(button: UIButton) {
        self.isTheFirstSelected = true
        self.imagePicker.showActionSheet(with: button)
    }

    func secondButtonAction(button: UIButton) {
        self.isTheFirstSelected = false
        self.imagePicker.showActionSheet(with: button)
    }

    func didResetButton() {
        self.oneImage = nil
        self.anotherImage = nil
        self.clearFeature()
    }

    func compareButtonAction(button: UIButton) {
        guard let oneImage = self.oneImage, let anotherImage = self.anotherImage else {
            self.showAlert(title: NSLocalizedString("tips", comment: ""),
                            message: NSLocalizedString("reuploadValidPhoto", comment: ""),
                            handler: nil)
            return
        }

        // Clear feature
        self.clearFeature()
        self.loadingView.startAnimating()
        self.queue.async {
            let imageArr = [oneImage, anotherImage]
            var detectResult = true
            for (index, image) in imageArr.enumerated() {
                let result = self.detect(image: image, index: index)
                if !result {
                    DispatchQueue.main.async {
                        // Reset UI and data / 重置UI和数据
                        self.mainView.refreshUploadButton(index: index, image: nil)
                        if index == 0 {
                            self.oneImage = nil
                        } else {
                            self.anotherImage = nil
                        }
                    }
                }
                detectResult = (detectResult && result)
            }

            // Image detect failed
            if !detectResult {
                DispatchQueue.main.async {
                    self.loadingView.stopAnimating()
                }
                return
            }

            // Feature Comparison 1:1
            self.compare()
            DispatchQueue.main.async {
                self.loadingView.stopAnimating()
            }
        }
    }

    func detect(image: UIImage, index: Int) -> Bool {
        let inputImage = self.convertTool.inputImage(with: image)
        var isDetectSuccess = false
        // Face detector
        apiResult = self.faceDetector.detect(withInputImage: inputImage, faceOrientation: .UP) { [weak self] (detectionResult, landMarks, count) in
            if count == 0 { // NO Face
                DispatchQueue.main.async {
                    self?.mainView.refreshHint(type: .reload)
                }
                return
            } else {
                isDetectSuccess = true
            }

            // Feature extraction
            self?.apiResult = self!.featureExtraction.extract(withInputImage: inputImage, landmarks: landMarks, count: count) { [weak self] (featureArrayPointer) in
                let buffer = UnsafeBufferPointer(start: featureArrayPointer, count: Int(count))
                let featureArray = Array(buffer)

                // Get the face frame from the detection result
                let detectionResult = self!.convertTool.detectionResultArray(withDetectionResult: detectionResult, count: count)
                let faceRect = self!.faceRect(with: detectionResult)

                // Save feature
                self?.saveFeature((index == 0), value: featureArray[0])

                // Update button ui
                self?.updateBtn(index: index, faceRect: faceRect, image: image)
            }
        }
        // After using the data, remember to destroy it to avoid memory leaks
        self.convertTool.releaseInputImageFromImage()
        return isDetectSuccess
    }

    func saveFeature(_ isTheFirstSelected: Bool, value: UnsafeMutablePointer<Int8>) {
        if isTheFirstSelected {
            if let feature = self.oneFeature {
                self.featureExtraction.releaseFeature(feature)
            }
            self.oneFeature = value
            print("this is one")
        } else {
            if let feature = self.anotherFeature {
                self.featureExtraction.releaseFeature(feature)
            }
            self.anotherFeature = value
            print("this is another")
        }
    }

    // Feature Comparison 1:1
    func compare() {
        if let feature = self.oneFeature, let anotherFeature = self.anotherFeature {
            apiResult = self.featureComparison.compare(withFeature: feature, anotherFeature: anotherFeature, scoreBlock: {[weak self] (score) in
                DispatchQueue.main.async {
                    self?.mainView.refreshScore(score)
                }
                print("---featureComparison---1:1--- score \(score)")
            })
        }
    }

    func updateBtn(index: Int, faceRect: CGRect, image: UIImage) {
        DispatchQueue.main.async {
            // Draw the face frame on the image
            let image = STFaceProTool.draw(faceRect, image: image, color: textYellowColor)
            self.mainView.refreshUploadButton(index: index, image: image)
        }
    }

    // Get face frame
    func faceRect(with detectionResult: [STFaceProDetectionResult]) -> CGRect {
        let resultFaceRect = detectionResult[0].faceRect
        let faceRect = CGRect(x: resultFaceRect.left,
                              y: resultFaceRect.top,
                              width: abs(resultFaceRect.right - resultFaceRect.left),
                              height: abs(resultFaceRect.bottom - resultFaceRect.top))
        return faceRect
    }

    // create Detector
    func createDetector() {
        let bundlePath = Bundle.main.path(forResource: "st_facepro_resource", ofType: "bundle") ?? ""
        let occlusionModelPath = bundlePath + "/model" + "/M_Align_occlusion.model"
        let verifyModelPath = bundlePath + "/model" + "/M_Verify_Mobilenetv2Pruned.model"

        apiResult = self.faceDetector.create(withModelPath: occlusionModelPath, config: .ANY_FACE)

        apiResult = self.featureExtraction.create(withModelPath: verifyModelPath)
        apiResult = self.featureComparison.create(withModelPath: verifyModelPath)
    }

    func addViews() {
        self.imagePicker.vcForShow = self
        self.imagePicker.delegate = self

        self.mainView.viewDelegate = self
        self.view.addSubview(self.mainView)

        self.view.addSubview(self.loadingView)
    }

    lazy var mainView: STFacePro1VS1View = {
        let view = STFacePro1VS1View(frame: CGRect(x: 0, y: 0, width: screenWidth, height: screenHeight))
        return view
    }()
}
                  
package com.sensetime.senseid.facepro;

import org.opencv.core.*;
import org.opencv.imgcodecs.*;

import com.sensetime.senseid.facepro.common.*;
import com.sensetime.senseid.facepro.liveness.*;

public class Detect_OneVSOne {
  public final static int[] face_orientation_list = { FaceproLibrary.STID_ORIENTATION_UP, FaceproLibrary.STID_ORIENTATION_LEFT,
      FaceproLibrary.STID_ORIENTATION_DOWN, FaceproLibrary.STID_ORIENTATION_RIGHT };
  public static void main(String[] args) {
    // TODO Auto-generated method stub
    if (args.length < 3) {
      System.out.println("test_sample_face_detect_1vs1 [ini file path] [input image1] [image2 input] (or add [image face orientation])");
      System.out.println("orientation index : 0:up, 1:left, 2:down, 3:right");
      System.out.println("for example: ./test_sample_face_detect_1vs1 ./test.int face_01.jpg face_02.jpg 0");
      return;
    }

    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    FaceproLibrary mLibrary = new FaceproLibrary();

    IniReader mIniReader = null;
    try {
      mIniReader = new IniReader(args[0]);
    } catch (Exception e) {
      e.printStackTrace();
    }
    String license_file = mIniReader.getValue("resource", "license_filepath");
    if (license_file == null) {
      System.out.println("ini file has no license_filepath");
      return;
    }
    String activationcode_path = mIniReader.getValue("resource", "activation_code_dir");
    if (activationcode_path == null) {
      System.out.println("ini file has no activationcode_path");
      return;
    }
    int err = LoadLicense.loadLicense(mLibrary, license_file, activationcode_path);
    if (err != 0) {
      System.out.println("load license error");
      return;
    }
    else {
      System.out.println("load license ok");
    }

    int iamge_orientation = FaceproLibrary.STID_ORIENTATION_UP;
    if (args.length == 4) {
      int orientation_index = Integer.parseInt(args[3]);
      if (orientation_index >= 0 && orientation_index <= 3) {
        iamge_orientation = face_orientation_list[orientation_index];
      } else {
        iamge_orientation = FaceproLibrary.STID_ORIENTATION_UP;
      }
    }

    int detect_config = 0;
    String detect_config_str = mIniReader.getValue("others", "detector_config");
    if (detect_config_str == null) {
      detect_config = FaceproLibrary.STID_FACEPRO_DETECTOR_CONFIG_LARGE_FACE;
    } else {
      if (detect_config_str == "large") {
        detect_config = FaceproLibrary.STID_FACEPRO_DETECTOR_CONFIG_LARGE_FACE;
      }
      else if (detect_config_str == "small") {
        detect_config = FaceproLibrary.STID_FACEPRO_DETECTOR_CONFIG_SMALL_FACE;
      }
      else if (detect_config_str == "any") {
        detect_config = FaceproLibrary.STID_FACEPRO_DETECTOR_CONFIG_ANY_FACE;
      }
      else {
        detect_config = FaceproLibrary.STID_FACEPRO_DETECTOR_CONFIG_LARGE_FACE;
      }
    }

    int alignment_count = 0;
    String alignment_count_str = mIniReader.getValue("others", "alignment_count");
    if(alignment_count_str == null){
      alignment_count = 1;
    }
    else{
      alignment_count = Integer.parseInt(alignment_count_str);
      if(alignment_count < 0 || alignment_count > 0xff) {
        alignment_count = 1;
      }
    }

    HandleResult mDetectorHandle = null;
    HandleResult mVerifyHandle = null;
    HandleResult mCompareHandle = null;

    do {
      String alignment_model_path = mIniReader.getValue("resource", "alignment_model_filepath");
      mDetectorHandle = mLibrary.createDetector(alignment_model_path, detect_config | alignment_count);
      if (mDetectorHandle.getResultCode() != FaceproLibrary.STID_OK) {
        System.out.println("Create detect handle error: " + mDetectorHandle.getResultCode());
        break;
      }

      String verify_model_path = mIniReader.getValue("resource", "verify_model_filepath");
      mVerifyHandle = mLibrary.featureExtractionCreateHandle(verify_model_path);
      if (mVerifyHandle.getResultCode() != FaceproLibrary.STID_OK) {
        System.out.println("Create verify handle error: " + mVerifyHandle.getResultCode());
        break;
      }

      String compare_model_path = mIniReader.getValue("resource", "compare_model_filepath");
      mCompareHandle = mLibrary.featureComparisonCreateHandle(compare_model_path);
      if (mCompareHandle.getResultCode() != FaceproLibrary.STID_OK) {
        System.out.println("Create compare handle error: " + mCompareHandle.getResultCode());
        break;
      }

      Mat image_buff1 = null;
      try {
        image_buff1 = Imgcodecs.imread(args[1]);
      } catch (Exception e) {
        e.printStackTrace();
      }

      if(image_buff1 == null) {
        System.out.printf("Load image %s fails.", args[1]);
        break;
      }
      StidImage image1 = ImageConvert.imageCvToStid(image_buff1);

      Mat image_buff2 = null;
      try {
        image_buff2 = Imgcodecs.imread(args[2]);
      } catch (Exception e) {
        e.printStackTrace();
      }

      if(image_buff2 == null) {
        System.out.printf("Load image %s fails.", args[2]);
        break;
      }
      StidImage image2 = ImageConvert.imageCvToStid(image_buff2);

      DetectorResults mImg1DetectorResults = mLibrary.detector(
          mDetectorHandle.getHandle(),
          image1,
          iamge_orientation);
      if(mImg1DetectorResults.getResultCode() != FaceproLibrary.STID_OK) {
        System.out.println("Detector error: " + mImg1DetectorResults.getResultCode());
        break;
      }
      if (mImg1DetectorResults.getDetectionResults() == null || mImg1DetectorResults.getDetectionResults().isEmpty() ||
        mImg1DetectorResults.getLandmarksResults() == null || mImg1DetectorResults.getLandmarksResults().isEmpty()) {
        System.out.println("Img1 detect no face");
        break;
      }

      DetectorResults mImg2DetectorResults = mLibrary.detector(
          mDetectorHandle.getHandle(),
          image2,
          iamge_orientation);
      if(mImg2DetectorResults.getResultCode() != FaceproLibrary.STID_OK) {
        System.out.println("Detector error: " + mImg2DetectorResults.getResultCode());
        break;
      }
      if (mImg2DetectorResults.getDetectionResults() == null || mImg2DetectorResults.getDetectionResults().isEmpty() ||
        mImg2DetectorResults.getLandmarksResults() == null || mImg2DetectorResults.getLandmarksResults().isEmpty()) {
        System.out.println("Img2 detect no face");
        break;
      }

      Landmarks img1Landmarks = mImg1DetectorResults.getLandmarksResults().get(0);
      Landmarks img2Landmarks = mImg2DetectorResults.getLandmarksResults().get(0);

      StringResult img1VerifyResult = mLibrary.getFeature(
          mVerifyHandle.getHandle(),
          image1,
          img1Landmarks);
      if(img1VerifyResult.getResultCode() != FaceproLibrary.STID_OK) {
        System.out.println("Get img1 feature failed, err : " + img1VerifyResult.getResultCode());
        break;
      }
      StringResult img2VerifyResult = mLibrary.getFeature(
          mVerifyHandle.getHandle(),
          image2,
          img2Landmarks);
      if(img2VerifyResult.getResultCode() != FaceproLibrary.STID_OK) {
        System.out.println("Get img2 feature failed, err : " + img2VerifyResult.getResultCode());
        break;
      }

      FloatResult compareResult = null;
      if (img1VerifyResult.getString() != null && img2VerifyResult.getString() != null) {
        compareResult = mLibrary.featureComparisonCompare(mCompareHandle.getHandle(), img1VerifyResult.getString(),
            img2VerifyResult.getString());
        if(compareResult.getResultCode() != FaceproLibrary.STID_OK) {
          System.out.println("Compare two img failed, err : " + compareResult.getResultCode());
          break;
        }
        String compartStr = String.format("%.3f", compareResult == null ? 0.0F : compareResult.getFloat());
        System.out.println("score : " + compartStr);
        if (compareResult.getFloat() > 0.5) {
          System.out.println("The same person");
        } else {
          System.out.println("Different person");
        }
      }
    }while(false);

    if(mDetectorHandle != null){
      mLibrary.destroyDetector(mDetectorHandle.getHandle());
    }
    if(mVerifyHandle != null){
      mLibrary.featureExtractionDestroyHandle(mVerifyHandle.getHandle());
    }
    if(mCompareHandle != null){
      mLibrary.featureComparisonDestroyHandle(mCompareHandle.getHandle());
    }
    System.out.println("Test finish!");
  }
}
                  
#include <opencv2/opencv.hpp>
#include <stdio.h>
#include "helper.h"

using namespace std;
using namespace cv;

const vector<stid_orientation_t> face_orientation_list = {STID_ORIENTATION_UP, STID_ORIENTATION_LEFT, STID_ORIENTATION_DOWN, STID_ORIENTATION_RIGHT};

int main(int argc, char *argv[]) {
  if (argc < 4) {
    fprintf(stderr, "test_sample_facepro_detector_1vs1 [ini file path] [input image1] [image2 input] (or add [image face orientation])\n");
    fprintf(stderr, "orientation index : 0:up, 1:left, 2:right, 3:down\n");
    fprintf(stderr, "for example: ./test_sample_facepro_detector_1vs1 ./test.ini face_01.jpg face_02.jpg 0\n");
    return -1;
  }

  std::string ini_file_name = string(argv[1]);
  stid_orientation_t iamge_orientation;
  if(argc >= 5){
    int orientation_index = atoi(argv[4]);
    if(orientation_index >= 0 && orientation_index <=3){
      iamge_orientation = face_orientation_list[orientation_index];
    }
    else{
      iamge_orientation = STID_ORIENTATION_UP;
    }
  }
  else{
    iamge_orientation = STID_ORIENTATION_UP;
  }

  IniParser ini_parser;
  if(0 != ini_parser.open(ini_file_name)){
    fprintf(stderr, "ini file is invalid\n");
    return -1;
  }
  std::string license_file = "";
  std::string activationcode_path = "";
  int err = ini_parser.getStringValue("license_filepath", license_file);
  if(0 != err){
    fprintf(stderr, "ini file has no license_filepath\n");
    return -1;
  }
  err = ini_parser.getStringValue("activation_code_dir", activationcode_path);
  if(0 != err){
    fprintf(stderr, "ini file has no activation_code_filepath\n");
    return -1;
  }

  int load_ret = LoadLicense(license_file, activationcode_path);
  if(STID_OK != load_ret){
    fprintf(stderr, "load licesne fail, err is %d\n", load_ret);
    return -1;
  }

  stid_result_t ret = STID_OK;

  // Use opencv's function to load image,bgr_image has a format
  // STID_PIX_FMT_BGR888
  Mat bgr_image_1, bgr_image_2;
  bgr_image_1 = imread(argv[2]); // STID_PIX_FMT_BGR888
  if (!bgr_image_1.data) {
    fprintf(stderr, "fail to read %s\n", argv[2]);
    return -1;
  }

  // Use opencv's function to load image,bgr_image has a format
  // STID_PIX_FMT_BGR888
  bgr_image_2 = imread(argv[3]);
  if (!bgr_image_2.data) {
    fprintf(stderr, "fail to read %s\n", argv[3]);
    return -1;
  }

  stid_image_t image1 = {bgr_image_1.data, STID_PIX_FMT_BGR888, (int)bgr_image_1.cols, (int)bgr_image_1.rows, (int)bgr_image_1.step};
  stid_image_t image2 = {bgr_image_2.data, STID_PIX_FMT_BGR888, (int)bgr_image_2.cols, (int)bgr_image_2.rows, (int)bgr_image_2.step};

  stid_handle_t detector_handle = nullptr;
  stid_handle_t verify_handle = nullptr;
  stid_handle_t compare_handle = nullptr;
  stid_detection_result_t* detector_result1 = nullptr;
  stid_landmarks_t* landmarks_result1 = nullptr;
  stid_detection_result_t* detector_result2 = nullptr;
  stid_landmarks_t* landmarks_result2 = nullptr;
  int face_count_1 = 0;
  int face_count_2 = 0;

  do{
    // Init face detector handle
    string detector_config_str = "";
    int detector_config = 0;
    err = ini_parser.getStringValue("detector_config", detector_config_str);
    if(0 != err){
      detector_config = STID_FACEPRO_DETECTOR_CONFIG_LARGE_FACE;
    }
    else{
      if(detector_config_str == "large") detector_config = STID_FACEPRO_DETECTOR_CONFIG_LARGE_FACE;
      else if(detector_config_str == "small") detector_config = STID_FACEPRO_DETECTOR_CONFIG_SMALL_FACE;
      else if(detector_config_str == "any") detector_config = STID_FACEPRO_DETECTOR_CONFIG_ANY_FACE;
      else detector_config = STID_FACEPRO_DETECTOR_CONFIG_LARGE_FACE;
    }

    int alignment_count = 0;
    err = ini_parser.getIntValue("alignment_count", alignment_count);
    if(0 != err){
      alignment_count = 0;
    }
    else if(alignment_count < 0 || alignment_count > 0xff){
      alignment_count = 1;
    }

    double compare_threshold = 0.0;
    err = ini_parser.getDoubleValue("compare_threshold", compare_threshold);
    if (0 != err){
      compare_threshold = 0.5;
    }
    if (compare_threshold <= 0.0 || compare_threshold >= 1.0){
      compare_threshold = 0.5;
    }

    std::string alignment_model_filepath = "";
    err = ini_parser.getStringValue("alignment_model_filepath", alignment_model_filepath);
    if(0 != err){
      fprintf(stderr, "ini file has no alignment model path\n");
      break;
    }
    else{
      ret = stid_facepro_detector_create_handle(&detector_handle, alignment_model_filepath.c_str(), detector_config | alignment_count);
    }
    if (ret != STID_OK) {
      fprintf(stderr, "fail to init detector handle, error code %d\n", ret);
      break;
    }

    // Face detect
    ret = stid_facepro_detector_detect(detector_handle, &image1, iamge_orientation, &detector_result1, &landmarks_result1, &face_count_1);
    if (ret != STID_OK) {
      fprintf(stderr, "stid_facepro_detector_detect failed, error code : %d\n", ret);
      break;
    }
    // Face detect
    ret = stid_facepro_detector_detect(detector_handle, &image2, iamge_orientation, &detector_result2, &landmarks_result2, &face_count_2);
    if (ret != STID_OK) {
      fprintf(stderr, "stid_facepro_detector_detect failed, error code : %d\n", ret);
      break;
    }

    if (face_count_1 > 0 && face_count_2 > 0) {
      std::string verify_model_filepath = "";
      err = ini_parser.getStringValue("verify_model_filepath", verify_model_filepath);
      if(0 != err){
        fprintf(stderr, "ini file has no verify model path\n");
        break;
      }
      else{
        ret = stid_facepro_feature_extraction_create_handle(&verify_handle, verify_model_filepath.c_str());
      }
      if (ret != STID_OK) {
        fprintf(stderr, "create verify handle failed, error code %d\n", ret);
        break;
      }

      std::string compare_model_filepath = "";
      err = ini_parser.getStringValue("compare_model_filepath", compare_model_filepath);
      if(0 != err){
        fprintf(stderr, "ini file has no compare model path\n");
        break;
      }
      else{
        ret = stid_facepro_feature_comparison_create_handle(&compare_handle, compare_model_filepath.c_str());
      }
      if (ret != STID_OK) {
        fprintf(stderr, "create compare handle failed, error code %d\n", ret);
        break;
      }

      if (verify_handle && compare_handle) {
        int version = 0;
        ret = stid_facepro_feature_extraction_get_version(verify_handle, &version);
        if(STID_OK == ret){
          fprintf(stderr, "version is %d\n", version);
        }
        char* image_feature1 = nullptr;
        ret = stid_facepro_feature_extraction_extract(verify_handle, &image1, &landmarks_result1[0], &image_feature1);
        if (ret != STID_OK) {
          fprintf(stderr, "stid_facepro_verify_get_feature failed, error code %d\n", ret);
          break;
        }

        char* image_feature2 = nullptr;
        ret = stid_facepro_feature_extraction_extract(verify_handle, &image2, &landmarks_result2[0], &image_feature2);
        if (ret != STID_OK) {
          fprintf(stderr, "stid_facepro_verify_get_feature failed, error code %d\n", ret);
          stid_facepro_feature_release_feature(image_feature1);
          break;
        }

        if(image_feature1 && image_feature2){
          float compare_ret = 0.0;
          ret = stid_facepro_feature_comparison_compare(compare_handle, image_feature1, image_feature2, &compare_ret);

          if (ret == STID_OK) {
            fprintf(stderr, "score: %f\n", compare_ret);
            // comapre score with compare_threshold
            if (compare_ret > (float)compare_threshold)
              fprintf(stderr, "the same person.\n");
            else
              fprintf(stderr, "different people.\n");
          } else {
            fprintf(stderr, "stid_facepro_feature_comparison_compare failed, error code : %d\n", ret);
          }
        }
        stid_facepro_feature_release_feature(image_feature1);
        stid_facepro_feature_release_feature(image_feature2);
      }
    }
    else{
      if (face_count_1 == 0) {
        fprintf(stderr, "can't find face in %s\n", argv[2]);
      }
      if (face_count_2 == 0) {
        fprintf(stderr, "can't find face in %s\n", argv[3]);
      }
    }
  }while(0);

  stid_facepro_release_detection_results_array(detector_result1, face_count_1);
  stid_facepro_release_landmarks_array(landmarks_result1, face_count_1);
  stid_facepro_release_detection_results_array(detector_result2, face_count_2);
  stid_facepro_release_landmarks_array(landmarks_result2, face_count_2);

  stid_facepro_feature_extraction_destroy_handle(verify_handle);
  stid_facepro_feature_comparison_destroy_handle(compare_handle);
  stid_facepro_detector_destroy_handle(detector_handle);

  fprintf(stderr, "test finish!\n");
  fprintf(stderr, "press any key to exit.\n");
  getchar();

  return 0;
}
                  
  • 1:1認証 画面イメージ