inference_engine.hpp 2.5 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071
  1. // This file is part of OpenCV project.
  2. // It is subject to the license terms in the LICENSE file found in the top-level directory
  3. // of this distribution and at http://opencv.org/license.html.
  4. //
  5. // Copyright (C) 2018-2019, Intel Corporation, all rights reserved.
  6. // Third party copyrights are property of their respective owners.
  7. #ifndef OPENCV_DNN_UTILS_INF_ENGINE_HPP
  8. #define OPENCV_DNN_UTILS_INF_ENGINE_HPP
  9. #include "../dnn.hpp"
  10. namespace cv { namespace dnn {
  11. CV__DNN_EXPERIMENTAL_NS_BEGIN
  12. /* Values for 'OPENCV_DNN_BACKEND_INFERENCE_ENGINE_TYPE' parameter */
  13. #define CV_DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_API "NN_BUILDER"
  14. #define CV_DNN_BACKEND_INFERENCE_ENGINE_NGRAPH "NGRAPH"
  15. /** @brief Returns Inference Engine internal backend API.
  16. *
  17. * See values of `CV_DNN_BACKEND_INFERENCE_ENGINE_*` macros.
  18. *
  19. * Default value is controlled through `OPENCV_DNN_BACKEND_INFERENCE_ENGINE_TYPE` runtime parameter (environment variable).
  20. */
  21. CV_EXPORTS_W cv::String getInferenceEngineBackendType();
  22. /** @brief Specify Inference Engine internal backend API.
  23. *
  24. * See values of `CV_DNN_BACKEND_INFERENCE_ENGINE_*` macros.
  25. *
  26. * @returns previous value of internal backend API
  27. */
  28. CV_EXPORTS_W cv::String setInferenceEngineBackendType(const cv::String& newBackendType);
  29. /** @brief Release a Myriad device (binded by OpenCV).
  30. *
  31. * Single Myriad device cannot be shared across multiple processes which uses
  32. * Inference Engine's Myriad plugin.
  33. */
  34. CV_EXPORTS_W void resetMyriadDevice();
  35. /* Values for 'OPENCV_DNN_IE_VPU_TYPE' parameter */
  36. #define CV_DNN_INFERENCE_ENGINE_VPU_TYPE_UNSPECIFIED ""
  37. /// Intel(R) Movidius(TM) Neural Compute Stick, NCS (USB 03e7:2150), Myriad2 (https://software.intel.com/en-us/movidius-ncs)
  38. #define CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_2 "Myriad2"
  39. /// Intel(R) Neural Compute Stick 2, NCS2 (USB 03e7:2485), MyriadX (https://software.intel.com/ru-ru/neural-compute-stick)
  40. #define CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X "MyriadX"
  41. #define CV_DNN_INFERENCE_ENGINE_CPU_TYPE_ARM_COMPUTE "ARM_COMPUTE"
  42. #define CV_DNN_INFERENCE_ENGINE_CPU_TYPE_X86 "X86"
  43. /** @brief Returns Inference Engine VPU type.
  44. *
  45. * See values of `CV_DNN_INFERENCE_ENGINE_VPU_TYPE_*` macros.
  46. */
  47. CV_EXPORTS_W cv::String getInferenceEngineVPUType();
  48. /** @brief Returns Inference Engine CPU type.
  49. *
  50. * Specify OpenVINO plugin: CPU or ARM.
  51. */
  52. CV_EXPORTS_W cv::String getInferenceEngineCPUType();
  53. CV__DNN_EXPERIMENTAL_NS_END
  54. }} // namespace
  55. #endif // OPENCV_DNN_UTILS_INF_ENGINE_HPP