Skip to content

Commit

Permalink
Publishing 2019 R2 content (#223)
Browse files Browse the repository at this point in the history
  • Loading branch information
asuhov authored and openvino-pushbot committed Aug 9, 2019
1 parent c585b53 commit ba6e22b
Show file tree
Hide file tree
Showing 3,651 changed files with 294,517 additions and 170,741 deletions.
The diff you're trying to view is too large. We only load the first 3000 changed files.
5 changes: 5 additions & 0 deletions .gitmodules
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
[submodule "inference-engine/thirdparty/ade"]
path = inference-engine/thirdparty/ade
url = https://github.com/opencv/ade.git
ignore = dirty
[submodule "inference-engine/thirdparty/ngraph"]
path = inference-engine/thirdparty/ngraph
url = https://github.com/NervanaSystems/ngraph.git
ignore = dirty
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# [OpenVINO™ Toolkit](https://01.org/openvinotoolkit) - Deep Learning Deployment Toolkit repository
[![Stable release](https://img.shields.io/badge/version-2019.R1-green.svg)](https://github.com/opencv/dldt/releases/tag/2019_R1)
[![Stable release](https://img.shields.io/badge/version-2019.R2-green.svg)](https://github.com/opencv/dldt/releases/tag/2019_R2)
[![Apache License Version 2.0](https://img.shields.io/badge/license-Apache_2.0-green.svg)](LICENSE)

This toolkit allows developers to deploy pre-trained deep learning models through a high-level C++ Inference Engine API integrated with application logic.
Expand Down
151 changes: 27 additions & 124 deletions inference-engine/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,149 +2,52 @@
# SPDX-License-Identifier: Apache-2.0
#

cmake_minimum_required(VERSION 3.5 FATAL_ERROR)

project(InferenceEngine)

set(DEV_BUILD TRUE)

include(CTest)

## WA for problem with gtest submodule. It cannot detect uint32 type.
## remove Gtest submodule and this two lines together
include (CheckTypeSize)
check_type_size (uint32_t uint32_t LANGUAGE CXX)

if (UNIX AND NOT APPLE)
set(LINUX TRUE)
endif()

option (OS_FOLDER "create OS dedicated folder in output" OFF)

if(CMAKE_SYSTEM_PROCESSOR STREQUAL "armv7l")
set (ARCH_FOLDER armv7l)
elseif("${CMAKE_SIZEOF_VOID_P}" EQUAL "8")
set (ARCH_FOLDER intel64)
else()
set (ARCH_FOLDER ia32)
endif()

if (OS_FOLDER)
message ("**** OS FOLDER IS: [${OS_FOLDER}]")
if ("${OS_FOLDER}" STREQUAL "ON")
message ("**** USING OS FOLDER: [${CMAKE_SYSTEM_NAME}]")
set (BIN_FOLDER bin/${CMAKE_SYSTEM_NAME}/${ARCH_FOLDER})
else()
set (BIN_FOLDER bin/${OS_FOLDER}/${ARCH_FOLDER})
endif()
if (APPLE)
# due to https://cmake.org/cmake/help/v3.12/policy/CMP0068.html
cmake_minimum_required(VERSION 3.9 FATAL_ERROR)
else()
set (BIN_FOLDER bin/${ARCH_FOLDER})
endif()

set (IE_MAIN_SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR})
set (CMAKE_MODULE_PATH "${IE_MAIN_SOURCE_DIR}/cmake" ${CMAKE_MODULE_PATH})

#printing debug messages
include (debug)

if("${CMAKE_BUILD_TYPE}" STREQUAL "")
debug_message(STATUS "CMAKE_BUILD_TYPE not defined, 'Release' will be used")
set(CMAKE_BUILD_TYPE "Release")
endif()
message(STATUS "BUILD_CONFIGURATION: ${CMAKE_BUILD_TYPE}")

if(COVERAGE)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fprofile-arcs -ftest-coverage -O0")
cmake_minimum_required(VERSION 3.7.2 FATAL_ERROR)
endif()

if (UNIX)
SET(LIB_DL ${CMAKE_DL_LIBS})
endif()

set (OUTPUT_ROOT ${IE_MAIN_SOURCE_DIR})

include(os_flags)

#resolving dependencies for the project
include (dependencies)
project(InferenceEngine)

set(CMAKE_DEBUG_POSTFIX ${IE_DEBUG_POSTFIX})
set(CMAKE_RELEASE_POSTFIX ${IE_RELEASE_POSTFIX})
set(CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/cmake" ${CMAKE_MODULE_PATH})
set(IE_MAIN_SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR})

if (WIN32)
# Support CMake multiconfiguration for Visual Studio build
set(IE_BUILD_POSTFIX $<$<CONFIG:Debug>:${IE_DEBUG_POSTFIX}>$<$<CONFIG:Release>:${IE_RELEASE_POSTFIX}>)
set(IE_BUILD_CONFIGURATION $<CONFIG>)
else ()
if (${CMAKE_BUILD_TYPE} STREQUAL "Debug" )
set(IE_BUILD_POSTFIX ${IE_DEBUG_POSTFIX})
else()
set(IE_BUILD_POSTFIX ${IE_RELEASE_POSTFIX})
endif()
set(IE_BUILD_CONFIGURATION ${CMAKE_BUILD_TYPE})
endif()
include(CTest)
include(features)

add_definitions(-DIE_BUILD_POSTFIX=\"${IE_BUILD_POSTFIX}\")

if(NOT(UNIX))
if (WIN32)
#set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} /MT")
#set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /MTd")
endif()
set (CMAKE_LIBRARY_OUTPUT_DIRECTORY ${OUTPUT_ROOT}/${BIN_FOLDER})
set (CMAKE_LIBRARY_PATH ${OUTPUT_ROOT}/${BIN_FOLDER})
set (CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${OUTPUT_ROOT}/${BIN_FOLDER})
set (CMAKE_COMPILE_PDB_OUTPUT_DIRECTORY ${OUTPUT_ROOT}/${BIN_FOLDER})
set (CMAKE_PDB_OUTPUT_DIRECTORY ${OUTPUT_ROOT}/${BIN_FOLDER})
set (CMAKE_RUNTIME_OUTPUT_DIRECTORY ${OUTPUT_ROOT}/${BIN_FOLDER})
set (LIBRARY_OUTPUT_DIRECTORY ${OUTPUT_ROOT}/${BIN_FOLDER})
set (LIBRARY_OUTPUT_PATH ${LIBRARY_OUTPUT_DIRECTORY}) # compatibility issue: linux uses LIBRARY_OUTPUT_PATH, windows uses LIBRARY_OUTPUT_DIRECTORY
else ()
set (CMAKE_LIBRARY_OUTPUT_DIRECTORY ${OUTPUT_ROOT}/${BIN_FOLDER}/${IE_BUILD_CONFIGURATION}/lib)
set (CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${OUTPUT_ROOT}/${BIN_FOLDER}/${IE_BUILD_CONFIGURATION}/lib)
set (CMAKE_COMPILE_PDB_OUTPUT_DIRECTORY ${OUTPUT_ROOT}/${BIN_FOLDER}/${IE_BUILD_CONFIGURATION})
set (CMAKE_PDB_OUTPUT_DIRECTORY ${OUTPUT_ROOT}/${BIN_FOLDER}/${IE_BUILD_CONFIGURATION})
set (CMAKE_RUNTIME_OUTPUT_DIRECTORY ${OUTPUT_ROOT}/${BIN_FOLDER}/${IE_BUILD_CONFIGURATION})
set (LIBRARY_OUTPUT_DIRECTORY ${OUTPUT_ROOT}/${BIN_FOLDER}/${IE_BUILD_CONFIGURATION}/lib)
set (LIBRARY_OUTPUT_PATH ${LIBRARY_OUTPUT_DIRECTORY}/lib)
endif()
# include developer package
include(developer_package)

if (APPLE)
set(CMAKE_MACOSX_RPATH 1)
endif(APPLE)
# These options are shared with 3rdparty plugins
# by means of developer package
include(check_features)

#rpath fully disabled
if (NOT ENABLE_PLUGIN_RPATH)
SET (CMAKE_SKIP_RPATH TRUE)
endif()

#Use solution folders.
set_property(GLOBAL PROPERTY USE_FOLDERS ON)
#message("=====================> ${CMAKE_BUILD_TYPE} <=====================")
# resolving dependencies for the project
include(dependencies)

message (STATUS "PROJECT ............................... " ${PROJECT_NAME})
message (STATUS "CMAKE_BINARY_DIR ...................... " ${CMAKE_BINARY_DIR})
message (STATUS "IE_MAIN_SOURCE_DIR .................... " ${IE_MAIN_SOURCE_DIR})
message (STATUS "CMAKE_GENERATOR ....................... " ${CMAKE_GENERATOR})
message (STATUS "CMAKE_C_COMPILER_ID ................... " ${CMAKE_C_COMPILER_ID})
message (STATUS "CMAKE_BUILD_TYPE ...................... " ${CMAKE_BUILD_TYPE})

include(sdl)

set (CMAKE_POSITION_INDEPENDENT_CODE ON)

include (sanitizer)

include(CheckCXXCompilerFlag)
add_subdirectory(src)

include(cpplint)
if(ENABLE_TESTS)
add_subdirectory(tests)
endif()

add_subdirectory(src)
add_subdirectory(tests)
add_subdirectory(thirdparty)
set(InferenceEngine_DIR "${CMAKE_BINARY_DIR}")

#to be able to link
set (LIB_FOLDER ${IE_MAIN_SOURCE_DIR}/${BIN_FOLDER}/${IE_BUILD_CONFIGURATION}/lib)
add_subdirectory(tools)

if (ENABLE_SAMPLES)
# hint for find_package(InferenceEngine in the samples folder)
set(InferenceEngine_DIR "${CMAKE_BINARY_DIR}")
endif()

# gflags and format_reader targets are kept inside of samples directory and
# they must be built even if samples build is disabled (required for tests and tools).
Expand Down
12 changes: 6 additions & 6 deletions inference-engine/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ The software was validated on:
git submodule update --recursive
```
2. Install build dependencies using the `install_dependencies.sh` script in the project root folder.
3. By default, the build enables the Inference Engine GPU plugin to infer models on your Intel® Processor Graphics. This requires you to [Install Intel® Graphics Compute Runtime for OpenCL™ Driver package 19.04.12237](https://github.com/intel/compute-runtime/releases/tag/19.04.12237) before running the build. If you don't want to use the GPU plugin, use the `-DENABLE_CLDNN=ON` CMake build option and skip the installation of the Intel® Graphics Compute Runtime for OpenCL™ Driver.
3. By default, the build enables the Inference Engine GPU plugin to infer models on your Intel® Processor Graphics. This requires you to [Install Intel® Graphics Compute Runtime for OpenCL™ Driver package 19.04.12237](https://github.com/intel/compute-runtime/releases/tag/19.04.12237) before running the build. If you don't want to use the GPU plugin, use the `-DENABLE_CLDNN=OFF` CMake build option and skip the installation of the Intel® Graphics Compute Runtime for OpenCL™ Driver.
4. Create a build folder:
```sh
mkdir build && cd build
Expand All @@ -82,7 +82,7 @@ You can use the following additional build options:
- To switch to OpenBLAS\* implementation, use the `GEMM=OPENBLAS` option and `BLAS_INCLUDE_DIRS` and `BLAS_LIBRARIES` CMake options to specify path to the OpenBLAS headers and library. For example use the following options on CentOS\*: `-DGEMM=OPENBLAS -DBLAS_INCLUDE_DIRS=/usr/include/openblas -DBLAS_LIBRARIES=/usr/lib64/libopenblas.so.0`.
- To switch to the optimized MKL-ML\* GEMM implementation, use `-DGEMM=MKL` and `-DMKLROOT=<path_to_MKL>` CMake options to specify a path to unpacked MKL-ML with the `include` and `lib` folders. MKL-ML\* package can be downloaded from the [MKL-DNN repository](https://github.com/intel/mkl-dnn/releases/download/v0.17/mklml_lnx_2019.0.1.20180928.tgz).
- To switch to the optimized MKL-ML\* GEMM implementation, use `-DGEMM=MKL` and `-DMKLROOT=<path_to_MKL>` CMake options to specify a path to unpacked MKL-ML with the `include` and `lib` folders. MKL-ML\* package can be downloaded from the [MKL-DNN repository](https://github.com/intel/mkl-dnn/releases/download/v0.19/mklml_lnx_2019.0.5.20190502.tgz).
- Threading Building Blocks (TBB) is used by default. To build the Inference Engine with OpenMP* threading, set the `-DTHREADING=OMP` option.
Expand Down Expand Up @@ -165,7 +165,7 @@ Native compilation of the Inference Engine is the most straightforward solution.
cmake -DCMAKE_BUILD_TYPE=Release \
-DENABLE_SSE42=OFF \
-DTHREADING=SEQ \
-DENABLE_GNA=OFF .. && make -j2
-DENABLE_GNA=OFF .. && make
```
### Cross Compilation Using Docker*
Expand Down Expand Up @@ -302,7 +302,7 @@ The software was validated on:
3. Install OpenBLAS:
1. Download [OpenBLAS\*](https://sourceforge.net/projects/openblas/files/v0.2.14/OpenBLAS-v0.2.14-Win64-int64.zip/download)
2. Unzip the downloaded package to a directory on your machine. In this document, this directory is referred to as `<OPENBLAS_DIR>`.
4. By default, the build enables the Inference Engine GPU plugin to infer models on your Intel® Processor Graphics. This requires you to [download and install the Intel® Graphics Driver for Windows* [25.20] driver package](https://downloadcenter.intel.com/download/28646/Intel-Graphics-Windows-10-DCH-Drivers?product=80939) before running the build. If you don't want to use the GPU plugin, use the `-DENABLE_CLDNN=ON` CMake build option and skip the installation of the Intel® Graphics Driver.
4. By default, the build enables the Inference Engine GPU plugin to infer models on your Intel® Processor Graphics. This requires you to [download and install the Intel® Graphics Driver for Windows* [25.20] driver package](https://downloadcenter.intel.com/download/28646/Intel-Graphics-Windows-10-DCH-Drivers?product=80939) before running the build. If you don't want to use the GPU plugin, use the `-DENABLE_CLDNN=OFF` CMake build option and skip the installation of the Intel® Graphics Driver.
5. Create build directory:
```sh
mkdir build
Expand All @@ -323,7 +323,7 @@ cmake -G "Visual Studio 15 2017 Win64" -T "Intel C++ Compiler 18.0" ^
- Internal JIT GEMM implementation is used by default.
- To switch to OpenBLAS GEMM implementation, use the `-DGEMM=OPENBLAS` CMake option and specify path to OpenBLAS using the `-DBLAS_INCLUDE_DIRS=<OPENBLAS_DIR>\include` and `-DBLAS_LIBRARIES=<OPENBLAS_DIR>\lib\libopenblas.dll.a` options. Prebuilt OpenBLAS\* package can be downloaded [here](https://sourceforge.net/projects/openblas/files/v0.2.14/OpenBLAS-v0.2.14-Win64-int64.zip/download). mingw64* runtime dependencies can be downloaded [here](https://sourceforge.net/projects/openblas/files/v0.2.14/mingw64_dll.zip/download).
- To switch to the optimized MKL-ML\* GEMM implementation, use the `-DGEMM=MKL` and `-DMKLROOT=<path_to_MKL>` CMake options to specify a path to unpacked MKL-ML with the `include` and `lib` folders. MKL-ML\* package can be downloaded from the [MKL-DNN repository](https://github.com/intel/mkl-dnn/releases/download/v0.17/mklml_win_2019.0.1.20180928.zip).
- To switch to the optimized MKL-ML\* GEMM implementation, use the `-DGEMM=MKL` and `-DMKLROOT=<path_to_MKL>` CMake options to specify a path to unpacked MKL-ML with the `include` and `lib` folders. MKL-ML\* package can be downloaded from the [MKL-DNN repository](https://github.com/intel/mkl-dnn/releases/download/v0.19/mklml_win_2019.0.5.20190502.zip).
- Threading Building Blocks (TBB) is used by default. To build the Inference Engine with OpenMP* threading, set the `-DTHREADING=OMP` option.
Expand Down Expand Up @@ -385,7 +385,7 @@ The software was validated on:

You can use the following additional build options:
- Internal JIT GEMM implementation is used by default.
- To switch to the optimized MKL-ML\* GEMM implementation, use `-DGEMM=MKL` and `-DMKLROOT=<path_to_MKL>` cmake options to specify a path to unpacked MKL-ML with the `include` and `lib` folders. MKL-ML\* package can be downloaded [here](https://github.com/intel/mkl-dnn/releases/download/v0.17.1/mklml_mac_2019.0.1.20180928.tgz)
- To switch to the optimized MKL-ML\* GEMM implementation, use `-DGEMM=MKL` and `-DMKLROOT=<path_to_MKL>` cmake options to specify a path to unpacked MKL-ML with the `include` and `lib` folders. MKL-ML\* package can be downloaded [here](https://github.com/intel/mkl-dnn/releases/download/v0.19/mklml_mac_2019.0.5.20190502.tgz)

- Threading Building Blocks (TBB) is used by default. To build the Inference Engine with OpenMP* threading, set the `-DTHREADING=OMP` option.

Expand Down
57 changes: 57 additions & 0 deletions inference-engine/cmake/FindITT.cmake
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
# Copyright (C) 2018-2019 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
#

unset(ITT_INCLUDE_DIR CACHE)
unset(ITT_LIB CACHE)

if(NOT DEFINED INTEL_VTUNE_DIR AND DEFINED ENV{INTEL_VTUNE_DIR})
set(INTEL_VTUNE_DIR "$ENV{INTEL_VTUNE_DIR}")
endif()
if(NOT DEFINED INTEL_VTUNE_DIR)
if(EXISTS "/opt/intel/vtune_amplifier_xe/include")
set(INTEL_VTUNE_DIR "/opt/intel/vtune_amplifier_xe")
elseif(EXISTS "/opt/intel/vtune_amplifier/include")
set(INTEL_VTUNE_DIR "/opt/intel/vtune_amplifier")
elseif (EXISTS "C:/Program Files (x86)/IntelSWTools/VTune Amplifier XE")
set(INTEL_VTUNE_DIR "C:/Program Files (x86)/IntelSWTools/VTune Amplifier XE")
elseif (EXISTS "C:/Program Files (x86)/IntelSWTools/VTune Amplifier")
set(INTEL_VTUNE_DIR "C:/Program Files (x86)/IntelSWTools/VTune Amplifier")
elseif (EXISTS "$ENV{HOME}/intel/vtune_amplifier_2019")
set(INTEL_VTUNE_DIR "$ENV{HOME}/intel/vtune_amplifier_2019")
endif()
endif()

if(DEFINED INTEL_VTUNE_DIR)
message(STATUS "INTEL_VTUNE_DIR = ${INTEL_VTUNE_DIR}")

find_path(ITT_INCLUDE_DIR
FILES
ittnotify.h
PATHS "${INTEL_VTUNE_DIR}/include/")

find_library(ITT_LIB
"libittnotify${CMAKE_STATIC_LIBRARY_SUFFIX}"
PATHS ${INTEL_VTUNE_DIR}/lib64)

set(Located_ITT_LIBS ${ITT_LIB})
set(Located_ITT_INCLUDE_DIRS ${ITT_INCLUDE_DIR})
else()
message(STATUS "INTEL_VTUNE_DIR is not defined")
endif()

# Handle find_package() arguments, and set INTEL_ITT_FOUND
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(INTEL_ITT
REQUIRED_VARS
Located_ITT_INCLUDE_DIRS
Located_ITT_LIBS)

if(INTEL_ITT_FOUND)
add_library(ittnotify STATIC IMPORTED GLOBAL)
set_target_properties(ittnotify PROPERTIES IMPORTED_LOCATION "${Located_ITT_LIBS}"
INTERFACE_INCLUDE_DIRECTORIES ${Located_ITT_INCLUDE_DIRS}
INTERFACE_COMPILE_DEFINITIONS ENABLE_PROFILING_ITT)

set(INTEL_ITT_LIBS ittnotify ${CMAKE_DL_LIBS})
endif()
1 change: 0 additions & 1 deletion inference-engine/cmake/FindlibGNA.cmake
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
# Copyright (C) 2018-2019 Intel Corporation
#
# SPDX-License-Identifier: Apache-2.0
#

Expand Down
4 changes: 4 additions & 0 deletions inference-engine/cmake/arm.toolchain.cmake
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
# Copyright (C) 2018-2019 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
#

set(CMAKE_SYSTEM_NAME Linux)
set(CMAKE_SYSTEM_PROCESSOR armv7l)

Expand Down
14 changes: 14 additions & 0 deletions inference-engine/cmake/arm64.toolchain.cmake
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
# Copyright (C) 2018-2019 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
#

set(CMAKE_SYSTEM_NAME Linux)
set(CMAKE_SYSTEM_PROCESSOR aarch64)

set(CMAKE_C_COMPILER aarch64-linux-gnu-gcc)
set(CMAKE_CXX_COMPILER aarch64-linux-gnu-g++)

set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)
Loading

0 comments on commit ba6e22b

Please sign in to comment.