OpenVINO on Intel Core Ultra ## Dockerfile: ov_base.dockerfile ``` # Generate by below command : # docker_gen.sh --name ov_base ubuntu_2204_openvino_2024.2.0.dockerfile intel-npu-driver_1.5.1_u22.04.dockerfile openvino_2024.2.0_omz.dockerfile ARG INTEL_NPU_DRIVER_1_5_1_U22_04_BASE=ubuntu_2204_openvino_2024.2.0 ARG OPENVINO_2024_2_0_OMZ_BASE=intel-npu-driver_1.5.1_u22.04 ARG USER_MAIN=root FROM openvino/ubuntu22_dev:2024.2.0 as ubuntu_2204_openvino_2024.2.0 USER 0 SHELL ["/bin/bash", "-cx"] WORKDIR / RUN apt-get update && apt-get install -y sudo RUN apt-get install -y software-properties-common && add-apt-repository universe && add-apt-repository multiverse RUN apt-get update && apt-get install -y lsb-release vim sudo cpio build-essential python3-pip python3-venv wget FROM ${INTEL_NPU_DRIVER_1_5_1_U22_04_BASE} as intel-npu-driver_1.5.1_u22.04 # # intel-npu-driver_1.5.1+u22.04 # refer from: https://github.com/intel/linux-npu-driver/releases/tag/v1.5.1 # SHELL ["/bin/bash", "-c"] RUN apt-get update RUN apt-get install -y git build-essential cmake libncurses5-dev libpciaccess-dev libdrm-dev dpkg libtbb12 WORKDIR /tmp RUN mkdir /tmp/xpu RUN dpkg --purge --force-remove-reinstreq intel-driver-compiler-npu intel-fw-npu intel-level-zero-npu level-zero RUN wget -P /tmp/xpu https://github.com/intel/linux-npu-driver/releases/download/v1.5.1/intel-driver-compiler-npu_1.5.1.20240708-9842236399_ubuntu22.04_amd64.deb RUN wget -P /tmp/xpu https://github.com/intel/linux-npu-driver/releases/download/v1.5.1/intel-fw-npu_1.5.1.20240708-9842236399_ubuntu22.04_amd64.deb RUN wget -P /tmp/xpu https://github.com/intel/linux-npu-driver/releases/download/v1.5.1/intel-level-zero-npu_1.5.1.20240708-9842236399_ubuntu22.04_amd64.deb RUN wget -P /tmp/xpu https://github.com/oneapi-src/level-zero/releases/download/v1.17.6/level-zero_1.17.6+u22.04_amd64.deb RUN wget -P /tmp/xpu https://github.com/oneapi-src/level-zero/releases/download/v1.17.6/level-zero-devel_1.17.6+u22.04_amd64.deb # Intel OpenCL NEO driver RUN wget -P /tmp/xpu https://github.com/intel/intel-graphics-compiler/releases/download/igc-1.0.16900.23/intel-igc-core_1.0.16900.23_amd64.deb RUN wget -P /tmp/xpu https://github.com/intel/intel-graphics-compiler/releases/download/igc-1.0.16900.23/intel-igc-opencl_1.0.16900.23_amd64.deb RUN wget -P /tmp/xpu https://github.com/intel/compute-runtime/releases/download/24.22.29735.20/intel-level-zero-gpu-dbgsym_1.3.29735.20_amd64.ddeb RUN wget -P /tmp/xpu https://github.com/intel/compute-runtime/releases/download/24.22.29735.20/intel-level-zero-gpu_1.3.29735.20_amd64.deb RUN wget -P /tmp/xpu https://github.com/intel/compute-runtime/releases/download/24.22.29735.20/intel-opencl-icd-dbgsym_24.22.29735.20_amd64.ddeb RUN wget -P /tmp/xpu https://github.com/intel/compute-runtime/releases/download/24.22.29735.20/intel-opencl-icd_24.22.29735.20_amd64.deb RUN wget -P /tmp/xpu https://github.com/intel/compute-runtime/releases/download/24.22.29735.20/libigdgmm12_22.3.19_amd64.deb RUN dpkg -i /tmp/xpu/*.deb FROM ${OPENVINO_2024_2_0_OMZ_BASE} as openvino_2024.2.0_omz # # openvino_2024.2.0_omz # WORKDIR /opt/intel/openvino RUN omz_downloader --name resnet-50-pytorch --precisions FP16 && omz_converter --name resnet-50-pytorch --precision FP16 RUN curl -O https://storage.openvinotoolkit.org/data/test_data/images/car_1.bmp ``` ## Build docker image with dockerfile ov_base.dockerfile ``` docker build -f ov_base.dockerfile -t ov_base:2024 . ``` ## Query supported device ``` docker run -itu root:root --privileged --rm --device /dev:/dev/ ov_base:2024 /bin/bash -c "samples/cpp/samples_bin/samples_bin/hello_query_device" ``` ## Benchmark inferencing on GPU ``` docker run -itu root:root --rm --device /dev/dri:/dev/dri ov_base:2024 /bin/bash -c "samples/cpp/samples_bin/samples_bin/benchmark_app -m public/resnet-50-pytorch/FP16/resnet-50-pytorch.xml -d GPU" ``` ## Benchmark inferencing on NPU ``` docker run -itu root:root --privileged --rm --device /dev:/dev/ ov_base:2023 /bin/bash -c "samples/cpp/samples_bin/samples_bin/benchmark_app -m public/resnet-50-pytorch/FP16/resnet-50-pytorch.xml -d NPU" ``` ## Bash console for manually test ``` docker run -itu root:root --privileged --rm --device /dev:/dev/ ov_base:2024 /bin/bash ```