Reputation: 9681
I have Miniconda3 on a Linux system (Ubuntu 22.04). The environment has Python 3.10 as well as a functioning (in Python) installation of PyTorch (installed following official instructions).
I would like to setup a CMake project that uses PyTorch C++ API. The reason is not important and also I am aware that it's beta (the official documentation states that), so instability and major changes are not excluded.
Currently I have this very minimal CMakeLists.txt
:
cmake_minimum_required(VERSION 3.19) # or whatever version you use
project(PyTorch_Cpp_HelloWorld CXX)
set(PYTORCH_ROOT "/home/$ENV{USER}/miniconda/envs/ML/lib/python3.10/site-packages/torch")
list(APPEND CMAKE_PREFIX_PATH "${PYTORCH_ROOT}/share/cmake/Torch/")
find_package(Torch REQUIRED CONFIG)
...
# Add executable
# Link against PyTorch library
When I try to configure the project I'm getting error:
CMake Error at CMakeLists.txt:21 (message): message called with incorrect number of arguments
-- Could NOT find Protobuf (missing: Protobuf_LIBRARIES Protobuf_INCLUDE_DIR) -- Found Threads: TRUE CMake Warning at /home/USER/miniconda/envs/ML/lib/python3.10/site-packages/torch/share/cmake/Caffe2/public/protobuf.cmake:88 (message): Protobuf cannot be found. Depending on whether you are building Caffe2 or a Caffe2 dependent library, the next warning / error will give you more info. Call Stack (most recent call first): /home/USER/miniconda/envs/ML/lib/python3.10/site-packages/torch/share/cmake/Caffe2/Caffe2Config.cmake:56 (include)
/home/USER/miniconda/envs/ML/lib/python3.10/site-packages/torch/share/cmake/Torch/TorchConfig.cmake:68 (find_package) CMakeLists.txt:23 (find_package)CMake Error at /home/USER/miniconda/envs/ML/lib/python3.10/site-packages/torch/share/cmake/Caffe2/Caffe2Config.cmake:58 (message): Your installed Caffe2 version uses protobuf but the protobuf library cannot be found. Did you accidentally remove it, or have you set the right CMAKE_PREFIX_PATH? If you do not have protobuf, you will need to install protobuf and set the library path accordingly. Call Stack (most recent call first):
/home/USER/miniconda/envs/ML/lib/python3.10/site-packages/torch/share/cmake/Torch/TorchConfig.cmake:68 (find_package) CMakeLists.txt:23 (find_package)
I installed libprotobuf
(again via conda
) but, while I can find the library files, I can't find any *ProtobufConfig.cmake
or anything remotely related to protobuf and its CMake setup.
Before I go fight against wind mills I would like to ask here what the proper setup would be. I am guessing building from source is always an option, however this will pose a huge overhead on people, who I collaborate with.
Upvotes: 1
Views: 7907
Reputation: 20462
Using this conda env:
name: pytorch_latest
channels:
- pytorch
- conda-forge
- defaults
dependencies:
- pytorch=1.11.0
- torchvision
- torchaudio
- cpuonly
I copied the small example from here and got it to run. The key was to set the correct library directories (both torch
in site-packages
, but also the lib
folder of the environment). The cmake file is written so that the folders are automatically found :
example-app.cpp
#include <torch/torch.h>
#include <iostream>
int main() {
torch::Tensor tensor = torch::rand({2, 3});
std::cout << tensor << std::endl;
}
CMakeLists.txt:
cmake_minimum_required(VERSION 3.0 FATAL_ERROR)
project(example-app)
#Add the torch library directory
list(APPEND CMAKE_PREFIX_PATH "$ENV{CONDA_PREFIX}/lib/python3.10/site-packages/torch")
#This is needed to be able to find the mkl and other dependent libraries
link_directories("$ENV{CONDA_PREFIX}/lib")
set(ENV{MKLROOT} "$ENV{CONDA_PREFIX}/lib")
find_package(Torch REQUIRED)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${TORCH_CXX_FLAGS}")
add_executable(example-app example-app.cpp)
#We need to add pthread and omp manually here
target_link_libraries(example-app "${TORCH_LIBRARIES}" pthread omp)
set_property(TARGET example-app PROPERTY CXX_STANDARD 14)
Exact environment (in case there are problems with reproducibility):
name: pytorch_latest
channels:
- pytorch
- conda-forge
- defaults
dependencies:
- _libgcc_mutex=0.1=conda_forge
- _openmp_mutex=4.5=2_kmp_llvm
- blas=2.115=mkl
- blas-devel=3.9.0=15_linux64_mkl
- brotlipy=0.7.0=py310h5764c6d_1004
- bzip2=1.0.8=h7f98852_4
- ca-certificates=2022.5.18.1=ha878542_0
- certifi=2022.5.18.1=py310hff52083_0
- cffi=1.15.0=py310h0fdd8cc_0
- charset-normalizer=2.0.12=pyhd8ed1ab_0
- cpuonly=2.0=0
- cryptography=37.0.1=py310h9ce1e76_0
- ffmpeg=4.3=hf484d3e_0
- freetype=2.10.4=h0708190_1
- giflib=5.2.1=h36c2ea0_2
- gmp=6.2.1=h58526e2_0
- gnutls=3.6.13=h85f3911_1
- idna=3.3=pyhd8ed1ab_0
- jpeg=9e=h166bdaf_1
- lame=3.100=h7f98852_1001
- lcms2=2.12=hddcbb42_0
- ld_impl_linux-64=2.36.1=hea4e1c9_2
- lerc=3.0=h9c3ff4c_0
- libblas=3.9.0=15_linux64_mkl
- libcblas=3.9.0=15_linux64_mkl
- libdeflate=1.10=h7f98852_0
- libffi=3.4.2=h7f98852_5
- libgcc-ng=12.1.0=h8d9b700_16
- libgfortran-ng=12.1.0=h69a702a_16
- libgfortran5=12.1.0=hdcd56e2_16
- libiconv=1.17=h166bdaf_0
- liblapack=3.9.0=15_linux64_mkl
- liblapacke=3.9.0=15_linux64_mkl
- libnsl=2.0.0=h7f98852_0
- libpng=1.6.37=h21135ba_2
- libstdcxx-ng=12.1.0=ha89aaad_16
- libtiff=4.4.0=h0fcbabc_0
- libuuid=2.32.1=h7f98852_1000
- libuv=1.43.0=h7f98852_0
- libwebp=1.2.2=h3452ae3_0
- libwebp-base=1.2.2=h7f98852_1
- libxcb=1.13=h7f98852_1004
- libzlib=1.2.12=h166bdaf_0
- llvm-openmp=14.0.4=he0ac6c6_0
- lz4-c=1.9.3=h9c3ff4c_1
- mkl=2022.1.0=h84fe81f_915
- mkl-devel=2022.1.0=ha770c72_916
- mkl-include=2022.1.0=h84fe81f_915
- ncurses=6.3=h27087fc_1
- nettle=3.6=he412f7d_0
- numpy=1.22.4=py310h4ef5377_0
- openh264=2.1.1=h780b84a_0
- openjpeg=2.4.0=hb52868f_1
- openssl=3.0.3=h166bdaf_0
- pillow=9.1.1=py310he619898_1
- pip=22.1.2=pyhd8ed1ab_0
- pthread-stubs=0.4=h36c2ea0_1001
- pycparser=2.21=pyhd8ed1ab_0
- pyopenssl=22.0.0=pyhd8ed1ab_0
- pysocks=1.7.1=py310hff52083_5
- python=3.10.4=h2660328_0_cpython
- python_abi=3.10=2_cp310
- pytorch=1.11.0=py3.10_cpu_0
- pytorch-mutex=1.0=cpu
- readline=8.1=h46c0cb4_0
- requests=2.27.1=pyhd8ed1ab_0
- setuptools=62.3.2=py310hff52083_0
- sqlite=3.38.5=h4ff8645_0
- tbb=2021.5.0=h924138e_1
- tk=8.6.12=h27826a3_0
- torchaudio=0.11.0=py310_cpu
- torchvision=0.12.0=py310_cpu
- typing_extensions=4.2.0=pyha770c72_1
- tzdata=2022a=h191b570_0
- urllib3=1.26.9=pyhd8ed1ab_0
- wheel=0.37.1=pyhd8ed1ab_0
- xorg-libxau=1.0.9=h7f98852_0
- xorg-libxdmcp=1.1.3=h7f98852_0
- xz=5.2.5=h516909a_1
- zlib=1.2.12=h166bdaf_0
- zstd=1.5.2=h8a70e8d_1
Upvotes: 2
Reputation: 20462
As an alternative to using conda
, I would suggest to just grab the pre-built library using the link https://download.pytorch.org/libtorch/nightly/cpu/libtorch-shared-with-deps-latest.zip Unzipping that will give you a folder libtorch
which you can put next to your CMakeLists.txt. With that, all you need to have in your cmake (see here) would be something like this:
project(example-app)
find_package(Torch REQUIRED)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${TORCH_CXX_FLAGS}")
add_executable(example-app example-app.cpp)
target_link_libraries(example-app "${TORCH_LIBRARIES}")
set_property(TARGET example-app PROPERTY CXX_STANDARD 14)
list(APPEND CMAKE_PREFIX_PATH "libtorch")
Upvotes: 2