Serverless C++ Integration

Overview

DeepDetect can be linked against in order to directly use a DeepDetect object in your own C++ code, instead of using the server API.

We show here an example to adapt to your needs.

WARNING: this example builds only the caffe backends into deepdetect library, you should adapt library compilation (and dependencies).

Compiling deepdetect as a library

  • go to www.deepdetect.com
  • select quickstart -> server
  • select ubuntu, build from source
  • select compute and target GPU
  • select caffe backend, deselect others

  • follow instructions until Build section

  • in Build section, follow instructions BUT :

in cmake line, replace


-DUSE_SIMSEARCH=ON

by :


-DUSE_SIMSEARCH=OFF -DUSE_CAFFE=ON -DUSE_JSON_API=ON -DUSE_HTTP_SERVER=OFF -DUSE_COMMAND_LINE=OFF 

if shared lib is prefered for your linking pleasure, you can add


-DBUILD_SHARED_LIBS=ON

  • after make, library will be build/src/libddetect.a or build/src/libddetect.so

Example compilation of your code

Here is an example compilation script with dependencies for CAFFE backend of a test_dd.cc that links to libddetect. This is a very simple minimalist example, you should replace with your build generator (ie cmake or any other)


DD_ROOT="../deepdetect"
DD_SRC="$DD_ROOT/src"
DD_LIB="$DD_ROOT/build/src/libddetect.a"
DD_INCLUDES="-I$DD_SRC -I$DD_SRC/backends/caffe"
DD_FLAGS="-DUSE_CAFFE -DUSE_OPENCV -std=c++11  -fopenmp -g -ggdb"

CUDA_LIBS="/usr/local/cuda/lib64/libcudart_static.a -lpthread -ldl \
/usr/lib/x86_64-linux-gnu/librt.so /usr/local/cuda/lib64/libcublas.so \
/usr/local/cuda/lib64/libcurand.so /usr/local/cuda/lib64/libcudart.so \
/usr/local/cuda/lib64/libcusolver.so"
CUDA_INCS="-I/usr/local/cuda/include"
CAFFE_PATH="$DD_ROOT/build/caffe_dd/src/caffe_dd/"
CAFFE_INCS="-I$CAFFE_PATH/include -I$CAFFE_PATH/.build_release/src/"
CAFFE_LIB_DEPS="-lleveldb -lsnappy -llmdb -lhdf5_hl -lhdf5 -lopenblas"
CAFFE_LIBS="-L$CAFFE_PATH/.build_release/lib -lcaffe"
CAFFE_PROTO="$CAFFE_PATH/.build_release/src/caffe/proto/caffe.pb.cc"
EIGEN_INCLUDE="-I/usr/include/eigen3"
BOOST_LIBS="/usr/lib/x86_64-linux-gnu/libboost_filesystem.so \
/usr/lib/x86_64-linux-gnu/libboost_thread.so -lpthread       \
/usr/lib/x86_64-linux-gnu/libboost_system.so                 \
/usr/lib/x86_64-linux-gnu/libboost_iostreams.so              \
/usr/lib/x86_64-linux-gnu/libboost_chrono.so                 \
/usr/lib/x86_64-linux-gnu/libboost_date_time.so              \
/usr/lib/x86_64-linux-gnu/libboost_atomic.so                 \
/usr/lib/x86_64-linux-gnu/libboost_regex.so"
OPENCV_LIBS="`pkg-config --libs opencv`"
PROTOBUF_LIBS="-L/usr/lib/x86_64-linux-gnu -lprotobuf"
CURLPP_LIBS="-lcurlpp -lcurl"
HDF5_LIBS="-L/usr/lib/x86_64-linux-gnu/hdf5/serial -lhdf5_hl -lhdf5 -lhdf5_cpp"
OTHER_LIBS="-lglog -lgflags -liomp5 -larchive"

SRCS="test_dd.cc $CAFFE_PROTO"
OUTPUT="test_dd"

g++ $DD_FLAGS -o $OUTPUT $SRCS $DD_LIB $DD_INCLUDES  $EIGEN_INCLUDE $CUDA_LIBS $CUDA_INCS \
$CAFFE_INCS $BOOST_LIBS $OPENCV_LIBS $PROTOBUF_LIBS $CURLPP_LIBS $HDF5_LIBS $CAFFE_LIBS   \
$CAFFE_LIB_DEPS  $OTHER_LIBS

Example usage using JSON API

Here is a simple example usage of deepdetect as a type, using JSON API.

Main code

#include <opencv2/opencv.hpp>
#include <iostream>
#include "jsonapi.h"
#include "deepdetect.h"

int main(int argc, char ** argv)
{
  dd::DeepDetect<dd::JsonAPI> dede;
  createServiceJSON(dede, std::string("example service"), std::string("/path/to/model"));
  predictJSON(dede, std::string("example service"), std::string("/path/to/image/image.png"));
  deleteServiceJSON(dede, std::string("example service"));
}

Service creation:

Here is an example service creation using JSON API. (We use raw string literals from C++11 of the form R”delim(…)delim” in order give more readability to JSON string).

bool createServiceJSON(dd::DeepDetect<dd::JsonAPI>&dede, std::string service_name,
                       std::string model_path)
{
  std::string msg = std::string(
       R"JSON(
          {
             "mllib":"caffe",
             "description":"example service",
             "type":"supervised",
             "parameters":{
                 "input":{
                      "connector":"image",
                      "width": 512,
                      "height": 512  },
                  "mllib":{ "nclasses":2  }   },
           "model":{  "repository":
        )JSON")
    + std::string("\"") +  model_path +"\"  }}";

  std::string outstr;
  try
    {
      JDoc jd = dede.service_create(service_name, msg);
      outstr = dede.jrender(jd);
    }
  catch (std::exception &e)
    {
      std::cout << "exception!!" << std::endl;
      std::cout << e.what() << std::endl;
      std::cout << msg << std::endl;
    }

  bool ok = outstr.find("\"code\":201") != std::string::npos;
  return ok;
}

Service deletion

The service deletion function can be written as:

bool deleteServiceJSON(dd::DeepDetect<dd::JsonAPI>& dede,  std::string service_name)
{
  int outcode;
  std::string outstr;
  std::string msg = "{\"clear\":\"mem\"}";
  try  
    {
        outstr = dede.jrender(dede.service_delete(service_name, msg));  
    }
  catch (std::exception &e)
    { }

  bool ok = outstr.find("\"code\":200") != std::string::npos;
  return ok;
}

Prediction call

A predict call using JSON API (and C++11 raw string literals) can have the form:

bool predictJSON(dd::DeepDetect<dd::JsonAPI>& dede,  std::string service_name, std::string data)
{
  std::stringstream msg;
  msg << "{\"service\":\""  << service_name  << "\","  <<
    R"JSON(
       "parameters":{
              "output": {
                 "confidence_threshold":0.1,
                 "bbox":true       },
              "mllib":{
                  "gpu":true,
                  "gpuid":0,
                  "net":{  "test_batch_size":1   }  }
       },
       "data":[
     )JSON";
  msg << "\"" << data << "\"]}";

  int outcode;
  std::string outstr;
  try
    {
      outstr = dede.jrender(dede.service_predict(msg.str()));
    }
  catch (std::exception &e)
    { }

  bool ok = outstr.find("\"code\":200")!=std::string::npos;
  if (!ok)
      std::string msg = "DD prediction failed : " + outstr;
  else
    {
      rapidjson::Document jdoc;
      jdoc.Parse(outstr.c_str());
      std::cout << "JSONAPI GOT: " << outstr << std::endl;
      const rapidjson::Value &outs = jdoc["body"]["predictions"][0]["classes"][0]["bbox"];
    }
  return ok;
}

Here is an example output to parse:

{"status":{"code":200,"msg":"OK"},
   "head":{"method":"/predict","service":"example service", "time":177.0},
   "body":{
      "predictions":
          [
             {"classes":
                [
                   {"prob":0.9965794682502747,
                    "bbox":{"xmax":833.170166015625,"ymax":832.6193237304688,
                            "ymin":893.6544189453125,"xmin":701.4317626953125},
                    "cat":"1"  },
                   {"prob":0.9817548394203186,
                    "last":true,
                    "bbox":{"xmax":970.1082763671875,"ymax":493.6981506347656,
                            "ymin":536.486572265625,"xmin":887.2913208007813},
                    "cat":"1"  }
                ],
               "uri":"/path/to/image/image.png"}
             ]
           }
       }

Using raw APIData function

Here is an example of using directly APIData parameters instead of using the JSON api. This can be usefull for passing an image directly as a cv::Mat (as in the example) and not as a filename, an URL or a base64 encoded string.

Main code:

#include <opencv2/opencv.hpp>
#include <iostream>
#include "jsonapi.h"
#include "deepdetect.h"

int main(int argc, char ** argv)
{
  dd::DeepDetect<dd::JsonAPI> dede;
  createServiceJSON(dede, std::string("example service"), std::string("/path/to/model"));
  std::string testimg = "/path/to/image/image.png";

  std::vector<cv::Mat> imgs;
  cv::Mat img = cv::imread(testimg,CV_LOAD_IMAGE_COLOR);
  imgs.push_back(std::move(img));

  predictAPIData(dede,std::string("aircraft tracking"), imgs);
  deleteServiceJSON(dede, std::string("example service"));
}

APIData-based prediction

Here is the corresponding prediction function. The APIData need to be filled in manually. Output is also in APIData format, following JSON format starting from field “body”.

bool predictAPIData(dd::DeepDetect<dd::JsonAPI>& dede, std::string service_name, 
                    std::vector<cv::Mat> imgs)
{
  dd::APIData ad;
  dd::APIData params;
  dd::APIData input_params;
  dd::APIData output_params;
  dd::APIData mllib_params;
  dd::APIData net_mllib_params;

  output_params.add(std::string("bbox"),true);
  output_params.add(std::string("confidence_threshold"),0.1);
  params.add(std::string("output"),output_params);

  mllib_params.add(std::string("gpu"), true);
  mllib_params.add(std::string("gpuid"), 0);
  net_mllib_params.add(std::string("test_batch_size"),1);
  mllib_params.add(std::string("net"),net_mllib_params);
  params.add(std::string("mllib"),mllib_params);

  ad.add(std::string("parameters"), params);
  ad.add(std::string("data_raw_img"), imgs);

  dd::APIData out;
  try
    {
      dede.predict(ad,service_name,out);
    }
  catch (dd::InputConnectorBadParamException &e)    {    }
  catch (dd::MLLibBadParamException &e)             {    }
  catch (dd::InputConnectorInternalException &e)    {    }
  catch (dd::MLLibInternalException &e)             {    }
  catch (dd::MLServiceLockException &e)             {    }
  catch (std::exception &e)                         {    }

  std::vector<dd::APIData> preds = out.getv("predictions");
  for (dd::APIData pred : preds)  // iterate over image inputs
    {
      std::vector<dd::APIData> classes = pred.getv("classes"); // iterate over objects per image
      for (dd::APIData c : classes)
        {
          std::string cat = c.get("cat").get<std::string>();
          std::cout << "cat:" << cat << std::endl;
          double prob = c.get("prob").get<double>();
          std::cout << "prob:" << prob << std::endl;
          double xmin = c.getobj("bbox").get("xmin").get<double>();
          std::cout << "xmin:" << xmin << std::endl;
          double xmax = c.getobj("bbox").get("xmax").get<double>();
          std::cout << "xmax:" << xmax << std::endl;
          double ymin = c.getobj("bbox").get("ymin").get<double>();
          std::cout << "ymin:" << ymin << std::endl;
          double ymax = c.getobj("bbox").get("ymax").get<double>();
          std::cout << "ymax:" << ymax << std::endl;
        }
    }
}

Related