9 #ifndef PYTORCH_INFERENCE_INFERENCE_ENGINE_HPP 10 #define PYTORCH_INFERENCE_INFERENCE_ENGINE_HPP 15 #include <arrayfire.h> 30 std::vector<std::vector<pytorch::Layer *>>
layers;
35 af::Backend backend = AF_BACKEND_CUDA,
36 bool quiet =
true) : device(device) {
37 af::setBackend(backend);
38 af::setDevice(this->device);
49 layers.push_back({l});
57 return layers[depth][width];
60 inline tensor
forward(
const std::vector<tensor> &input){
61 std::vector<tensor> out = input;
62 for (
auto &layer : layers){
63 if (layer.size() == 1) {
64 out = layer[0]->forward(out);
68 int wid = layer.size();
69 for (
int i = 0; i < wid; i++){
70 out[i] = layer[i]->forward({out[i]})[0];
85 #endif //PYTORCH_INFERENCE_INFERENCE_ENGINE_HPP Layer * get_layer_ptr(const int &depth, const int &width=0)
Definition: inference_engine.hpp:56
Definition: inference_engine.hpp:28
inference_engine(const int &device=0, af::Backend backend=AF_BACKEND_CUDA, bool quiet=true)
Definition: inference_engine.hpp:34
void add_layer(std::vector< Layer *>l)
Definition: inference_engine.hpp:52
tensor forward(const std::vector< tensor > &input)
Definition: inference_engine.hpp:60
void check_size(const int &size1, const int &size2, const std::string &func)
Definition: include/utils.hpp:62
Definition: inference_engine.hpp:21
virtual ~inference_engine()
Definition: inference_engine.hpp:44
Equivalent to Conv2d in pytorch.
std::vector< std::vector< pytorch::Layer * > > layers
Definition: inference_engine.hpp:30
void add_layer(Layer *l)
Definition: inference_engine.hpp:48
const int device
Definition: inference_engine.hpp:31