Thanks to visit codestin.com
Credit goes to github.com

Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion examples/mnist/train.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,9 @@ static void construct_net(network<sequential>& nn) {
#undef O
#undef X

core::backend_t backend_type = core::backend_t::tiny_dnn;
// by default will use backend_t::tiny_dnn unless you compiled
// with -DUSE_AVX=ON and your device supports AVX intrinsics
core::backend_t backend_type = core::default_engine();

// construct nets
nn << convolutional_layer<tan_h>(32, 32, 5, 1, 6, // C1, 1@32x32-in, 6@28x28-out
Expand Down
1 change: 0 additions & 1 deletion test/test_convolutional_layer.h
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,6 @@ TEST(convolutional, setup_tiny) {
EXPECT_EQ(l.fan_in_size(), cnn_size_t(9)); // num of incoming connections
EXPECT_EQ(l.fan_out_size(), cnn_size_t(18)); // num of outgoing connections
EXPECT_STREQ(l.layer_type().c_str(), "conv"); // string with layer type
EXPECT_TRUE(l.engine() == backend_t::tiny_dnn); // the engine type
}

inline void randomize_tensor(tensor_t& tensor)
Expand Down
11 changes: 11 additions & 0 deletions tiny_dnn/core/backend.h
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
namespace tiny_dnn {
namespace core {

// TODO(edgar): remove this
class context;

enum class backend_t { tiny_dnn, nnpack, libdnn, avx, opencl };
Expand All @@ -55,6 +56,16 @@ inline std::ostream& operator << (std::ostream& os, backend_t type) {

/*enum class Engine { OpenCL };*/

inline backend_t default_engine() {
#ifdef CNN_USE_AVX
#if defined(__AVX__) || defined(__AVX2__)
return backend_t::avx;
#endif
#endif // CNN_USE_AVX
return backend_t::tiny_dnn;
}

// TODO(edgar): remove this
struct backend_params {
backend_params() {}
};
Expand Down
8 changes: 4 additions & 4 deletions tiny_dnn/layers/convolutional_layer.h
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ class convolutional_layer : public feedforward_layer<Activation> {
bool has_bias = true,
cnn_size_t w_stride = 1,
cnn_size_t h_stride = 1,
backend_t backend_type = backend_t::tiny_dnn,
backend_t backend_type = core::default_engine(),
backend_params b_params = backend_params())
: convolutional_layer(in_width, in_height, window_size, window_size, in_channels, out_channels,
connection_table(), pad_type, has_bias, w_stride, h_stride,
Expand Down Expand Up @@ -111,7 +111,7 @@ class convolutional_layer : public feedforward_layer<Activation> {
bool has_bias = true,
cnn_size_t w_stride = 1,
cnn_size_t h_stride = 1,
backend_t backend_type = backend_t::tiny_dnn,
backend_t backend_type = core::default_engine(),
backend_params b_params = backend_params())
: convolutional_layer(in_width, in_height, window_width, window_height, in_channels, out_channels,
connection_table(), pad_type, has_bias, w_stride, h_stride,
Expand Down Expand Up @@ -144,7 +144,7 @@ class convolutional_layer : public feedforward_layer<Activation> {
bool has_bias = true,
cnn_size_t w_stride = 1,
cnn_size_t h_stride = 1,
backend_t backend_type = backend_t::tiny_dnn,
backend_t backend_type = core::default_engine(),
backend_params b_params = backend_params())
: convolutional_layer(in_width, in_height, window_size, window_size, in_channels, out_channels,
connection_table, pad_type, has_bias, w_stride, h_stride,
Expand Down Expand Up @@ -179,7 +179,7 @@ class convolutional_layer : public feedforward_layer<Activation> {
bool has_bias = true,
cnn_size_t w_stride = 1,
cnn_size_t h_stride = 1,
backend_t backend_type = backend_t::tiny_dnn,
backend_t backend_type = core::default_engine(),
backend_params b_params = backend_params())
: Base(std_input_order(has_bias)) {
conv_set_params(shape3d(in_width, in_height, in_channels),
Expand Down
2 changes: 1 addition & 1 deletion tiny_dnn/layers/fully_connected_layer.h
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ class fully_connected_layer : public feedforward_layer<Activation> {
fully_connected_layer(cnn_size_t in_dim,
cnn_size_t out_dim,
bool has_bias = true,
backend_t backend_type = backend_t::tiny_dnn)
backend_t backend_type = core::default_engine())
: Base(std_input_order(has_bias)) {
set_params(in_dim, out_dim, has_bias);
init_backend(backend_type);
Expand Down
4 changes: 2 additions & 2 deletions tiny_dnn/layers/max_pooling_layer.h
Original file line number Diff line number Diff line change
Expand Up @@ -62,15 +62,15 @@ class max_pooling_layer : public feedforward_layer<Activation> {
cnn_size_t in_height,
cnn_size_t in_channels,
cnn_size_t pooling_size,
backend_t backend_type = backend_t::tiny_dnn,
backend_t backend_type = core::default_engine(),
backend_params b_params = backend_params())
: max_pooling_layer(in_width, in_height, in_channels, pooling_size, pooling_size, backend_type, b_params) {
}

max_pooling_layer(const shape3d& in_shape,
cnn_size_t pooling_size,
cnn_size_t stride,
backend_t backend_type = backend_t::tiny_dnn,
backend_t backend_type = core::default_engine(),
backend_params b_params = backend_params())
: max_pooling_layer(in_shape.width_, in_shape.height_, in_shape.depth_, pooling_size, stride, backend_type, b_params) {
}
Expand Down