tiny_dnn 1.0.0
A header only, dependency-free deep learning framework in C++11
Loading...
Searching...
No Matches
layer_factory.h
1/*
2 Copyright (c) 2013, Taiga Nomi
3 All rights reserved.
4
5 Redistribution and use in source and binary forms, with or without
6 modification, are permitted provided that the following conditions are met:
7 * Redistributions of source code must retain the above copyright
8 notice, this list of conditions and the following disclaimer.
9 * Redistributions in binary form must reproduce the above copyright
10 notice, this list of conditions and the following disclaimer in the
11 documentation and/or other materials provided with the distribution.
12 * Neither the name of the <organization> nor the
13 names of its contributors may be used to endorse or promote products
14 derived from this software without specific prior written permission.
15
16 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
17 EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
20 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
21 (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
22 LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
23 ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26*/
27#pragma once
28#include <google/protobuf/io/coded_stream.h>
29#include <google/protobuf/io/zero_copy_stream_impl.h>
30#include <google/protobuf/text_format.h>
31#include "caffe.pb.h"
32
33#include "tiny_dnn/network.h"
34#include "tiny_dnn/lossfunctions/loss_function.h"
35#include "tiny_dnn/optimizers/optimizer.h"
36#include "tiny_dnn/util/util.h"
37#include "tiny_dnn/io/caffe/layer_factory_impl.h"
38
39namespace tiny_dnn {
40
47inline std::shared_ptr<network<sequential>>
48create_net_from_caffe_net(const caffe::NetParameter& layer, const shape3d& data_shape)
49{
50 detail::caffe_layer_vector src_net(layer);
51 shape_t shape;
52
53 if (data_shape.size() > 0) {
54 shape = data_shape;
55 } else {
56 if (layer.input_shape_size() > 0) {
57 // input_shape is deprecated in Caffe
58 // blob dimensions are ordered by number N x channel K x height H x width W
59 int depth = static_cast<int>(layer.input_shape(0).dim(1));
60 int height = static_cast<int>(layer.input_shape(0).dim(2));
61 int width = static_cast<int>(layer.input_shape(0).dim(3));
62 shape = shape3d(width, height, depth);
63 }
64 else if (src_net[0].has_input_param()) {
65 // blob dimensions are ordered by number N x channel K x height H x width W
66 int depth = static_cast<int>(src_net[0].input_param().shape(0).dim(1));
67 int height = static_cast<int>(src_net[0].input_param().shape(0).dim(2));
68 int width = static_cast<int>(src_net[0].input_param().shape(0).dim(3));
69 shape = shape3d(width, height, depth);
70 }
71 else {
72 throw nn_error("input_shape not found in caffemodel. must specify input shape explicitly");
73 }
74 }
75
76 auto dst_net = std::make_shared<network<sequential>>(layer.name());
77
78 for (size_t i = 0; i < src_net.size(); i++) {
79 auto type = src_net[i].type();
80
81 if (detail::layer_skipped(type)) {
82 continue;
83 }
84
85 if (!detail::layer_supported(type)) {
86 throw nn_error("error: tiny-dnn does not support this layer type:" + type);
87 }
88
89 shape_t shape_next = shape;
90 auto layer = detail::create(src_net[i], shape, &shape_next);
91
92 nn_info("convert " + type + " => " + typeid(*layer).name());
93 nn_info("shape:" + to_string(shape_next));
94
95 *dst_net << layer;
96 shape = shape_next;
97 }
98
99 return dst_net;
100}
101
102
109inline std::shared_ptr<network<sequential>>
110create_net_from_caffe_protobinary(const std::string& caffebinarymodel, const shape3d& data_shape)
111{
112 caffe::NetParameter np;
113
114 detail::read_proto_from_binary(caffebinarymodel, &np);
115 return create_net_from_caffe_net(np, data_shape);
116}
117
123inline std::shared_ptr<network<sequential>>
124create_net_from_caffe_prototxt(const std::string& caffeprototxt, const shape3d& shape =shape3d())
125{
126 caffe::NetParameter np;
127
128 detail::read_proto_from_text(caffeprototxt, &np);
129 return create_net_from_caffe_net(np, shape);
130}
131
139template <typename N>
140inline void reload_weight_from_caffe_net(const caffe::NetParameter& layer, network<N> *net)
141{
142 detail::caffe_layer_vector src_net(layer);
143
144 size_t tiny_layer_idx = 0;
145
146 for (size_t caffe_layer_idx = 0; caffe_layer_idx < src_net.size(); caffe_layer_idx++) {
147 auto type = src_net[caffe_layer_idx].type();
148
149 size_t next_idx = tiny_layer_idx + 1;
150
151 while (next_idx < net->depth() && !detail::layer_match(type, (*net)[next_idx]->layer_type())) {
152 next_idx++;
153 }
154 if (next_idx >= net->depth()) break;
155
156 tiny_layer_idx = next_idx;
157
158 // load weight
159 detail::load(src_net[caffe_layer_idx], (*net)[tiny_layer_idx++]);
160 }
161}
162
170template <typename N>
171inline void reload_weight_from_caffe_protobinary(const std::string& caffebinary, network<N> *net)
172{
173 caffe::NetParameter np;
174
175 detail::read_proto_from_binary(caffebinary, &np);
176 reload_weight_from_caffe_net(np, net);
177}
178
179} // namespace tiny_dnn