Innerhlab dieses Repositorys ist ein showcase ausgearbeitet, welcher live die Funktion des EVM Algorithmus darstellt.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

deploy_upperbody.prototxt 44KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102
  1. name: "MobileNet-SSD"
  2. input: "data"
  3. input_shape {
  4. dim: 1
  5. dim: 3
  6. dim: 300
  7. dim: 300
  8. }
  9. layer {
  10. name: "conv0"
  11. type: "Convolution"
  12. bottom: "data"
  13. top: "conv0"
  14. param {
  15. lr_mult: 0.1
  16. decay_mult: 0.1
  17. }
  18. convolution_param {
  19. num_output: 32
  20. bias_term: false
  21. pad: 1
  22. kernel_size: 3
  23. stride: 2
  24. weight_filler {
  25. type: "msra"
  26. }
  27. }
  28. }
  29. layer {
  30. name: "conv0/bn"
  31. type: "BatchNorm"
  32. bottom: "conv0"
  33. top: "conv0"
  34. param {
  35. lr_mult: 0
  36. decay_mult: 0
  37. }
  38. param {
  39. lr_mult: 0
  40. decay_mult: 0
  41. }
  42. param {
  43. lr_mult: 0
  44. decay_mult: 0
  45. }
  46. }
  47. layer {
  48. name: "conv0/scale"
  49. type: "Scale"
  50. bottom: "conv0"
  51. top: "conv0"
  52. param {
  53. lr_mult: 0.1
  54. decay_mult: 0.0
  55. }
  56. param {
  57. lr_mult: 0.2
  58. decay_mult: 0.0
  59. }
  60. scale_param {
  61. filler {
  62. value: 1
  63. }
  64. bias_term: true
  65. bias_filler {
  66. value: 0
  67. }
  68. }
  69. }
  70. layer {
  71. name: "conv0/relu"
  72. type: "ReLU"
  73. bottom: "conv0"
  74. top: "conv0"
  75. }
  76. layer {
  77. name: "conv1/dw"
  78. type: "Convolution"
  79. bottom: "conv0"
  80. top: "conv1/dw"
  81. param {
  82. lr_mult: 0.1
  83. decay_mult: 0.1
  84. }
  85. convolution_param {
  86. num_output: 32
  87. bias_term: false
  88. pad: 1
  89. kernel_size: 3
  90. group: 32
  91. #engine: CAFFE
  92. weight_filler {
  93. type: "msra"
  94. }
  95. }
  96. }
  97. layer {
  98. name: "conv1/dw/bn"
  99. type: "BatchNorm"
  100. bottom: "conv1/dw"
  101. top: "conv1/dw"
  102. param {
  103. lr_mult: 0
  104. decay_mult: 0
  105. }
  106. param {
  107. lr_mult: 0
  108. decay_mult: 0
  109. }
  110. param {
  111. lr_mult: 0
  112. decay_mult: 0
  113. }
  114. }
  115. layer {
  116. name: "conv1/dw/scale"
  117. type: "Scale"
  118. bottom: "conv1/dw"
  119. top: "conv1/dw"
  120. param {
  121. lr_mult: 0.1
  122. decay_mult: 0.0
  123. }
  124. param {
  125. lr_mult: 0.2
  126. decay_mult: 0.0
  127. }
  128. scale_param {
  129. filler {
  130. value: 1
  131. }
  132. bias_term: true
  133. bias_filler {
  134. value: 0
  135. }
  136. }
  137. }
  138. layer {
  139. name: "conv1/dw/relu"
  140. type: "ReLU"
  141. bottom: "conv1/dw"
  142. top: "conv1/dw"
  143. }
  144. layer {
  145. name: "conv1"
  146. type: "Convolution"
  147. bottom: "conv1/dw"
  148. top: "conv1"
  149. param {
  150. lr_mult: 0.1
  151. decay_mult: 0.1
  152. }
  153. convolution_param {
  154. num_output: 64
  155. bias_term: false
  156. kernel_size: 1
  157. weight_filler {
  158. type: "msra"
  159. }
  160. }
  161. }
  162. layer {
  163. name: "conv1/bn"
  164. type: "BatchNorm"
  165. bottom: "conv1"
  166. top: "conv1"
  167. param {
  168. lr_mult: 0
  169. decay_mult: 0
  170. }
  171. param {
  172. lr_mult: 0
  173. decay_mult: 0
  174. }
  175. param {
  176. lr_mult: 0
  177. decay_mult: 0
  178. }
  179. }
  180. layer {
  181. name: "conv1/scale"
  182. type: "Scale"
  183. bottom: "conv1"
  184. top: "conv1"
  185. param {
  186. lr_mult: 0.1
  187. decay_mult: 0.0
  188. }
  189. param {
  190. lr_mult: 0.2
  191. decay_mult: 0.0
  192. }
  193. scale_param {
  194. filler {
  195. value: 1
  196. }
  197. bias_term: true
  198. bias_filler {
  199. value: 0
  200. }
  201. }
  202. }
  203. layer {
  204. name: "conv1/relu"
  205. type: "ReLU"
  206. bottom: "conv1"
  207. top: "conv1"
  208. }
  209. layer {
  210. name: "conv2/dw"
  211. type: "Convolution"
  212. bottom: "conv1"
  213. top: "conv2/dw"
  214. param {
  215. lr_mult: 0.1
  216. decay_mult: 0.1
  217. }
  218. convolution_param {
  219. num_output: 64
  220. bias_term: false
  221. pad: 1
  222. kernel_size: 3
  223. stride: 2
  224. group: 64
  225. #engine: CAFFE
  226. weight_filler {
  227. type: "msra"
  228. }
  229. }
  230. }
  231. layer {
  232. name: "conv2/dw/bn"
  233. type: "BatchNorm"
  234. bottom: "conv2/dw"
  235. top: "conv2/dw"
  236. param {
  237. lr_mult: 0
  238. decay_mult: 0
  239. }
  240. param {
  241. lr_mult: 0
  242. decay_mult: 0
  243. }
  244. param {
  245. lr_mult: 0
  246. decay_mult: 0
  247. }
  248. }
  249. layer {
  250. name: "conv2/dw/scale"
  251. type: "Scale"
  252. bottom: "conv2/dw"
  253. top: "conv2/dw"
  254. param {
  255. lr_mult: 0.1
  256. decay_mult: 0.0
  257. }
  258. param {
  259. lr_mult: 0.2
  260. decay_mult: 0.0
  261. }
  262. scale_param {
  263. filler {
  264. value: 1
  265. }
  266. bias_term: true
  267. bias_filler {
  268. value: 0
  269. }
  270. }
  271. }
  272. layer {
  273. name: "conv2/dw/relu"
  274. type: "ReLU"
  275. bottom: "conv2/dw"
  276. top: "conv2/dw"
  277. }
  278. layer {
  279. name: "conv2"
  280. type: "Convolution"
  281. bottom: "conv2/dw"
  282. top: "conv2"
  283. param {
  284. lr_mult: 0.1
  285. decay_mult: 0.1
  286. }
  287. convolution_param {
  288. num_output: 128
  289. bias_term: false
  290. kernel_size: 1
  291. weight_filler {
  292. type: "msra"
  293. }
  294. }
  295. }
  296. layer {
  297. name: "conv2/bn"
  298. type: "BatchNorm"
  299. bottom: "conv2"
  300. top: "conv2"
  301. param {
  302. lr_mult: 0
  303. decay_mult: 0
  304. }
  305. param {
  306. lr_mult: 0
  307. decay_mult: 0
  308. }
  309. param {
  310. lr_mult: 0
  311. decay_mult: 0
  312. }
  313. }
  314. layer {
  315. name: "conv2/scale"
  316. type: "Scale"
  317. bottom: "conv2"
  318. top: "conv2"
  319. param {
  320. lr_mult: 0.1
  321. decay_mult: 0.0
  322. }
  323. param {
  324. lr_mult: 0.2
  325. decay_mult: 0.0
  326. }
  327. scale_param {
  328. filler {
  329. value: 1
  330. }
  331. bias_term: true
  332. bias_filler {
  333. value: 0
  334. }
  335. }
  336. }
  337. layer {
  338. name: "conv2/relu"
  339. type: "ReLU"
  340. bottom: "conv2"
  341. top: "conv2"
  342. }
  343. layer {
  344. name: "conv3/dw"
  345. type: "Convolution"
  346. bottom: "conv2"
  347. top: "conv3/dw"
  348. param {
  349. lr_mult: 0.1
  350. decay_mult: 0.1
  351. }
  352. convolution_param {
  353. num_output: 128
  354. bias_term: false
  355. pad: 1
  356. kernel_size: 3
  357. group: 128
  358. #engine: CAFFE
  359. weight_filler {
  360. type: "msra"
  361. }
  362. }
  363. }
  364. layer {
  365. name: "conv3/dw/bn"
  366. type: "BatchNorm"
  367. bottom: "conv3/dw"
  368. top: "conv3/dw"
  369. param {
  370. lr_mult: 0
  371. decay_mult: 0
  372. }
  373. param {
  374. lr_mult: 0
  375. decay_mult: 0
  376. }
  377. param {
  378. lr_mult: 0
  379. decay_mult: 0
  380. }
  381. }
  382. layer {
  383. name: "conv3/dw/scale"
  384. type: "Scale"
  385. bottom: "conv3/dw"
  386. top: "conv3/dw"
  387. param {
  388. lr_mult: 0.1
  389. decay_mult: 0.0
  390. }
  391. param {
  392. lr_mult: 0.2
  393. decay_mult: 0.0
  394. }
  395. scale_param {
  396. filler {
  397. value: 1
  398. }
  399. bias_term: true
  400. bias_filler {
  401. value: 0
  402. }
  403. }
  404. }
  405. layer {
  406. name: "conv3/dw/relu"
  407. type: "ReLU"
  408. bottom: "conv3/dw"
  409. top: "conv3/dw"
  410. }
  411. layer {
  412. name: "conv3"
  413. type: "Convolution"
  414. bottom: "conv3/dw"
  415. top: "conv3"
  416. param {
  417. lr_mult: 0.1
  418. decay_mult: 0.1
  419. }
  420. convolution_param {
  421. num_output: 128
  422. bias_term: false
  423. kernel_size: 1
  424. weight_filler {
  425. type: "msra"
  426. }
  427. }
  428. }
  429. layer {
  430. name: "conv3/bn"
  431. type: "BatchNorm"
  432. bottom: "conv3"
  433. top: "conv3"
  434. param {
  435. lr_mult: 0
  436. decay_mult: 0
  437. }
  438. param {
  439. lr_mult: 0
  440. decay_mult: 0
  441. }
  442. param {
  443. lr_mult: 0
  444. decay_mult: 0
  445. }
  446. }
  447. layer {
  448. name: "conv3/scale"
  449. type: "Scale"
  450. bottom: "conv3"
  451. top: "conv3"
  452. param {
  453. lr_mult: 0.1
  454. decay_mult: 0.0
  455. }
  456. param {
  457. lr_mult: 0.2
  458. decay_mult: 0.0
  459. }
  460. scale_param {
  461. filler {
  462. value: 1
  463. }
  464. bias_term: true
  465. bias_filler {
  466. value: 0
  467. }
  468. }
  469. }
  470. layer {
  471. name: "conv3/relu"
  472. type: "ReLU"
  473. bottom: "conv3"
  474. top: "conv3"
  475. }
  476. layer {
  477. name: "conv4/dw"
  478. type: "Convolution"
  479. bottom: "conv3"
  480. top: "conv4/dw"
  481. param {
  482. lr_mult: 0.1
  483. decay_mult: 0.1
  484. }
  485. convolution_param {
  486. num_output: 128
  487. bias_term: false
  488. pad: 1
  489. kernel_size: 3
  490. stride: 2
  491. group: 128
  492. #engine: CAFFE
  493. weight_filler {
  494. type: "msra"
  495. }
  496. }
  497. }
  498. layer {
  499. name: "conv4/dw/bn"
  500. type: "BatchNorm"
  501. bottom: "conv4/dw"
  502. top: "conv4/dw"
  503. param {
  504. lr_mult: 0
  505. decay_mult: 0
  506. }
  507. param {
  508. lr_mult: 0
  509. decay_mult: 0
  510. }
  511. param {
  512. lr_mult: 0
  513. decay_mult: 0
  514. }
  515. }
  516. layer {
  517. name: "conv4/dw/scale"
  518. type: "Scale"
  519. bottom: "conv4/dw"
  520. top: "conv4/dw"
  521. param {
  522. lr_mult: 0.1
  523. decay_mult: 0.0
  524. }
  525. param {
  526. lr_mult: 0.2
  527. decay_mult: 0.0
  528. }
  529. scale_param {
  530. filler {
  531. value: 1
  532. }
  533. bias_term: true
  534. bias_filler {
  535. value: 0
  536. }
  537. }
  538. }
  539. layer {
  540. name: "conv4/dw/relu"
  541. type: "ReLU"
  542. bottom: "conv4/dw"
  543. top: "conv4/dw"
  544. }
  545. layer {
  546. name: "conv4"
  547. type: "Convolution"
  548. bottom: "conv4/dw"
  549. top: "conv4"
  550. param {
  551. lr_mult: 0.1
  552. decay_mult: 0.1
  553. }
  554. convolution_param {
  555. num_output: 256
  556. bias_term: false
  557. kernel_size: 1
  558. weight_filler {
  559. type: "msra"
  560. }
  561. }
  562. }
  563. layer {
  564. name: "conv4/bn"
  565. type: "BatchNorm"
  566. bottom: "conv4"
  567. top: "conv4"
  568. param {
  569. lr_mult: 0
  570. decay_mult: 0
  571. }
  572. param {
  573. lr_mult: 0
  574. decay_mult: 0
  575. }
  576. param {
  577. lr_mult: 0
  578. decay_mult: 0
  579. }
  580. }
  581. layer {
  582. name: "conv4/scale"
  583. type: "Scale"
  584. bottom: "conv4"
  585. top: "conv4"
  586. param {
  587. lr_mult: 0.1
  588. decay_mult: 0.0
  589. }
  590. param {
  591. lr_mult: 0.2
  592. decay_mult: 0.0
  593. }
  594. scale_param {
  595. filler {
  596. value: 1
  597. }
  598. bias_term: true
  599. bias_filler {
  600. value: 0
  601. }
  602. }
  603. }
  604. layer {
  605. name: "conv4/relu"
  606. type: "ReLU"
  607. bottom: "conv4"
  608. top: "conv4"
  609. }
  610. layer {
  611. name: "conv5/dw"
  612. type: "Convolution"
  613. bottom: "conv4"
  614. top: "conv5/dw"
  615. param {
  616. lr_mult: 0.1
  617. decay_mult: 0.1
  618. }
  619. convolution_param {
  620. num_output: 256
  621. bias_term: false
  622. pad: 1
  623. kernel_size: 3
  624. group: 256
  625. #engine: CAFFE
  626. weight_filler {
  627. type: "msra"
  628. }
  629. }
  630. }
  631. layer {
  632. name: "conv5/dw/bn"
  633. type: "BatchNorm"
  634. bottom: "conv5/dw"
  635. top: "conv5/dw"
  636. param {
  637. lr_mult: 0
  638. decay_mult: 0
  639. }
  640. param {
  641. lr_mult: 0
  642. decay_mult: 0
  643. }
  644. param {
  645. lr_mult: 0
  646. decay_mult: 0
  647. }
  648. }
  649. layer {
  650. name: "conv5/dw/scale"
  651. type: "Scale"
  652. bottom: "conv5/dw"
  653. top: "conv5/dw"
  654. param {
  655. lr_mult: 0.1
  656. decay_mult: 0.0
  657. }
  658. param {
  659. lr_mult: 0.2
  660. decay_mult: 0.0
  661. }
  662. scale_param {
  663. filler {
  664. value: 1
  665. }
  666. bias_term: true
  667. bias_filler {
  668. value: 0
  669. }
  670. }
  671. }
  672. layer {
  673. name: "conv5/dw/relu"
  674. type: "ReLU"
  675. bottom: "conv5/dw"
  676. top: "conv5/dw"
  677. }
  678. layer {
  679. name: "conv5"
  680. type: "Convolution"
  681. bottom: "conv5/dw"
  682. top: "conv5"
  683. param {
  684. lr_mult: 0.1
  685. decay_mult: 0.1
  686. }
  687. convolution_param {
  688. num_output: 256
  689. bias_term: false
  690. kernel_size: 1
  691. weight_filler {
  692. type: "msra"
  693. }
  694. }
  695. }
  696. layer {
  697. name: "conv5/bn"
  698. type: "BatchNorm"
  699. bottom: "conv5"
  700. top: "conv5"
  701. param {
  702. lr_mult: 0
  703. decay_mult: 0
  704. }
  705. param {
  706. lr_mult: 0
  707. decay_mult: 0
  708. }
  709. param {
  710. lr_mult: 0
  711. decay_mult: 0
  712. }
  713. }
  714. layer {
  715. name: "conv5/scale"
  716. type: "Scale"
  717. bottom: "conv5"
  718. top: "conv5"
  719. param {
  720. lr_mult: 0.1
  721. decay_mult: 0.0
  722. }
  723. param {
  724. lr_mult: 0.2
  725. decay_mult: 0.0
  726. }
  727. scale_param {
  728. filler {
  729. value: 1
  730. }
  731. bias_term: true
  732. bias_filler {
  733. value: 0
  734. }
  735. }
  736. }
  737. layer {
  738. name: "conv5/relu"
  739. type: "ReLU"
  740. bottom: "conv5"
  741. top: "conv5"
  742. }
  743. layer {
  744. name: "conv6/dw"
  745. type: "Convolution"
  746. bottom: "conv5"
  747. top: "conv6/dw"
  748. param {
  749. lr_mult: 0.1
  750. decay_mult: 0.1
  751. }
  752. convolution_param {
  753. num_output: 256
  754. bias_term: false
  755. pad: 1
  756. kernel_size: 3
  757. stride: 2
  758. group: 256
  759. #engine: CAFFE
  760. weight_filler {
  761. type: "msra"
  762. }
  763. }
  764. }
  765. layer {
  766. name: "conv6/dw/bn"
  767. type: "BatchNorm"
  768. bottom: "conv6/dw"
  769. top: "conv6/dw"
  770. param {
  771. lr_mult: 0
  772. decay_mult: 0
  773. }
  774. param {
  775. lr_mult: 0
  776. decay_mult: 0
  777. }
  778. param {
  779. lr_mult: 0
  780. decay_mult: 0
  781. }
  782. }
  783. layer {
  784. name: "conv6/dw/scale"
  785. type: "Scale"
  786. bottom: "conv6/dw"
  787. top: "conv6/dw"
  788. param {
  789. lr_mult: 0.1
  790. decay_mult: 0.0
  791. }
  792. param {
  793. lr_mult: 0.2
  794. decay_mult: 0.0
  795. }
  796. scale_param {
  797. filler {
  798. value: 1
  799. }
  800. bias_term: true
  801. bias_filler {
  802. value: 0
  803. }
  804. }
  805. }
  806. layer {
  807. name: "conv6/dw/relu"
  808. type: "ReLU"
  809. bottom: "conv6/dw"
  810. top: "conv6/dw"
  811. }
  812. layer {
  813. name: "conv6"
  814. type: "Convolution"
  815. bottom: "conv6/dw"
  816. top: "conv6"
  817. param {
  818. lr_mult: 0.1
  819. decay_mult: 0.1
  820. }
  821. convolution_param {
  822. num_output: 512
  823. bias_term: false
  824. kernel_size: 1
  825. weight_filler {
  826. type: "msra"
  827. }
  828. }
  829. }
  830. layer {
  831. name: "conv6/bn"
  832. type: "BatchNorm"
  833. bottom: "conv6"
  834. top: "conv6"
  835. param {
  836. lr_mult: 0
  837. decay_mult: 0
  838. }
  839. param {
  840. lr_mult: 0
  841. decay_mult: 0
  842. }
  843. param {
  844. lr_mult: 0
  845. decay_mult: 0
  846. }
  847. }
  848. layer {
  849. name: "conv6/scale"
  850. type: "Scale"
  851. bottom: "conv6"
  852. top: "conv6"
  853. param {
  854. lr_mult: 0.1
  855. decay_mult: 0.0
  856. }
  857. param {
  858. lr_mult: 0.2
  859. decay_mult: 0.0
  860. }
  861. scale_param {
  862. filler {
  863. value: 1
  864. }
  865. bias_term: true
  866. bias_filler {
  867. value: 0
  868. }
  869. }
  870. }
  871. layer {
  872. name: "conv6/relu"
  873. type: "ReLU"
  874. bottom: "conv6"
  875. top: "conv6"
  876. }
  877. layer {
  878. name: "conv7/dw"
  879. type: "Convolution"
  880. bottom: "conv6"
  881. top: "conv7/dw"
  882. param {
  883. lr_mult: 0.1
  884. decay_mult: 0.1
  885. }
  886. convolution_param {
  887. num_output: 512
  888. bias_term: false
  889. pad: 1
  890. kernel_size: 3
  891. group: 512
  892. #engine: CAFFE
  893. weight_filler {
  894. type: "msra"
  895. }
  896. }
  897. }
  898. layer {
  899. name: "conv7/dw/bn"
  900. type: "BatchNorm"
  901. bottom: "conv7/dw"
  902. top: "conv7/dw"
  903. param {
  904. lr_mult: 0
  905. decay_mult: 0
  906. }
  907. param {
  908. lr_mult: 0
  909. decay_mult: 0
  910. }
  911. param {
  912. lr_mult: 0
  913. decay_mult: 0
  914. }
  915. }
  916. layer {
  917. name: "conv7/dw/scale"
  918. type: "Scale"
  919. bottom: "conv7/dw"
  920. top: "conv7/dw"
  921. param {
  922. lr_mult: 0.1
  923. decay_mult: 0.0
  924. }
  925. param {
  926. lr_mult: 0.2
  927. decay_mult: 0.0
  928. }
  929. scale_param {
  930. filler {
  931. value: 1
  932. }
  933. bias_term: true
  934. bias_filler {
  935. value: 0
  936. }
  937. }
  938. }
  939. layer {
  940. name: "conv7/dw/relu"
  941. type: "ReLU"
  942. bottom: "conv7/dw"
  943. top: "conv7/dw"
  944. }
  945. layer {
  946. name: "conv7"
  947. type: "Convolution"
  948. bottom: "conv7/dw"
  949. top: "conv7"
  950. param {
  951. lr_mult: 0.1
  952. decay_mult: 0.1
  953. }
  954. convolution_param {
  955. num_output: 512
  956. bias_term: false
  957. kernel_size: 1
  958. weight_filler {
  959. type: "msra"
  960. }
  961. }
  962. }
  963. layer {
  964. name: "conv7/bn"
  965. type: "BatchNorm"
  966. bottom: "conv7"
  967. top: "conv7"
  968. param {
  969. lr_mult: 0
  970. decay_mult: 0
  971. }
  972. param {
  973. lr_mult: 0
  974. decay_mult: 0
  975. }
  976. param {
  977. lr_mult: 0
  978. decay_mult: 0
  979. }
  980. }
  981. layer {
  982. name: "conv7/scale"
  983. type: "Scale"
  984. bottom: "conv7"
  985. top: "conv7"
  986. param {
  987. lr_mult: 0.1
  988. decay_mult: 0.0
  989. }
  990. param {
  991. lr_mult: 0.2
  992. decay_mult: 0.0
  993. }
  994. scale_param {
  995. filler {
  996. value: 1
  997. }
  998. bias_term: true
  999. bias_filler {
  1000. value: 0
  1001. }
  1002. }
  1003. }
  1004. layer {
  1005. name: "conv7/relu"
  1006. type: "ReLU"
  1007. bottom: "conv7"
  1008. top: "conv7"
  1009. }
  1010. layer {
  1011. name: "conv8/dw"
  1012. type: "Convolution"
  1013. bottom: "conv7"
  1014. top: "conv8/dw"
  1015. param {
  1016. lr_mult: 0.1
  1017. decay_mult: 0.1
  1018. }
  1019. convolution_param {
  1020. num_output: 512
  1021. bias_term: false
  1022. pad: 1
  1023. kernel_size: 3
  1024. group: 512
  1025. #engine: CAFFE
  1026. weight_filler {
  1027. type: "msra"
  1028. }
  1029. }
  1030. }
  1031. layer {
  1032. name: "conv8/dw/bn"
  1033. type: "BatchNorm"
  1034. bottom: "conv8/dw"
  1035. top: "conv8/dw"
  1036. param {
  1037. lr_mult: 0
  1038. decay_mult: 0
  1039. }
  1040. param {
  1041. lr_mult: 0
  1042. decay_mult: 0
  1043. }
  1044. param {
  1045. lr_mult: 0
  1046. decay_mult: 0
  1047. }
  1048. }
  1049. layer {
  1050. name: "conv8/dw/scale"
  1051. type: "Scale"
  1052. bottom: "conv8/dw"
  1053. top: "conv8/dw"
  1054. param {
  1055. lr_mult: 0.1
  1056. decay_mult: 0.0
  1057. }
  1058. param {
  1059. lr_mult: 0.2
  1060. decay_mult: 0.0
  1061. }
  1062. scale_param {
  1063. filler {
  1064. value: 1
  1065. }
  1066. bias_term: true
  1067. bias_filler {
  1068. value: 0
  1069. }
  1070. }
  1071. }
  1072. layer {
  1073. name: "conv8/dw/relu"
  1074. type: "ReLU"
  1075. bottom: "conv8/dw"
  1076. top: "conv8/dw"
  1077. }
  1078. layer {
  1079. name: "conv8"
  1080. type: "Convolution"
  1081. bottom: "conv8/dw"
  1082. top: "conv8"
  1083. param {
  1084. lr_mult: 0.1
  1085. decay_mult: 0.1
  1086. }
  1087. convolution_param {
  1088. num_output: 512
  1089. bias_term: false
  1090. kernel_size: 1
  1091. weight_filler {
  1092. type: "msra"
  1093. }
  1094. }
  1095. }
  1096. layer {
  1097. name: "conv8/bn"
  1098. type: "BatchNorm"
  1099. bottom: "conv8"
  1100. top: "conv8"
  1101. param {
  1102. lr_mult: 0
  1103. decay_mult: 0
  1104. }
  1105. param {
  1106. lr_mult: 0
  1107. decay_mult: 0
  1108. }
  1109. param {
  1110. lr_mult: 0
  1111. decay_mult: 0
  1112. }
  1113. }
  1114. layer {
  1115. name: "conv8/scale"
  1116. type: "Scale"
  1117. bottom: "conv8"
  1118. top: "conv8"
  1119. param {
  1120. lr_mult: 0.1
  1121. decay_mult: 0.0
  1122. }
  1123. param {
  1124. lr_mult: 0.2
  1125. decay_mult: 0.0
  1126. }
  1127. scale_param {
  1128. filler {
  1129. value: 1
  1130. }
  1131. bias_term: true
  1132. bias_filler {
  1133. value: 0
  1134. }
  1135. }
  1136. }
  1137. layer {
  1138. name: "conv8/relu"
  1139. type: "ReLU"
  1140. bottom: "conv8"
  1141. top: "conv8"
  1142. }
  1143. layer {
  1144. name: "conv9/dw"
  1145. type: "Convolution"
  1146. bottom: "conv8"
  1147. top: "conv9/dw"
  1148. param {
  1149. lr_mult: 0.1
  1150. decay_mult: 0.1
  1151. }
  1152. convolution_param {
  1153. num_output: 512
  1154. bias_term: false
  1155. pad: 1
  1156. kernel_size: 3
  1157. group: 512
  1158. #engine: CAFFE
  1159. weight_filler {
  1160. type: "msra"
  1161. }
  1162. }
  1163. }
  1164. layer {
  1165. name: "conv9/dw/bn"
  1166. type: "BatchNorm"
  1167. bottom: "conv9/dw"
  1168. top: "conv9/dw"
  1169. param {
  1170. lr_mult: 0
  1171. decay_mult: 0
  1172. }
  1173. param {
  1174. lr_mult: 0
  1175. decay_mult: 0
  1176. }
  1177. param {
  1178. lr_mult: 0
  1179. decay_mult: 0
  1180. }
  1181. }
  1182. layer {
  1183. name: "conv9/dw/scale"
  1184. type: "Scale"
  1185. bottom: "conv9/dw"
  1186. top: "conv9/dw"
  1187. param {
  1188. lr_mult: 0.1
  1189. decay_mult: 0.0
  1190. }
  1191. param {
  1192. lr_mult: 0.2
  1193. decay_mult: 0.0
  1194. }
  1195. scale_param {
  1196. filler {
  1197. value: 1
  1198. }
  1199. bias_term: true
  1200. bias_filler {
  1201. value: 0
  1202. }
  1203. }
  1204. }
  1205. layer {
  1206. name: "conv9/dw/relu"
  1207. type: "ReLU"
  1208. bottom: "conv9/dw"
  1209. top: "conv9/dw"
  1210. }
  1211. layer {
  1212. name: "conv9"
  1213. type: "Convolution"
  1214. bottom: "conv9/dw"
  1215. top: "conv9"
  1216. param {
  1217. lr_mult: 0.1
  1218. decay_mult: 0.1
  1219. }
  1220. convolution_param {
  1221. num_output: 512
  1222. bias_term: false
  1223. kernel_size: 1
  1224. weight_filler {
  1225. type: "msra"
  1226. }
  1227. }
  1228. }
  1229. layer {
  1230. name: "conv9/bn"
  1231. type: "BatchNorm"
  1232. bottom: "conv9"
  1233. top: "conv9"
  1234. param {
  1235. lr_mult: 0
  1236. decay_mult: 0
  1237. }
  1238. param {
  1239. lr_mult: 0
  1240. decay_mult: 0
  1241. }
  1242. param {
  1243. lr_mult: 0
  1244. decay_mult: 0
  1245. }
  1246. }
  1247. layer {
  1248. name: "conv9/scale"
  1249. type: "Scale"
  1250. bottom: "conv9"
  1251. top: "conv9"
  1252. param {
  1253. lr_mult: 0.1
  1254. decay_mult: 0.0
  1255. }
  1256. param {
  1257. lr_mult: 0.2
  1258. decay_mult: 0.0
  1259. }
  1260. scale_param {
  1261. filler {
  1262. value: 1
  1263. }
  1264. bias_term: true
  1265. bias_filler {
  1266. value: 0
  1267. }
  1268. }
  1269. }
  1270. layer {
  1271. name: "conv9/relu"
  1272. type: "ReLU"
  1273. bottom: "conv9"
  1274. top: "conv9"
  1275. }
  1276. layer {
  1277. name: "conv10/dw"
  1278. type: "Convolution"
  1279. bottom: "conv9"
  1280. top: "conv10/dw"
  1281. param {
  1282. lr_mult: 0.1
  1283. decay_mult: 0.1
  1284. }
  1285. convolution_param {
  1286. num_output: 512
  1287. bias_term: false
  1288. pad: 1
  1289. kernel_size: 3
  1290. group: 512
  1291. #engine: CAFFE
  1292. weight_filler {
  1293. type: "msra"
  1294. }
  1295. }
  1296. }
  1297. layer {
  1298. name: "conv10/dw/bn"
  1299. type: "BatchNorm"
  1300. bottom: "conv10/dw"
  1301. top: "conv10/dw"
  1302. param {
  1303. lr_mult: 0
  1304. decay_mult: 0
  1305. }
  1306. param {
  1307. lr_mult: 0
  1308. decay_mult: 0
  1309. }
  1310. param {
  1311. lr_mult: 0
  1312. decay_mult: 0
  1313. }
  1314. }
  1315. layer {
  1316. name: "conv10/dw/scale"
  1317. type: "Scale"
  1318. bottom: "conv10/dw"
  1319. top: "conv10/dw"
  1320. param {
  1321. lr_mult: 0.1
  1322. decay_mult: 0.0
  1323. }
  1324. param {
  1325. lr_mult: 0.2
  1326. decay_mult: 0.0
  1327. }
  1328. scale_param {
  1329. filler {
  1330. value: 1
  1331. }
  1332. bias_term: true
  1333. bias_filler {
  1334. value: 0
  1335. }
  1336. }
  1337. }
  1338. layer {
  1339. name: "conv10/dw/relu"
  1340. type: "ReLU"
  1341. bottom: "conv10/dw"
  1342. top: "conv10/dw"
  1343. }
  1344. layer {
  1345. name: "conv10"
  1346. type: "Convolution"
  1347. bottom: "conv10/dw"
  1348. top: "conv10"
  1349. param {
  1350. lr_mult: 0.1
  1351. decay_mult: 0.1
  1352. }
  1353. convolution_param {
  1354. num_output: 512
  1355. bias_term: false
  1356. kernel_size: 1
  1357. weight_filler {
  1358. type: "msra"
  1359. }
  1360. }
  1361. }
  1362. layer {
  1363. name: "conv10/bn"
  1364. type: "BatchNorm"
  1365. bottom: "conv10"
  1366. top: "conv10"
  1367. param {
  1368. lr_mult: 0
  1369. decay_mult: 0
  1370. }
  1371. param {
  1372. lr_mult: 0
  1373. decay_mult: 0
  1374. }
  1375. param {
  1376. lr_mult: 0
  1377. decay_mult: 0
  1378. }
  1379. }
  1380. layer {
  1381. name: "conv10/scale"
  1382. type: "Scale"
  1383. bottom: "conv10"
  1384. top: "conv10"
  1385. param {
  1386. lr_mult: 0.1
  1387. decay_mult: 0.0
  1388. }
  1389. param {
  1390. lr_mult: 0.2
  1391. decay_mult: 0.0
  1392. }
  1393. scale_param {
  1394. filler {
  1395. value: 1
  1396. }
  1397. bias_term: true
  1398. bias_filler {
  1399. value: 0
  1400. }
  1401. }
  1402. }
  1403. layer {
  1404. name: "conv10/relu"
  1405. type: "ReLU"
  1406. bottom: "conv10"
  1407. top: "conv10"
  1408. }
  1409. layer {
  1410. name: "conv11/dw"
  1411. type: "Convolution"
  1412. bottom: "conv10"
  1413. top: "conv11/dw"
  1414. param {
  1415. lr_mult: 0.1
  1416. decay_mult: 0.1
  1417. }
  1418. convolution_param {
  1419. num_output: 512
  1420. bias_term: false
  1421. pad: 1
  1422. kernel_size: 3
  1423. group: 512
  1424. #engine: CAFFE
  1425. weight_filler {
  1426. type: "msra"
  1427. }
  1428. }
  1429. }
  1430. layer {
  1431. name: "conv11/dw/bn"
  1432. type: "BatchNorm"
  1433. bottom: "conv11/dw"
  1434. top: "conv11/dw"
  1435. param {
  1436. lr_mult: 0
  1437. decay_mult: 0
  1438. }
  1439. param {
  1440. lr_mult: 0
  1441. decay_mult: 0
  1442. }
  1443. param {
  1444. lr_mult: 0
  1445. decay_mult: 0
  1446. }
  1447. }
  1448. layer {
  1449. name: "conv11/dw/scale"
  1450. type: "Scale"
  1451. bottom: "conv11/dw"
  1452. top: "conv11/dw"
  1453. param {
  1454. lr_mult: 0.1
  1455. decay_mult: 0.0
  1456. }
  1457. param {
  1458. lr_mult: 0.2
  1459. decay_mult: 0.0
  1460. }
  1461. scale_param {
  1462. filler {
  1463. value: 1
  1464. }
  1465. bias_term: true
  1466. bias_filler {
  1467. value: 0
  1468. }
  1469. }
  1470. }
  1471. layer {
  1472. name: "conv11/dw/relu"
  1473. type: "ReLU"
  1474. bottom: "conv11/dw"
  1475. top: "conv11/dw"
  1476. }
  1477. layer {
  1478. name: "conv11"
  1479. type: "Convolution"
  1480. bottom: "conv11/dw"
  1481. top: "conv11"
  1482. param {
  1483. lr_mult: 0.1
  1484. decay_mult: 0.1
  1485. }
  1486. convolution_param {
  1487. num_output: 512
  1488. bias_term: false
  1489. kernel_size: 1
  1490. weight_filler {
  1491. type: "msra"
  1492. }
  1493. }
  1494. }
  1495. layer {
  1496. name: "conv11/bn"
  1497. type: "BatchNorm"
  1498. bottom: "conv11"
  1499. top: "conv11"
  1500. param {
  1501. lr_mult: 0
  1502. decay_mult: 0
  1503. }
  1504. param {
  1505. lr_mult: 0
  1506. decay_mult: 0
  1507. }
  1508. param {
  1509. lr_mult: 0
  1510. decay_mult: 0
  1511. }
  1512. }
  1513. layer {
  1514. name: "conv11/scale"
  1515. type: "Scale"
  1516. bottom: "conv11"
  1517. top: "conv11"
  1518. param {
  1519. lr_mult: 0.1
  1520. decay_mult: 0.0
  1521. }
  1522. param {
  1523. lr_mult: 0.2
  1524. decay_mult: 0.0
  1525. }
  1526. scale_param {
  1527. filler {
  1528. value: 1
  1529. }
  1530. bias_term: true
  1531. bias_filler {
  1532. value: 0
  1533. }
  1534. }
  1535. }
  1536. layer {
  1537. name: "conv11/relu"
  1538. type: "ReLU"
  1539. bottom: "conv11"
  1540. top: "conv11"
  1541. }
  1542. layer {
  1543. name: "conv12/dw"
  1544. type: "Convolution"
  1545. bottom: "conv11"
  1546. top: "conv12/dw"
  1547. param {
  1548. lr_mult: 0.1
  1549. decay_mult: 0.1
  1550. }
  1551. convolution_param {
  1552. num_output: 512
  1553. bias_term: false
  1554. pad: 1
  1555. kernel_size: 3
  1556. stride: 2
  1557. group: 512
  1558. #engine: CAFFE
  1559. weight_filler {
  1560. type: "msra"
  1561. }
  1562. }
  1563. }
  1564. layer {
  1565. name: "conv12/dw/bn"
  1566. type: "BatchNorm"
  1567. bottom: "conv12/dw"
  1568. top: "conv12/dw"
  1569. param {
  1570. lr_mult: 0
  1571. decay_mult: 0
  1572. }
  1573. param {
  1574. lr_mult: 0
  1575. decay_mult: 0
  1576. }
  1577. param {
  1578. lr_mult: 0
  1579. decay_mult: 0
  1580. }
  1581. }
  1582. layer {
  1583. name: "conv12/dw/scale"
  1584. type: "Scale"
  1585. bottom: "conv12/dw"
  1586. top: "conv12/dw"
  1587. param {
  1588. lr_mult: 0.1
  1589. decay_mult: 0.0
  1590. }
  1591. param {
  1592. lr_mult: 0.2
  1593. decay_mult: 0.0
  1594. }
  1595. scale_param {
  1596. filler {
  1597. value: 1
  1598. }
  1599. bias_term: true
  1600. bias_filler {
  1601. value: 0
  1602. }
  1603. }
  1604. }
  1605. layer {
  1606. name: "conv12/dw/relu"
  1607. type: "ReLU"
  1608. bottom: "conv12/dw"
  1609. top: "conv12/dw"
  1610. }
  1611. layer {
  1612. name: "conv12"
  1613. type: "Convolution"
  1614. bottom: "conv12/dw"
  1615. top: "conv12"
  1616. param {
  1617. lr_mult: 0.1
  1618. decay_mult: 0.1
  1619. }
  1620. convolution_param {
  1621. num_output: 1024
  1622. bias_term: false
  1623. kernel_size: 1
  1624. weight_filler {
  1625. type: "msra"
  1626. }
  1627. }
  1628. }
  1629. layer {
  1630. name: "conv12/bn"
  1631. type: "BatchNorm"
  1632. bottom: "conv12"
  1633. top: "conv12"
  1634. param {
  1635. lr_mult: 0
  1636. decay_mult: 0
  1637. }
  1638. param {
  1639. lr_mult: 0
  1640. decay_mult: 0
  1641. }
  1642. param {
  1643. lr_mult: 0
  1644. decay_mult: 0
  1645. }
  1646. }
  1647. layer {
  1648. name: "conv12/scale"
  1649. type: "Scale"
  1650. bottom: "conv12"
  1651. top: "conv12"
  1652. param {
  1653. lr_mult: 0.1
  1654. decay_mult: 0.0
  1655. }
  1656. param {
  1657. lr_mult: 0.2
  1658. decay_mult: 0.0
  1659. }
  1660. scale_param {
  1661. filler {
  1662. value: 1
  1663. }
  1664. bias_term: true
  1665. bias_filler {
  1666. value: 0
  1667. }
  1668. }
  1669. }
  1670. layer {
  1671. name: "conv12/relu"
  1672. type: "ReLU"
  1673. bottom: "conv12"
  1674. top: "conv12"
  1675. }
  1676. layer {
  1677. name: "conv13/dw"
  1678. type: "Convolution"
  1679. bottom: "conv12"
  1680. top: "conv13/dw"
  1681. param {
  1682. lr_mult: 0.1
  1683. decay_mult: 0.1
  1684. }
  1685. convolution_param {
  1686. num_output: 1024
  1687. bias_term: false
  1688. pad: 1
  1689. kernel_size: 3
  1690. group: 1024
  1691. #engine: CAFFE
  1692. weight_filler {
  1693. type: "msra"
  1694. }
  1695. }
  1696. }
  1697. layer {
  1698. name: "conv13/dw/bn"
  1699. type: "BatchNorm"
  1700. bottom: "conv13/dw"
  1701. top: "conv13/dw"
  1702. param {
  1703. lr_mult: 0
  1704. decay_mult: 0
  1705. }
  1706. param {
  1707. lr_mult: 0
  1708. decay_mult: 0
  1709. }
  1710. param {
  1711. lr_mult: 0
  1712. decay_mult: 0
  1713. }
  1714. }
  1715. layer {
  1716. name: "conv13/dw/scale"
  1717. type: "Scale"
  1718. bottom: "conv13/dw"
  1719. top: "conv13/dw"
  1720. param {
  1721. lr_mult: 0.1
  1722. decay_mult: 0.0
  1723. }
  1724. param {
  1725. lr_mult: 0.2
  1726. decay_mult: 0.0
  1727. }
  1728. scale_param {
  1729. filler {
  1730. value: 1
  1731. }
  1732. bias_term: true
  1733. bias_filler {
  1734. value: 0
  1735. }
  1736. }
  1737. }
  1738. layer {
  1739. name: "conv13/dw/relu"
  1740. type: "ReLU"
  1741. bottom: "conv13/dw"
  1742. top: "conv13/dw"
  1743. }
  1744. layer {
  1745. name: "conv13"
  1746. type: "Convolution"
  1747. bottom: "conv13/dw"
  1748. top: "conv13"
  1749. param {
  1750. lr_mult: 0.1
  1751. decay_mult: 0.1
  1752. }
  1753. convolution_param {
  1754. num_output: 1024
  1755. bias_term: false
  1756. kernel_size: 1
  1757. weight_filler {
  1758. type: "msra"
  1759. }
  1760. }
  1761. }
  1762. layer {
  1763. name: "conv13/bn"
  1764. type: "BatchNorm"
  1765. bottom: "conv13"
  1766. top: "conv13"
  1767. param {
  1768. lr_mult: 0
  1769. decay_mult: 0
  1770. }
  1771. param {
  1772. lr_mult: 0
  1773. decay_mult: 0
  1774. }
  1775. param {
  1776. lr_mult: 0
  1777. decay_mult: 0
  1778. }
  1779. }
  1780. layer {
  1781. name: "conv13/scale"
  1782. type: "Scale"
  1783. bottom: "conv13"
  1784. top: "conv13"
  1785. param {
  1786. lr_mult: 0.1
  1787. decay_mult: 0.0
  1788. }
  1789. param {
  1790. lr_mult: 0.2
  1791. decay_mult: 0.0
  1792. }
  1793. scale_param {
  1794. filler {
  1795. value: 1
  1796. }
  1797. bias_term: true
  1798. bias_filler {
  1799. value: 0
  1800. }
  1801. }
  1802. }
  1803. layer {
  1804. name: "conv13/relu"
  1805. type: "ReLU"
  1806. bottom: "conv13"
  1807. top: "conv13"
  1808. }
  1809. layer {
  1810. name: "conv14_1"
  1811. type: "Convolution"
  1812. bottom: "conv13"
  1813. top: "conv14_1"
  1814. param {
  1815. lr_mult: 0.1
  1816. decay_mult: 0.1
  1817. }
  1818. convolution_param {
  1819. num_output: 256
  1820. bias_term: false
  1821. kernel_size: 1
  1822. weight_filler {
  1823. type: "msra"
  1824. }
  1825. }
  1826. }
  1827. layer {
  1828. name: "conv14_1/bn"
  1829. type: "BatchNorm"
  1830. bottom: "conv14_1"
  1831. top: "conv14_1"
  1832. param {
  1833. lr_mult: 0
  1834. decay_mult: 0
  1835. }
  1836. param {
  1837. lr_mult: 0
  1838. decay_mult: 0
  1839. }
  1840. param {
  1841. lr_mult: 0
  1842. decay_mult: 0
  1843. }
  1844. }
  1845. layer {
  1846. name: "conv14_1/scale"
  1847. type: "Scale"
  1848. bottom: "conv14_1"
  1849. top: "conv14_1"
  1850. param {
  1851. lr_mult: 0.1
  1852. decay_mult: 0.0
  1853. }
  1854. param {
  1855. lr_mult: 0.2
  1856. decay_mult: 0.0
  1857. }
  1858. scale_param {
  1859. filler {
  1860. value: 1
  1861. }
  1862. bias_term: true
  1863. bias_filler {
  1864. value: 0
  1865. }
  1866. }
  1867. }
  1868. layer {
  1869. name: "conv14_1/relu"
  1870. type: "ReLU"
  1871. bottom: "conv14_1"
  1872. top: "conv14_1"
  1873. }
  1874. layer {
  1875. name: "conv14_2"
  1876. type: "Convolution"
  1877. bottom: "conv14_1"
  1878. top: "conv14_2"
  1879. param {
  1880. lr_mult: 0.1
  1881. decay_mult: 0.1
  1882. }
  1883. convolution_param {
  1884. num_output: 512
  1885. bias_term: false
  1886. pad: 1
  1887. kernel_size: 3
  1888. stride: 2
  1889. weight_filler {
  1890. type: "msra"
  1891. }
  1892. }
  1893. }
  1894. layer {
  1895. name: "conv14_2/bn"
  1896. type: "BatchNorm"
  1897. bottom: "conv14_2"
  1898. top: "conv14_2"
  1899. param {
  1900. lr_mult: 0
  1901. decay_mult: 0
  1902. }
  1903. param {
  1904. lr_mult: 0
  1905. decay_mult: 0
  1906. }
  1907. param {
  1908. lr_mult: 0
  1909. decay_mult: 0
  1910. }
  1911. }
  1912. layer {
  1913. name: "conv14_2/scale"
  1914. type: "Scale"
  1915. bottom: "conv14_2"
  1916. top: "conv14_2"
  1917. param {
  1918. lr_mult: 0.1
  1919. decay_mult: 0.0
  1920. }
  1921. param {
  1922. lr_mult: 0.2
  1923. decay_mult: 0.0
  1924. }
  1925. scale_param {
  1926. filler {
  1927. value: 1
  1928. }
  1929. bias_term: true
  1930. bias_filler {
  1931. value: 0
  1932. }
  1933. }
  1934. }
  1935. layer {
  1936. name: "conv14_2/relu"
  1937. type: "ReLU"
  1938. bottom: "conv14_2"
  1939. top: "conv14_2"
  1940. }
  1941. layer {
  1942. name: "conv15_1"
  1943. type: "Convolution"
  1944. bottom: "conv14_2"
  1945. top: "conv15_1"
  1946. param {
  1947. lr_mult: 0.1
  1948. decay_mult: 0.1
  1949. }
  1950. convolution_param {
  1951. num_output: 128
  1952. bias_term: false
  1953. kernel_size: 1
  1954. weight_filler {
  1955. type: "msra"
  1956. }
  1957. }
  1958. }
  1959. layer {
  1960. name: "conv15_1/bn"
  1961. type: "BatchNorm"
  1962. bottom: "conv15_1"
  1963. top: "conv15_1"
  1964. param {
  1965. lr_mult: 0
  1966. decay_mult: 0
  1967. }
  1968. param {
  1969. lr_mult: 0
  1970. decay_mult: 0
  1971. }
  1972. param {
  1973. lr_mult: 0
  1974. decay_mult: 0
  1975. }
  1976. }
  1977. layer {
  1978. name: "conv15_1/scale"
  1979. type: "Scale"
  1980. bottom: "conv15_1"
  1981. top: "conv15_1"
  1982. param {
  1983. lr_mult: 0.1
  1984. decay_mult: 0.0
  1985. }
  1986. param {
  1987. lr_mult: 0.2
  1988. decay_mult: 0.0
  1989. }
  1990. scale_param {
  1991. filler {
  1992. value: 1
  1993. }
  1994. bias_term: true
  1995. bias_filler {
  1996. value: 0
  1997. }
  1998. }
  1999. }
  2000. layer {
  2001. name: "conv15_1/relu"
  2002. type: "ReLU"
  2003. bottom: "conv15_1"
  2004. top: "conv15_1"
  2005. }
  2006. layer {
  2007. name: "conv15_2"
  2008. type: "Convolution"
  2009. bottom: "conv15_1"
  2010. top: "conv15_2"
  2011. param {
  2012. lr_mult: 0.1
  2013. decay_mult: 0.1
  2014. }
  2015. convolution_param {
  2016. num_output: 256
  2017. bias_term: false
  2018. pad: 1
  2019. kernel_size: 3
  2020. stride: 2
  2021. weight_filler {
  2022. type: "msra"
  2023. }
  2024. }
  2025. }
  2026. layer {
  2027. name: "conv15_2/bn"
  2028. type: "BatchNorm"
  2029. bottom: "conv15_2"
  2030. top: "conv15_2"
  2031. param {
  2032. lr_mult: 0
  2033. decay_mult: 0
  2034. }
  2035. param {
  2036. lr_mult: 0
  2037. decay_mult: 0
  2038. }
  2039. param {
  2040. lr_mult: 0
  2041. decay_mult: 0
  2042. }
  2043. }
  2044. layer {
  2045. name: "conv15_2/scale"
  2046. type: "Scale"
  2047. bottom: "conv15_2"
  2048. top: "conv15_2"
  2049. param {
  2050. lr_mult: 0.1
  2051. decay_mult: 0.0
  2052. }
  2053. param {
  2054. lr_mult: 0.2
  2055. decay_mult: 0.0
  2056. }
  2057. scale_param {
  2058. filler {
  2059. value: 1
  2060. }
  2061. bias_term: true
  2062. bias_filler {
  2063. value: 0
  2064. }
  2065. }
  2066. }
  2067. layer {
  2068. name: "conv15_2/relu"
  2069. type: "ReLU"
  2070. bottom: "conv15_2"
  2071. top: "conv15_2"
  2072. }
  2073. layer {
  2074. name: "conv16_1"
  2075. type: "Convolution"
  2076. bottom: "conv15_2"
  2077. top: "conv16_1"
  2078. param {
  2079. lr_mult: 0.1
  2080. decay_mult: 0.1
  2081. }
  2082. convolution_param {
  2083. num_output: 128
  2084. bias_term: false
  2085. kernel_size: 1
  2086. weight_filler {
  2087. type: "msra"
  2088. }
  2089. }
  2090. }
  2091. layer {
  2092. name: "conv16_1/bn"
  2093. type: "BatchNorm"
  2094. bottom: "conv16_1"
  2095. top: "conv16_1"
  2096. param {
  2097. lr_mult: 0
  2098. decay_mult: 0
  2099. }
  2100. param {
  2101. lr_mult: 0
  2102. decay_mult: 0
  2103. }
  2104. param {
  2105. lr_mult: 0
  2106. decay_mult: 0
  2107. }
  2108. }
  2109. layer {
  2110. name: "conv16_1/scale"
  2111. type: "Scale"
  2112. bottom: "conv16_1"
  2113. top: "conv16_1"
  2114. param {
  2115. lr_mult: 0.1
  2116. decay_mult: 0.0
  2117. }
  2118. param {
  2119. lr_mult: 0.2
  2120. decay_mult: 0.0
  2121. }
  2122. scale_param {
  2123. filler {
  2124. value: 1
  2125. }
  2126. bias_term: true
  2127. bias_filler {
  2128. value: 0
  2129. }
  2130. }
  2131. }
  2132. layer {
  2133. name: "conv16_1/relu"
  2134. type: "ReLU"
  2135. bottom: "conv16_1"
  2136. top: "conv16_1"
  2137. }
  2138. layer {
  2139. name: "conv16_2"
  2140. type: "Convolution"
  2141. bottom: "conv16_1"
  2142. top: "conv16_2"
  2143. param {
  2144. lr_mult: 0.1
  2145. decay_mult: 0.1
  2146. }
  2147. convolution_param {
  2148. num_output: 256
  2149. bias_term: false
  2150. pad: 1
  2151. kernel_size: 3
  2152. stride: 2
  2153. weight_filler {
  2154. type: "msra"
  2155. }
  2156. }
  2157. }
  2158. layer {
  2159. name: "conv16_2/bn"
  2160. type: "BatchNorm"
  2161. bottom: "conv16_2"
  2162. top: "conv16_2"
  2163. param {
  2164. lr_mult: 0
  2165. decay_mult: 0
  2166. }
  2167. param {
  2168. lr_mult: 0
  2169. decay_mult: 0
  2170. }
  2171. param {
  2172. lr_mult: 0
  2173. decay_mult: 0
  2174. }
  2175. }
  2176. layer {
  2177. name: "conv16_2/scale"
  2178. type: "Scale"
  2179. bottom: "conv16_2"
  2180. top: "conv16_2"
  2181. param {
  2182. lr_mult: 0.1
  2183. decay_mult: 0.0
  2184. }
  2185. param {
  2186. lr_mult: 0.2
  2187. decay_mult: 0.0
  2188. }
  2189. scale_param {
  2190. filler {
  2191. value: 1
  2192. }
  2193. bias_term: true
  2194. bias_filler {
  2195. value: 0
  2196. }
  2197. }
  2198. }
  2199. layer {
  2200. name: "conv16_2/relu"
  2201. type: "ReLU"
  2202. bottom: "conv16_2"
  2203. top: "conv16_2"
  2204. }
  2205. layer {
  2206. name: "conv17_1"
  2207. type: "Convolution"
  2208. bottom: "conv16_2"
  2209. top: "conv17_1"
  2210. param {
  2211. lr_mult: 0.1
  2212. decay_mult: 0.1
  2213. }
  2214. convolution_param {
  2215. num_output: 64
  2216. bias_term: false
  2217. kernel_size: 1
  2218. weight_filler {
  2219. type: "msra"
  2220. }
  2221. }
  2222. }
  2223. layer {
  2224. name: "conv17_1/bn"
  2225. type: "BatchNorm"
  2226. bottom: "conv17_1"
  2227. top: "conv17_1"
  2228. param {
  2229. lr_mult: 0
  2230. decay_mult: 0
  2231. }
  2232. param {
  2233. lr_mult: 0
  2234. decay_mult: 0
  2235. }
  2236. param {
  2237. lr_mult: 0
  2238. decay_mult: 0
  2239. }
  2240. }
  2241. layer {
  2242. name: "conv17_1/scale"
  2243. type: "Scale"
  2244. bottom: "conv17_1"
  2245. top: "conv17_1"
  2246. param {
  2247. lr_mult: 0.1
  2248. decay_mult: 0.0
  2249. }
  2250. param {
  2251. lr_mult: 0.2
  2252. decay_mult: 0.0
  2253. }
  2254. scale_param {
  2255. filler {
  2256. value: 1
  2257. }
  2258. bias_term: true
  2259. bias_filler {
  2260. value: 0
  2261. }
  2262. }
  2263. }
  2264. layer {
  2265. name: "conv17_1/relu"
  2266. type: "ReLU"
  2267. bottom: "conv17_1"
  2268. top: "conv17_1"
  2269. }
  2270. layer {
  2271. name: "conv17_2"
  2272. type: "Convolution"
  2273. bottom: "conv17_1"
  2274. top: "conv17_2"
  2275. param {
  2276. lr_mult: 0.1
  2277. decay_mult: 0.1
  2278. }
  2279. convolution_param {
  2280. num_output: 128
  2281. bias_term: false
  2282. pad: 1
  2283. kernel_size: 3
  2284. stride: 2
  2285. weight_filler {
  2286. type: "msra"
  2287. }
  2288. }
  2289. }
  2290. layer {
  2291. name: "conv17_2/bn"
  2292. type: "BatchNorm"
  2293. bottom: "conv17_2"
  2294. top: "conv17_2"
  2295. param {
  2296. lr_mult: 0
  2297. decay_mult: 0
  2298. }
  2299. param {
  2300. lr_mult: 0
  2301. decay_mult: 0
  2302. }
  2303. param {
  2304. lr_mult: 0
  2305. decay_mult: 0
  2306. }
  2307. }
  2308. layer {
  2309. name: "conv17_2/scale"
  2310. type: "Scale"
  2311. bottom: "conv17_2"
  2312. top: "conv17_2"
  2313. param {
  2314. lr_mult: 0.1
  2315. decay_mult: 0.0
  2316. }
  2317. param {
  2318. lr_mult: 0.2
  2319. decay_mult: 0.0
  2320. }
  2321. scale_param {
  2322. filler {
  2323. value: 1
  2324. }
  2325. bias_term: true
  2326. bias_filler {
  2327. value: 0
  2328. }
  2329. }
  2330. }
  2331. layer {
  2332. name: "conv17_2/relu"
  2333. type: "ReLU"
  2334. bottom: "conv17_2"
  2335. top: "conv17_2"
  2336. }
  2337. layer {
  2338. name: "conv11_mbox_loc"
  2339. type: "Convolution"
  2340. bottom: "conv11"
  2341. top: "conv11_mbox_loc"
  2342. param {
  2343. lr_mult: 0.1
  2344. decay_mult: 0.1
  2345. }
  2346. param {
  2347. lr_mult: 0.2
  2348. decay_mult: 0.0
  2349. }
  2350. convolution_param {
  2351. num_output: 12
  2352. kernel_size: 1
  2353. weight_filler {
  2354. type: "msra"
  2355. }
  2356. bias_filler {
  2357. type: "constant"
  2358. value: 0.0
  2359. }
  2360. }
  2361. }
  2362. layer {
  2363. name: "conv11_mbox_loc_perm"
  2364. type: "Permute"
  2365. bottom: "conv11_mbox_loc"
  2366. top: "conv11_mbox_loc_perm"
  2367. permute_param {
  2368. order: 0
  2369. order: 2
  2370. order: 3
  2371. order: 1
  2372. }
  2373. }
  2374. layer {
  2375. name: "conv11_mbox_loc_flat"
  2376. type: "Flatten"
  2377. bottom: "conv11_mbox_loc_perm"
  2378. top: "conv11_mbox_loc_flat"
  2379. flatten_param {
  2380. axis: 1
  2381. }
  2382. }
  2383. layer {
  2384. name: "conv11_mbox_conf"
  2385. type: "Convolution"
  2386. bottom: "conv11"
  2387. top: "conv11_mbox_conf"
  2388. param {
  2389. lr_mult: 1.0
  2390. decay_mult: 1.0
  2391. }
  2392. param {
  2393. lr_mult: 2.0
  2394. decay_mult: 0.0
  2395. }
  2396. convolution_param {
  2397. num_output: 63
  2398. kernel_size: 1
  2399. weight_filler {
  2400. type: "msra"
  2401. }
  2402. bias_filler {
  2403. type: "constant"
  2404. value: 0.0
  2405. }
  2406. }
  2407. }
  2408. layer {
  2409. name: "conv11_mbox_conf_perm"
  2410. type: "Permute"
  2411. bottom: "conv11_mbox_conf"
  2412. top: "conv11_mbox_conf_perm"
  2413. permute_param {
  2414. order: 0
  2415. order: 2
  2416. order: 3
  2417. order: 1
  2418. }
  2419. }
  2420. layer {
  2421. name: "conv11_mbox_conf_flat"
  2422. type: "Flatten"
  2423. bottom: "conv11_mbox_conf_perm"
  2424. top: "conv11_mbox_conf_flat"
  2425. flatten_param {
  2426. axis: 1
  2427. }
  2428. }
  2429. layer {
  2430. name: "conv11_mbox_priorbox"
  2431. type: "PriorBox"
  2432. bottom: "conv11"
  2433. bottom: "data"
  2434. top: "conv11_mbox_priorbox"
  2435. prior_box_param {
  2436. min_size: 60.0
  2437. aspect_ratio: 2.0
  2438. flip: true
  2439. clip: false
  2440. variance: 0.1
  2441. variance: 0.1
  2442. variance: 0.2
  2443. variance: 0.2
  2444. offset: 0.5
  2445. }
  2446. }
  2447. layer {
  2448. name: "conv13_mbox_loc"
  2449. type: "Convolution"
  2450. bottom: "conv13"
  2451. top: "conv13_mbox_loc"
  2452. param {
  2453. lr_mult: 0.1
  2454. decay_mult: 0.1
  2455. }
  2456. param {
  2457. lr_mult: 0.2
  2458. decay_mult: 0.0
  2459. }
  2460. convolution_param {
  2461. num_output: 24
  2462. kernel_size: 1
  2463. weight_filler {
  2464. type: "msra"
  2465. }
  2466. bias_filler {
  2467. type: "constant"
  2468. value: 0.0
  2469. }
  2470. }
  2471. }
  2472. layer {
  2473. name: "conv13_mbox_loc_perm"
  2474. type: "Permute"
  2475. bottom: "conv13_mbox_loc"
  2476. top: "conv13_mbox_loc_perm"
  2477. permute_param {
  2478. order: 0
  2479. order: 2
  2480. order: 3
  2481. order: 1
  2482. }
  2483. }
  2484. layer {
  2485. name: "conv13_mbox_loc_flat"
  2486. type: "Flatten"
  2487. bottom: "conv13_mbox_loc_perm"
  2488. top: "conv13_mbox_loc_flat"
  2489. flatten_param {
  2490. axis: 1
  2491. }
  2492. }
  2493. layer {
  2494. name: "conv13_mbox_conf"
  2495. type: "Convolution"
  2496. bottom: "conv13"
  2497. top: "conv13_mbox_conf"
  2498. param {
  2499. lr_mult: 1.0
  2500. decay_mult: 1.0
  2501. }
  2502. param {
  2503. lr_mult: 2.0
  2504. decay_mult: 0.0
  2505. }
  2506. convolution_param {
  2507. num_output: 126
  2508. kernel_size: 1
  2509. weight_filler {
  2510. type: "msra"
  2511. }
  2512. bias_filler {
  2513. type: "constant"
  2514. value: 0.0
  2515. }
  2516. }
  2517. }
  2518. layer {
  2519. name: "conv13_mbox_conf_perm"
  2520. type: "Permute"
  2521. bottom: "conv13_mbox_conf"
  2522. top: "conv13_mbox_conf_perm"
  2523. permute_param {
  2524. order: 0
  2525. order: 2
  2526. order: 3
  2527. order: 1
  2528. }
  2529. }
  2530. layer {
  2531. name: "conv13_mbox_conf_flat"
  2532. type: "Flatten"
  2533. bottom: "conv13_mbox_conf_perm"
  2534. top: "conv13_mbox_conf_flat"
  2535. flatten_param {
  2536. axis: 1
  2537. }
  2538. }
  2539. layer {
  2540. name: "conv13_mbox_priorbox"
  2541. type: "PriorBox"
  2542. bottom: "conv13"
  2543. bottom: "data"
  2544. top: "conv13_mbox_priorbox"
  2545. prior_box_param {
  2546. min_size: 105.0
  2547. max_size: 150.0
  2548. aspect_ratio: 2.0
  2549. aspect_ratio: 3.0
  2550. flip: true
  2551. clip: false
  2552. variance: 0.1
  2553. variance: 0.1
  2554. variance: 0.2
  2555. variance: 0.2
  2556. offset: 0.5
  2557. }
  2558. }
  2559. layer {
  2560. name: "conv14_2_mbox_loc"
  2561. type: "Convolution"
  2562. bottom: "conv14_2"
  2563. top: "conv14_2_mbox_loc"
  2564. param {
  2565. lr_mult: 0.1
  2566. decay_mult: 0.1
  2567. }
  2568. param {
  2569. lr_mult: 0.2
  2570. decay_mult: 0.0
  2571. }
  2572. convolution_param {
  2573. num_output: 24
  2574. kernel_size: 1
  2575. weight_filler {
  2576. type: "msra"
  2577. }
  2578. bias_filler {
  2579. type: "constant"
  2580. value: 0.0
  2581. }
  2582. }
  2583. }
  2584. layer {
  2585. name: "conv14_2_mbox_loc_perm"
  2586. type: "Permute"
  2587. bottom: "conv14_2_mbox_loc"
  2588. top: "conv14_2_mbox_loc_perm"
  2589. permute_param {
  2590. order: 0
  2591. order: 2
  2592. order: 3
  2593. order: 1
  2594. }
  2595. }
  2596. layer {
  2597. name: "conv14_2_mbox_loc_flat"
  2598. type: "Flatten"
  2599. bottom: "conv14_2_mbox_loc_perm"
  2600. top: "conv14_2_mbox_loc_flat"
  2601. flatten_param {
  2602. axis: 1
  2603. }
  2604. }
  2605. layer {
  2606. name: "conv14_2_mbox_conf"
  2607. type: "Convolution"
  2608. bottom: "conv14_2"
  2609. top: "conv14_2_mbox_conf"
  2610. param {
  2611. lr_mult: 1.0
  2612. decay_mult: 1.0
  2613. }
  2614. param {
  2615. lr_mult: 2.0
  2616. decay_mult: 0.0
  2617. }
  2618. convolution_param {
  2619. num_output: 126
  2620. kernel_size: 1
  2621. weight_filler {
  2622. type: "msra"
  2623. }
  2624. bias_filler {
  2625. type: "constant"
  2626. value: 0.0
  2627. }
  2628. }
  2629. }
  2630. layer {
  2631. name: "conv14_2_mbox_conf_perm"
  2632. type: "Permute"
  2633. bottom: "conv14_2_mbox_conf"
  2634. top: "conv14_2_mbox_conf_perm"
  2635. permute_param {
  2636. order: 0
  2637. order: 2
  2638. order: 3
  2639. order: 1
  2640. }
  2641. }
  2642. layer {
  2643. name: "conv14_2_mbox_conf_flat"
  2644. type: "Flatten"
  2645. bottom: "conv14_2_mbox_conf_perm"
  2646. top: "conv14_2_mbox_conf_flat"
  2647. flatten_param {
  2648. axis: 1
  2649. }
  2650. }
  2651. layer {
  2652. name: "conv14_2_mbox_priorbox"
  2653. type: "PriorBox"
  2654. bottom: "conv14_2"
  2655. bottom: "data"
  2656. top: "conv14_2_mbox_priorbox"
  2657. prior_box_param {
  2658. min_size: 150.0
  2659. max_size: 195.0
  2660. aspect_ratio: 2.0
  2661. aspect_ratio: 3.0
  2662. flip: true
  2663. clip: false
  2664. variance: 0.1
  2665. variance: 0.1
  2666. variance: 0.2
  2667. variance: 0.2
  2668. offset: 0.5
  2669. }
  2670. }
  2671. layer {
  2672. name: "conv15_2_mbox_loc"
  2673. type: "Convolution"
  2674. bottom: "conv15_2"
  2675. top: "conv15_2_mbox_loc"
  2676. param {
  2677. lr_mult: 0.1
  2678. decay_mult: 0.1
  2679. }
  2680. param {
  2681. lr_mult: 0.2
  2682. decay_mult: 0.0
  2683. }
  2684. convolution_param {
  2685. num_output: 24
  2686. kernel_size: 1
  2687. weight_filler {
  2688. type: "msra"
  2689. }
  2690. bias_filler {
  2691. type: "constant"
  2692. value: 0.0
  2693. }
  2694. }
  2695. }
  2696. layer {
  2697. name: "conv15_2_mbox_loc_perm"
  2698. type: "Permute"
  2699. bottom: "conv15_2_mbox_loc"
  2700. top: "conv15_2_mbox_loc_perm"
  2701. permute_param {
  2702. order: 0
  2703. order: 2
  2704. order: 3
  2705. order: 1
  2706. }
  2707. }
  2708. layer {
  2709. name: "conv15_2_mbox_loc_flat"
  2710. type: "Flatten"
  2711. bottom: "conv15_2_mbox_loc_perm"
  2712. top: "conv15_2_mbox_loc_flat"
  2713. flatten_param {
  2714. axis: 1
  2715. }
  2716. }
  2717. layer {
  2718. name: "conv15_2_mbox_conf"
  2719. type: "Convolution"
  2720. bottom: "conv15_2"
  2721. top: "conv15_2_mbox_conf"
  2722. param {
  2723. lr_mult: 1.0
  2724. decay_mult: 1.0
  2725. }
  2726. param {
  2727. lr_mult: 2.0
  2728. decay_mult: 0.0
  2729. }
  2730. convolution_param {
  2731. num_output: 126
  2732. kernel_size: 1
  2733. weight_filler {
  2734. type: "msra"
  2735. }
  2736. bias_filler {
  2737. type: "constant"
  2738. value: 0.0
  2739. }
  2740. }
  2741. }
  2742. layer {
  2743. name: "conv15_2_mbox_conf_perm"
  2744. type: "Permute"
  2745. bottom: "conv15_2_mbox_conf"
  2746. top: "conv15_2_mbox_conf_perm"
  2747. permute_param {
  2748. order: 0
  2749. order: 2
  2750. order: 3
  2751. order: 1
  2752. }
  2753. }
  2754. layer {
  2755. name: "conv15_2_mbox_conf_flat"
  2756. type: "Flatten"
  2757. bottom: "conv15_2_mbox_conf_perm"
  2758. top: "conv15_2_mbox_conf_flat"
  2759. flatten_param {
  2760. axis: 1
  2761. }
  2762. }
  2763. layer {
  2764. name: "conv15_2_mbox_priorbox"
  2765. type: "PriorBox"
  2766. bottom: "conv15_2"
  2767. bottom: "data"
  2768. top: "conv15_2_mbox_priorbox"
  2769. prior_box_param {
  2770. min_size: 195.0
  2771. max_size: 240.0
  2772. aspect_ratio: 2.0
  2773. aspect_ratio: 3.0
  2774. flip: true
  2775. clip: false
  2776. variance: 0.1
  2777. variance: 0.1
  2778. variance: 0.2
  2779. variance: 0.2
  2780. offset: 0.5
  2781. }
  2782. }
  2783. layer {
  2784. name: "conv16_2_mbox_loc"
  2785. type: "Convolution"
  2786. bottom: "conv16_2"
  2787. top: "conv16_2_mbox_loc"
  2788. param {
  2789. lr_mult: 0.1
  2790. decay_mult: 0.1
  2791. }
  2792. param {
  2793. lr_mult: 0.2
  2794. decay_mult: 0.0
  2795. }
  2796. convolution_param {
  2797. num_output: 24
  2798. kernel_size: 1
  2799. weight_filler {
  2800. type: "msra"
  2801. }
  2802. bias_filler {
  2803. type: "constant"
  2804. value: 0.0
  2805. }
  2806. }
  2807. }
  2808. layer {
  2809. name: "conv16_2_mbox_loc_perm"
  2810. type: "Permute"
  2811. bottom: "conv16_2_mbox_loc"
  2812. top: "conv16_2_mbox_loc_perm"
  2813. permute_param {
  2814. order: 0
  2815. order: 2
  2816. order: 3
  2817. order: 1
  2818. }
  2819. }
  2820. layer {
  2821. name: "conv16_2_mbox_loc_flat"
  2822. type: "Flatten"
  2823. bottom: "conv16_2_mbox_loc_perm"
  2824. top: "conv16_2_mbox_loc_flat"
  2825. flatten_param {
  2826. axis: 1
  2827. }
  2828. }
  2829. layer {
  2830. name: "conv16_2_mbox_conf"
  2831. type: "Convolution"
  2832. bottom: "conv16_2"
  2833. top: "conv16_2_mbox_conf"
  2834. param {
  2835. lr_mult: 1.0
  2836. decay_mult: 1.0
  2837. }
  2838. param {
  2839. lr_mult: 2.0
  2840. decay_mult: 0.0
  2841. }
  2842. convolution_param {
  2843. num_output: 126
  2844. kernel_size: 1
  2845. weight_filler {
  2846. type: "msra"
  2847. }
  2848. bias_filler {
  2849. type: "constant"
  2850. value: 0.0
  2851. }
  2852. }
  2853. }
  2854. layer {
  2855. name: "conv16_2_mbox_conf_perm"
  2856. type: "Permute"
  2857. bottom: "conv16_2_mbox_conf"
  2858. top: "conv16_2_mbox_conf_perm"
  2859. permute_param {
  2860. order: 0
  2861. order: 2
  2862. order: 3
  2863. order: 1
  2864. }
  2865. }
  2866. layer {
  2867. name: "conv16_2_mbox_conf_flat"
  2868. type: "Flatten"
  2869. bottom: "conv16_2_mbox_conf_perm"
  2870. top: "conv16_2_mbox_conf_flat"
  2871. flatten_param {
  2872. axis: 1
  2873. }
  2874. }
  2875. layer {
  2876. name: "conv16_2_mbox_priorbox"
  2877. type: "PriorBox"
  2878. bottom: "conv16_2"
  2879. bottom: "data"
  2880. top: "conv16_2_mbox_priorbox"
  2881. prior_box_param {
  2882. min_size: 240.0
  2883. max_size: 285.0
  2884. aspect_ratio: 2.0
  2885. aspect_ratio: 3.0
  2886. flip: true
  2887. clip: false
  2888. variance: 0.1
  2889. variance: 0.1
  2890. variance: 0.2
  2891. variance: 0.2
  2892. offset: 0.5
  2893. }
  2894. }
  2895. layer {
  2896. name: "conv17_2_mbox_loc"
  2897. type: "Convolution"
  2898. bottom: "conv17_2"
  2899. top: "conv17_2_mbox_loc"
  2900. param {
  2901. lr_mult: 0.1
  2902. decay_mult: 0.1
  2903. }
  2904. param {
  2905. lr_mult: 0.2
  2906. decay_mult: 0.0
  2907. }
  2908. convolution_param {
  2909. num_output: 24
  2910. kernel_size: 1
  2911. weight_filler {
  2912. type: "msra"
  2913. }
  2914. bias_filler {
  2915. type: "constant"
  2916. value: 0.0
  2917. }
  2918. }
  2919. }
  2920. layer {
  2921. name: "conv17_2_mbox_loc_perm"
  2922. type: "Permute"
  2923. bottom: "conv17_2_mbox_loc"
  2924. top: "conv17_2_mbox_loc_perm"
  2925. permute_param {
  2926. order: 0
  2927. order: 2
  2928. order: 3
  2929. order: 1
  2930. }
  2931. }
  2932. layer {
  2933. name: "conv17_2_mbox_loc_flat"
  2934. type: "Flatten"
  2935. bottom: "conv17_2_mbox_loc_perm"
  2936. top: "conv17_2_mbox_loc_flat"
  2937. flatten_param {
  2938. axis: 1
  2939. }
  2940. }
  2941. layer {
  2942. name: "conv17_2_mbox_conf"
  2943. type: "Convolution"
  2944. bottom: "conv17_2"
  2945. top: "conv17_2_mbox_conf"
  2946. param {
  2947. lr_mult: 1.0
  2948. decay_mult: 1.0
  2949. }
  2950. param {
  2951. lr_mult: 2.0
  2952. decay_mult: 0.0
  2953. }
  2954. convolution_param {
  2955. num_output: 126
  2956. kernel_size: 1
  2957. weight_filler {
  2958. type: "msra"
  2959. }
  2960. bias_filler {
  2961. type: "constant"
  2962. value: 0.0
  2963. }
  2964. }
  2965. }
  2966. layer {
  2967. name: "conv17_2_mbox_conf_perm"
  2968. type: "Permute"
  2969. bottom: "conv17_2_mbox_conf"
  2970. top: "conv17_2_mbox_conf_perm"
  2971. permute_param {
  2972. order: 0
  2973. order: 2
  2974. order: 3
  2975. order: 1
  2976. }
  2977. }
  2978. layer {
  2979. name: "conv17_2_mbox_conf_flat"
  2980. type: "Flatten"
  2981. bottom: "conv17_2_mbox_conf_perm"
  2982. top: "conv17_2_mbox_conf_flat"
  2983. flatten_param {
  2984. axis: 1
  2985. }
  2986. }
  2987. layer {
  2988. name: "conv17_2_mbox_priorbox"
  2989. type: "PriorBox"
  2990. bottom: "conv17_2"
  2991. bottom: "data"
  2992. top: "conv17_2_mbox_priorbox"
  2993. prior_box_param {
  2994. min_size: 285.0
  2995. max_size: 300.0
  2996. aspect_ratio: 2.0
  2997. aspect_ratio: 3.0
  2998. flip: true
  2999. clip: false
  3000. variance: 0.1
  3001. variance: 0.1
  3002. variance: 0.2
  3003. variance: 0.2
  3004. offset: 0.5
  3005. }
  3006. }
  3007. layer {
  3008. name: "mbox_loc"
  3009. type: "Concat"
  3010. bottom: "conv11_mbox_loc_flat"
  3011. bottom: "conv13_mbox_loc_flat"
  3012. bottom: "conv14_2_mbox_loc_flat"
  3013. bottom: "conv15_2_mbox_loc_flat"
  3014. bottom: "conv16_2_mbox_loc_flat"
  3015. bottom: "conv17_2_mbox_loc_flat"
  3016. top: "mbox_loc"
  3017. concat_param {
  3018. axis: 1
  3019. }
  3020. }
  3021. layer {
  3022. name: "mbox_conf"
  3023. type: "Concat"
  3024. bottom: "conv11_mbox_conf_flat"
  3025. bottom: "conv13_mbox_conf_flat"
  3026. bottom: "conv14_2_mbox_conf_flat"
  3027. bottom: "conv15_2_mbox_conf_flat"
  3028. bottom: "conv16_2_mbox_conf_flat"
  3029. bottom: "conv17_2_mbox_conf_flat"
  3030. top: "mbox_conf"
  3031. concat_param {
  3032. axis: 1
  3033. }
  3034. }
  3035. layer {
  3036. name: "mbox_priorbox"
  3037. type: "Concat"
  3038. bottom: "conv11_mbox_priorbox"
  3039. bottom: "conv13_mbox_priorbox"
  3040. bottom: "conv14_2_mbox_priorbox"
  3041. bottom: "conv15_2_mbox_priorbox"
  3042. bottom: "conv16_2_mbox_priorbox"
  3043. bottom: "conv17_2_mbox_priorbox"
  3044. top: "mbox_priorbox"
  3045. concat_param {
  3046. axis: 2
  3047. }
  3048. }
  3049. layer {
  3050. name: "mbox_conf_reshape"
  3051. type: "Reshape"
  3052. bottom: "mbox_conf"
  3053. top: "mbox_conf_reshape"
  3054. reshape_param {
  3055. shape {
  3056. dim: 0
  3057. dim: -1
  3058. dim: 21
  3059. }
  3060. }
  3061. }
  3062. layer {
  3063. name: "mbox_conf_softmax"
  3064. type: "Softmax"
  3065. bottom: "mbox_conf_reshape"
  3066. top: "mbox_conf_softmax"
  3067. softmax_param {
  3068. axis: 2
  3069. }
  3070. }
  3071. layer {
  3072. name: "mbox_conf_flatten"
  3073. type: "Flatten"
  3074. bottom: "mbox_conf_softmax"
  3075. top: "mbox_conf_flatten"
  3076. flatten_param {
  3077. axis: 1
  3078. }
  3079. }
  3080. layer {
  3081. name: "detection_out"
  3082. type: "DetectionOutput"
  3083. bottom: "mbox_loc"
  3084. bottom: "mbox_conf_flatten"
  3085. bottom: "mbox_priorbox"
  3086. top: "detection_out"
  3087. include {
  3088. phase: TEST
  3089. }
  3090. detection_output_param {
  3091. num_classes: 21
  3092. share_location: true
  3093. background_label_id: 0
  3094. nms_param {
  3095. nms_threshold: 0.45
  3096. top_k: 100
  3097. }
  3098. code_type: CENTER_SIZE
  3099. keep_top_k: 100
  3100. confidence_threshold: 0.25
  3101. }
  3102. }