Hi, I use keras to realize this paper. but I do not know whether it has problem or not.
`#!/usr/bin/env python3
-- coding: utf-8 --
"""
Created on Thu Jul 5 10:26:49 2018
@author: dolphin
"""
from future import absolute_import
from future import print_function
import keras
from keras import backend as K
import tensorflow as tf
#define slice function
def slice_f(x,c1,c2):
return x[c1:c2,:]
#define a slice layer using Lamda layer
def slice_hash_layer(inputX,arguemnts):
slice_s=keras.layers.Lambda(slice_f,arguments=arguemnts)(inputX)
hash_s=keras.layers.Dense(1,activation='sigmoid',kernel_initializer=keras.initializers.TruncatedNormal(stddev=0.1))(slice_s)
return hash_s
def deep_hash_model_():
inputX=keras.Input(shape=(224,224,3))
#conv1
conv1_1=keras.layers.Conv2D(96,kernel_size=(11,11),strides=(4,4),activation='relu',
kernel_initializer=keras.initializers.TruncatedNormal(stddev=0.01))(inputX)
conv1_2=keras.layers.Conv2D(96,kernel_size=(1,1),striders=(1,1),activation='relu',
kernel_initializer=keras.initializers.TruncatedNormal(stddev=0.05))(conv1_1)
pool1=keras.layers.MaxPool2D(pool_size=(3,3),strides = (2,2),padding='valid')(conv1_2)
pool1=keras.layers.Dropout(0.5)(pool1)
#conv2
conv2_1=keras.layers.Conv2D(256,kernel_size=(5,5),strides=(1,1),activation='relu',
kernel_initializer=keras.initializers.TruncatedNormal(stddev=0.01),
padding='same')(pool1)
conv2_2=keras.layers.Conv2D(256,kernel_size=(1,1),strides=(1,1),activation='relu',
kernel_initializer=keras.initializers.TruncatedNormal(stddev=0.05),
padding='same')(conv2_1)
pool2=keras.layers.MaxPool2D(pool_size=(3,3),strides = (2,2),padding='valid')(conv2_2)
#conv3
conv3_1=keras.layers.Conv2D(384,kernel_size=(3,3),strides=(1,1),activation='relu',
kernel_initializer=keras.initializers.TruncatedNormal(stddev=0.01),
padding='valid')(pool2)
conv3_2=keras.layers.Conv2D(384,kernel_size=(1,1),strides=(1,1),activation='relu',
kernel_initializer=keras.initializers.TruncatedNormal(stddev=0.05),
padding='valid')(conv3_1)
pool3=keras.layers.MaxPool2D(pool_size=(3,3),strides = (2,2),padding='valid')(conv3_2)
#conv4
conv4_1=keras.layers.Conv2D(1024,kernel_size=(3,3),strides=(1,1),activation='relu',
kernel_initializer=keras.initializers.TruncatedNormal(stddev=0.01),
padding='valid')(pool3)
conv4_2=keras.layers.Conv2D(1200,kernel_size=(1,1),strides=(1,1),activation='relu',
kernel_initializer=keras.initializers.TruncatedNormal(stddev=0.05),
padding='valid')(conv4_1)
pool4=keras.layers.AvgPool2D(kernel_size=(6,6),strides=(1,1),padding='valid')(conv4_2)
#divide and encode module
#use 24 hash-code-bit
slice_hash=[]
for i in range(24):
arguments={'c1': i*50, 'c2': (i+1)*50}
slice_hash.append(slice_hash_layer(pool4,arguments))
merge_one=keras.layers.concatenate(slice_hash)
final_model = keras.models.Model(inputs=inputX, outputs=merge_one)
return final_model
deep_hash_model = deep_hash_model_()
#triplt loss function
#read a line from text,each line include a triple,including three images:
#query_image,positive_image,negtivate_image
batch_size=24
def triplt_loss(y_true,y_pred):
loss=tf.convert_to_tensor(0,dtype=tf.float32)
total_loss=tf.convert_to_tensor(0,dtype=tf.float32)
g=tf.constant(1.0,shape=[1],dtype=tf.float32)
zero=tf.constant(0.0,shape=[1],dtype=tf.float32)
for i in range(0,batch_size,3):
q_embedding=y_pred[i]
p_embedding=y_pred[i+1]
n_embedding=y_pred[i+2]
D_q_p=K.sqrt(K.sum((q_embedding-p_embedding)**2))
D_q_n=K.sqrt(K.sum((q_embedding-n_embedding)**2))
loss=tf.maximum(g+D_q_p-D_q_n,zero)
total_loss=total_loss+loss
total_loss=total_loss/(batch_size/3)
return total_loss `