from sklearn.preprocessing import OneHotEncoder, StandardScaler
from sklearn.model_selection import train_test_split
from tensorflow.keras.layers import Dense ,Concatenate
#import keras
import keras.backend as K
from tensorflow import keras
from tensorflow.keras.layers import concatenate
from tensorflow.keras import Sequential,Model
from tensorflow.keras.layers import Dense ,Concatenate,Input,BatchNormalization
dense_f=78
hero_f=26
giftbag_f=227
recent_f=1627
def build_model(dense_f,hero_f , giftbag_f, recent_f):
dense_inputs = Input(shape=(dense_f, ))
hero_inputs = Input(shape=(hero_f, ))
gift_inputs = Input(shape=(giftbag_f, ))
recent_inputs = Input(shape=(recent_f, ))
gift_dense = Dense(256, activation='relu')(gift_inputs)
# gift_dense= keras.layers.BatchNormalization()(gift_dense)
recent_dense = Dense(256, activation='relu')(recent_inputs)
recent_dense=BatchNormalization()(recent_dense)
merge_inputs = concatenate([dense_inputs ,hero_inputs,gift_dense,recent_dense],axis=1)
flow_dense = Dense(256, activation='relu')(merge_inputs)
flow_dense = BatchNormalization()( flow_dense)
flow_dense = Dense(128, activation='relu')(flow_dense)
flow_dense = BatchNormalization()( flow_dense)
flow_dense = Dense(64, activation='relu')(flow_dense)
flow_dense = BatchNormalization()( flow_dense)
outputs = Dense(n_classes+1, activation='softmax')(flow_dense)
# b = K.slice(outputs, [0, 0], [-1,128])
#
# b1 = K.slice(outputs, [-1, 128], [-1,1])
#
def mycrossentropy(y_true, y_pred, e=0.3):
b=y_pred[:,:128]
b1=y_pred[:,128:]
c=y_true[:,:128]
c1=y_true[:,128:129]
loss1 = K.categorical_crossentropy(b, c)
loss2 = K.mean(K.square(b1 -c1), axis=-1)
return (1-e)*loss1 * e*loss2
model = keras.Model(inputs=[dense_inputs, hero_inputs, gift_inputs, recent_inputs], outputs=outputs)
opt = keras.optimizers.Adam()
model.compile(optimizer=opt, loss=mycrossentropy)
return model
model = build_model(dfdense.shape[1], hero_zo.shape[1], gift_tensor.shape[1], recentt1.shape[1])
model.fit([dfdense, hero_zo, gift_tensor, recentt1],yyp,
epochs=100, verbose=1, batch_size=64)