def softmax_loss(vectors):
anc, pos, neg = vectors
pos_sim = tf.reduce_sum((anc * pos), axis=-1, keepdims=True)
neg_mul = tf.matmul(anc, neg, transpose_b=True)
neg_sim = tf.log(tf.reduce_sum(tf.exp(neg_mul), axis=-1, keepdims=True))
loss = tf.nn.relu(neg_sim - pos_sim)
return loss