1 분 소요

BPR에 대한 정리와 구현글입니다.

Key 1.

단순히 score를 예측하는 것이 아니라 observed와 unobserved 간의 차이를 학습하는 것!!!

Key 2.

Bayesian 관점에서 prior를 normal로 정의하면 L2 Norm이 natural하다. 수학적 당위성

이것 이후로 이제 어떻게 latent representation(embedding)을 정할 것인지(initialization 혹은 학습)의 문제로 넘겨진다.

Implementation

  • 모델
class BPR(nn.Module):
    def __init__(self, user_num, item_num, embedding_dim, regularization):
        super(BPR, self).__init__()
        self.regularization = regularization

        self.W = nn.Embedding(user_num, embedding_dim)
        nn.init.xavier_normal_(self.W.weight)
        self.H = nn.Embedding(item_num, embedding_dim)
        nn.init.xavier_normal_(self.H.weight)

        self.log_sig = nn.LogSigmoid()

    def get_r_hat(self):
        user_vec = self.W.weight.clone()
        item_vec = self.H.weight.clone()
        r_hat = user_vec @ item_vec.T
        return r_hat

    def forward(self, data):
        user_id = data[:, 0]
        positive_id = data[:, 1]
        negative_id = data[:, 2]

        user_vec = self.W(user_id)
        positive_vec = self.H(positive_id)
        negative_vec = self.H(negative_id)

        x_ui_hat = torch.sum(user_vec * positive_vec, dim=1)
        x_uj_hat = torch.sum(user_vec * negative_vec, dim=1)
        x_uij_hat = x_ui_hat - x_uj_hat

        bpr = - self.log_sig(x_uij_hat).sum()
        regularization = self.regularization['user'] * (torch.norm(user_vec, dim=1) ** 2).sum() \
                         + self.regularization['positive'] * (torch.norm(positive_vec, dim=1) ** 2).sum() \
                         + self.regularization['negative'] * (torch.norm(negative_vec, dim=1) ** 2).sum()
        loss = bpr + regularization
        return loss
  • 데이터 셋 전처리
class CustomDataset(Dataset):
    def __init__(self, data: pd.DataFrame, item_num, device, negative_samples, target_transform=None, transform=None):
        # I
        total_item = np.arange(item_num)

        # D_S
        uij = []

        for user in np.unique(data[:, 0]):
            user_ui = data[data[:, 0] == user]

            # I_{u}^{+}
            positive_data = user_ui[user_ui[:, 2] == 1, 1]

            # I\I_{u}^{+}
            negative_data = np.setdiff1d(total_item, positive_data)
            negative_data = negative_data[np.random.choice(negative_data.shape[0], negative_samples, replace=False)]

            user_uij = np.zeros((len(positive_data)*len(negative_data), 3))
            user_uij[:, 0] = user
            user_uij[:, 1] = np.repeat(positive_data, len(negative_data))
            user_uij[:, 2] = np.tile(negative_data, len(positive_data))
            uij.extend(user_uij)
        uij = np.array(uij)

        self.data = torch.LongTensor(uij).to(device)
        self.targets = torch.LongTensor(uij[:, 2]).to(device)
        self.transform = transform
        self.target_transform = target_transform

    def __len__(self):
        return self.targets.size()[0]

    def __getitem__(self, idx):
        image = self.data[idx]
        label = self.targets[idx]

        if self.transform:
            image = self.transform(image)
        if self.target_transform:
            label = self.target_transform(label)
        return image, label


class DataLoad:
    def __init__(self, user_num, item_num, is_train=True):
        super(DataLoad, self).__init__()
        self.is_train = is_train
        self.user_num = user_num
        self.item_num = item_num

    def load_data(self, fold):
        if self.is_train:
            data_type = 'base'
        else:
            data_type = 'test'

        raw_data = pd.read_csv(f'data/ml-100k/u{fold}.{data_type}', '\t',
                               names=['user_id', 'item_id', 'rating', 'timestamp'],
                               engine='python')

        raw_data[['user_id', 'item_id']] -= 1
        raw_data['label'] = np.where(raw_data['rating'] == 5, 1, -1)
        raw_data.drop(['timestamp', 'rating'], axis=1,  inplace=True)

        return raw_data.values

댓글남기기