| import torch |
| from torch import nn |
| from transformers import AutoModel, PreTrainedModel, PretrainedConfig |
|
|
|
|
| class MultiTaskGraphCodeBERTConfig(PretrainedConfig): |
| model_type = "multi_task_graphcodebert" |
| |
| def __init__(self, num_cwe_classes=12, **kwargs): |
| super().__init__(**kwargs) |
| self.num_cwe_classes = num_cwe_classes |
|
|
|
|
| class MultiTaskGraphCodeBERT(PreTrainedModel): |
| config_class = MultiTaskGraphCodeBERTConfig |
| base_model_prefix = "base" |
| |
| def __init__(self, config): |
| super().__init__(config) |
| self.base = AutoModel.from_pretrained("microsoft/graphcodebert-base") |
| self.vul_head = nn.Linear(768, 2) |
| self.cwe_head = nn.Linear(768, config.num_cwe_classes) |
| |
| def forward(self, input_ids, attention_mask=None, labels_vul=None, labels_cwe=None): |
| outputs = self.base(input_ids=input_ids, attention_mask=attention_mask) |
| hidden_state = outputs.last_hidden_state[:, 0, :] |
| |
| vul_logits = self.vul_head(hidden_state) |
| cwe_logits = self.cwe_head(hidden_state) |
| |
| loss = None |
| if labels_vul is not None and labels_cwe is not None: |
| vul_loss = nn.CrossEntropyLoss()(vul_logits, labels_vul) |
| |
| mask = labels_vul == 1 |
| if torch.any(mask): |
| cwe_loss = nn.CrossEntropyLoss()(cwe_logits[mask], labels_cwe[mask]) |
| loss = vul_loss + 0.5 * cwe_loss |
| else: |
| loss = vul_loss |
|
|
| return {"loss": loss, "vul_logits": vul_logits, "cwe_logits": cwe_logits} if loss is not None else {"vul_logits": vul_logits, "cwe_logits": cwe_logits} |