super(DistillYOLOv3Loss, self).__init__() self.loss_weight = weight def obj_weighted_reg(self, sx, sy, sw, sh, tx, ty, tw, th, tobj): loss_x = ops.sigmoid_cross_entropy_with_logits(sx, F.sigmoid(tx)) loss_y = ops.sigmoid_cross_entropy_with_logits(sy, F.sigmoid(ty...
Dataset distillation has gained significant interest in recent years, yet existing approaches typically distill from the entire dataset, potentially including non-beneficial samples. We introduce a novel "Prune First, Distill After" framework that systematically prunes datasets via loss-based sampling prior...
Paper tables with annotated results for Distill the Best, Ignore the Rest: Improving Dataset Distillation with Loss-Value-Based Pruning
This paper presents a case study on the crude oil atmospheric distillation column of Tabriz refinery plant to show the applicability of exergy loss profiles in thermodynamic examination of the different retrofit options. The atmospheric distillation column of Tabriz refinery has been revamped as a ...
def obj_loss(self, sobj, tobj): obj_mask = paddle.cast(tobj > 0., dtype="float32") obj_mask.stop_gradient = True loss = paddle.mean( ops.sigmoid_cross_entropy_with_logits(sobj, obj_mask)) return loss def forward(self, teacher_model, student_model): teacher_distill_pa...
super(DistillYOLOv3Loss, self).__init__() self.loss_weight = weight def obj_weighted_reg(self, sx, sy, sw, sh, tx, ty, tw, th, tobj): loss_x = ops.sigmoid_cross_entropy_with_logits(sx, F.sigmoid(tx)) loss_y = ops.sigmoid_cross_entropy_with_logits(sy, F.sigmoid(ty...