gitclonehttps://github.com/NVIDIA/apexcdapex pip install -v --no-cache-dir --global-option="--cpp_ext"--global-option="--cuda_ext"\ --global-option="--deprecated_fused_adam"--global-option="--xentropy"\ --global-option="--fast_multihead_attn"./ ...
move_to_cuda(sample) return sample def apply_half(t): if t.dtype is torch.float32: return t.half() return t return utils.apply(apply_half, sample) if fp16 else sample def _set_seed(self): # Set seed based on args.seed and the update number so that we get 32 changes: 17 ...
git clone https://github.com/NVIDIA/apexcdapex pip install -v --no-cache-dir --global-option="--cpp_ext"--global-option="--cuda_ext"\ --global-option="--deprecated_fused_adam"--global-option="--xentropy"\ --global-option="--fast_multihead_attn"./ ...
Example code to fill masks for a batch of sentences using GPU bart.cuda() bart.fill_mask(['The cat <mask> on the <mask>.', 'The dog <mask> on the <mask>.'], topk=3, beam=10) # [[('The cat was on the ground.', tensor(-0.6183)), ('The cat was on the floor.', tens...
move_to_cuda(sample) if use_cuda else sample gen_timer.start() hypos = scorer.generate(models, sample) gen_timer.stop(sample['ntokens']) for hypos_i in hypos: hypo = hypos_i[0] tokens = hypo['tokens'] tgt_len = tokens.numel() pos_scores = hypo['positional_scores'].float() ...
bpe='subword_nmt') en2de.eval()# disable dropout# The underlying model is available under the *models* attributeassertisinstance(en2de.models[0], fairseq.models.transformer.TransformerModel)# Move model to GPU for faster translationen2de.cuda()# Translate a sentenceen2de.translate('Hello world!
sample=utils.move_to_cuda(sample)ifuse_cudaelsesample gen_timer.start() hypos=scorer.generate(models,sample) gen_timer.stop(sample['ntokens']) fori,hypos_iinenumerate(hypos): hypo=hypos_i[0] sample_id=sample['id'][i] tokens=hypo['tokens'] ...
[0], fairseq.models.transformer.TransformerModel) # Move model to GPU for faster translation en2de.cuda() # Translate a sentence en2de.translate('Hello world!') # 'Hallo Welt!' # Batched translation en2de.translate(['Hello world!', 'The cat sat on the mat.']) # ['Hallo Welt!', ...
git clone https://github.com/NVIDIA/apex cd apex pip install -v --no-cache-dir --global-option="--cpp_ext" --global-option="--cuda_ext" \ --global-option="--deprecated_fused_adam" --global-option="--xentropy" \ --global-option="--fast_multihead_attn" ./...
git clone https://github.com/NVIDIA/apex cd apex pip install -v --no-cache-dir --global-option="--cpp_ext" --global-option="--cuda_ext" \ --global-option="--deprecated_fused_adam" --global-option="--xentropy" \ --global-option="--fast_multihead_attn" ./...