![]() Semi-supervised training with back-translation is an effective way of improving decode ( fr_sample ) English-to-German Translation detokenize ( fr_toks ) assert fr = en2fr. ![]() generate ( en_bin, beam = 5, sampling = True, sampling_topk = 20 ) assert len ( fr_bin ) = 5 # Convert one of the samples to a string and detokenizeįr_sample = fr_bin fr_bpe = en2fr. tolist () = # Generate five translations with top-k sampling:įr_bin = en2fr. apply_bpe ( en_toks ) assert en_bpe = ello world !' # Manually binarize:Įn_bin = en2fr. tokenize ( 'Hello world!' ) assert en_toks = 'Hello world !' # Manually apply BPE:Įn_bpe = en2fr. translate ( 'Hello world!', beam = 5 ) assert fr = 'Bonjour à tous !' # Manually tokenize:Įn_toks = en2fr. cuda () # Translate with beam search:įr = en2fr. load ( 'pytorch/fairseq', '-fr', tokenizer = 'moses', bpe = 'subword_nmt' ) # Use the GPU (optional):Įn2fr. ![]() Import torch # Load an En-Fr Transformer model trained on WMT'14 data :Įn2fr = torch.
0 Comments
Leave a Reply. |
AuthorWrite something about yourself. No need to be fancy, just an overview. ArchivesCategories |