from transformersimportAutoModelForCausalLM,AutoTokenizer model=AutoModelForCausalLM.from_pretrained("deepseek-r1",torch_dtype=torch.bfloat16,device_map="auto")tokenizer=AutoTokenizer.from_pretrained("deepseek-r1")outputs=model.generate(**tokenizer("你好,DeepSeek-V3!",return_tensors="pt").to(mode...