[{"data":1,"prerenderedAt":330},["ShallowReactive",2],{"content-query-dHHtRLlyO5":3},{"_path":4,"_dir":5,"_draft":6,"_partial":6,"_locale":7,"title":8,"description":9,"date":10,"cover":11,"type":12,"category":13,"body":14,"_type":324,"_id":325,"_source":326,"_file":327,"_stem":328,"_extension":329},"/technology-blogs/zh/3635","zh",false,"","昇思MindSpore支持QwQ-32B并上线开源社区","QwQ-32B是阿里云于2025年3月6日发布的人工智能大型语言模型。","2025-03-07","https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/03/26/642cee8d2668420fa8a651bc45024dba.png","technology-blogs","实践",{"type":15,"children":16,"toc":317},"root",[17,25,31,36,44,49,62,67,74,79,84,89,99,104,109,117,122,127,132,137,142,150,155,163,168,176,180,185,190,195,201,206,211,219,224,229,234,242,247,252,257,262,270,275,280,285,293,299,312],{"type":18,"tag":19,"props":20,"children":22},"element","h1",{"id":21},"昇思mindspore支持qwq-32b并上线开源社区",[23],{"type":24,"value":8},"text",{"type":18,"tag":26,"props":27,"children":28},"p",{},[29],{"type":24,"value":30},"QwQ-32B是阿里云于2025年3月6日发布的人工智能大型语言模型。这是一款拥有 320 亿参数的模型，其性能可与具备 6710 亿参数（其中 370 亿被激活）的 DeepSeek-R1 媲美。这一成果突显了将强化学习应用于经过大规模预训练的强大基础模型的有效性。在保持强劲性能的同时，千问QwQ-32B还大幅降低了部署使用成本，此外，阿里云采用宽松的Apache2.0协议，将千问QwQ-32B模型向全球开源，所有人都可免费下载及商用。",{"type":18,"tag":26,"props":32,"children":33},{},[34],{"type":24,"value":35},"QwQ-32B 在一系列基准测试中进行了评估，测试了数学推理、编程能力和通用能力。结果显示了 QwQ-32B 与其他领先模型的性能对比，包括DeepSeek-R1、OpenAI-o1-mini、DeepSeek-R1-Distilled-Llama-70B和DeepSeek-R1-Distilled-Qwen-32B。",{"type":18,"tag":26,"props":37,"children":38},{},[39],{"type":18,"tag":40,"props":41,"children":43},"img",{"alt":7,"src":42},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/03/14/c55ca04bbb7d47ff92d2fde25faa6cd7.png",[],{"type":18,"tag":26,"props":45,"children":46},{},[47],{"type":24,"value":48},"昇思MindSpore原生支持Qwen2.5-32B, 在此基础上0Day完成QwQ-32B的支持，并且完成性能测试。昇思MindSpore开源社区、魔乐社区已第一时间上架该模型，欢迎广大开发者下载体验！此外，MindSpore将于近期支持vLLM推理框架部署，敬请期待！",{"type":18,"tag":26,"props":50,"children":51},{},[52,54],{"type":24,"value":53},"魔乐社区：",{"type":18,"tag":55,"props":56,"children":60},"a",{"href":57,"rel":58},"https://modelers.cn/models/MindSpore-Lab/QwQ-32B",[59],"nofollow",[61],{"type":24,"value":57},{"type":18,"tag":26,"props":63,"children":64},{},[65],{"type":24,"value":66},"以下为手把手教程：",{"type":18,"tag":68,"props":69,"children":71},"h2",{"id":70},"_01快速开始",[72],{"type":24,"value":73},"# 01快速开始",{"type":18,"tag":26,"props":75,"children":76},{},[77],{"type":24,"value":78},"QwQ-32B推理验证使用了Atlas 800T A2服务器（基于BF16权重）。昇思MindSpore提供了QwQ-32B推理专用的Docker容器镜像，供开发者快速体验。",{"type":18,"tag":26,"props":80,"children":81},{},[82],{"type":24,"value":83},"1、下载昇思 MindSpore 推理容器镜像",{"type":18,"tag":26,"props":85,"children":86},{},[87],{"type":24,"value":88},"执行以下 Shell 命令，拉取昇思 MindSpore 推理容器镜像（复用DeepSeek-V3的镜像）：",{"type":18,"tag":90,"props":91,"children":93},"pre",{"code":92},"docker pull \nswr.cn-central-221.ovaijisuan.com/mindformers/deepseek_v3_mindspore2.5.0-infer:20250217\n",[94],{"type":18,"tag":95,"props":96,"children":97},"code",{"__ignoreMap":7},[98],{"type":24,"value":92},{"type":18,"tag":26,"props":100,"children":101},{},[102],{"type":24,"value":103},"2、启动容器",{"type":18,"tag":26,"props":105,"children":106},{},[107],{"type":24,"value":108},"执行以下命令创建并启动容器：",{"type":18,"tag":90,"props":110,"children":112},{"code":111},"docker run -it --privileged  --name=qwq-32b --net=host \\\n   --shm-size 500g \\\n   --device=/dev/davinci0 \\\n   --device=/dev/davinci1 \\\n   --device=/dev/davinci2 \\\n   --device=/dev/davinci3 \\\n   --device=/dev/davinci4 \\\n   --device=/dev/davinci5 \\\n   --device=/dev/davinci6 \\\n   --device=/dev/davinci7 \\\n   --device=/dev/davinci_manager \\\n   --device=/dev/hisi_hdc \\\n   --device /dev/devmm_svm \\\n   -v /usr/local/Ascend/driver:/usr/local/Ascend/driver \\\n   -v /usr/local/Ascend/firmware:/usr/local/Ascend/firmware \\\n   -v /usr/local/sbin/npu-smi:/usr/local/sbin/npu-smi \\\n   -v /usr/local/sbin:/usr/local/sbin \\\n   -v /etc/hccn.conf:/etc/hccn.conf \\\n   \nswr.cn-central-221.ovaijisuan.com/mindformers/deepseek_v3_mindspore2.5.0-infer:20250217 \\\n   bash\n",[113],{"type":18,"tag":95,"props":114,"children":115},{"__ignoreMap":7},[116],{"type":24,"value":111},{"type":18,"tag":26,"props":118,"children":119},{},[120],{"type":24,"value":121},"注意事项：",{"type":18,"tag":26,"props":123,"children":124},{},[125],{"type":24,"value":126},"如果部署在多机上，每台机器中容器的hostname不能重复。如果有部分宿主机的hostname是一致的，需要在起容器的时候修改容器的hostname。",{"type":18,"tag":26,"props":128,"children":129},{},[130],{"type":24,"value":131},"后续所有操作均在容器内操作。",{"type":18,"tag":26,"props":133,"children":134},{},[135],{"type":24,"value":136},"3、模型下载",{"type":18,"tag":26,"props":138,"children":139},{},[140],{"type":24,"value":141},"执行以下命令为自定义下载路径/home/work/QwQ-32B添加白名单：",{"type":18,"tag":90,"props":143,"children":145},{"code":144},"export HUB_WHITE_LIST_PATHS= /home/work/QwQ-32B\n",[146],{"type":18,"tag":95,"props":147,"children":148},{"__ignoreMap":7},[149],{"type":24,"value":144},{"type":18,"tag":26,"props":151,"children":152},{},[153],{"type":24,"value":154},"执行以下 Python 脚本从魔乐社区下载昇思 MindSpore 版本的 QwQ-32B 文件至指定路径/home/work/QwQ-32B。下载的文件包含模型代码、权重、分词模型和示例代码，占用约 62GB 的磁盘空间：",{"type":18,"tag":90,"props":156,"children":158},{"code":157},"from openmind_hub import snapshot_download\n\nsnapshot_download(\n    repo_id=\"MindSpore-Lab/QwQ-32B\",\n    local_dir=\"/home/work/QwQ-32B\",\n    local_dir_use_symlink=False\n)\n",[159],{"type":18,"tag":95,"props":160,"children":161},{"__ignoreMap":7},[162],{"type":24,"value":157},{"type":18,"tag":26,"props":164,"children":165},{},[166],{"type":24,"value":167},"下载完成的 /home/work/QwQ-32B 文件夹目录结构如下：",{"type":18,"tag":90,"props":169,"children":171},{"code":170},"QwQ-32b\n  ├── config.json                         # 模型json配置文件\n  ├── vocab.json                          # 词表vocab文件\n  ├── merges.txt                          # 词表merges文件\n  ├── tokenizer.json                      # 词表json文件\n  ├── tokenizer_config.json               # 词表配置文件\n  ├── predict_qwq_32b.yaml                # 模型yaml配置文件\n  ├── qwen2_5_tokenizer.py                # 模型tokenizer文件\n  ├── model-xxxxx-of-xxxxx.safetensors    # 模型权重文件\n  └── param_name_map.json                 # 模型权重映射文件\n",[172],{"type":18,"tag":95,"props":173,"children":174},{"__ignoreMap":7},[175],{"type":24,"value":170},{"type":18,"tag":26,"props":177,"children":178},{},[179],{"type":24,"value":121},{"type":18,"tag":26,"props":181,"children":182},{},[183],{"type":24,"value":184},"/home/work/QwQ-32B可修改为自定义路径，确保该路径有足够的磁盘空间（约 62GB）。",{"type":18,"tag":26,"props":186,"children":187},{},[188],{"type":24,"value":189},"模型权重文件和映射文件单独存放到一个文件夹目录下。",{"type":18,"tag":26,"props":191,"children":192},{},[193],{"type":24,"value":194},"下载时间可能因网络环境而异，建议在稳定的网络环境下操作。",{"type":18,"tag":68,"props":196,"children":198},{"id":197},"_02服务化部署",[199],{"type":24,"value":200},"# 02服务化部署",{"type":18,"tag":26,"props":202,"children":203},{},[204],{"type":24,"value":205},"1、修改模型配置文件",{"type":18,"tag":26,"props":207,"children":208},{},[209],{"type":24,"value":210},"在 predict_qwq_32b.yaml 中对以下配置进行修改：",{"type":18,"tag":90,"props":212,"children":214},{"code":213},"auto_trans_ckpt: True  # 打开权重自动切分，自动将权重转换为分布式任务所需的形式\nload_checkpoint: '/home/work/QwQ-32B' # 为存放模型分布式权重文件夹路径\nprocessor:\n  tokenizer:\n    vocab_file: /home/work/QwQ-32B/vocab.json\"  # vocab文件绝对路径\n    merges_file: \"/home/work/QwQ-32B/merges.txt\"  # merges文件绝对路径\n",[215],{"type":18,"tag":95,"props":216,"children":217},{"__ignoreMap":7},[218],{"type":24,"value":213},{"type":18,"tag":26,"props":220,"children":221},{},[222],{"type":24,"value":223},"2、一键启动MindIE",{"type":18,"tag":26,"props":225,"children":226},{},[227],{"type":24,"value":228},"MindSpore Transformers提供了一键拉起MindIE脚本，脚本中已预置环境变量设置和服务化配置，仅需输入模型文件目录后即可快速拉起服务。",{"type":18,"tag":26,"props":230,"children":231},{},[232],{"type":24,"value":233},"进入mindformers/scripts目录下，执行MindIE启动脚本",{"type":18,"tag":90,"props":235,"children":237},{"code":236},"cd mindformers/scripts\nbash run_mindie.sh --model-name QwQ-32B --model-path /home/work/QwQ-32B --max-prefill-batch-size 1\n",[238],{"type":18,"tag":95,"props":239,"children":240},{"__ignoreMap":7},[241],{"type":24,"value":236},{"type":18,"tag":26,"props":243,"children":244},{},[245],{"type":24,"value":246},"参数说明：",{"type":18,"tag":26,"props":248,"children":249},{},[250],{"type":24,"value":251},"--model-name：设置模型名称",{"type":18,"tag":26,"props":253,"children":254},{},[255],{"type":24,"value":256},"--model-path：设置模型目录路径",{"type":18,"tag":26,"props":258,"children":259},{},[260],{"type":24,"value":261},"查看日志：",{"type":18,"tag":90,"props":263,"children":265},{"code":264},"tail -f output.log\n",[266],{"type":18,"tag":95,"props":267,"children":268},{"__ignoreMap":7},[269],{"type":24,"value":264},{"type":18,"tag":26,"props":271,"children":272},{},[273],{"type":24,"value":274},"当log日志中出现Daemon start success!，表示服务启动成功。",{"type":18,"tag":26,"props":276,"children":277},{},[278],{"type":24,"value":279},"3. 执行推理请求测试",{"type":18,"tag":26,"props":281,"children":282},{},[283],{"type":24,"value":284},"执行以下命令发送流式推理请求进行测试：",{"type":18,"tag":90,"props":286,"children":288},{"code":287},"curl -w \"\\ntime_total=%{time_total}\\n\" -H \"Accept: application/json\" -H \"Content-type: application/json\" -X POST -d '{\"inputs\": \"请介绍一个北京的景点\", \"parameters\": {\"do_sample\": false, \"max_new_tokens\": 128}, \"stream\": false}' http://127.0.0.1:1025/generate_stream &\n",[289],{"type":18,"tag":95,"props":290,"children":291},{"__ignoreMap":7},[292],{"type":24,"value":287},{"type":18,"tag":68,"props":294,"children":296},{"id":295},"_03声明",[297],{"type":24,"value":298},"# 03声明",{"type":18,"tag":26,"props":300,"children":301},{},[302,304,310],{"type":24,"value":303},"本文档提供的模型代码、权重文件和部署镜像，当前仅限于基于昇思MindSpore AI框架体验QwQ-32B的部署效果，不支持生产环境部署。相关使用问题请反馈至ISSUE（链接：",{"type":18,"tag":55,"props":305,"children":308},{"href":306,"rel":307},"https://gitee.com/mindspore/mindformers/issues",[59],[309],{"type":24,"value":306},{"type":24,"value":311},"）。",{"type":18,"tag":26,"props":313,"children":314},{},[315],{"type":24,"value":316},"昇思MindSpore AI框架将持续支持相关主流模型演进，并根据开源情况面向全体开发者提供镜像与支持。",{"title":7,"searchDepth":318,"depth":318,"links":319},4,[320,322,323],{"id":70,"depth":321,"text":73},2,{"id":197,"depth":321,"text":200},{"id":295,"depth":321,"text":298},"markdown","content:technology-blogs:zh:3635.md","content","technology-blogs/zh/3635.md","technology-blogs/zh/3635","md",1776506132533]