[{"data":1,"prerenderedAt":633},["ShallowReactive",2],{"content-query-pMQpad8Kl4":3},{"_path":4,"_dir":5,"_draft":6,"_partial":6,"_locale":7,"title":8,"description":9,"date":10,"cover":11,"type":12,"body":13,"_type":627,"_id":628,"_source":629,"_file":630,"_stem":631,"_extension":632},"/technology-blogs/zh/3854","zh",false,"","Autoformer模型论文解读，并基于MindSpore NLP推理复现","有效地提高了模型对具有明显周期性的时间序列数据的预测能力，尤其适用于处理长时间序列的依赖关系","2025-10-09","https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/645ca911ac364506a9cbfb2baf1bc024.png","technology-blogs",{"type":14,"children":15,"toc":624},"root",[16,24,30,35,40,45,53,58,75,80,90,95,100,105,114,129,136,159,176,183,211,221,228,267,275,286,291,298,333,340,375,380,392,407,415,420,429,436,441,449,457,462,469,474,482,487,494,509,514,519,524,531,536,549,564,569,574,582,587,596,601,610,615],{"type":17,"tag":18,"props":19,"children":21},"element","h1",{"id":20},"autoformer模型论文解读并基于mindspore-nlp推理复现",[22],{"type":23,"value":8},"text",{"type":17,"tag":25,"props":26,"children":27},"p",{},[28],{"type":23,"value":29},"Autoformer模型是一种处理时间序列预测任务的深度学习模型，它创新性地结合了深度分解架构（Decomposition Architecture）和自相关机制（Auto-Correlation Mechanism），通过渐进式（progressively）分解和序列级（series-wise）连接来捕捉时间序列的周期性特征，有效地提高了模型对具有明显周期性的时间序列数据的预测能力，尤其适用于处理长时间序列的依赖关系。原文《Autoformer: Decomposition Transformers with Auto-Correlation for Long-Term Series Forecasting》由清华大学吴海旭等于2021年发表在NeurIPS顶会。",{"type":17,"tag":25,"props":31,"children":32},{},[33],{"type":23,"value":34},"Autoformer被用于高效且准确地解决长期时间序列预测问题，该类问题极具挑战性，因为其待预测的序列长度远远大于输入的历史序列长度，即需要在有限的历史信息基础上预测更长远的未来时间序列。所谓长时间时序预测，实质上就是利用已知的一段相对较短的历史时间序列数据来预测未来一段相对更长的序列。",{"type":17,"tag":25,"props":36,"children":37},{},[38],{"type":23,"value":39},"若干类似的时间序列预测方法简要对比",{"type":17,"tag":25,"props":41,"children":42},{},[43],{"type":23,"value":44},"在时间序列预测领域涌现了许多模型，有传统统计模型（自回归模型AR、移动平均模型MA、自回归移动平均模型ARMA等）、机器学习模型（随机森林RF、支持向量机SVM等）、深度学习模型（循环神经网络RNN、门控循环单元GRU、时间卷积网络TCN等）和组合（混合）模型。不同的模型适用于不同的时间序列预测任务，选择合适的模型需要考虑数据的特性、预测任务的复杂性、计算资源等因素。Autoformer是一类基于Transformer的深度学习模型，针对长期时间序列预测问题进行设计。我列举了与之类似的若干模型，如表1所示，并对他们的特点、创新点和适用场景进行分析。",{"type":17,"tag":25,"props":46,"children":47},{},[48],{"type":17,"tag":49,"props":50,"children":52},"img",{"alt":7,"src":51},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/d39e70601d61484f95c68573b17a2066.png",[],{"type":17,"tag":25,"props":54,"children":55},{},[56],{"type":23,"value":57},"如上表所示，该类模型皆源自于Transformer架构，（除了Dlinear）都采用了注意力机制来捕捉序列中的依赖关系，本身就能够处理序列数据并捕获长距离依赖。比较常用的LSTM模型多步预测效果不尽理想，误差累积是造成该问题的一部分原因，Transformer-based模型是一步产生所有预测结果，在处理长时序问题上有一定优势。从改进策略上来看，Autoformer针对长时间序列预测问题提出了深度分解架构和自相关机制的策略，**Informer和Crossformer等模型则是提出了自注意力机制的变种，iTransformer融合了Transformer和RNN的特性，PatchTst将时间序列分割成patches来处理数据，FEDformer引入了Fourier变换。**将较于其他模型，Autoformer结构较为复杂，并未加入其他模型（RNN、线性结构），也未改动自注意力机制，而是提出了一种自相关机制和深度分解架构，适合处理长序列和复杂的依赖关系的问题。考虑到原始时序数据中各种趋势信息比较混乱，无法提供有效的时间依赖，和传统的Transformer模型在自注意力计算时的二次复杂度较高的问题。",{"type":17,"tag":18,"props":59,"children":61},{"id":60},"_01-论文创新点",[62,68,70],{"type":17,"tag":63,"props":64,"children":65},"strong",{},[66],{"type":23,"value":67},"# 01",{"type":23,"value":69}," ",{"type":17,"tag":63,"props":71,"children":72},{},[73],{"type":23,"value":74},"论文创新点",{"type":17,"tag":25,"props":76,"children":77},{},[78],{"type":23,"value":79},"Autoformer模型的创新点在原论文标题中有明确体现，“Autoformer: Decomposition Transformers with Auto-Correlation for Long-Term Series Forecasting”。“Decomposition”，指的是将传统时序分解可以将原始序列分解为更容易预测、更稳定的内容，由于仅在预处理阶段进行分解无法提取序列的内在依赖，因此将分解模块集成到模型中。“Auto-Correlation”提出了一种新的计算机制，将多头注意力机制的“自注意（self-attention）”对节点的计算转化为计算子序列（sub-series）间的相似度。具体工作如下：",{"type":17,"tag":25,"props":81,"children":82},{},[83,85],{"type":23,"value":84},"**1、**",{"type":17,"tag":63,"props":86,"children":87},{},[88],{"type":23,"value":89},"深度分解架构——从预测的隐藏变量中提取复杂的长期趋势信息",{"type":17,"tag":25,"props":91,"children":92},{},[93],{"type":23,"value":94},"Autoformer仍然沿用残差和编解码器结构，同时由Auto-Correlation、Series Decomp、Feed Forward三个模块组成每一层，为了对复杂的时间模式进行推理，作者尝试采用时序数据预处理中常用的分解的思想，将Transformer改造为分解预测结构，在模型的输入部分先用到Series Decomp Block。",{"type":17,"tag":25,"props":96,"children":97},{},[98],{"type":23,"value":99},"时间序列分解是指将时间序列分解为几个组分，每个组分表示一类潜在的时间模式，如周期项（seasonal），趋势项（trend-cyclical）。由于预测问题中未来的不可知性，通常先对过去序列进行分解，再对每个组分分别预测。但这会造成预测结果受限于分解效果，并且忽视了未来各个组分之间的相互作用。Autoformer突破地将作为预处理传统方法的序列分解加入深度模型，提出的深度分解架构，将序列分解作为一个内部单元，嵌入到深度模型的编-解码器中。在预测过程中，模型交替进行预测结果优化和序列分解，即从隐变量中逐步分离趋势项与周期项，实现渐进式地预测，能够从复杂时间模式中分解出可预测性更强的项。",{"type":17,"tag":25,"props":101,"children":102},{},[103],{"type":23,"value":104},"这种设计允许模型在预测过程中交替进行预测结果优化和序列分解，进而实现两者相互促进。深度分解架构如图1所示。",{"type":17,"tag":25,"props":106,"children":107},{},[108,112],{"type":17,"tag":49,"props":109,"children":111},{"alt":7,"src":110},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/dbc8c6907fba4c5fae5a4b937eb92802.png",[],{"type":23,"value":113}," 上面表示编码器（Encoder）部分，模型对输入的原始时序数据进行编码；右上角，黑色折线表示原始时序数据（Time Series），黄色线表示周期性数据部分（Seasonal Part），蓝色线表示趋势项部分（Trend-cyclical Part）；在解码器部分将周期项初始化传入自相关机制等一些列操作，将趋势项传递给分解模块处理后的结果，得到结果（如下如图所示）并进行加和得到预测结果。",{"type":17,"tag":25,"props":115,"children":116},{},[117,121,123,127],{"type":17,"tag":49,"props":118,"children":120},{"alt":7,"src":119},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/5e058434d8e04d719ac27e1ac0b40c88.png",[],{"type":23,"value":122}," 序列分解单元（series decomposition block）使用传统的decomposition操作，基于滑动平均思想，将序列分解为趋势项和周期项两部分，平滑周期项（反应长期趋势）、突出趋势项（反应短期的波动）。对于长度为L的序列",{"type":17,"tag":49,"props":124,"children":126},{"alt":7,"src":125},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/3acc6818f4954672892033cd3265fe01.png",[],{"type":23,"value":128}," ，做如下处理：",{"type":17,"tag":25,"props":130,"children":131},{},[132],{"type":17,"tag":49,"props":133,"children":135},{"alt":7,"src":134},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/af7cee4b00764c8aa7bfbad70e1c0136.png",[],{"type":17,"tag":25,"props":137,"children":138},{},[139,141,145,147,151,153,157],{"type":23,"value":140},"其中， X为待分解的隐变量，",{"type":17,"tag":49,"props":142,"children":144},{"alt":7,"src":143},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/085fb3f6a7624395943780c0a97acbec.png",[],{"type":23,"value":146},"存储着每个滑动窗口的均值，即序列的短期波动，为趋势项，对原输入数据先进行填充然后再进行平均池化；",{"type":17,"tag":49,"props":148,"children":150},{"alt":7,"src":149},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/6603c74a9a9d402bad57da384448a184.png",[],{"type":23,"value":152},"为周期项，是减去短期波动后保留周期性的平滑序列，即原始时序数据减掉趋势项数据。Series Decomp Block可以记为",{"type":17,"tag":49,"props":154,"children":156},{"alt":7,"src":155},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/862ff30cb84440c7b1d488f01a7afd5f.png",[],{"type":23,"value":158},"。",{"type":17,"tag":25,"props":160,"children":161},{},[162,164,168,170,174],{"type":23,"value":163},"**编码器：**主要进行周期项建模，逐步消除趋势项，得到周期项",{"type":17,"tag":49,"props":165,"children":167},{"alt":7,"src":166},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/71ec48a53a15422a9192a1a0de5959b8.png",[],{"type":23,"value":169},"。而基于这种周期性，设计自相关机制，聚合不同周期的相似子过程，假设具有N层编码器，第l层编码层可以总结为",{"type":17,"tag":49,"props":171,"children":173},{"alt":7,"src":172},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/75c99dcb9a47455590ff1f6bd28f6fcd.png",[],{"type":23,"value":175}," ,每一层处理如下：",{"type":17,"tag":25,"props":177,"children":178},{},[179],{"type":17,"tag":49,"props":180,"children":182},{"alt":7,"src":181},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/6057d42abb904200a49690558d6af53d.png",[],{"type":17,"tag":25,"props":184,"children":185},{},[186,188,192,194,198,200,204,205,209],{"type":23,"value":187},"其中，“_”表示消除的趋势部分，",{"type":17,"tag":49,"props":189,"children":191},{"alt":7,"src":190},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/eeb591d0715b4fb9bdf43c1a9d259490.png",[],{"type":23,"value":193},"表示第l层编码器的输出，",{"type":17,"tag":49,"props":195,"children":197},{"alt":7,"src":196},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/7383fc3e15c841ef9bcb8fe3344e3fe7.png",[],{"type":23,"value":199},"表示经过嵌入的",{"type":17,"tag":49,"props":201,"children":203},{"alt":7,"src":202},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/9f02be76d5d64470a07f7bfb06de12de.png",[],{"type":23,"value":158},{"type":17,"tag":49,"props":206,"children":208},{"alt":7,"src":207},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/e36e196564ba4925b82e313d8b176d9d.png",[],{"type":23,"value":210},"分别表示第l层中经过i个序列分解模块后获得的季节性部分。Auto-Correlation表示自相关性机制，下文对其展开具体介绍。",{"type":17,"tag":25,"props":212,"children":213},{},[214,219],{"type":17,"tag":63,"props":215,"children":216},{},[217],{"type":23,"value":218},"解码器：",{"type":23,"value":220}," 采用双路处理模式，对趋势项与周期项分别建模，包含趋势周期分量的累积结构和周期性分量的叠加自相关机制成分。上分支处理seasonal part，使用自相关机制AutoCorrelation，基于序列的周期性质来挖掘未来预测状态内的时间依赖，聚合不同周期中具有相似过程的子序列；下分支处理trend-cyclical part，采用带权加法逐步从上分支每一子层预测的隐变量中提取出趋势信息加和。",{"type":17,"tag":25,"props":222,"children":223},{},[224],{"type":17,"tag":49,"props":225,"children":227},{"alt":7,"src":226},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/b214425adfbc47a5a795047f5737d9a4.png",[],{"type":17,"tag":25,"props":229,"children":230},{},[231,232,236,238,242,243,247,249,253,255,259,261,265],{"type":23,"value":187},{"type":17,"tag":49,"props":233,"children":235},{"alt":7,"src":234},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/1b445f173b6c4796a73942cf581805b5.png",[],{"type":23,"value":237},"表示第l层解码器的输出，",{"type":17,"tag":49,"props":239,"children":241},{"alt":7,"src":240},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/9cc2366eb1f24585a0280e680fcf0d7c.png",[],{"type":23,"value":199},{"type":17,"tag":49,"props":244,"children":246},{"alt":7,"src":245},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/00b0558c82c5413e88d344378ae95267.png",[],{"type":23,"value":248},"用于加速。",{"type":17,"tag":49,"props":250,"children":252},{"alt":7,"src":251},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/c3853b7e81604a6489d69bb6718d2f62.png",[],{"type":23,"value":254},"表示分别经过第l层解码层中第i个序列分解模块处理后的季节性部分和趋势循环部分。",{"type":17,"tag":49,"props":256,"children":258},{"alt":7,"src":257},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/3e5ca5de4d07449fbf33e47c0ef95ed8.png",[],{"type":23,"value":260},"表示第i个提取趋势的",{"type":17,"tag":49,"props":262,"children":264},{"alt":7,"src":263},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/58e271474e3948c292861248a5345881.png",[],{"type":23,"value":266},"权重。基于上述渐进式分解架构，模型可以在预测过程中逐步分解隐变量，并通过自相关机制、累积的方式分别得到周期、趋势组分的预测结果，实现分解、预测结果优化的交替进行、相互促进。",{"type":17,"tag":25,"props":268,"children":269},{},[270],{"type":17,"tag":63,"props":271,"children":272},{},[273],{"type":23,"value":274},"2、自相关机制——扩大信息利用效率",{"type":17,"tag":25,"props":276,"children":277},{},[278,280,284],{"type":23,"value":279},"不同周期的相似相位之间通常表现出相似的子过程，利用这种序列固有的周期性来设计自相关机制，代替点向连接的注意力机制，实现高效的序列级（series-wise）连接和",{"type":17,"tag":49,"props":281,"children":283},{"alt":7,"src":282},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/1302eb97ae734fd09fd098354f4d0135.png",[],{"type":23,"value":285},"复杂度，打破信息利用瓶颈。其中，包含基于周期的依赖发现（Period-based dependencies）和时延信息聚合（Time delay aggregation）。",{"type":17,"tag":25,"props":287,"children":288},{},[289],{"type":23,"value":290},"自相关机制的高效计算：如图所示，自相关机制依然使用query、key、value的多头形式（可以无缝替换自注意力机制），Q、K、V和Transformer一样通过映射输入得到，对Q和K分别执行快速傅里叶变换操作 (FFT)，而K还执行了共轭操作。",{"type":17,"tag":25,"props":292,"children":293},{},[294],{"type":17,"tag":49,"props":295,"children":297},{"alt":7,"src":296},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/4d888b4074874f8ba5c5d5c6ea9b9177.png",[],{"type":17,"tag":25,"props":299,"children":300},{},[301,331],{"type":17,"tag":63,"props":302,"children":303},{},[304,306,329],{"type":23,"value":305},"基于周期的依赖发现：",{"type":17,"tag":63,"props":307,"children":308},{},[309,311,315,317,321,323,327],{"type":23,"value":310},"周期之间相同的相位位置自然会产生相似的子过程，为找到相似子过程，需要估计序列的周期。Autoformer提出了一种周期依赖（Period-based dependencies）自相关系数",{"type":17,"tag":49,"props":312,"children":314},{"alt":7,"src":313},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/5b7c1e8984b2418a84f65af17b089004.png",[],{"type":23,"value":316},"，对于一个实际离散的时间过程",{"type":17,"tag":49,"props":318,"children":320},{"alt":7,"src":319},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/926c47b5dc1c4a92add3d0594fa0ce93.png",[],{"type":23,"value":322},"延迟t个周期的序列",{"type":17,"tag":49,"props":324,"children":326},{"alt":7,"src":325},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/c5e12ec8d0c3485593a3f4aa507f12c1.png",[],{"type":23,"value":328},"，基于",{"type":23,"value":330},"随机过程理论",{"type":23,"value":332},"，可以计算其自相关系数：",{"type":17,"tag":25,"props":334,"children":335},{},[336],{"type":17,"tag":49,"props":337,"children":339},{"alt":7,"src":338},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/7a43c20b52174b0d97d97316c42d5324.png",[],{"type":17,"tag":25,"props":341,"children":342},{},[343,345,349,351,355,357,361,363,367,369,373],{"type":23,"value":344},"其中，自相关系数",{"type":17,"tag":49,"props":346,"children":348},{"alt":7,"src":347},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/01772c618cdd4db094bcad4bf920bea9.png",[],{"type":23,"value":350},"表示离散时间序列",{"type":17,"tag":49,"props":352,"children":354},{"alt":7,"src":353},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/6026fb64e8a54991aefe6354ebc78046.png",[],{"type":23,"value":356},"与它的t延迟滞后序列",{"type":17,"tag":49,"props":358,"children":360},{"alt":7,"src":359},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/4988ee9b281a4329b0d0202ef381b49f.png",[],{"type":23,"value":362},"之间的时延相似性，这种时延相似性被看作估计周期长度的非归一化置信度，即周期长度为t的置信度为",{"type":17,"tag":49,"props":364,"children":366},{"alt":7,"src":365},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/4100f8d43dfb4b27b4eed89f5674a5d8.png",[],{"type":23,"value":368},"。接着，选择最可能的k个周期长度",{"type":17,"tag":49,"props":370,"children":372},{"alt":7,"src":371},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/f7f52e40e36e4aec8d851c4db9e5bd7a.png",[],{"type":23,"value":374},"。基于周期的依赖关系由上述估计的周期导出，并可以通过相应的自相关来加权。",{"type":17,"tag":25,"props":376,"children":377},{},[378],{"type":23,"value":379},"基于Wiener-Khinchin理论，可以使用快速傅立叶变换（FFT）得到，因此引出对于周期的依赖发现。计算过程如下：",{"type":17,"tag":25,"props":381,"children":382},{},[383,387,388],{"type":17,"tag":49,"props":384,"children":386},{"alt":7,"src":385},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/e3885dce9c7e431cb2aaa29719913e70.png",[],{"type":23,"value":69},{"type":17,"tag":49,"props":389,"children":391},{"alt":7,"src":390},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/99b8f30de8c5495d97d20a760813e7fd.png",[],{"type":17,"tag":18,"props":393,"children":395},{"id":394},"_02-数据集上的评价指标得分",[396,401,402],{"type":17,"tag":63,"props":397,"children":398},{},[399],{"type":23,"value":400},"# 02",{"type":23,"value":69},{"type":17,"tag":63,"props":403,"children":404},{},[405],{"type":23,"value":406},"数据集上的评价指标得分",{"type":17,"tag":25,"props":408,"children":409},{},[410],{"type":17,"tag":63,"props":411,"children":412},{},[413],{"type":23,"value":414},"1、主要实验结果",{"type":17,"tag":25,"props":416,"children":417},{},[418],{"type":23,"value":419},"Autoformer在处理长时间序列问题上表现出色，在涵盖能源、交通、经济、气象、疾病五大领域的6个数据集上进行了模型验证均取得了当时的最优（SOTA）效果。该实验部分来自Autoformer原文的实验。",{"type":17,"tag":25,"props":421,"children":422},{},[423,427],{"type":17,"tag":49,"props":424,"children":426},{"alt":7,"src":425},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/3189866dab2a4b15ab9ca148ca4e4d28.png",[],{"type":23,"value":428}," Autoformer在上述的多个领域数据集数据集、各种输入-输出长度的设置下，取得了当时的最优（SOTA）结果。",{"type":17,"tag":25,"props":430,"children":431},{},[432],{"type":17,"tag":49,"props":433,"children":435},{"alt":7,"src":434},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/9fb3dedf54f3490ca89f53d30178f4ca.png",[],{"type":17,"tag":25,"props":437,"children":438},{},[439],{"type":23,"value":440},"其中，前五个数据集指标为input-96-predict-336设置，ILI数据集为input-24-predict-60设置。相比于之前的SOTA结果，Autoformer实现了ETT能源数据集74%的MSE提升，Electricity能源数据集MSE提升24%，Exchange经济数据集提升64%，Traffic交通数据集提升14%，Weather气象数据集提升26%，在input-24-predict-60设置下，ILI疾病数据集提升30%。在上述6个数据集，Autoformer在MSE指标上平均提升38%。",{"type":17,"tag":25,"props":442,"children":443},{},[444],{"type":17,"tag":63,"props":445,"children":446},{},[447],{"type":23,"value":448},"2、对比实验",{"type":17,"tag":25,"props":450,"children":451},{},[452],{"type":17,"tag":63,"props":453,"children":454},{},[455],{"type":23,"value":456},"深度分解架构的通用性实验部分：",{"type":17,"tag":25,"props":458,"children":459},{},[460],{"type":23,"value":461},"将Autoformer提出的深度分解架构应用于其他Transformer-based模型，均可以得到明显提升，且随着预测时效的延长，效果提升更明显，具有较好通用性。在ETT数据集上的MSE指标对比如表4所示。",{"type":17,"tag":25,"props":463,"children":464},{},[465],{"type":17,"tag":49,"props":466,"children":468},{"alt":7,"src":467},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/4748456a14464c7ca7ddb0cba08feef1.png",[],{"type":17,"tag":25,"props":470,"children":471},{},[472],{"type":23,"value":473},"Origin表示直接预测，Sep表示先分解后预测（用模块进行预处理，得到s和t序列分别预测，最后加和），Ours表示深度分解架构。",{"type":17,"tag":25,"props":475,"children":476},{},[477],{"type":17,"tag":63,"props":478,"children":479},{},[480],{"type":23,"value":481},"自相关机制与自注意力机制的对比实验部分：",{"type":17,"tag":25,"props":483,"children":484},{},[485],{"type":23,"value":486},"同样基于深度分解架构，在多种输入-输出设置下，对比了自相关机制与经典Transformer中Full Attention、Informer中PropSparse Attention等自注意力机制及其变体，Autoformer中提出的自相关机制依然取得了最优的结果。在ETT数据集上对比实验如表5所示。",{"type":17,"tag":25,"props":488,"children":489},{},[490],{"type":17,"tag":49,"props":491,"children":493},{"alt":7,"src":492},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/14086ce245564f6e866f9e4f4a0cb4b2.png",[],{"type":17,"tag":18,"props":495,"children":497},{"id":496},"_03-针对mindspore-nlp实现的评估",[498,503,504],{"type":17,"tag":63,"props":499,"children":500},{},[501],{"type":23,"value":502},"# 03",{"type":23,"value":69},{"type":17,"tag":63,"props":505,"children":506},{},[507],{"type":23,"value":508},"针对MindSpore NLP实现的评估",{"type":17,"tag":25,"props":510,"children":511},{},[512],{"type":23,"value":513},"对于MindSpore NLP的Autoformer实现，我们使用了官方相同的“Traffic”基准来评估Autoformer模型的长时间序列预测性能，输入长度为48，预测长度为24。Traffic描述了道路占用率。它包含 2015 年至 2016 年旧金山高速公路传感器记录的每小时数据。是非常标准的时间序列常用数据集。主要的评价指标包括Univariate-MASE。整个数据加载与模型测试流程均与huggingface中提供的官方样例相同。",{"type":17,"tag":25,"props":515,"children":516},{},[517],{"type":23,"value":518},"本次测试训练集的长度为862，测试集的长度为6034，模型为每个时间序列生成了 100 个不同的预测样本，每个预测样本的预测时间步长为24。根据测试结果，MindNLP实现与官方实现的误差在1%以内。",{"type":17,"tag":25,"props":520,"children":521},{},[522],{"type":23,"value":523},"测试结果如下：",{"type":17,"tag":25,"props":525,"children":526},{},[527],{"type":17,"tag":49,"props":528,"children":530},{"alt":7,"src":529},"https://obs-mindspore-file.obs.cn-north-4.myhuaweicloud.com/file/2025/10/13/9f9be4f834e849498e8f642dc679dbbd.png",[],{"type":17,"tag":25,"props":532,"children":533},{},[534],{"type":23,"value":535},"实验结果表明，Autoformer在MindNLP的实现其精度比官方实现还要高，误差均低于1%。",{"type":17,"tag":25,"props":537,"children":538},{},[539,541],{"type":23,"value":540},"完整推理代码：",{"type":17,"tag":542,"props":543,"children":547},"a",{"href":544,"rel":545},"https://github.com/4everImmortality/autoformer_mindnlp_test",[546],"nofollow",[548],{"type":23,"value":544},{"type":17,"tag":18,"props":550,"children":552},{"id":551},"_04-总结",[553,558,559],{"type":17,"tag":63,"props":554,"children":555},{},[556],{"type":23,"value":557},"# 04",{"type":23,"value":69},{"type":17,"tag":63,"props":560,"children":561},{},[562],{"type":23,"value":563},"总结",{"type":17,"tag":25,"props":565,"children":566},{},[567],{"type":23,"value":568},"Autoformer模型凭借深度分解架构和自相关机制在长时序列预测、复杂时间模式难以处理、运算效率高的问题上表现优异，通过渐进式分解和序列级连接，大幅提高了长时预测效率。",{"type":17,"tag":25,"props":570,"children":571},{},[572],{"type":23,"value":573},"建议各位开发者利用MindSpore NLP等工具来加载并复现该模型的实验成果。MindSpore NLP提供了一套与PyTorch风格一致的简洁接口，加载和评估预训练模型非常直接和高效。",{"type":17,"tag":25,"props":575,"children":576},{},[577],{"type":17,"tag":63,"props":578,"children":579},{},[580],{"type":23,"value":581},"参考链接",{"type":17,"tag":25,"props":583,"children":584},{},[585],{"type":23,"value":586},"[1]arXiv:2106.13008v5:",{"type":17,"tag":25,"props":588,"children":589},{},[590],{"type":17,"tag":542,"props":591,"children":594},{"href":592,"rel":593},"https://arxiv.org/abs/2106.13008v5",[546],[595],{"type":23,"value":592},{"type":17,"tag":25,"props":597,"children":598},{},[599],{"type":23,"value":600},"[2] Autoformer:基于深度分解架构和自相关机制的长期序列预测模型:",{"type":17,"tag":25,"props":602,"children":603},{},[604],{"type":17,"tag":542,"props":605,"children":608},{"href":606,"rel":607},"https://zhuanlan.zhihu.com/p/385066440",[546],[609],{"type":23,"value":606},{"type":17,"tag":25,"props":611,"children":612},{},[613],{"type":23,"value":614},"[3] 革新Transformer！清华大学提出全新Autoformer骨干网络，长时序预测达到SOTA:",{"type":17,"tag":25,"props":616,"children":617},{},[618],{"type":17,"tag":542,"props":619,"children":622},{"href":620,"rel":621},"https://zhuanlan.zhihu.com/p/387810819",[546],[623],{"type":23,"value":620},{"title":7,"searchDepth":625,"depth":625,"links":626},4,[],"markdown","content:technology-blogs:zh:3854.md","content","technology-blogs/zh/3854.md","technology-blogs/zh/3854","md",1776506136159]