You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
abstract = {We introduce a new paradigm for generative modeling built on Continuous Normalizing Flows ({CNFs}), allowing us to train {CNFs} at unprecedented scale. Specifically, we present the notion of Flow Matching ({FM}), a simulation-free approach for training {CNFs} based on regressing vector fields of fixed conditional probability paths. Flow Matching is compatible with a general family of Gaussian probability paths for transforming between noise and data samples -- which subsumes existing diffusion paths as specific instances. Interestingly, we find that employing {FM} with diffusion paths results in a more robust and stable alternative for training diffusion models. Furthermore, Flow Matching opens the door to training {CNFs} with other, non-diffusion probability paths. An instance of particular interest is using Optimal Transport ({OT}) displacement interpolation to define the conditional probability paths. These paths are more efficient than diffusion paths, provide faster training and sampling, and result in better generalization. Training {CNFs} using Flow Matching on {ImageNet} leads to consistently better performance than alternative diffusion-based methods in terms of both likelihood and sample quality, and allows fast and reliable sample generation using off-the-shelf numerical {ODE} solvers.},
6
+
number = {{arXiv}:2210.02747},
7
+
publisher = {{arXiv}},
8
+
author = {Lipman, Yaron and Chen, Ricky T. Q. and Ben-Hamu, Heli and Nickel, Maximilian and Le, Matt},
9
+
urldate = {2024-07-05},
10
+
date = {2023-02-08},
11
+
eprinttype = {arxiv},
12
+
eprint = {2210.02747 [cs, stat]}
13
+
}
14
+
15
+
@article{albergo2023stochastic,
16
+
title={Stochastic interpolants: A unifying framework for flows and diffusions},
17
+
author={Albergo, Michael S and Boffi, Nicholas M and Vanden-Eijnden, Eric},
18
+
journal={arXiv preprint arXiv:2303.08797},
19
+
year={2023}
20
+
}
21
+
22
+
@article{tong2023improving,
23
+
title={Improving and generalizing flow-based generative models with minibatch optimal transport},
24
+
author={Tong, Alexander and Fatras, Kilian and Malkin, Nikolay and Huguet, Guillaume and Zhang, Yanlei and Rector-Brooks, Jarrid and Wolf, Guy and Bengio, Yoshua},
25
+
journal={arXiv preprint arXiv:2302.00482},
26
+
year={2023}
27
+
}
28
+
29
+
@article{liu2022flow,
30
+
title={Flow straight and fast: Learning to generate and transfer data with rectified flow},
31
+
author={Liu, Xingchao and Gong, Chengyue and Liu, Qiang},
32
+
journal={arXiv preprint arXiv:2209.03003},
33
+
year={2022}
34
+
}
35
+
36
+
@article{hu2021lora,
37
+
title={Lora: Low-rank adaptation of large language models. arXiv 2021},
38
+
author={Hu, Edward J and Shen, Yelong and Wallis, Phillip and Allen-Zhu, Zeyuan and Li, Yuanzhi and Wang, Shean and Wang, Lu and Chen, Weizhu},
39
+
journal={arXiv preprint arXiv:2106.09685},
40
+
volume={10},
41
+
year={2021}
42
+
}
43
+
44
+
@article{micikevicius2017mixed,
45
+
title={Mixed precision training},
46
+
author={Micikevicius, Paulius and Narang, Sharan and Alben, Jonah and Diamos, Gregory and Elsen, Erich and Garcia, David and Ginsburg, Boris and Houston, Michael and Kuchaiev, Oleksii and Venkatesh, Ganesh and others},
47
+
journal={arXiv preprint arXiv:1710.03740},
48
+
year={2017}
49
+
}
50
+
51
+
@inproceedings{fu2025moflowonestep,
52
+
author = {Fu, Yuxiang and Yan, Qi and Wang, Lele and Li, Ke and Liao, Renjie},
53
+
title = {MoFlow: One-Step Flow Matching for Human Trajectory Forecasting via Implicit Maximum Likelihood Estimation based Distillation},
54
+
journal = {Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition},
55
+
year = {2025},
56
+
}
57
+
58
+
@misc{lipman2024flowmatchingguidecode,
59
+
title={Flow Matching Guide and Code},
60
+
author={Yaron Lipman and Marton Havasi and Peter Holderrieth and Neta Shaul and Matt Le and Brian Karrer and Ricky T. Q. Chen and David Lopez-Paz and Heli Ben-Hamu and Itai Gat},
61
+
year={2024},
62
+
eprint={2412.06264},
63
+
archivePrefix={arXiv},
64
+
primaryClass={cs.LG},
65
+
url={https://arxiv.org/abs/2412.06264},
66
+
}
67
+
68
+
@article{boffi2025build,
69
+
title={How to build a consistency model: Learning flow maps via self-distillation},
70
+
author={Boffi, Nicholas M and Albergo, Michael S and Vanden-Eijnden, Eric},
71
+
journal={arXiv preprint arXiv:2505.18825},
72
+
year={2025}
73
+
}
74
+
75
+
@article{geng2025mean,
76
+
title={Mean flows for one-step generative modeling},
77
+
author={Geng, Zhengyang and Deng, Mingyang and Bai, Xingjian and Kolter, J Zico and He, Kaiming},
78
+
journal={arXiv preprint arXiv:2505.13447},
79
+
year={2025}
80
+
}
81
+
82
+
@article{peng2025flow,
83
+
title={Flow-Anchored Consistency Models},
84
+
author={Peng, Yansong and Zhu, Kai and Liu, Yu and Wu, Pingyu and Li, Hebei and Sun, Xiaoyan and Wu, Feng},
85
+
journal={arXiv preprint arXiv:2507.03738},
86
+
year={2025}
87
+
}
88
+
89
+
@article{guo2025splitmeanflow,
90
+
title={SplitMeanFlow: Interval Splitting Consistency in Few-Step Generative Modeling},
91
+
author={Guo, Yi and Wang, Wei and Yuan, Zhihang and Cao, Rong and Chen, Kuan and Chen, Zhengyang and Huo, Yuanyuan and Zhang, Yang and Wang, Yuping and Liu, Shouda and others},
92
+
journal={arXiv preprint arXiv:2507.16884},
93
+
year={2025}
94
+
}
95
+
96
+
@article{ho2020denoising,
97
+
title={Denoising diffusion probabilistic models},
98
+
author={Ho, Jonathan and Jain, Ajay and Abbeel, Pieter},
99
+
journal={Advances in neural information processing systems},
100
+
volume={33},
101
+
pages={6840--6851},
102
+
year={2020}
103
+
}
104
+
105
+
@article{song2020score,
106
+
title={Score-based generative modeling through stochastic differential equations},
107
+
author={Song, Yang and Sohl-Dickstein, Jascha and Kingma, Diederik P and Kumar, Abhishek and Ermon, Stefano and Poole, Ben},
108
+
journal={arXiv preprint arXiv:2011.13456},
109
+
year={2020}
110
+
}
111
+
112
+
@article{lu2024simplifying,
113
+
title={Simplifying, stabilizing and scaling continuous-time consistency models},
114
+
author={Lu, Cheng and Song, Yang},
115
+
journal={arXiv preprint arXiv:2410.11081},
116
+
year={2024}
117
+
}
118
+
119
+
@article{kim2023consistency,
120
+
title={Consistency trajectory models: Learning probability flow ode trajectory of diffusion},
121
+
author={Kim, Dongjun and Lai, Chieh-Hsin and Liao, Wei-Hsiang and Murata, Naoki and Takida, Yuhta and Uesaka, Toshimitsu and He, Yutong and Mitsufuji, Yuki and Ermon, Stefano},
122
+
journal={arXiv preprint arXiv:2310.02279},
123
+
year={2023}
124
+
}
125
+
126
+
@article{sabour2025align,
127
+
title={Align Your Flow: Scaling Continuous-Time Flow Map Distillation},
128
+
author={Sabour, Amirmojtaba and Fidler, Sanja and Kreis, Karsten},
129
+
journal={arXiv preprint arXiv:2506.14603},
130
+
year={2025}
131
+
}
132
+
133
+
@article{frans2024one,
134
+
title={One step diffusion via shortcut models},
135
+
author={Frans, Kevin and Hafner, Danijar and Levine, Sergey and Abbeel, Pieter},
136
+
journal={arXiv preprint arXiv:2410.12557},
137
+
year={2024}
138
+
}
139
+
140
+
@article{zhou2025inductive,
141
+
title={Inductive moment matching},
142
+
author={Zhou, Linqi and Ermon, Stefano and Song, Jiaming},
143
+
journal={arXiv preprint arXiv:2503.07565},
144
+
year={2025}
145
+
}
146
+
147
+
@article{yin2024improved,
148
+
title={Improved distribution matching distillation for fast image synthesis},
149
+
author={Yin, Tianwei and Gharbi, Micha{\"e}l and Park, Taesung and Zhang, Richard and Shechtman, Eli and Durand, Fredo and Freeman, Bill},
150
+
journal={Advances in neural information processing systems},
151
+
volume={37},
152
+
pages={47455--47487},
153
+
year={2024}
154
+
}
155
+
156
+
@article{song2020denoising,
157
+
title={Denoising diffusion implicit models},
158
+
author={Song, Jiaming and Meng, Chenlin and Ermon, Stefano},
159
+
journal={arXiv preprint arXiv:2010.02502},
160
+
year={2020}
161
+
}
162
+
163
+
@article{wang2025uni,
164
+
title={Uni-Instruct: One-step Diffusion Model through Unified Diffusion Divergence Instruction},
165
+
author={Wang, Yifei and Bai, Weimin and Zhang, Colin and Zhang, Debing and Luo, Weijian and Sun, He},
166
+
journal={arXiv preprint arXiv:2505.20755},
167
+
year={2025}
168
+
}
169
+
170
+
@inproceedings{zhou2024score,
171
+
title={Score identity Distillation: Exponentially Fast Distillation of Pretrained Diffusion Models for One-Step Generation},
172
+
author={Mingyuan Zhou and Huangjie Zheng and Zhendong Wang and Mingzhang Yin and Hai Huang},
173
+
booktitle={International Conference on Machine Learning},
174
+
url={https://arxiv.org/abs/2404.04057},
175
+
year={2024}
176
+
}
177
+
178
+
@article{xu2025one,
179
+
title={One-step Diffusion Models with $ f $-Divergence Distribution Matching},
180
+
author={Xu, Yilun and Nie, Weili and Vahdat, Arash},
0 commit comments