将此算法代码粘贴到我的文档中后,文档中的所有内容都会垂直对齐。
\documentclass[a4paper]{article}
% For reducing margin
\usepackage[english]{babel}
\usepackage[utf8]{inputenc}
\usepackage{algorithm}
\usepackage{amsmath}
%\usepackage[algo2e]{algorithm2e}
\usepackage{arevmath} % For math symbols
\usepackage{algpseudocode}
\title{Algorithm template}
\author{Ajay}
\date{\today} % Today's date
\begin{document}
\begin{algorithm}[t]
\centering
\caption{RL-training Algorithm}
\begin{algorithmic}[1]
\Procedure{RL-Training}{Document $D$, Training step $T$, Batch size $B$}
\State Initialize network params $\theta$
\For{$t = 1$ to $T$}
\State Sample input sentence $s_{i}$ from document D for $i \in \{1, \ldots, B\}$
\State Calculate reward value by comparing ROUGE-L score between ground truth summary and predicted summary
\State calculate gradient of loss as \begin{equation}
\begin{aligned}
\nabla L(\theta)
& \approx-r(\hat{y}) \sum_{i=1}^{B} \nabla \log p\left(\hat{y}_{i} \mid s_{i}, D, \theta\right)
\end{aligned}
\end{equation}
\State Normalize the gradient with Batch size
\begin{equation}
g_\theta = \frac{1}{B} \nabla L(\theta)
\end{equation}
\State update $\theta$ with adam optimizer
\begin{equation}
\theta \gets \Call{Adam}{\theta, g_{\theta}}
\end{equation}
\EndFor
\State \Return $\theta$ \newline
\EndProcedure
\end{algorithmic}
\end{algorithm}
\end{document}