from numpy import newaxis, fill_diagonal, sum, sqrt NA = newaxis # \grad_k V(\xx_k) = 4\epsilon \sum_{j\ne k} (6\sigma^6/r_{kj}^7 - 12\sigma^12/r_{kj}^13) u_{kj} # = 4\epsilon \sum_{j=1}^N (6\sigma^6/\tilde{r}_{kj}^7 - 12\sigma^12/\tilde{r}_{kj}^13) u_{kj} # u_{kk} = (0,0,0), \tilde{r}_{kk} = 1 def LJgradient(sigma, epsilon): def gradV(X): d = X[:,NA] - X[NA,:] # (N,N,3) displacement vectors r = sqrt( sum(d*d,axis=-1) ) # (N,N) distances fill_diagonal(r,1) # Don't divide by zero u = d/r[:,:,NA] # (N,N,3) unit vectors in direction of \xx_i - \xx_j T = 6*(sigma**6) * (r**-7) - 12*(sigma**12) * (r**-13) # NxN matrix of r-derivatives # Using the chain rule, we turn the (N,N)-matrix of r-derivatives into the (N,3)-array # of derivatives to Cartesian coordinates, i.e.:the gradient. return 4*epsilon*sum(T[:,:,NA] * u, axis=1) return gradV