Function LOGIT(y As Range, xraw As Range, Optional constant As Byte, Optional stats As Byte)
   If IsMissing(constant) Then constant = 1
   If IsMissing(stats) Then stats = 0
   
   'Count variables
   Dim i As Long, j As Long, jj As Long
   'Read data dimensions
   Dim K As Long, N As Long
   N = y.Rows.Count
   K = xraw.Columns.Count + constant
   'Adding a vector of ones to the x matrix if constant=1,
   'name xraw=x from now on
   Dim x() As Double
   ReDim x(1 To N, 1 To K)
   For i = 1 To N
      x(i, 1) = 1
      For j = 1 + constant To K
         x(i, j) = xraw(i, j - constant)
      Next j
   Next i
   'Initializing the coefficient vector (b) and the score (bx)
   Dim b() As Double, bx() As Double, ybar As Double
   ReDim b(1 To K): ReDim bx(1 To N)
   ybar = Application.WorksheetFunction.Average(y)
   If constant = 1 Then b(1) = Log(ybar / (1 - ybar))
   For i = 1 To N
      bx(i) = b(1)
   Next i
   'Compute prediction Lambda, gradient dlnl,
   'Hessian hesse, and log likelihood lnl
   ReDim Lambda(1 To N), dlnL(1 To K), hesse(1 To K, 1 To K)
   For i = 1 To N
      Lambda(i) = 1 / (1 + Exp(-bx(i)))
      For j = 1 To K
         dlnL(j) = dlnL(j) + (y(i) - Lambda(i)) * x(i, j)
         For jj = 1 To K
            hesse(jj, j) = hesse(jj, j) - Lambda(i) * (1 - Lambda(i)) * x(i, jj) * x(i, j)
         Next jj
      Next j
      lnL(iter) = lnL(iter) + y(i) * Log(1 / (1 + Exp(-bx(i)))) + (1 - y(i)) * Log(1 - 1 / (1 + Exp(-bx(i))))
   Next i
   'Compute inverse Hessian (=hinv) and multiply hinv with gradient dlnl
   hinv = Application.WorksheetFunction.MInverse(hesse)
   hinvg = Application.WorksheetFunction.MMult(dlnL, hinv)
   
   If Abs(Change) <= sens Then Exit Do
   'Apply Newton's scheme for updating coefficients b
   For j = 1 To K
      b(j) = b(j) - hinvg(j)
   Next j
   'ln Likelihood of model with just a constant(lnL0)
   Dim lnL0 As Double
   lnL0 = N * (ybar * Log(ybar) + (1 - ybar) * Log(1 - ybar))
End Function