Skip to content

Commit

Permalink
[EXPR-2]working Sirisaac
Browse files Browse the repository at this point in the history
  • Loading branch information
sudheerad9 committed Jul 24, 2021
1 parent 140339f commit fd739ea
Showing 1 changed file with 6 additions and 6 deletions.
12 changes: 6 additions & 6 deletions SirIsaac/fittingProblem.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,7 @@ def fitAll(self,usePreviousParams=True,fitPerfectModel=False,resume=True,

if maxNumFit is None: maxNumFit = len(self.fittingModelNames)

for name in self.fittingModelNames[0:1]:
for name in self.fittingModelNames:
fittingModel = self.fittingModelDict[name]
print("fittingModel",fittingModel)
# 4.18.2012
Expand Down Expand Up @@ -293,7 +293,7 @@ def fitAll(self,usePreviousParams=True,fitPerfectModel=False,resume=True,
orderedLs = []
if not hasattr(self,'stopFittingN'):
self.stopFittingN = 3
for n in self.fittingModelNames[0:1]:
for n in self.fittingModelNames:
if self.logLikelihoodDict.has_key(n):
orderedLs.append(self.logLikelihoodDict[n])
if (len(orderedLs) > self.stopFittingN):
Expand Down Expand Up @@ -411,17 +411,17 @@ def logLikelihood( self,cost,singVals,priorSingVals ):
print("in loglokeligdddddddddddddddddddddddddddddd")
print(cost)
print(singVals)
return -(cost + 0.5*scipy.sum( logsumexp(singVals) ) \
- 0.5*scipy.sum( logsumexp(priorSingVals) ) )
return -(cost + 0.5*scipy.sum( scipy.log(singVals) ) \
- 0.5*scipy.sum( scipy.log(priorSingVals) ) )


# 8.2.2009 updated to include 2pi
# 9.11.2013 corrected
def penalty(self,singVals,priorSingVals):
#return 0.5*scipy.sum( scipy.log( \
# scipy.array(self._StiffSingVals(singVals,cutoff))/(2.*scipy.pi) ) )
return + 0.5*scipy.sum( logsumexp(singVals) ) \
- 0.5*scipy.sum( logsumexp(priorSingVals) )
return + 0.5*scipy.sum( scipy.log(singVals) ) \
- 0.5*scipy.sum( scipy.log(priorSingVals) )

def numStiffSingVals(self,singVals,cutoff=None):
return len( self._StiffSingVals(singVals,cutoff) )
Expand Down

0 comments on commit fd739ea

Please sign in to comment.