zackxconti / bnmetamodel_gh

Repo for bnmetamodel lib version for Lab Mouse Grasshopper plug-in.
1 stars 2 forks source link

Find way to enter probabilities and convert them to likelihoods in `inferPD_JT_soft` #40

Open kallewesterling opened 1 year ago

kallewesterling commented 1 year ago

See BayesianNetwork.inferPD_JT_soft: currently you can only enter likelihoods. Need to find way to enter probabilities and convert them to likelihoods

def inferPD_JT_soft (self, softEvidence ): # method to perform inference with soft evidence (virtual) using join tree only
    print 'performing inference using junction tree algorithm ...'

    def potential_to_df(p):
        data = []
        for pe in p.entries:
            v = pe.entries.values()[0]
            p = pe.value
            t = (v, p)
            data.append(t)
        return pd.DataFrame(data, columns=['val', 'p'])

    def potentials_to_dfs(join_tree):
        data = []
        for node in join_tree.get_bbn_nodes():
            name = node.variable.name
            df = potential_to_df(join_tree.get_bbn_potential(node))
            t = (name, df)

            data.append(t)
        return data

    def pybbnToLibpgm_posteriors(pybbnPosteriors):
        posteriors = {}

        for node in pybbnPosteriors:
            var = node[0]
            df = node[1]
            p = df.sort_values(by=['val'])
            posteriors[var] = p['p'].tolist()

        return posteriors # returns a dictionary of dataframes

    evidenceList = []

    for evName in softEvidence.keys():
        ev = EvidenceBuilder().with_node(self.join_tree.get_bbn_node_by_name(evName))

        for state, likelihood in enumerate(softEvidence[evName]):
            ev.values[state] = likelihood

        ev = ev.with_type(EvidenceType.VIRTUAL).build() # specify evidenc type as virtual (soft) (likelihoods not probabilities)
        evidenceList.append(ev)

    self.join_tree.unobserve_all()
    self.join_tree.update_evidences(evidenceList)

    posteriors = potentials_to_dfs(self.join_tree) #contains posteriors + evidence distributions

    # join tree algorithm seems to eliminate bins whose posterior probabilities are zero
    # the following checks for missing bins and adds them back

    for posterior in posteriors:
        print 'posssssssterior ', posterior
        numbins = len(self.BinRanges[posterior[0]])

        for i in range (0,numbins):
            if float (i) not in posterior[1]['val'].tolist(): # if
                #print 'bin number ', float(i) ,' was missing '
                posterior[1].loc[len(posterior[1])] = [float(i), 0.0]
                continue

    posteriorsDict = pybbnToLibpgm_posteriors(posteriors)

    print 'inference is complete ... posterior distributions were generated successfully'

    return posteriorsDict  # posteriors + evidence distributions (for visualising)