———————————————————————————
cognitive_science_learning_model_base.py
Base class to implement behavioural studying fashions.
Modifaction Logs:
: 08 July 24 : zach.wolpe@medibio.com.au : init
———————————————————————————
“””
from scipy.optimize import decrease
import plotly.graph_objects as go
import matplotlib.pyplot as plt
import ipywidgets as widgets
from scipy import stats
import pandas as pd
import numpy as np
# inside modules.
from src.rescorla_wagner_model import (RoscorlaWagner)
from src.rescorla_wagner_model_plots import (RescorlaWagnerPlots)
from src.rescorla_wagner_model_simulation import (RescorlaWagnerSimulate)
from src.rescorla_wagner_model_diagnostics import (RoscorlaWagerModelDiagnostics)
from abc import ABC, abstractmethod
from scipy.optimize import decrease
import matplotlib.pyplot as plt
from tqdm import tqdm
from plotly.subplots import make_subplots
import plotly.graph_objects as go
” +– Helpers —————————————————————–>> “
def add_diag_line(fig, xmin=0, xmax=1, marker_color=’black’, row=1, col=1):
_x = np.linspace(xmin, xmax, 100)
_y = _x
fig.add_trace(go.Scatter(x=_x, y=_y, mode=’traces’, marker_color=marker_color, title=’y=x’, showlegend=False),row=row, col=col)
” +– Base Class ————————————————————–>> “
class MultiArmedBanditModels(ABC):
def __init__(self):
cross
@abstractmethod
def predict(self):
cross
@abstractmethod
def simulate(self, **kwargs):
self.simulated_params = kwargs
@abstractmethod
def neg_log_likelihood(self):
cross
@abstractmethod
def perform_sensitivity_analysis(self):
cross
@abstractmethod
def optimize_brute_force(self, loss_function, loss_kwargs, parameter_ranges):
neg_log_likelihoods = [loss_function(**params) for params in loss_kwargs]
min_loss_func_idx = np.argmin(neg_log_likelihoods)
min_loss_value = min(neg_log_likelihoods)
return min_loss_func_idx, loss_kwargs[min_loss_func_idx], min_loss_value
def optimize_scikit(self, loss_function, init_guess, args, bounds):
consequence = decrease(
loss_function,
init_guess,
args=args,
bounds=bounds
)
res_nll = consequence.enjoyable
param_fits = consequence.x
return consequence, res_nll, param_fits
@abstractmethod
def plot_neg_log_likelihood(self):
cross
def compare_fitting_procedures(self):
cross
def perform_sensitivity_analysis(self):
cross
def compute_BIC(self, LL, T, k_params=2):
return -2 * LL + k_params * np.log(T)