4646 'DerivedModel' ,
4747 'PoisonedModel' ,
4848 'BinomialModel' ,
49+ 'GaussianHyperparameterizedModel' ,
4950 'MultinomialModel' ,
5051 'MLEModel' ,
5152 'RandomWalkModel' ,
5960from past .builtins import basestring
6061
6162import numpy as np
62- from scipy .stats import binom , multivariate_normal
63+ from scipy .stats import binom , multivariate_normal , norm
6364from itertools import combinations_with_replacement as tri_comb
6465
6566from qinfer .utils import binomial_pdf , multinomial_pdf , sample_multinomial
6667from qinfer .abstract_model import Model , DifferentiableModel
6768from qinfer ._lib import enum # <- TODO: replace with flufl.enum!
6869from qinfer .utils import binom_est_error
69- from qinfer .domains import IntegerDomain , MultinomialDomain
70+ from qinfer .domains import IntegerDomain , MultinomialDomain , RealDomain
7071
7172## FUNCTIONS ###################################################################
7273
@@ -245,9 +246,9 @@ def __init__(self, underlying_model):
245246 else :
246247 self ._expparams_scalar = False
247248 self ._expparams_dtype = underlying_model .expparams_dtype + [('n_meas' , 'uint' )]
248-
249+
249250 ## PROPERTIES ##
250-
251+
251252 @property
252253 def decorated_model (self ):
253254 # Provided for backcompat only.
@@ -380,6 +381,131 @@ def fisher_information(self, modelparams, expparams):
380381 )
381382 return two_outcome_fi * expparams ['n_meas' ]
382383
384+ class GaussianHyperparameterizedModel (DerivedModel ):
385+ """
386+ Model representing a two-outcome model viewed through samples
387+ from one of two distinct Gaussian distributions. This model adds four new
388+ model parameters to its underlying model, respectively representing the
389+ mean outcome conditioned on an underlying 0, the mean outcome conditioned
390+ on an underlying 1, and the variance of outcomes conditioned in each case.
391+
392+ :param qinfer.abstract_model.Model underlying_model: An instance of a two-
393+ outcome model to be viewed through Gaussian distributions.
394+ """
395+
396+ def __init__ (self , underlying_model ):
397+ super (GaussianHyperparameterizedModel , self ).__init__ (underlying_model )
398+
399+ if not (underlying_model .is_n_outcomes_constant and underlying_model .n_outcomes (None ) == 2 ):
400+ raise ValueError ("Decorated model must be a two-outcome model." )
401+
402+ n_orig_mps = underlying_model .n_modelparams
403+ self ._orig_mps_slice = np .s_ [:n_orig_mps ]
404+ self ._mu_slice = np .s_ [n_orig_mps :n_orig_mps + 2 ]
405+ self ._sigma2_slice = np .s_ [n_orig_mps + 2 :n_orig_mps + 4 ]
406+
407+ ## PROPERTIES ##
408+
409+ @property
410+ def decorated_model (self ):
411+ # Provided for backcompat only.
412+ return self .underlying_model
413+
414+ @property
415+ def modelparam_names (self ):
416+ return self .underlying_model .modelparam_names + [
417+ r'\mu_0' , r'\mu_1' ,
418+ r'\sigma_0^2' , r'\sigma_1^2'
419+ ]
420+
421+ @property
422+ def n_modelparams (self ):
423+ return len (self .modelparam_names )
424+
425+ ## METHODS ##
426+
427+ def domain (self , expparams ):
428+ return [RealDomain ()] * len (expparams ) if expparams is not None else RealDomain ()
429+
430+ def are_expparam_dtypes_consistent (self , expparams ):
431+ return True
432+
433+ def are_models_valid (self , modelparams ):
434+ orig_mps = modelparams [:, self ._orig_mps_slice ]
435+ sigma2 = modelparams [:, self ._sigma2_slice ]
436+
437+ return np .all ([
438+ self .underlying_model .are_models_valid (orig_mps ),
439+ np .all (sigma2 > 0 , axis = - 1 )
440+ ], axis = 0 )
441+
442+ def underlying_likelihood (self , binary_outcomes , modelparams , expparams ):
443+ """
444+ Given outcomes hypothesized for the underlying model, returns the likelihood
445+ which which those outcomes occur.
446+ """
447+ original_mps = modelparams [..., self ._orig_mps_slice ]
448+ return self .underlying_model .likelihood (binary_outcomes , original_mps , expparams )
449+
450+ def likelihood (self , outcomes , modelparams , expparams ):
451+ # By calling the superclass implementation, we can consolidate
452+ # call counting there.
453+ super (GaussianHyperparameterizedModel , self ).likelihood (outcomes , modelparams , expparams )
454+
455+ # We want these to broadcast to the shape
456+ # (idx_underlying_outcome, idx_outcome, idx_modelparam, idx_experiment).
457+ # Thus, we need shape
458+ # (idx_underlying_outcome, 1, idx_modelparam, 1).
459+ mu = (modelparams [:, self ._mu_slice ].T )[:, np .newaxis , :, np .newaxis ]
460+ sigma = np .sqrt (
461+ (modelparams [:, self ._sigma2_slice ].T )[:, np .newaxis , :, np .newaxis ]
462+ )
463+
464+ assert np .all (sigma > 0 )
465+
466+ # Now we can rescale the outcomes to be random variates z drawn from N(0, 1).
467+ scaled_outcomes = (outcomes [np .newaxis ,:,np .newaxis ,np .newaxis ] - mu ) / sigma
468+
469+ # We can then compute the conditional likelihood Pr(z | underlying_outcome, model).
470+ conditional_L = norm (0 , 1 ).pdf (scaled_outcomes )
471+
472+ # To find the marginalized likeihood, we now need the underlying likelihood
473+ # Pr(underlying_outcome | model), so that we can sum over the idx_u_o axis.
474+ # Note that we need to add a new axis to shift the underlying outcomes left
475+ # of the real-valued outcomes z.
476+ underlying_L = self .underlying_likelihood (
477+ np .array ([0 , 1 ], dtype = 'uint' ),
478+ modelparams , expparams
479+ )[:, None , :, :]
480+
481+ # Now we marginalize and return.
482+ return (underlying_L * conditional_L ).sum (axis = 0 )
483+
484+ def simulate_experiment (self , modelparams , expparams , repeat = 1 ):
485+ super (GaussianHyperparameterizedModel , self ).simulate_experiment (modelparams , expparams )
486+
487+ # Start by generating a bunch of (0, 1) normalized random variates
488+ # that we'll randomly rescale to the right location and shape.
489+ zs = np .random .randn (modelparams .shape [0 ], expparams .shape [0 ])
490+
491+ # Next, we sample a bunch of underlying outcomes to figure out
492+ # how to rescale everything.
493+ underlying_outcomes = self .underlying_model .simulate_experiment (
494+ modelparams [:, :- 4 ], expparams , repeat = repeat
495+ )
496+
497+ # We can now rescale zs to obtain the actual outcomes.
498+ mu = (modelparams [:, self ._mu_slice ].T )[:, None , :, None ]
499+ sigma = np .sqrt (
500+ (modelparams [:, self ._sigma2_slice ].T )[:, None , :, None ]
501+ )
502+ outcomes = (
503+ np .where (underlying_outcomes , mu [1 ], mu [0 ]) +
504+ np .where (underlying_outcomes , sigma [1 ], sigma [0 ]) * zs
505+ )
506+
507+ return outcomes [0 ,0 ,0 ] if outcomes .size == 1 else outcomes
508+
383509class MultinomialModel (DerivedModel ):
384510 """
385511 Model representing finite numbers of iid samples from another model with
@@ -680,7 +806,7 @@ def __init__(
680806 # therefore, we need to add modelparams
681807 self ._has_fixed_covariance = False
682808 if self ._diagonal :
683- self ._srw_names = ["\sigma_{{{}}}" .format (name ) for name in self ._rw_names ]
809+ self ._srw_names = [r "\sigma_{{{}}}" .format (name ) for name in self ._rw_names ]
684810 self ._srw_idxs = (underlying_model .n_modelparams + \
685811 np .arange (self ._n_rw )).astype (np .int )
686812 else :
@@ -692,9 +818,9 @@ def __init__(
692818 for idx1 , name1 in enumerate (self ._rw_names ):
693819 for name2 in self ._rw_names [:idx1 + 1 ]:
694820 if name1 == name2 :
695- self ._srw_names .append ("\sigma_{{{}}}" .format (name1 ))
821+ self ._srw_names .append (r "\sigma_{{{}}}" .format (name1 ))
696822 else :
697- self ._srw_names .append ("\sigma_{{{},{}}}" .format (name2 ,name1 ))
823+ self ._srw_names .append (r "\sigma_{{{},{}}}" .format (name2 ,name1 ))
698824 else :
699825 # In this case the covariance matrix is fixed and fully specified
700826 self ._has_fixed_covariance = True
0 commit comments