Module langbrainscore.metrics.metric
Expand source code
import typing
import numpy as np
import xarray as xr
from langbrainscore.interface import _MatrixMetric, _Metric, _VectorMetric
from scipy.stats import kendalltau, pearsonr, spearmanr
from sklearn.metrics import accuracy_score, mean_squared_error, pairwise_distances
# class Metric:
# """
# wrapper for metric classes that confirms they instantiate the proper interface
# and coordinates their execution over the contents of supplied xarrays
# """
# def __init__(self, metric: typing.Union[_Metric, str], **kwargs) -> "Metric":
# assert issubclass(metric, _Metric)
# self._metric = metric(**kwargs)
# def __call__(self, X: xr.DataArray, Y: xr.DataArray) -> np.ndarray:
# """
# args:
# xr.DataArray: X
# xr.DataArray: Y
# returns:
# score of specified metric applied to X and Y
# """
# score = self._metric(X.values, Y.values)
# if not isinstance(score, np.ndarray):
# return np.array(score).reshape(-1)
# return score
class PearsonR(_VectorMetric):
@staticmethod
def _score(x: np.ndarray, y: np.ndarray) -> np.float:
r, p = pearsonr(x, y)
return r
class SpearmanRho(_VectorMetric):
@staticmethod
def _score(x: np.ndarray, y: np.ndarray) -> np.float:
rho, p = spearmanr(x, y)
return rho
class KendallTau(_VectorMetric):
@staticmethod
def _score(x: np.ndarray, y: np.ndarray) -> np.float:
tau, p = kendalltau(x, y)
return tau
class FisherCorr(_VectorMetric):
@staticmethod
def _score(x: np.ndarray, y: np.ndarray) -> np.float:
r, p = pearsonr(x, y)
corr = np.arctanh(r)
return corr
class RMSE(_VectorMetric):
@staticmethod
def _score(x: np.ndarray, y: np.ndarray) -> np.float:
loss = mean_squared_error(x, y, squared=False)
return loss
class ClassificationAccuracy(_VectorMetric):
@staticmethod
def _score(x: np.ndarray, y: np.ndarray) -> np.float:
score = accuracy_score(x, y, normalize=True)
return score
class RSA(_MatrixMetric):
"""
evaluates representational similarity between two matrices for a given
distance measure and vector comparison metric
"""
def __init__(self, distance="correlation", comparison=PearsonR()):
"""
args:
string: distance (anything accepted by sklearn.metrics.pairwise_distances)
_VectorMetric: comparison
"""
self._distance = distance
self._comparison = comparison
super().__init__()
def _score(self, X: np.ndarray, Y: np.ndarray) -> np.float:
X_rdm = pairwise_distances(X, metric=self._distance)
Y_rdm = pairwise_distances(Y, metric=self._distance)
if any([m.shape[1] == 1 for m in (X, Y)]): # can't calc 1D corr dists
X_rdm[np.isnan(X_rdm)] = 0
Y_rdm[np.isnan(Y_rdm)] = 0
indices = np.triu_indices(X_rdm.shape[0], k=1)
score = self._comparison(X_rdm[indices], Y_rdm[indices])
return score
# inspired by https://github.com/yuanli2333/CKA-Centered-Kernel-Alignment/blob/master/CKA.py
class CKA(_MatrixMetric):
"""
evaluates centered kernel alignment distance between two matrices
currently only implements linear kernel
"""
def __init__(self):
super().__init__()
@staticmethod
def _center(K):
N = K.shape[0]
U = np.ones([N, N])
I = np.eye(N)
H = I - U / N
centered = H @ K @ H
return centered
def _HSIC(self, A, B):
L_A = A @ A.T
L_B = B @ B.T
HSIC = np.sum(self._center(L_A) * self._center(L_B))
return HSIC
def _score(self, X: np.ndarray, Y: np.ndarray) -> np.float:
HSIC_XY = self._HSIC(X, Y)
HSIC_XX = self._HSIC(X, X)
HSIC_YY = self._HSIC(Y, Y)
score = HSIC_XY / (np.sqrt(HSIC_XX) * np.sqrt(HSIC_YY))
return score
Classes
class CKA
-
evaluates centered kernel alignment distance between two matrices currently only implements linear kernel
Expand source code
class CKA(_MatrixMetric): """ evaluates centered kernel alignment distance between two matrices currently only implements linear kernel """ def __init__(self): super().__init__() @staticmethod def _center(K): N = K.shape[0] U = np.ones([N, N]) I = np.eye(N) H = I - U / N centered = H @ K @ H return centered def _HSIC(self, A, B): L_A = A @ A.T L_B = B @ B.T HSIC = np.sum(self._center(L_A) * self._center(L_B)) return HSIC def _score(self, X: np.ndarray, Y: np.ndarray) -> np.float: HSIC_XY = self._HSIC(X, Y) HSIC_XX = self._HSIC(X, X) HSIC_YY = self._HSIC(Y, Y) score = HSIC_XY / (np.sqrt(HSIC_XX) * np.sqrt(HSIC_YY)) return score
Ancestors
- langbrainscore.interface.metric._MatrixMetric
- langbrainscore.interface.metric._Metric
- langbrainscore.interface.cacheable._Cacheable
- typing.Protocol
- typing.Generic
- abc.ABC
class ClassificationAccuracy (reduction=None)
-
subclass of _Metric that applies relevant vector similarity metric along each column of the input arrays.
args: callable: reduction (can also be None or False)
raises: TypeError: if reduction argument is not callable.
Expand source code
class ClassificationAccuracy(_VectorMetric): @staticmethod def _score(x: np.ndarray, y: np.ndarray) -> np.float: score = accuracy_score(x, y, normalize=True) return score
Ancestors
- langbrainscore.interface.metric._VectorMetric
- langbrainscore.interface.metric._Metric
- langbrainscore.interface.cacheable._Cacheable
- typing.Protocol
- typing.Generic
- abc.ABC
class FisherCorr (reduction=None)
-
subclass of _Metric that applies relevant vector similarity metric along each column of the input arrays.
args: callable: reduction (can also be None or False)
raises: TypeError: if reduction argument is not callable.
Expand source code
class FisherCorr(_VectorMetric): @staticmethod def _score(x: np.ndarray, y: np.ndarray) -> np.float: r, p = pearsonr(x, y) corr = np.arctanh(r) return corr
Ancestors
- langbrainscore.interface.metric._VectorMetric
- langbrainscore.interface.metric._Metric
- langbrainscore.interface.cacheable._Cacheable
- typing.Protocol
- typing.Generic
- abc.ABC
class KendallTau (reduction=None)
-
subclass of _Metric that applies relevant vector similarity metric along each column of the input arrays.
args: callable: reduction (can also be None or False)
raises: TypeError: if reduction argument is not callable.
Expand source code
class KendallTau(_VectorMetric): @staticmethod def _score(x: np.ndarray, y: np.ndarray) -> np.float: tau, p = kendalltau(x, y) return tau
Ancestors
- langbrainscore.interface.metric._VectorMetric
- langbrainscore.interface.metric._Metric
- langbrainscore.interface.cacheable._Cacheable
- typing.Protocol
- typing.Generic
- abc.ABC
class PearsonR (reduction=None)
-
subclass of _Metric that applies relevant vector similarity metric along each column of the input arrays.
args: callable: reduction (can also be None or False)
raises: TypeError: if reduction argument is not callable.
Expand source code
class PearsonR(_VectorMetric): @staticmethod def _score(x: np.ndarray, y: np.ndarray) -> np.float: r, p = pearsonr(x, y) return r
Ancestors
- langbrainscore.interface.metric._VectorMetric
- langbrainscore.interface.metric._Metric
- langbrainscore.interface.cacheable._Cacheable
- typing.Protocol
- typing.Generic
- abc.ABC
class RMSE (reduction=None)
-
subclass of _Metric that applies relevant vector similarity metric along each column of the input arrays.
args: callable: reduction (can also be None or False)
raises: TypeError: if reduction argument is not callable.
Expand source code
class RMSE(_VectorMetric): @staticmethod def _score(x: np.ndarray, y: np.ndarray) -> np.float: loss = mean_squared_error(x, y, squared=False) return loss
Ancestors
- langbrainscore.interface.metric._VectorMetric
- langbrainscore.interface.metric._Metric
- langbrainscore.interface.cacheable._Cacheable
- typing.Protocol
- typing.Generic
- abc.ABC
class RSA (distance='correlation', comparison=(PearsonR?_reduction=None))
-
evaluates representational similarity between two matrices for a given distance measure and vector comparison metric
args: string: distance (anything accepted by sklearn.metrics.pairwise_distances) _VectorMetric: comparison
Expand source code
class RSA(_MatrixMetric): """ evaluates representational similarity between two matrices for a given distance measure and vector comparison metric """ def __init__(self, distance="correlation", comparison=PearsonR()): """ args: string: distance (anything accepted by sklearn.metrics.pairwise_distances) _VectorMetric: comparison """ self._distance = distance self._comparison = comparison super().__init__() def _score(self, X: np.ndarray, Y: np.ndarray) -> np.float: X_rdm = pairwise_distances(X, metric=self._distance) Y_rdm = pairwise_distances(Y, metric=self._distance) if any([m.shape[1] == 1 for m in (X, Y)]): # can't calc 1D corr dists X_rdm[np.isnan(X_rdm)] = 0 Y_rdm[np.isnan(Y_rdm)] = 0 indices = np.triu_indices(X_rdm.shape[0], k=1) score = self._comparison(X_rdm[indices], Y_rdm[indices]) return score
Ancestors
- langbrainscore.interface.metric._MatrixMetric
- langbrainscore.interface.metric._Metric
- langbrainscore.interface.cacheable._Cacheable
- typing.Protocol
- typing.Generic
- abc.ABC
class SpearmanRho (reduction=None)
-
subclass of _Metric that applies relevant vector similarity metric along each column of the input arrays.
args: callable: reduction (can also be None or False)
raises: TypeError: if reduction argument is not callable.
Expand source code
class SpearmanRho(_VectorMetric): @staticmethod def _score(x: np.ndarray, y: np.ndarray) -> np.float: rho, p = spearmanr(x, y) return rho
Ancestors
- langbrainscore.interface.metric._VectorMetric
- langbrainscore.interface.metric._Metric
- langbrainscore.interface.cacheable._Cacheable
- typing.Protocol
- typing.Generic
- abc.ABC