code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
if unit not in ('kelvin', 'celsius', 'fahrenheit'): raise ValueError("Invalid value for parameter 'unit'") minimum = min(self._purge_none_samples(self.temperature_series()), key=itemgetter(1)) if unit == 'kelvin': result = minimum if un...
def min_temperature(self, unit='kelvin')
Returns a tuple containing the min value in the temperature series preceeded by its timestamp :param unit: the unit of measure for the temperature values. May be among: '*kelvin*' (default), '*celsius*' or '*fahrenheit*' :type unit: str :returns: a tuple :rai...
2.998776
2.682125
1.11806
if unit not in ('kelvin', 'celsius', 'fahrenheit'): raise ValueError("Invalid value for parameter 'unit'") average = self._average(self._purge_none_samples( self.temperature_series())) if unit == 'kelvin': result ...
def average_temperature(self, unit='kelvin')
Returns the average value in the temperature series :param unit: the unit of measure for the temperature values. May be among: '*kelvin*' (default), '*celsius*' or '*fahrenheit*' :type unit: str :returns: a float :raises: ValueError when invalid values are provided f...
2.982603
2.959298
1.007875
return max(self._purge_none_samples(self.rain_series()), key=lambda item:item[1])
def max_rain(self)
Returns a tuple containing the max value in the rain series preceeded by its timestamp :returns: a tuple :raises: ValueError when the measurement series is empty
23.049131
16.254669
1.418001
return json.dumps({"reception_time": self._reception_time, "Location": json.loads(self._location.to_JSON()), "Weather": json.loads(self._weather.to_JSON()) })
def to_JSON(self)
Dumps object fields into a JSON formatted string :returns: the JSON string
4.321323
5.516061
0.783407
root_node = self._to_DOM() if xmlns: xmlutils.annotate_with_XMLNS(root_node, OBSERVATION_XMLNS_PREFIX, OBSERVATION_XMLNS_URL) return xmlutils.DOM_node_to_XML(root_node, xml_declaration)
def to_XML(self, xml_declaration=True, xmlns=True)
Dumps object fields to an XML-formatted string. The 'xml_declaration' switch enables printing of a leading standard XML line containing XML version and encoding. The 'xmlns' switch enables printing of qualified XMLNS prefixes. :param XML_declaration: if ``True`` (default) prints a lead...
4.193828
5.447153
0.769912
root_node = ET.Element("observation") reception_time_node = ET.SubElement(root_node, "reception_time") reception_time_node.text = str(self._reception_time) root_node.append(self._location._to_DOM()) root_node.append(self._weather._to_DOM()) return root_node
def _to_DOM(self)
Dumps object data to a fully traversable DOM representation of the object. :returns: a ``xml.etree.Element`` object
2.587985
2.922067
0.885669
lat = str(params_dict['lat']) lon = str(params_dict['lon']) params = dict(lat=lat, lon=lon) # build request URL uri = http_client.HttpClient.to_url(UV_INDEX_URL, self._API_key, None) _, json_data = self._client.cacheable_get_json(uri, params=params) retu...
def get_uvi(self, params_dict)
Invokes the UV Index endpoint :param params_dict: dict of parameters :returns: a string containing raw JSON data :raises: *ValueError*, *APICallError*
5.583093
6.19362
0.901426
lat = str(params_dict['lat']) lon = str(params_dict['lon']) start = str(params_dict['start']) end = str(params_dict['end']) params = dict(lat=lat, lon=lon, start=start, end=end) # build request URL uri = http_client.HttpClient.to_url(UV_INDEX_HISTORY_URL...
def get_uvi_history(self, params_dict)
Invokes the UV Index History endpoint :param params_dict: dict of parameters :returns: a string containing raw JSON data :raises: *ValueError*, *APICallError*
4.073776
4.277472
0.952379
return [ cls.TEMPERATURE, cls.PRESSURE, cls.HUMIDITY, cls.WIND_SPEED, cls.WIND_DIRECTION, cls.CLOUDS ]
def items(cls)
All values for this enum :return: list of str
3.368231
3.342222
1.007782
return [ cls.GREATER_THAN, cls.GREATER_THAN_EQUAL, cls.LESS_THAN, cls.LESS_THAN_EQUAL, cls.EQUAL, cls.NOT_EQUAL ]
def items(cls)
All values for this enum :return: list of str
2.615578
2.641977
0.990008
if JSON_string is None: raise parse_response_error.ParseResponseError('JSON data is None') d = json.loads(JSON_string) # Check if server returned errors: this check overcomes the lack of use # of HTTP error status codes by the OWM API 2.5. This mechanism is #...
def parse_JSON(self, JSON_string)
Parses a *Forecast* instance out of raw JSON data. Only certain properties of the data are used: if these properties are not found or cannot be parsed, an error is issued. :param JSON_string: a raw JSON string :type JSON_string: str :returns: a *Forecast* instance or ``None`` if...
3.809572
3.649621
1.043827
return evaluation.evaluate_density( dist, numpy.arcsinh(x), cache=cache)/numpy.sqrt(1+x*x)
def _pdf(self, x, dist, cache)
Probability density function.
8.854574
9.185193
0.964005
args = list(args) # expand args to match dim if len(args) < poly.dim: args = args + [np.nan]*(poly.dim-len(args)) elif len(args) > poly.dim: raise ValueError("too many arguments") # Find and perform substitutions, if any x0, x1 = [], [] for idx, arg in enumerate(args)...
def call(poly, args)
Evaluate a polynomial along specified axes. Args: poly (Poly): Input polynomial. args (numpy.ndarray): Argument to be evaluated. Masked values keeps the variable intact. Returns: (Poly, numpy.ndarray): If masked values are used the Poly is returned. ...
3.293467
3.310491
0.994858
x0,x1 = map(Poly, [x0,x1]) dim = np.max([p.dim for p in [P,x0,x1]]) dtype = chaospy.poly.typing.dtyping(P.dtype, x0.dtype, x1.dtype) P, x0, x1 = [chaospy.poly.dimension.setdim(p, dim) for p in [P,x0,x1]] if x0.shape: x0 = [x for x in x0] else: x0 = [x0] if x1.shape: ...
def substitute(P, x0, x1, V=0)
Substitute a variable in a polynomial array. Args: P (Poly) : Input data. x0 (Poly, int) : The variable to substitute. Indicated with either unit variable, e.g. `x`, `y`, `z`, etc. or through an integer matching the unit variables dimension, e.g. `x==0`, `y==1`, ...
3.113055
3.197649
0.973545
if P.shape: return min([is_decomposed(poly) for poly in P]) return len(P.keys) <= 1
def is_decomposed(P)
Check if a polynomial (array) is on component form. Args: P (Poly): Input data. Returns: (bool): True if all polynomials in ``P`` are on component form. Examples: >>> x,y = cp.variable(2) >>> print(cp.is_decomposed(cp.Poly([1,x,x*y]))) True ...
7.765771
10.465626
0.742026
P = P.copy() if not P: return P out = [Poly({key:P.A[key]}) for key in P.keys] return Poly(out, None, None, None)
def decompose(P)
Decompose a polynomial to component form. In array missing values are padded with 0 to make decomposition compatible with ``chaospy.sum(Q, 0)``. Args: P (Poly) : Input data. Returns: (Poly) : Decomposed polynomial with `P.shape==(M,)+Q.shape` where `M` is the number of...
9.090894
12.346194
0.736332
logger = logging.getLogger(__name__) assert len(k_data) == len(distribution), ( "distribution %s is not of length %d" % (distribution, len(k_data))) assert len(k_data.shape) == 1 if numpy.all(k_data == 0): return 1. def cache_key(distribution): return (tuple(k_data), d...
def evaluate_moment( distribution, k_data, parameters=None, cache=None, )
Evaluate raw statistical moments. Args: distribution (Dist): Distribution to evaluate. x_data (numpy.ndarray): Locations for where evaluate moment of. parameters (:py:data:typing.Any): Collection of parameters to override the default ones in the ...
3.187715
3.269582
0.974961
from .mv_mul import MvMul length = max(left, right) if length == 1: return Mul(left, right) return MvMul(left, right)
def mul(left, right)
Distribution multiplication. Args: left (Dist, numpy.ndarray) : left hand side. right (Dist, numpy.ndarray) : right hand side.
5.679591
6.46332
0.878742
left = evaluation.get_forward_cache(left, cache) right = evaluation.get_forward_cache(right, cache) if isinstance(left, Dist): if isinstance(right, Dist): raise evaluation.DependencyError( "under-defined distribution {} or {}".format(left...
def _cdf(self, xloc, left, right, cache)
Cumulative distribution function. Example: >>> print(chaospy.Uniform().fwd([-0.5, 0.5, 1.5, 2.5])) [0. 0.5 1. 1. ] >>> print(Mul(chaospy.Uniform(), 2).fwd([-0.5, 0.5, 1.5, 2.5])) [0. 0.25 0.75 1. ] >>> print(Mul(2, chaospy.Uniform()).fwd([-0.5, 0...
2.435273
2.481699
0.981292
left = evaluation.get_inverse_cache(left, cache) right = evaluation.get_inverse_cache(right, cache) if isinstance(left, Dist): if isinstance(right, Dist): raise evaluation.DependencyError( "under-defined distribution {} or {}".format(left...
def _ppf(self, uloc, left, right, cache)
Point percentile function. Example: >>> print(chaospy.Uniform().inv([0.1, 0.2, 0.9])) [0.1 0.2 0.9] >>> print(Mul(chaospy.Uniform(), 2).inv([0.1, 0.2, 0.9])) [0.2 0.4 1.8] >>> print(Mul(2, chaospy.Uniform()).inv([0.1, 0.2, 0.9])) [0.2 0.4 ...
2.616374
2.795278
0.935998
left = evaluation.get_forward_cache(left, cache) right = evaluation.get_forward_cache(right, cache) if isinstance(left, Dist): if isinstance(right, Dist): raise evaluation.DependencyError( "under-defined distribution {} or {}".format(left...
def _pdf(self, xloc, left, right, cache)
Probability density function. Example: >>> print(chaospy.Uniform().pdf([-0.5, 0.5, 1.5, 2.5])) [0. 1. 0. 0.] >>> print(Mul(chaospy.Uniform(), 2).pdf([-0.5, 0.5, 1.5, 2.5])) [0. 0.5 0.5 0. ] >>> print(Mul(2, chaospy.Uniform()).pdf([-0.5, 0.5, 1.5, 2.5...
2.291155
2.371123
0.966274
if evaluation.get_dependencies(left, right): raise evaluation.DependencyError( "sum of dependent distributions not feasible: " "{} and {}".format(left, right) ) if isinstance(left, Dist): left = evaluation.evaluate_moment(left...
def _mom(self, key, left, right, cache)
Statistical moments. Example: >>> print(numpy.around(chaospy.Uniform().mom([0, 1, 2, 3]), 4)) [1. 0.5 0.3333 0.25 ] >>> print(numpy.around(Mul(chaospy.Uniform(), 2).mom([0, 1, 2, 3]), 4)) [1. 1. 1.3333 2. ] >>> print(numpy.around(Mu...
3.626057
3.636152
0.997224
from .. import baseclass collection = [dist] # create DAG as list of nodes and edges: nodes = [dist] edges = [] pool = [dist] while pool: dist = pool.pop() for key in sorted(dist.prm): value = dist.prm[key] if not isinstance(value, baseclass.Dis...
def sorted_dependencies(dist, reverse=False)
Extract all underlying dependencies from a distribution sorted topologically. Uses depth-first algorithm. See more here: Args: dist (Dist): Distribution to extract dependencies from. reverse (bool): If True, place dependencies in reverse order. Returns: ...
3.677789
3.512196
1.047148
from .. import baseclass distributions = [ sorted_dependencies(dist) for dist in distributions if isinstance(dist, baseclass.Dist) ] dependencies = list() for idx, dist1 in enumerate(distributions): for dist2 in distributions[idx+1:]: dependencies.extend([di...
def get_dependencies(*distributions)
Get underlying dependencies that are shared between distributions. If more than two distributions are provided, any pair-wise dependency between any two distributions are included, implying that an empty set is returned if and only if the distributions are i.i.d. Args: distributions: ...
4.06251
4.932314
0.823652
polynomials, norms, _, _ = chaospy.quad.generate_stieltjes( dist=dist, order=numpy.max(order), retall=True, **kws) if normed: for idx, poly in enumerate(polynomials): polynomials[idx] = poly / numpy.sqrt(norms[:, idx]) norms = norms**0 dim = len(dist) if dim > ...
def orth_ttr( order, dist, normed=False, sort="GR", retall=False, cross_truncation=1., **kws)
Create orthogonal polynomial expansion from three terms recursion formula. Args: order (int): Order of polynomial expansion. dist (Dist): Distribution space where polynomials are orthogonal If dist.ttr exists, it will be used. Must be stochastically independent. ...
3.123797
3.478256
0.898093
return evaluation.evaluate_density(dist, -xloc, cache=cache)
def _pdf(self, xloc, dist, cache)
Probability density function.
12.304729
12.991741
0.947119
return -evaluation.evaluate_inverse(dist, 1-q, cache=cache)
def _ppf(self, q, dist, cache)
Point percentile function.
15.664988
15.168934
1.032702
return (-1)**numpy.sum(k)*evaluation.evaluate_moment( dist, k, cache=cache)
def _mom(self, k, dist, cache)
Statistical moments.
12.485264
13.14511
0.949803
a,b = evaluation.evaluate_recurrence_coefficients(dist, k) return -a, b
def _ttr(self, k, dist, cache)
Three terms recursion coefficients.
17.083439
12.68185
1.347078
order = sorted(GENZ_KEISTER_22.keys())[order] abscissas, weights = GENZ_KEISTER_22[order] abscissas = numpy.array(abscissas) weights = numpy.array(weights) weights /= numpy.sum(weights) abscissas *= numpy.sqrt(2) return abscissas, weights
def quad_genz_keister_22 ( order )
Hermite Genz-Keister 22 rule. Args: order (int): The quadrature order. Must be in the interval (0, 8). Returns: (:py:data:typing.Tuple[numpy.ndarray, numpy.ndarray]): Abscissas and weights Examples: >>> abscissas, weights = quad_genz_keister_22(1) >...
2.77165
3.531353
0.784869
for datatype, identifier in { int: _identify_scaler, numpy.int8: _identify_scaler, numpy.int16: _identify_scaler, numpy.int32: _identify_scaler, numpy.int64: _identify_scaler, float: _identify_scaler, numpy.float16: _identify_s...
def identify_core(core)
Identify the polynomial argument.
2.500088
2.374325
1.052968
return core.A, core.dim, core.shape, core.dtype
def _identify_poly(core)
Specification for a polynomial.
13.977036
13.301918
1.050753
if not core: return {}, 1, (), int core = core.copy() key = sorted(core.keys(), key=chaospy.poly.base.sort_key)[0] shape = numpy.array(core[key]).shape dtype = numpy.array(core[key]).dtype dim = len(key) return core, dim, shape, dtype
def _identify_dict(core)
Specification for a dictionary.
5.738251
5.676239
1.010925
if isinstance(core, numpy.ndarray) and not core.shape: return {(0,):core}, 1, (), core.dtype core = [chaospy.poly.base.Poly(a) for a in core] shape = (len(core),) + core[0].shape dtype = chaospy.poly.typing.dtyping(*[_.dtype for _ in core]) dims = numpy.array([a.dim for a in core]) ...
def _identify_iterable(core)
Specification for a list, tuple, numpy.ndarray.
3.891582
3.82898
1.016349
if isinstance(poly, distributions.Dist): poly, dist = polynomials.variable(len(poly)), poly else: poly = polynomials.Poly(poly) cov = Cov(poly, dist, **kws) var = numpy.diag(cov) vvar = numpy.sqrt(numpy.outer(var, var)) return numpy.where(vvar > 0, cov/vvar, 0)
def Corr(poly, dist=None, **kws)
Correlation matrix of a distribution or polynomial. Args: poly (Poly, Dist): Input to take correlation on. Must have ``len(poly)>=2``. dist (Dist): Defines the space the correlation is taken on. It is ignored if ``poly`` is a distribution. Returns: ...
3.934036
4.729446
0.831817
from ...quad import quad_clenshaw_curtis q1, w1 = quad_clenshaw_curtis(int(10**3*a), 0, a) q2, w2 = quad_clenshaw_curtis(int(10**3*(1-a)), a, 1) q = numpy.concatenate([q1,q2], 1) w = numpy.concatenate([w1,w2]) w = w*numpy.where(q<a, 2*q/a, 2*(1-q)/(1-a)) from chaospy.poly import variab...
def tri_ttr(k, a)
Custom TTR function. Triangle distribution does not have an analytical TTR function, but because of its non-smooth nature, a blind integration scheme will converge very slowly. However, by splitting the integration into two divided at the discontinuity in the derivative, TTR can be made operative.
3.711617
3.795666
0.977856
if isinstance(poly, distributions.Dist): x = polynomials.variable(len(poly)) poly, dist = x, poly else: poly = polynomials.Poly(poly) if poly.dim < len(dist): polynomials.setdim(poly, len(dist)) shape = poly.shape poly = polynomials.flatten(poly) m1 = E(po...
def Skew(poly, dist=None, **kws)
Skewness operator. Element by element 3rd order statistics of a distribution or polynomial. Args: poly (Poly, Dist): Input to take skewness on. dist (Dist): Defines the space the skewness is taken on. It is ignored if ``poly`` is a distribution. Returns...
3.6403
3.653351
0.996428
assert len(x_data) == len(distribution), ( "distribution %s is not of length %d" % (distribution, len(x_data))) assert hasattr(distribution, "_cdf"), ( "distribution require the `_cdf` method to function.") cache = cache if cache is not None else {} parameters = load_parameters( ...
def evaluate_forward( distribution, x_data, parameters=None, cache=None, )
Evaluate forward Rosenblatt transformation. Args: distribution (Dist): Distribution to evaluate. x_data (numpy.ndarray): Locations for where evaluate forward transformation at. parameters (:py:data:typing.Any): Collection of parameters to override the def...
4.161135
4.162122
0.999763
if isinstance(poly, distributions.Dist): x = polynomials.variable(len(poly)) poly, dist = x, poly else: poly = polynomials.Poly(poly) dim = len(dist) if poly.dim<dim: polynomials.setdim(poly, dim) shape = poly.shape poly = polynomials.flatten(poly) key...
def Var(poly, dist=None, **kws)
Element by element 2nd order statistics. Args: poly (Poly, Dist): Input to take variance on. dist (Dist): Defines the space the variance is taken on. It is ignored if ``poly`` is a distribution. Returns: (numpy.ndarray): Element for eleme...
3.160985
3.203896
0.986607
if not isinstance(poly, (distributions.Dist, polynomials.Poly)): print(type(poly)) print("Approximating expected value...") out = quadrature.quad(poly, dist, veceval=True, **kws) print("done") return out if isinstance(poly, distributions.Dist): dist, poly = ...
def E(poly, dist=None, **kws)
Expected value operator. 1st order statistics of a probability distribution or polynomial on a given probability space. Args: poly (Poly, Dist): Input to take expected value on. dist (Dist): Defines the space the expected value is taken on. It is ignored if ...
3.671103
3.671416
0.999915
dim = len(dist) if poly.dim<dim: poly = chaospy.poly.setdim(poly, len(dist)) zero = [0]*dim out = numpy.zeros((dim, dim) + poly.shape) mean = E(poly, dist) V_total = Var(poly, dist) E_cond_i = [None]*dim V_E_cond_i = [None]*dim for i in range(dim): zero[i] = 1 ...
def Sens_m2(poly, dist, **kws)
Variance-based decomposition/Sobol' indices. Second order sensitivity indices. Args: poly (Poly): Polynomial to find second order Sobol indices on. dist (Dist): The distributions of the input used in ``poly``. Returns: (numpy.ndarray): First ord...
2.615709
2.695558
0.970378
x_data = .5*numpy.cos(numpy.arange(order, 0, -1)*numpy.pi/(order+1)) + .5 x_data = chaospy.quad.combine([x_data]*dim) return x_data.T
def create_chebyshev_samples(order, dim=1)
Chebyshev sampling function. Args: order (int): The number of samples to create along each axis. dim (int): The number of dimensions to create samples for. Returns: samples following Chebyshev sampling scheme mapped to the ``[0, 1]^dim`` hyper-cube and `...
5.207179
6.62358
0.786158
dim = len(dist) basis = chaospy.poly.basis( start=1, stop=order, dim=dim, sort=sort, cross_truncation=cross_truncation, ) length = len(basis) cholmat = chaospy.chol.gill_king(chaospy.descriptives.Cov(basis, dist)) cholmat_inv = numpy.linalg.inv(cholmat.T).T if not norme...
def orth_chol(order, dist, normed=True, sort="GR", cross_truncation=1., **kws)
Create orthogonal polynomial expansion from Cholesky decomposition. Args: order (int): Order of polynomial expansion dist (Dist): Distribution space where polynomials are orthogonal normed (bool): If True orthonormal polynomials will be used instead of mo...
3.825425
4.024458
0.950544
P = P.copy() if not chaospy.poly.caller.is_decomposed(P): raise TypeError("Polynomial not on component form.") A = [] dim = P.dim coef = P(*(1,)*dim) M = coef!=0 zero = (0,)*dim ones = [1]*dim A = [{zero: coef}] if zero in P.A: del P.A[zero] P.key...
def dimsplit(P)
Segmentize a polynomial (on decomposed form) into it's dimensions. In array missing values are padded with 1 to make dimsplit compatible with ``poly.prod(Q, 0)``. Args: P (Poly): Input polynomial. Returns: (Poly): Segmentet polynomial array where ``Q.shape==P....
5.602407
5.587273
1.002709
P = P.copy() ldim = P.dim if not dim: dim = ldim+1 if dim==ldim: return P P.dim = dim if dim>ldim: key = numpy.zeros(dim, dtype=int) for lkey in P.keys: key[:ldim] = lkey P.A[tuple(key)] = P.A.pop(lkey) else: key = nu...
def setdim(P, dim=None)
Adjust the dimensions of a polynomial. Output the results into Poly object Args: P (Poly) : Input polynomial dim (int) : The dimensions of the output polynomial. If omitted, increase polynomial with one dimension. If the new dim is smaller then P's dimensions, v...
3.095418
3.649969
0.848067
return evaluation.evaluate_density( dist, base**xloc, cache=cache)*base**xloc*numpy.log(base)
def _pdf(self, xloc, dist, base, cache)
Probability density function.
9.344839
9.099339
1.02698
return evaluation.evaluate_forward(dist, base**xloc, cache=cache)
def _cdf(self, xloc, dist, base, cache)
Cumulative distribution function.
14.817529
12.1355
1.221007
mat = numpy.asfarray(mat) size = mat.shape[0] # Calculate gamma(mat) and xi_(mat). gamma = 0.0 xi_ = 0.0 for idy in range(size): gamma = max(abs(mat[idy, idy]), gamma) for idx in range(idy+1, size): xi_ = max(abs(mat[idy, idx]), xi_) # Calculate delta and b...
def gill_murray_wright(mat, eps=1e-16)
Gill-Murray-Wright algorithm for pivoting modified Cholesky decomposition. Return ``(perm, lowtri, error)`` such that `perm.T*mat*perm = lowtri*lowtri.T` is approximately correct. Args: mat (numpy.ndarray): Must be a non-singular and symmetric matrix eps (float): Er...
2.941397
3.018938
0.974315
# Temporary permutation matrix for swaping 2 rows or columns. size = mat_a.shape[0] perm_new = numpy.eye(size, dtype=int) # Modify the permutation matrix perm by swaping columns. perm_row = 1.0*perm[:, idx] perm[:, idx] = perm[:, idy] perm[:, idy] = perm_row # Modify the permutati...
def swap_across(idx, idy, mat_a, mat_r, perm)
Interchange row and column idy and idx.
3.411592
3.330359
1.024392
primes = list(primes) if not primes: prime_order = 10*dim while len(primes) < dim: primes = create_primes(prime_order) prime_order *= 2 primes = primes[:dim] assert len(primes) == dim, "not enough primes" if burnin < 0: burnin = max(primes) ...
def create_halton_samples(order, dim=1, burnin=-1, primes=())
Create Halton sequence. For ``dim == 1`` the sequence falls back to Van Der Corput sequence. Args: order (int): The order of the Halton sequence. Defines the number of samples. dim (int): The number of dimensions in the Halton sequence. burnin (int): ...
3.431532
3.134362
1.09481
if x_data is None: try: x_data = evaluation.evaluate_inverse( self, numpy.array([[0.5]]*len(self))) except StochasticallyDependentError: x_data = approximation.find_interior_point(self) shape = (len(self),) ...
def range(self, x_data=None)
Generate the upper and lower bounds of a distribution. Args: x_data (numpy.ndarray) : The bounds might vary over the sample space. By providing x_data you can specify where in the space the bound should be taken. If omitted, a (pseudo-)random sample ...
3.861584
3.912972
0.986867
x_data = numpy.asfarray(x_data) shape = x_data.shape x_data = x_data.reshape(len(self), -1) lower, upper = evaluation.evaluate_bound(self, x_data) q_data = numpy.zeros(x_data.shape) indices = x_data > upper q_data[indices] = 1 indices = ~indices ...
def fwd(self, x_data)
Forward Rosenblatt transformation. Args: x_data (numpy.ndarray): Location for the distribution function. ``x_data.shape`` must be compatible with distribution shape. Returns: (numpy.ndarray): Evaluated distribution function values...
2.994967
3.470877
0.862885
if len(self) > 1 and evaluation.get_dependencies(*self): raise StochasticallyDependentError( "Cumulative distribution does not support dependencies.") q_data = self.fwd(x_data) if len(self) > 1: q_data = numpy.prod(q_data, 0) return q_data
def cdf(self, x_data)
Cumulative distribution function. Note that chaospy only supports cumulative distribution functions for stochastically independent distributions. Args: x_data (numpy.ndarray): Location for the distribution function. Assumes that ``len(x_data) == len(...
5.813375
5.700745
1.019757
q_data = numpy.asfarray(q_data) assert numpy.all((q_data >= 0) & (q_data <= 1)), "sanitize your inputs!" shape = q_data.shape q_data = q_data.reshape(len(self), -1) x_data = evaluation.evaluate_inverse(self, q_data) lower, upper = evaluation.evaluate_bound(self, ...
def inv(self, q_data, max_iterations=100, tollerance=1e-5)
Inverse Rosenblatt transformation. If possible the transformation is done analytically. If not possible, transformation is approximated using an algorithm that alternates between Newton-Raphson and binary search. Args: q_data (numpy.ndarray): Probabilities t...
2.965937
3.340713
0.887815
x_data = numpy.asfarray(x_data) shape = x_data.shape x_data = x_data.reshape(len(self), -1) lower, upper = evaluation.evaluate_bound(self, x_data) f_data = numpy.zeros(x_data.shape) indices = (x_data <= upper) & (x_data >= lower) f_data[indices] = evalua...
def pdf(self, x_data, step=1e-7)
Probability density function. If possible the density will be calculated analytically. If not possible, it will be approximated by approximating the one-dimensional derivative of the forward Rosenblatt transformation and multiplying the component parts. Note that even if the distributio...
2.732283
3.04945
0.895992
size_ = numpy.prod(size, dtype=int) dim = len(self) if dim > 1: if isinstance(size, (tuple, list, numpy.ndarray)): shape = (dim,) + tuple(size) else: shape = (dim, size) else: shape = size from . import...
def sample(self, size=(), rule="R", antithetic=None)
Create pseudo-random generated samples. By default, the samples are created using standard (pseudo-)random samples. However, if needed, the samples can also be created by either low-discrepancy sequences, and/or variance reduction techniques. Changing the sampling scheme, use the follo...
3.270472
3.179294
1.028679
K = numpy.asarray(K, dtype=int) shape = K.shape dim = len(self) if dim > 1: shape = shape[1:] size = int(K.size/dim) K = K.reshape(dim, size) cache = {} out = [evaluation.evaluate_moment(self, kdata, cache) for kdata in K.T] ...
def mom(self, K, **kws)
Raw statistical moments. Creates non-centralized raw moments from the random variable. If analytical options can not be utilized, Monte Carlo integration will be used. Args: K (numpy.ndarray): Index of the raw moments. k.shape must be compatible with ...
4.469238
4.471303
0.999538
kloc = numpy.asarray(kloc, dtype=int) shape = kloc.shape kloc = kloc.reshape(len(self), -1) cache = {} out = [evaluation.evaluate_recurrence_coefficients(self, k) for k in kloc.T] out = numpy.array(out).T return out.reshape((2,)+shape)
def ttr(self, kloc, acc=10**3, verbose=1)
Three terms relation's coefficient generator Args: k (numpy.ndarray, int): The order of the coefficients. acc (int): Accuracy of discretized Stieltjes if analytical methods are unavailable. Returns: (Recurrence coeffic...
4.659978
3.9951
1.166423
if N is None: N = len(poly)/2 + 1 corr = Corr(poly, dist, **kws) out = numpy.empty(N) for n in range(N): out[n] = numpy.mean(corr.diagonal(n), 0) return out
def Acf(poly, dist, N=None, **kws)
Auto-correlation function. Args: poly (Poly): Polynomial of interest. Must have ``len(poly) > N``. dist (Dist): Defines the space the correlation is taken on. N (int): The number of time steps appart included. If omited set to ``len(poly)/2+1`...
3.595758
4.88451
0.736155
rc("figure", figsize=[8.,4.]) rc("figure.subplot", left=.08, top=.95, right=.98) rc("image", cmap="gray") seed(1000) Q1 = cp.Gamma(2) Q2 = cp.Normal(0, Q1) Q = cp.J(Q1, Q2) #end subplot(121) s,t = meshgrid(linspace(0,5,200), linspace(-6,6,200)) contourf(s,t,Q.pdf([s,t]...
def plot_figures()
Plot figures for multivariate distribution section.
2.660733
2.612596
1.018425
if isinstance(vari, Poly): shape = int(numpy.prod(vari.shape)) return reshape(vari, (shape,)) return numpy.array(vari).flatten()
def flatten(vari)
Flatten a shapeable quantity. Args: vari (chaospy.poly.base.Poly, numpy.ndarray): Shapeable input quantity. Returns: (chaospy.poly.base.Poly, numpy.ndarray): Same type as ``vari`` with `len(Q.shape)==1`. Examples: >>> P = chaospy.reshape(chaospy.prange(4), ...
5.530801
8.068131
0.685512
if isinstance(vari, Poly): core = vari.A.copy() for key in vari.keys: core[key] = reshape(core[key], shape) out = Poly(core, vari.dim, shape, vari.dtype) return out return numpy.asarray(vari).reshape(shape)
def reshape(vari, shape)
Reshape the shape of a shapeable quantity. Args: vari (chaospy.poly.base.Poly, numpy.ndarray): Shapeable input quantity. shape (tuple): The polynomials new shape. Must be compatible with the number of elements in ``vari``. Returns: (chaospy.poly.base...
4.360572
5.256711
0.829525
if isinstance(vari, Poly): core_old = vari.A.copy() core_new = {} for key in vari.keys: core_new[key] = rollaxis(core_old[key], axis, start) return Poly(core_new, vari.dim, None, vari.dtype) return numpy.rollaxis(vari, axis, start)
def rollaxis(vari, axis, start=0)
Roll the specified axis backwards, until it lies in a given position. Args: vari (chaospy.poly.base.Poly, numpy.ndarray): Input array or polynomial. axis (int): The axis to roll backwards. The positions of the other axes do not change relative to one another. ...
3.812619
3.609141
1.056378
if isinstance(vari, Poly): core = vari.A.copy() for key in vari.keys: core[key] = swapaxes(core[key], ax1, ax2) return Poly(core, vari.dim, None, vari.dtype) return numpy.swapaxes(vari, ax1, ax2)
def swapaxes(vari, ax1, ax2)
Interchange two axes of a polynomial.
3.792858
3.474431
1.091649
if isinstance(vari, Poly): core = vari.A.copy() for key in vari.keys: core[key] = roll(core[key], shift, axis) return Poly(core, vari.dim, None, vari.dtype) return numpy.roll(vari, shift, axis)
def roll(vari, shift, axis=None)
Roll array elements along a given axis.
3.803945
3.81055
0.998267
if isinstance(vari, Poly): core = vari.A.copy() for key in vari.keys: core[key] = transpose(core[key]) return Poly(core, vari.dim, vari.shape[::-1], vari.dtype) return numpy.transpose(vari)
def transpose(vari)
Transpose a shapeable quantety. Args: vari (chaospy.poly.base.Poly, numpy.ndarray): Quantety of interest. Returns: (chaospy.poly.base.Poly, numpy.ndarray): Same type as ``vari``. Examples: >>> P = chaospy.reshape(chaospy.prange(4), (2,2)) >>> print(...
4.6151
5.578815
0.827255
samples = numpy.asfarray(samples) assert numpy.all(samples <= 1) and numpy.all(samples >= 0), ( "all samples assumed on interval [0, 1].") if len(samples.shape) == 1: samples = samples.reshape(1, -1) inverse_samples = 1-samples dims = len(samples) if not len(axes): ...
def create_antithetic_variates(samples, axes=())
Generate antithetic variables. Args: samples (numpy.ndarray): The samples, assumed to be on the [0, 1]^D hyper-cube, to be reflected. axes (tuple): Boolean array of which axes to reflect. If This to limit the number of points created in higher dimensi...
3.442806
3.357889
1.025289
core, dim_, shape_, dtype_ = chaospy.poly.constructor.identify_core(core) core, shape = chaospy.poly.constructor.ensure_shape(core, shape, shape_) core, dtype = chaospy.poly.constructor.ensure_dtype(core, dtype, dtype_) core, dim = chaospy.poly.constructor.ensure_dim(core, dim, dim_) # Remove...
def preprocess(core, dim, shape, dtype)
Constructor function for the Poly class.
3.077038
2.960576
1.039338
args = [cleanup(arg) for arg in args] if part is not None: parts, orders = part if numpy.array(orders).size == 1: orders = [int(numpy.array(orders).item())]*len(args) parts = numpy.array(parts).flatten() for i, arg in enumerate(args): m, n = float(p...
def combine(args, part=None)
All linear combination of a list of list. Args: args (numpy.ndarray) : List of input arrays. Components to take linear combination of with `args[i].shape=(N[i], M[i])` where N is to be taken linear combination of and M is static. M[i] is set to 1 if missing. Retur...
3.29407
3.484626
0.945315
arg = numpy.asarray(arg) if len(arg.shape) <= 1: arg = arg.reshape(arg.size, 1) elif len(arg.shape) > 2: raise ValueError("shapes must be smaller than 3") return arg
def cleanup(arg)
Clean up the input variable.
3.377416
3.308555
1.020813
x_data = numpy.arange(1, order+1)/(order+1.) x_data = chaospy.quad.combine([x_data]*dim) return x_data.T
def create_grid_samples(order, dim=1)
Create samples from a regular grid. Args: order (int): The order of the grid. Defines the number of samples. dim (int): The number of dimensions in the grid Returns (numpy.ndarray): Regular grid with ``shape == (dim, order)``.
5.953828
6.854823
0.86856
idxm = numpy.array(multi_index(idxi, dim)) idxn = numpy.array(multi_index(idxj, dim)) out = single_index(idxm + idxn) return out
def add(idxi, idxj, dim)
Bertran addition. Example ------- >>> print(chaospy.bertran.add(3, 3, 1)) 6 >>> print(chaospy.bertran.add(3, 3, 2)) 10
3.458387
6.20377
0.557465
return int(scipy.special.comb(order+dim, dim, 1))
def terms(order, dim)
Count the number of polynomials in an expansion. Parameters ---------- order : int The upper order for the expansion. dim : int The number of dimensions of the expansion. Returns ------- N : int The number of terms in an expansion of upper order `M` and numb...
10.498835
17.09058
0.614305
def _rec(idx, dim): idxn = idxm = 0 if not dim: return () if idx == 0: return (0, )*dim while terms(idxn, dim) <= idx: idxn += 1 idx -= terms(idxn-1, dim) if idx == 0: return (idxn,) + (0,)*(dim-1) while ...
def multi_index(idx, dim)
Single to multi-index using graded reverse lexicographical notation. Parameters ---------- idx : int Index in interger notation dim : int The number of dimensions in the multi-index notation Returns ------- out : tuple Multi-index of `idx` with `len(out)=dim` E...
3.564613
3.809173
0.935797
if stop is None: start, stop = 0, start start = numpy.array(start, dtype=int).flatten() stop = numpy.array(stop, dtype=int).flatten() sort = sort.upper() total = numpy.mgrid[(slice(numpy.max(stop), -1, -1),)*dim] total = numpy.array(total).reshape(dim, -1) if start.size > 1: ...
def bindex(start, stop=None, dim=1, sort="G", cross_truncation=1.)
Generator for creating multi-indices. Args: start (int): The lower order of the indices stop (:py:data:typing.Optional[int]): the maximum shape included. If omitted: stop <- start; start <- 0 If int is provided, set as largest total order. If array of int, ...
2.634143
2.723189
0.967301
if -1 in idxm: return 0 order = int(sum(idxm)) dim = len(idxm) if order == 0: return 0 return terms(order-1, dim) + single_index(idxm[1:])
def single_index(idxm)
Multi-index to single integer notation. Uses graded reverse lexicographical notation. Parameters ---------- idxm : numpy.ndarray Index in multi-index notation Returns ------- idx : int Integer index of `idxm` Examples -------- >>> for idx in range(3): ... ...
5.231637
6.171939
0.847649
idxm = multi_index(idx, dim) out = 0 while idxm[-1:] == (0,): out += 1 idxm = idxm[:-1] return out
def rank(idx, dim)
Calculate the index rank according to Bertran's notation.
4.448659
4.485933
0.991691
idxm = multi_index(idx, dim) if axis is None: axis = dim - numpy.argmin(1*(numpy.array(idxm)[::-1] == 0))-1 if not idx: return idx, axis if idxm[axis] == 0: idxi = parent(parent(idx, dim)[0], dim)[0] while child(idxi+1, dim, axis) < idx: idxi += 1 ...
def parent(idx, dim, axis=None)
Parent node according to Bertran's notation. Parameters ---------- idx : int Index of the child node. dim : int Dimensionality of the problem. axis : int Assume axis direction. Returns ------- out : int Index of parent node with `j<=i`, and `j==i` iff `i...
5.029588
5.309032
0.947364
idxm = multi_index(idx, dim) out = numpy.array(idxm) + 1*(numpy.eye(len(idxm))[axis]) return single_index(out)
def child(idx, dim, axis)
Child node according to Bertran's notation. Parameters ---------- idx : int Index of the parent node. dim : int Dimensionality of the problem. axis : int Dimension direction to define a child. Must have `0<=axis<dim` Returns ------- out : int Ind...
7.730153
15.745575
0.490941
idxm = [0]*dim out = [] def _olindex(idx): if numpy.sum(idxm) == order: out.append(idxm[:]) return if idx == dim: return idxm_sum = numpy.sum(idxm) idx_saved = idxm[idx] for idxi in range(order - numpy.sum(idxm) + ...
def olindex(order, dim)
Create an lexiographical sorted basis for a given order. Examples -------- >>> chaospy.bertran.olindex(3, 2) array([[0, 3], [1, 2], [2, 1], [3, 0]])
3.22001
3.936307
0.818028
indices = [olindex(o, dim) for o in range(order+1)] indices = numpy.vstack(indices) return indices
def olindices(order, dim)
Create an lexiographical sorted basis for a given order. Examples: >>> chaospy.bertran.olindices(2, 2) array([[0, 0], [0, 1], [1, 0], [0, 2], [1, 1], [2, 0]])
4.073516
7.252534
0.561668
core = core.copy() if shape is None: shape = shape_ elif isinstance(shape, int): shape = (shape,) if tuple(shape) == tuple(shape_): return core, shape ones = np.ones(shape, dtype=int) for key, val in core.items(): core[key] = val*ones return core, shap...
def ensure_shape(core, shape, shape_)
Ensure shape is correct.
2.958211
2.848765
1.038419
core = core.copy() if dtype is None: dtype = dtype_ if dtype_ == dtype: return core, dtype for key, val in { int: chaospy.poly.typing.asint, float: chaospy.poly.typing.asfloat, np.float32: chaospy.poly.typing.asfloat, np.float64: cha...
def ensure_dtype(core, dtype, dtype_)
Ensure dtype is correct.
2.844136
2.764337
1.028867
if dim is None: dim = dim_ if not dim: return core, 1 if dim_ == dim: return core, int(dim) if dim > dim_: key_convert = lambda vari: vari[:dim_] else: key_convert = lambda vari: vari + (0,)*(dim-dim_) new_core = {} for key, val in core.items():...
def ensure_dim(core, dim, dim_)
Ensure that dim is correct.
2.831138
2.789147
1.015055
return numpy.sum((max(val)+1)**numpy.arange(len(val)-1, -1, -1)*val)
def sort_key(val)
Sort key for sorting keys in grevlex order.
9.710356
9.675517
1.003601
return Poly(self.A.copy(), self.dim, self.shape, self.dtype)
def copy(self)
Return a copy of the polynomial.
17.558527
11.360004
1.545644
out = numpy.array([self.A[key] for key in self.keys]) out = numpy.rollaxis(out, -1) return out
def coefficients(self)
Polynomial coefficients.
5.197267
5.108103
1.017455
shape = poly.shape poly = polynomials.flatten(poly) dim = len(dist) #sample from the inumpyut dist samples = dist.sample(sample, **kws) qoi_dists = [] for i in range(0, len(poly)): #sample the polynomial solution if dim == 1: dataset = poly[i](samples) ...
def QoI_Dist(poly, dist, sample=10000, **kws)
Constructs distributions for the quantity of interests. The function constructs a kernel density estimator (KDE) for each polynomial (poly) by sampling it. With the KDEs, distributions (Dists) are constructed. The Dists can be used for e.g. plotting probability density functions (PDF), or to make a s...
3.640353
3.994255
0.911397
order = numpy.asarray(order, dtype=int).flatten() lower = numpy.asarray(lower).flatten() upper = numpy.asarray(upper).flatten() dim = max(lower.size, upper.size, order.size) order = numpy.ones(dim, dtype=int)*order lower = numpy.ones(dim)*lower upper = numpy.ones(dim)*upper if com...
def quad_gauss_legendre(order, lower=0, upper=1, composite=None)
Generate the quadrature nodes and weights in Gauss-Legendre quadrature. Example: >>> abscissas, weights = quad_gauss_legendre(3) >>> print(numpy.around(abscissas, 4)) [[0.0694 0.33 0.67 0.9306]] >>> print(numpy.around(weights, 4)) [0.1739 0.3261 0.3261 0.1739]
2.474604
2.496356
0.991286
inner = numpy.ones(order+1)*0.5 outer = numpy.arange(order+1)**2 outer = outer/(16*outer-4.) banded = numpy.diag(numpy.sqrt(outer[1:]), k=-1) + numpy.diag(inner) + \ numpy.diag(numpy.sqrt(outer[1:]), k=1) vals, vecs = numpy.linalg.eig(banded) abscis, weight = vals.real, vecs[0...
def _gauss_legendre(order, composite=1)
Backend function.
2.506039
2.494658
1.004562
if len(dist) > 1: if isinstance(order, int): values = [quad_gauss_patterson(order, d) for d in dist] else: values = [quad_gauss_patterson(order[i], dist[i]) for i in range(len(dist))] abscissas = [_[0][0] for _ in values] weights =...
def quad_gauss_patterson(order, dist)
Generate sets abscissas and weights for Gauss-Patterson quadrature. Args: order (int) : The quadrature order. Must be in the interval (0, 8). dist (Dist) : The domain to create quadrature over. Returns: (numpy.ndarray, numpy.ndarray) : Abscissas and weights. Example: >>> X...
2.665665
2.739207
0.973152
from ..distributions.baseclass import Dist isdist = isinstance(domain, Dist) if isdist: dim = len(domain) else: dim = np.array(domain[0]).size rule = rule.lower() if len(rule) == 1: rule = collection.QUAD_SHORT_NAMES[rule] quad_function = collection.get_functio...
def generate_quadrature( order, domain, accuracy=100, sparse=False, rule="C", composite=1, growth=None, part=None, normalize=False, **kws )
Numerical quadrature node and weight generator. Args: order (int): The order of the quadrature. domain (numpy.ndarray, Dist): If array is provided domain is the lower and upper bounds (lo,up). Invalid if gaussian is set. If Dist is provided, bounds and nodes ...
3.362618
3.381628
0.994378
@wraps(func) def caller(*args, **kwargs): logger = logging.getLogger(__name__) instance = func(*args, **kwargs) logger.warning( "Distribution `chaospy.{}` has been renamed to ".format(name) + "`chaospy.{}` and will be deprecated next release.".format...
def deprecation_warning(func, name)
Add a deprecation warning do each distribution.
4.686034
4.255038
1.101291
if poly.dim < len(dist): poly = polynomials.setdim(poly, len(dist)) freeze = polynomials.Poly(freeze) freeze = polynomials.setdim(freeze, len(dist)) keys = freeze.keys if len(keys) == 1 and keys[0] == (0,)*len(dist): freeze = list(freeze.A.values())[0] else: freeze ...
def E_cond(poly, freeze, dist, **kws)
Conditional expected value operator. 1st order statistics of a polynomial on a given probability space conditioned on some of the variables. Args: poly (Poly): Polynomial to find conditional expected value on. freeze (numpy.ndarray): Boolean values defining the cond...
2.856781
3.044801
0.938249
logger = logging.getLogger(__name__) logger.debug("generating random samples using rule %s", rule) rule = rule.upper() if isinstance(domain, int): dim = domain trans = lambda x_data: x_data elif isinstance(domain, (tuple, list, numpy.ndarray)): domain = numpy.asfarray...
def generate_samples(order, domain=1, rule="R", antithetic=None)
Sample generator. Args: order (int): Sample order. Determines the number of samples to create. domain (Dist, int, numpy.ndarray): Defines the space where the samples are generated. If integer is provided, the space ``[0, 1]^domain`` will be used. If array-like ...
3.110925
2.996636
1.038139
r cords = np.array(cords)+1 slices = [] for cord in cords: slices.append(slice(1, 2**cord+1, 2)) grid = np.mgrid[slices] indices = grid.reshape(len(cords), np.prod(grid.shape[1:])).T sgrid = indices*2.**-cords return sgrid
def sparse_segment(cords)
r""" Create a segment of a sparse grid. Convert a ol-index to sparse grid coordinates on ``[0, 1]^N`` hyper-cube. A sparse grid of order ``D`` coencide with the set of sparse_segments where ``||cords||_1 <= D``. More specifically, a segment of: .. math:: \cup_{cords \in C} sparse_segm...
4.420599
5.403036
0.81817
abscissas = numpy.asfarray(abscissas) if len(abscissas.shape) == 1: abscissas = abscissas.reshape(1, abscissas.size) dim, size = abscissas.shape order = 1 while chaospy.bertran.terms(order, dim) <= size: order += 1 indices = numpy.array(chaospy.bertran.bindex(0, order-1, d...
def lagrange_polynomial(abscissas, sort="GR")
Create Lagrange polynomials. Args: abscissas (numpy.ndarray): Sample points where the Lagrange polynomials shall be defined. Example: >>> print(chaospy.around(lagrange_polynomial([-10, 10]), 4)) [-0.05q0+0.5, 0.05q0+0.5] >>> print(chaospy.around(lagrange_polynomial(...
2.844051
2.937759
0.968102
samples = numpy.asarray(samples) if lo is None: lo = samples.min() if up is None: up = samples.max() try: #construct the kernel density estimator dist = sample_dist(samples, lo, up) #raised by gaussian_kde if dataset is singular matrix except numpy.linalg.L...
def SampleDist(samples, lo=None, up=None)
Distribution based on samples. Estimates a distribution from the given samples by constructing a kernel density estimator (KDE). Args: samples: Sample values to construction of the KDE lo (float) : Location of lower threshold up (float) : Location of upper threshold ...
4.553177
5.559444
0.818999
size_ = numpy.prod(size, dtype=int) dim = len(self) if dim > 1: if isinstance(size, (tuple,list,numpy.ndarray)): shape = (dim,) + tuple(size) else: shape = (dim, size) else: shape = size out = self.kern...
def sample(self, size=(), rule="R", antithetic=None, verbose=False, **kws)
Overwrite sample() function, because the constructed Dist that is based on the KDE is only working with the random sampling that is given by the KDE itself.
3.318597
3.224782
1.029092
mat_ref = numpy.asfarray(mat) mat = mat_ref.copy() diag_max = numpy.diag(mat).max() assert len(mat.shape) == 2 size = len(mat) hitri = numpy.zeros((size, size)) piv = numpy.arange(size) for idx in range(size): idx_max = numpy.argmax(numpy.diag(mat[idx:, idx:])) + idx ...
def bastos_ohagen(mat, eps=1e-16)
Bastos-O'Hagen algorithm for modified Cholesky decomposition. Args: mat (numpy.ndarray): Input matrix to decompose. Assumed to close to positive definite. eps (float): Tolerance value for the eigenvalues. Values smaller than `tol*numpy.diag(mat).max()` are consid...
2.626387
2.568384
1.022583