sandialabs / pyapprox

Flexible and efficient tools for high-dimensional approximation, scientific machine learning and uncertainty quantification.
https://sandialabs.github.io/pyapprox/
MIT License
51 stars 13 forks source link

SparseGrid - adaptive_combination_technique test fails on Windows #9

Closed ConnectedSystems closed 2 years ago

ConnectedSystems commented 3 years ago

With latest changes on main branch.

________________________________________________________________________________ TestAdaptiveSparseGrid.test_adaptive_combination_technique _________________________________________________________________________________

self = <pyapprox.tests.test_sparse_grid.TestAdaptiveSparseGrid testMethod=test_adaptive_combination_technique>

    def test_adaptive_combination_technique(self):
        num_vars = 2
        max_level = 5

        __, __, isotropic_data_structures = get_sparse_grid_samples_and_weights(
            num_vars, max_level, clenshaw_curtis_in_polynomial_order,
            clenshaw_curtis_rule_growth)

        poly_indices = isotropic_data_structures[1]
        #monomial_idx = np.arange(poly_indices.shape[1])
        # for variance computation to be exact form a polynomial whose
        # indices form the half set of the sparse grid polynomial indices
        monomial_idx = []
        for ii in range(poly_indices.shape[1]):
            if poly_indices[:, ii].sum() < max_level:
                monomial_idx.append(ii)
        monomial_idx = np.asarray(monomial_idx)
        monomial_indices = poly_indices[:, monomial_idx]
        monomial_coeffs = np.random.normal(0.0, 1.0, (monomial_idx.shape[0], 1))
        def function(x): return evaluate_monomial(
            monomial_indices, monomial_coeffs, x)
        #function = lambda x: np.sum(x**8,axis=0)[:,np.newaxis]

        num_validation_samples = 1000
        validation_samples = np.random.uniform(
            -1., 1., (num_vars, num_validation_samples))

        validation_values = function(validation_samples)

        max_level_1d = None
        max_num_sparse_grid_samples = None
        error_tol = None
        admissibility_function = partial(
            max_level_admissibility_function, max_level, max_level_1d,
            max_num_sparse_grid_samples, error_tol)
        refinement_indicator = isotropic_refinement_indicator
        refinement_indicator = variance_refinement_indicator

        sparse_grid = CombinationSparseGrid(num_vars)
        sparse_grid.set_refinement_functions(
            refinement_indicator, admissibility_function,
            clenshaw_curtis_rule_growth)
        sparse_grid.set_univariate_rules(
            clenshaw_curtis_in_polynomial_order)
        sparse_grid.set_function(function)
        sparse_grid.build()

        assert (
            len(isotropic_data_structures[0]) == len(
                sparse_grid.poly_indices_dict))
        # assert isotropic_data_structures[0]==data_structures[0] will not work
        # keys will be the same but not idx
        for key in isotropic_data_structures[0]:
>           assert key in sparse_grid.poly_indices_dict
E           AssertionError: assert 1356306153142069452 in {-9018202432134511665: 94, -8983111295672685772: 139, -8826426438489003053: 103, -8543597068801984890: 7, ...}
E            +  where {-9018202432134511665: 94, -8983111295672685772: 139, -8826426438489003053: 103, -8543597068801984890: 7, ...} = <pyapprox.adaptive_sparse_grid.CombinationSparseGrid object at 0x0000029239490AF0>.poly_indices_dict

pyapprox\tests\test_sparse_grid.py:1124: AssertionError
ConnectedSystems commented 3 years ago

Can confirm latest changes to main branch (b9041080b947b394c52853df6aa12556f61f9592) does not resolve this issue.

Perhaps you already knew or intuited this but the issue seems to be deeper than any single key being different/missing:

(Pdb) sg_indices = set(sparse_grid.poly_indices_dict.keys())
(Pdb) iso_indices = set(isotropic_data_structures[0].keys())
(Pdb) len(iso_indices)              
145   
(Pdb) len(sg_indices)  
145   
(Pdb) len(sg_indices - iso_indices)
145