diff --git a/.github/workflows/docker-publish.yml b/.github/workflows/docker-publish.yml index e01e241e..4f35f4a0 100644 --- a/.github/workflows/docker-publish.yml +++ b/.github/workflows/docker-publish.yml @@ -64,9 +64,9 @@ jobs: # https://github.com/sigstore/cosign-installer - name: Install cosign if: github.event_name != 'pull_request' - uses: sigstore/cosign-installer@7e0881f8fe90b25e305bbf0309761e9314607e25 + uses: sigstore/cosign-installer@f3c664df7af409cb4873aa5068053ba9d61a57b6 #v2.6.0 with: - cosign-release: "v1.9.0" + cosign-release: "v1.11.0" # Workaround: https://github.com/docker/build-push-action/issues/461 - name: Setup Docker buildx diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 3ac74aa4..9227508b 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -31,7 +31,6 @@ jobs: # https://github.com/marketplace/actions/setup-miniconda#caching-environments steps: - uses: actions/checkout@v2 - - uses: bazelbuild/setup-bazelisk@v2 - name: Setup Mambaforge uses: conda-incubator/setup-miniconda@v2 with: @@ -47,42 +46,23 @@ jobs: - name: Cache Dependencies uses: actions/cache@v2 with: - # Cache the Conda, Bazel, and Pre-commit files + # Cache the Conda, and Pre-commit files path: | ${{ env.CONDA }}/envs ~/.cache/pre-commit - ~/.cache/bazel - key: conda-${{ runner.os }}--${{ runner.arch }}--${{ steps.get-date.outputs.today }}-${{ hashFiles('environment.yml') }}-${{ env.CACHE_NUMBER }} + key: conda-${{ runner.os }}--${{ runner.arch }}--${{ steps.get-date.outputs.today }}-${{ hashFiles('environment-ci.yml') }}-${{ env.CACHE_NUMBER }} env: - # Increase this value to reset cache if environment.yml has not changed + # Increase this value to reset cache if environment-ci.yml has not changed CACHE_NUMBER: 1 id: cache - name: Update environment - run: mamba env update -n anaconda-client-env -f environment.yml + run: mamba env update -n anaconda-client-env -f environment-ci.yml if: steps.cache.outputs.cache-hit != 'true' - name: Run all pre-commit checks on the full repo! run: | pre-commit run --all-files - - name: Build and install pyimprint - run: | - cd imprint - ./generate_bazelrc - # this is to see if installation via wheel works - bazel build -c dbg //python:pyimprint_wheel - pip install --no-deps --force-reinstall bazel-bin/python/dist/*.whl - # this is to see if editable installation works - # the pip install -e is not run until ./install.sh in the next step. - bazel build -c dbg //python:pyimprint/core.so - # Unfortunately, using a symlink doesn't work here for reasons that I - # don't understand - # ln -sf ./bazel-bin/python/pyimprint/core.so python/pyimprint/core.so - cp ./bazel-bin/python/pyimprint/core.so python/pyimprint/core.so - name: Install our libraries run: ./install.sh - - name: Bazel Test - run: | - cd imprint - bazel test -c dbg //... - name: Pytest run: | pytest . diff --git a/.vscode/settings.json b/.vscode/settings.json index 7268d3ed..db776301 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,5 +1,4 @@ { - "bazel-cpp-tools.compileCommands.targets": ["//..."], "jupyter.jupyterServerType": "local", "files.associations": { "functional": "cpp", diff --git a/confirm/confirm/mini_imprint/grid.py b/confirm/confirm/mini_imprint/grid.py index 64317646..aac7f54e 100644 --- a/confirm/confirm/mini_imprint/grid.py +++ b/confirm/confirm/mini_imprint/grid.py @@ -12,7 +12,7 @@ class HyperPlane: """ A plane defined by: - x \cdot n - c = 0 + x.dot(n) - c = 0 Sign convention: When used as the boundary between null hypothesis and alternative, the normal should point towards the null hypothesis space. diff --git a/confirm/tests/test_berry.py b/confirm/tests/test_berry.py index 771d0e90..accd4281 100644 --- a/confirm/tests/test_berry.py +++ b/confirm/tests/test_berry.py @@ -173,7 +173,7 @@ def test_inla_properties(method): np.testing.assert_allclose(sigma2_integral, 1.0) -@pytest.mark.parametrize("method", ["jax", "numpy", "cpp"]) +@pytest.mark.parametrize("method", ["jax", "numpy"]) def test_fast_inla(method, N=10, iterations=1): n_i = np.tile(np.array([20, 20, 35, 35]), (N, 1)) y_i = np.tile(np.array([0, 1, 9, 10], dtype=np.float64), (N, 1)) @@ -251,87 +251,6 @@ def test_fast_inla_same_results(N=1, iterations=1_000): ) -def test_py_binomial(n_arms=2, n_theta_1d=16, sim_size=100): - """ - Test against the Imprint accumulation and bound routines. - """ - import pyimprint.grid as grid - from pyimprint.bound import TypeIErrorBound - from pyimprint.driver import accumulate_process - from pyimprint.model.binomial import SimpleSelection - from confirm.berrylib.imprint import BerryImprintModel - - n_arm_samples = 35 - seed = 10 - # getting an exact match is only possible with n_threads = 1 because - # parallelism in the imprint accumulator leads to a different order of random - # numbers. - n_threads = 1 - - # define null hypos - null_hypos = [] - for i in range(n_arms): - n = np.zeros(n_arms) - # null is: - # theta_i <= logit(0.1) - # the normal should point towards the negative direction. but that also - # means we need to negate the logit(0.1) offset - n[i] = -1 - null_hypos.append(grid.HyperPlane(n, -logit(0.1))) - gr = grid.make_cartesian_grid_range( - n_theta_1d, np.full(n_arms, -3.5), np.full(n_arms, 1.0), sim_size - ) - gr.create_tiles(null_hypos) - gr.prune() - n_tiles = gr.n_tiles() - - fi = fast_inla.FastINLA(n_arms=n_arms) - b = BerryImprintModel(fi, n_arm_samples, [0.85]) - acc_o = accumulate_process(b, gr, sim_size, seed, n_threads) - - np.random.seed(seed) - samples = np.random.uniform(size=(sim_size, n_arm_samples, n_arms)) - - theta_tiles = grid.theta_tiles(gr) - nulls = grid.is_null_per_arm(gr) - - accumulator = binomial.binomial_accumulator(fi.rejection_inference) - typeI_sum, typeI_score = accumulator(theta_tiles, nulls, samples) - assert np.all(typeI_sum.to_py() == acc_o.typeI_sum()[0]) - np.testing.assert_allclose( - typeI_score.to_py(), acc_o.score_sum().reshape(n_tiles, n_arms), 1e-4 - ) - - corners = grid.collect_corners(gr) - tile_radii = grid.radii_tiles(gr) - sim_sizes = grid.sim_sizes_tiles(gr) - total, d0, d0u, d1w, d1uw, d2uw = binomial.upper_bound( - theta_tiles, - tile_radii, - corners, - sim_sizes, - n_arm_samples, - typeI_sum.to_py(), - typeI_score.to_py(), - ) - - delta = 0.025 - critvals = np.array([0.99]) - simple_selection_model = SimpleSelection(fi.n_arms, n_arm_samples, 1, critvals) - simple_selection_model.critical_values([fi.critical_value]) - - ub = TypeIErrorBound() - kbs = simple_selection_model.make_imprint_bound_state(gr) - ub.create(kbs, acc_o, gr, delta) - - np.testing.assert_allclose(d0, ub.delta_0()[0]) - np.testing.assert_allclose(d0u, ub.delta_0_u()[0]) - np.testing.assert_allclose(d1w, ub.delta_1()[0], rtol=1e-05) - np.testing.assert_allclose(d1uw, ub.delta_1_u()[0]) - np.testing.assert_allclose(d2uw, ub.delta_2_u()[0]) - np.testing.assert_allclose(total, ub.get()[0]) - - def test_rejection_table(): fi = fast_inla.FastINLA(n_arms=2) n = 10 diff --git a/install.sh b/install.sh index 04508cce..b9466169 100755 --- a/install.sh +++ b/install.sh @@ -9,15 +9,8 @@ fi # Set up pre-commit so it's fast the first time it gets used pre-commit install --install-hooks -pushd imprint || exit -./generate_bazelrc -popd || exit - # List the internal packages that should be available here. -for PKG in ./confirm ./imprint/python -do - [ -d "$PKG" ] && echo -e "\nDirectory $PKG exists. Installing... \n" && pip install --no-deps -e "$PKG" -done +pip install --no-deps -e ./confirm # Set up our imprint remote so we can use subtree. On Codespaces, we need # https. Locally, we use ssh.