Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix indices vit #70

Merged
merged 5 commits into from
Aug 5, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion terratorch/models/backbones/prithvi_vit.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,10 @@ def checkpoint_filter_wrapper_fn(state_dict, model):
kwargs = {k: v for k, v in kwargs.items() if k != "out_indices"}
model.feature_info = FeatureInfo(model.feature_info, out_indices)
model.encode_decode_forward = model.forward
model.forward = model.forward_features
def forward_filter_indices(*args, **kwargs):
features = model.forward_features(*args, **kwargs)
return [features[i] for i in out_indices]
model.forward = forward_filter_indices
model.model_bands = model_bands
model.pretrained_bands = pretrained_bands

Expand Down
1 change: 0 additions & 1 deletion tests/manufactured-finetune_prithvi_vit_100.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,6 @@ model:
- NIR_NARROW
- SWIR_1
- SWIR_2
num_frames: 1
head_dropout: 0.5708022831486758
head_final_act: torch.nn.ReLU
head_learned_upscale_layers: 2
Expand Down
1 change: 0 additions & 1 deletion tests/manufactured-finetune_prithvi_vit_300.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,6 @@ model:
- NIR_NARROW
- SWIR_1
- SWIR_2
num_frames: 1
head_dropout: 0.5708022831486758
head_final_act: torch.nn.ReLU
head_learned_upscale_layers: 2
Expand Down
20 changes: 15 additions & 5 deletions tests/test_backbones.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
# Copyright contributors to the Terratorch project

import importlib
import os

import pytest
import timm
import torch
import importlib

import terratorch # noqa: F401
import os

NUM_CHANNELS = 6
NUM_FRAMES = 3
Expand Down Expand Up @@ -52,6 +54,14 @@ def test_vit_models_accept_multitemporal(model_name, input_224_multitemporal):
backbone = timm.create_model(model_name, pretrained=False, num_frames=NUM_FRAMES)
backbone(input_224_multitemporal)

#def test_swin_models_accept_non_divisible_by_patch_size(input_386):
# backbone = timm.create_model("prithvi_swin_90_us", pretrained=False, num_frames=NUM_FRAMES)
# backbone(input_386)
@pytest.mark.parametrize("model_name", ["prithvi_vit_100", "prithvi_vit_300"])
def test_out_indices(model_name, input_224):
out_indices = [2, 4, 8, 10]
backbone = timm.create_model(model_name, pretrained=False, features_only=True, out_indices=out_indices)
assert backbone.feature_info.out_indices == out_indices

output = backbone(input_224)
full_output = backbone.forward_features(input_224)

for filtered_index, full_index in enumerate(out_indices):
assert torch.allclose(full_output[full_index], output[filtered_index])
55 changes: 15 additions & 40 deletions tests/test_finetune.py
Original file line number Diff line number Diff line change
@@ -1,64 +1,39 @@
import os
import shutil

import pytest
import timm
import torch
import importlib
import terratorch
import subprocess
import os

from terratorch.cli_tools import build_lightning_cli

@pytest.mark.parametrize("model_name", ["prithvi_swin_B", "prithvi_swin_L", "prithvi_vit_100", "prithvi_vit_300"])
def test_finetune_multiple_backbones(model_name):

@pytest.fixture(autouse=True)
def setup_and_cleanup(model_name):
model_instance = timm.create_model(model_name)
pretrained_bands = [0, 1, 2, 3, 4, 5]
model_bands = [0, 1, 2, 3, 4, 5]

state_dict = model_instance.state_dict()

torch.save(state_dict, os.path.join("tests/", model_name + ".pt"))
torch.save(state_dict, os.path.join("tests", model_name + ".pt"))

yield # everything after this runs after each test

# Running the terratorch CLI
os.remove(os.path.join("tests", model_name + ".pt"))
shutil.rmtree(os.path.join("tests", "all_ecos_random"))

@pytest.mark.parametrize("model_name", ["prithvi_swin_B", "prithvi_swin_L", "prithvi_vit_100", "prithvi_vit_300"])
def test_finetune_multiple_backbones(model_name):
command_list = ["fit", "-c", f"tests/manufactured-finetune_{model_name}.yaml"]
_ = build_lightning_cli(command_list)


@pytest.mark.parametrize("model_name", ["prithvi_swin_B"])
def test_finetune_bands_intervals(model_name):

model_instance = timm.create_model(model_name)

state_dict = model_instance.state_dict()

torch.save(state_dict, os.path.join("tests/", model_name + ".pt"))

# Running the terratorch CLI
command_list = ["fit", "-c", f"tests/manufactured-finetune_{model_name}_band_interval.yaml"]
_ = build_lightning_cli(command_list)

@pytest.mark.parametrize("model_name", ["prithvi_swin_B"])
def test_finetune_bands_str(model_name):

model_instance = timm.create_model(model_name)

state_dict = model_instance.state_dict()

torch.save(state_dict, os.path.join("tests/", model_name + ".pt"))

# Running the terratorch CLI
command_list = ["fit", "-c", f"tests/manufactured-finetune_{model_name}_string.yaml"]
_ = build_lightning_cli(command_list)

@pytest.mark.parametrize("model_name", ["prithvi_swin_B"])
def test_finetune_bands_str(model_name):

model_instance = timm.create_model(model_name)

state_dict = model_instance.state_dict()

torch.save(state_dict, os.path.join("tests/", model_name + ".pt"))

# Running the terratorch CLI
command_list = ["fit", "-c", f"tests/manufactured-finetune_{model_name}_metrics_from_file.yaml"]
command_list = ["fit", "-c", f"tests/manufactured-finetune_{model_name}_string.yaml"]
_ = build_lightning_cli(command_list)

Loading