Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ on:
branches: ["**"]
schedule:
# IMPORTANT: For scheduled job we execute AWS_S3
- cron: '0 23 * * 0,1,2,3,4' # Start previous dat at 23:00 to finish next day
- cron: '0 1 * * 1,2,3,4' # Mon-Thu at 1 am
workflow_dispatch:
inputs:
persistent_storage:
Expand Down Expand Up @@ -69,9 +69,9 @@ jobs:
_storage='not_scheduled'
day=$(date +'%a')
echo "Today is $day"
if [[ "$day" == "Sun" || "$day" == "Tue" || "$day" == "Thu" ]]; then
if [[ "$day" == "Mon" || "$day" == "Wed" ]]; then
_storage='AWS_S3'
elif [[ "$day" == "Mon" || "$day" == "Wed" ]]; then
elif [[ "$day" == "Tue" || "$day" == "Thu" ]]; then
_storage='GCPXML'
else
echo "UNSPECIFIED RESULT for this day (assumed LMDB)"
Expand Down
1 change: 0 additions & 1 deletion .github/workflows/build_steps.yml
Original file line number Diff line number Diff line change
Expand Up @@ -400,7 +400,6 @@ jobs:

- name: Run test
run: |
ulimit -a
if [[ "$(echo "$ARCTICDB_PYTEST_ARGS" | xargs)" == pytest* ]]; then
python -m pip install pytest-repeat setuptools wheel
python setup.py protoc --build-lib python
Expand Down
22 changes: 0 additions & 22 deletions python/arcticdb/storage_fixtures/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,6 @@
import botocore.exceptions
from moto.server import DomainDispatcherApplication, create_backend_app

from arcticdb.util.utils import get_logger

from .api import *
from .utils import (
get_ephemeral_port,
Expand All @@ -40,7 +38,6 @@
from arcticc.pb2.storage_pb2 import EnvironmentConfigsMap
from arcticdb.version_store.helper import add_gcp_library_to_env, add_s3_library_to_env
from arcticdb_ext.storage import AWSAuthMethod, NativeVariantStorage, GCPXMLSettings as NativeGCPXMLSettings
from arcticdb_ext.tools import S3Tool

# All storage client libraries to be imported on-demand to speed up start-up of ad-hoc test runs

Expand Down Expand Up @@ -116,8 +113,6 @@ def __init__(
def __exit__(self, exc_type, exc_value, traceback):
if self.factory.clean_bucket_on_fixture_exit:
self.factory.cleanup_bucket(self)
if len(self.libs_from_factory) > 0:
get_logger().warning(f"Libraries not cleared remaining {self.libs_from_factory.keys()}")

def create_test_cfg(self, lib_name: str) -> EnvironmentConfigsMap:
cfg = EnvironmentConfigsMap()
Expand Down Expand Up @@ -177,21 +172,6 @@ def copy_underlying_objects_to(self, destination: "S3Bucket"):
for key in self.iter_underlying_object_names():
dest.copy({"Bucket": self.bucket, "Key": key}, key, SourceClient=source_client)

def check_bucket(self, assert_on_fail = True):
s3_tool = S3Tool(self.bucket, self.factory.default_key.id,
self.factory.default_key.secret, self.factory.endpoint)
content = s3_tool.list_bucket(self.bucket)

logger.warning(f"Total objects left: {len(content)}")
logger.warning(f"First 100: {content[0:100]}")
logger.warning(f"BUCKET: {self.bucket}")
left_from = set()
for key in content:
library_name = key.split("/")[1] # get the name from object
left_from.add(library_name)
logger.warning(f"Left overs from libraries: {left_from}")
if assert_on_fail:
assert len(content) < 1

class NfsS3Bucket(S3Bucket):
def create_test_cfg(self, lib_name: str) -> EnvironmentConfigsMap:
Expand Down Expand Up @@ -321,7 +301,6 @@ def cleanup_bucket(self, b: S3Bucket):
# We are not writing to buckets in this case
# and if we try to delete the bucket, it will fail
b.slow_cleanup(failure_consequence="The following delete bucket call will also fail. ")
b.check_bucket(assert_on_fail=True)


class BaseGCPStorageFixtureFactory(StorageFixtureFactory):
Expand Down Expand Up @@ -356,7 +335,6 @@ def cleanup_bucket(self, b: GcpS3Bucket):
# We are not writing to buckets in this case
# and if we try to delete the bucket, it will fail
b.slow_cleanup(failure_consequence="The following delete bucket call will also fail. ")
b.check_bucket(assert_on_fail=True)


def real_s3_from_environment_variables(
Expand Down
10 changes: 1 addition & 9 deletions python/tests/compat/arcticdb/test_lib_naming.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,17 +38,9 @@ def test_create_library_with_all_chars(arctic_client_v1, prefix, suffix):
assert all(name in result for name in created_libraries)
finally:
logger.info("Delete started")
failed_to_delete = []
for cnt, lib in enumerate(created_libraries):
logger.info(f"Deletion: {cnt}/{len(created_libraries)} lib_name [{repr(lib)}] ")
try:
ac.delete_library(lib)
except Exception as e:
try:
ac.delete_library(lib)
except Exception as e:
failed_to_delete.append(lib)
assert len(failed_to_delete) < 1, f"Following libraries failed to delete: {failed_to_delete}"
ac.delete_library(lib)
logger.info("Delete ended")

assert not failed, "There is at least one failure look at the result"
Expand Down
Loading
Loading