Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 0 additions & 18 deletions docker/test/integration/cluster/ContainerStore.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,6 @@
from .containers.MinifiContainer import MinifiContainer
from .containers.NifiContainer import NifiContainer
from .containers.NifiContainer import NiFiOptions
from .containers.KinesisServerContainer import KinesisServerContainer
from .containers.S3ServerContainer import S3ServerContainer
from .containers.AzureStorageServerContainer import AzureStorageServerContainer
from .containers.HttpProxyContainer import HttpProxyContainer
from .containers.PostgreSQLServerContainer import PostgreSQLServerContainer
Expand Down Expand Up @@ -115,22 +113,6 @@ def acquire_container(self, context, container_name: str, engine='minifi-cpp', c
network=self.network,
image_store=self.image_store,
command=command))
elif engine == 's3-server':
return self.containers.setdefault(container_name,
S3ServerContainer(feature_context=feature_context,
name=container_name,
vols=self.vols,
network=self.network,
image_store=self.image_store,
command=command))
elif engine == 'kinesis-server':
return self.containers.setdefault(container_name,
KinesisServerContainer(feature_context=feature_context,
name=container_name,
vols=self.vols,
network=self.network,
image_store=self.image_store,
command=command))
elif engine == 'azure-storage-server':
return self.containers.setdefault(container_name,
AzureStorageServerContainer(feature_context=feature_context,
Expand Down
22 changes: 0 additions & 22 deletions docker/test/integration/cluster/DockerTestCluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
from .ContainerStore import ContainerStore
from .DockerCommunicator import DockerCommunicator
from .MinifiControllerExecutor import MinifiControllerExecutor
from .checkers.AwsChecker import AwsChecker
from .checkers.AzureChecker import AzureChecker
from .checkers.PostgresChecker import PostgresChecker
from .checkers.PrometheusChecker import PrometheusChecker
Expand All @@ -40,7 +39,6 @@ def __init__(self, context, feature_id):
self.vols = {}
self.container_communicator = DockerCommunicator()
self.container_store = ContainerStore(self.container_communicator.create_docker_network(feature_id), context.image_store, context.kubernetes_proxy, feature_id=feature_id)
self.aws_checker = AwsChecker(self.container_communicator)
self.azure_checker = AzureChecker(self.container_communicator)
self.postgres_checker = PostgresChecker(self.container_communicator)
self.prometheus_checker = PrometheusChecker()
Expand Down Expand Up @@ -190,26 +188,6 @@ def check_http_proxy_access(self, container_name, url):
and output.count("TCP_MISS") >= output.count("TCP_DENIED"))
or output.count("TCP_DENIED") == 0 and "TCP_MISS" in output)

def check_kinesis_server_record_data(self, container_name, record_data):
container_name = self.container_store.get_container_name_with_postfix(container_name)
return self.aws_checker.check_kinesis_server_record_data(container_name, record_data)

def check_s3_server_object_data(self, container_name, test_data):
container_name = self.container_store.get_container_name_with_postfix(container_name)
return self.aws_checker.check_s3_server_object_data(container_name, test_data)

def check_s3_server_object_hash(self, container_name: str, expected_file_hash: str):
container_name = self.container_store.get_container_name_with_postfix(container_name)
return self.aws_checker.check_s3_server_object_hash(container_name, expected_file_hash)

def check_s3_server_object_metadata(self, container_name, content_type="application/octet-stream", metadata=dict()):
container_name = self.container_store.get_container_name_with_postfix(container_name)
return self.aws_checker.check_s3_server_object_metadata(container_name, content_type, metadata)

def is_s3_bucket_empty(self, container_name):
container_name = self.container_store.get_container_name_with_postfix(container_name)
return self.aws_checker.is_s3_bucket_empty(container_name)

def check_azure_storage_server_data(self, container_name, test_data):
container_name = self.container_store.get_container_name_with_postfix(container_name)
return self.azure_checker.check_azure_storage_server_data(container_name, test_data)
Expand Down
5 changes: 0 additions & 5 deletions docker/test/integration/cluster/ImageStore.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,8 +65,6 @@ def get_image(self, container_engine):
image = self.__build_postgresql_server_image()
elif container_engine == "mqtt-broker":
image = self.__build_mqtt_broker_image()
elif container_engine == "kinesis-server":
image = self.__build_kinesis_image()
else:
raise Exception("There is no associated image for " + container_engine)

Expand Down Expand Up @@ -288,9 +286,6 @@ def __build_mqtt_broker_image(self):

return self.__build_image(dockerfile)

def __build_kinesis_image(self):
return self.__build_image_by_path(self.test_dir + "/resources/kinesis-mock", 'kinesis-server')

def __build_image(self, dockerfile, context_files=[]):
conf_dockerfile_buffer = BytesIO()
docker_context_buffer = BytesIO()
Expand Down
66 changes: 0 additions & 66 deletions docker/test/integration/cluster/checkers/AwsChecker.py

This file was deleted.

This file was deleted.

40 changes: 0 additions & 40 deletions docker/test/integration/cluster/containers/S3ServerContainer.py

This file was deleted.

15 changes: 0 additions & 15 deletions docker/test/integration/features/MiNiFi_integration_test_driver.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,21 +275,6 @@ def __validate(self, validator):
assert not self.cluster.segfault_happened() or self.cluster.log_app_output()
assert validator.validate() or self.cluster.log_app_output()

def check_kinesis_server_record_data(self, kinesis_container_name, record_data):
assert self.cluster.check_kinesis_server_record_data(kinesis_container_name, record_data) or self.cluster.log_app_output()

def check_s3_server_object_data(self, s3_container_name, object_data):
assert self.cluster.check_s3_server_object_data(s3_container_name, object_data) or self.cluster.log_app_output()

def check_s3_server_large_object_data(self, s3_container_name: str):
assert self.cluster.check_s3_server_object_hash(s3_container_name, self.test_file_hash) or self.cluster.log_app_output()

def check_s3_server_object_metadata(self, s3_container_name, content_type):
assert self.cluster.check_s3_server_object_metadata(s3_container_name, content_type) or self.cluster.log_app_output()

def check_empty_s3_bucket(self, s3_container_name):
assert self.cluster.is_s3_bucket_empty(s3_container_name) or self.cluster.log_app_output()

def check_http_proxy_access(self, http_proxy_container_name, url):
assert self.cluster.check_http_proxy_access(http_proxy_container_name, url) or self.cluster.log_app_output()

Expand Down
40 changes: 0 additions & 40 deletions docker/test/integration/features/steps/steps.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,11 +120,8 @@ def step_impl(context, processor_type, minifi_container_name):


@given("a {processor_type} processor")
@given("a {processor_type} processor set up to communicate with an s3 server")
@given("a {processor_type} processor set up to communicate with the same s3 server")
@given("a {processor_type} processor set up to communicate with an Azure blob storage")
@given("a {processor_type} processor set up to communicate with an MQTT broker instance")
@given("a {processor_type} processor set up to communicate with the kinesis server")
def step_impl(context, processor_type):
__create_processor(context, processor_type, processor_type, None, None, "minifi-cpp-flow")

Expand Down Expand Up @@ -499,18 +496,6 @@ def step_impl(context):
context.test.start('mqtt-broker')


# s3 setup
@given("a s3 server is set up in correspondence with the PutS3Object")
@given("a s3 server is set up in correspondence with the DeleteS3Object")
def step_impl(context):
context.test.acquire_container(context=context, name="s3-server", engine="s3-server")


@given("a kinesis server is set up in correspondence with the PutKinesisStream")
def step_impl(context):
context.test.acquire_container(context=context, name="kinesis-server", engine="kinesis-server")


# azure storage setup
@given("an Azure storage server is set up")
def step_impl(context):
Expand Down Expand Up @@ -730,31 +715,6 @@ def step_impl(context, url):
context.test.check_http_proxy_access('http-proxy', url)


@then("there is a record on the kinesis server with \"{record_data}\"")
def step_impl(context, record_data):
context.test.check_kinesis_server_record_data("kinesis-server", record_data)


@then("the object on the s3 server is \"{object_data}\"")
def step_impl(context, object_data):
context.test.check_s3_server_object_data("s3-server", object_data)


@then("the object on the s3 server is present and matches the original hash")
def step_impl(context):
context.test.check_s3_server_large_object_data("s3-server")


@then("the object content type on the s3 server is \"{content_type}\" and the object metadata matches use metadata")
def step_impl(context, content_type):
context.test.check_s3_server_object_metadata("s3-server", content_type)


@then("the object bucket on the s3 server is empty")
def step_impl(context):
context.test.check_empty_s3_bucket("s3-server")


# Azure
@when("test blob \"{blob_name}\" with the content \"{content}\" is created on Azure blob storage")
def step_impl(context, blob_name, content):
Expand Down
42 changes: 0 additions & 42 deletions docker/test/integration/minifi/processors/DeleteS3Object.py

This file was deleted.

Loading
Loading