Skip to content

Draft: Fix py3 tests #634

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 18 commits into
base: develop
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion base/redhat-8/install.sh
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ microdnf remove -y make gcc openssl-devel bzip2-devel findutils glib2-devel glib
microdnf clean all

# Install busybox direct from the multiarch since EPEL isn't available yet for redhat8
BUSYBOX_URL=${BUSYBOX_URL:=https://busybox.net/downloads/binaries/1.35.0-`arch`-linux-musl/busybox}
BUSYBOX_URL=${BUSYBOX_URL:=https://busybox.net/downloads/binaries/1.35.0-x86_64-linux-musl/busybox}
wget -O /bin/busybox ${BUSYBOX_URL}
chmod +x /bin/busybox

Expand Down
19 changes: 19 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,5 +4,24 @@
import pytest


#def pytest_configure(config):
# # register your new marker to avoid warnings
# config.addinivalue_line(
# "markers",
# "product: specify a test key"
# )

#def pytest_collection_modifyitems(config, items):
# filter = config.getoption("--product")
# if filter:
# new_items = []
# for item in items:
# mark = item.get_closest_marker("key")
# if mark and mark.args and mark.args[0] == filter:
# # collect all items that have a key marker with that value
# new_items.append(item)
# items[:] = new_items

def pytest_addoption(parser):
parser.addoption("--platform", default="debian-9", action="store", help="Define which platform of images to run tests again (default: debian-9)")
# parser.addoption("--product", default="all", action="store", help="Define which tests to run. Values can be splunk, uf, or all (default: all - Splunk and UF)")
46 changes: 34 additions & 12 deletions tests/executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,7 @@
from string import ascii_lowercase
# Code to suppress insecure https warnings
import urllib3
from urllib3.exceptions import InsecureRequestWarning, SubjectAltNameWarning
urllib3.disable_warnings(InsecureRequestWarning)
urllib3.disable_warnings(SubjectAltNameWarning)
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)


# Define variables
Expand All @@ -30,7 +28,7 @@
# Setup logging
LOGGER = logging.getLogger("docker-splunk")
LOGGER.setLevel(logging.INFO)
file_handler = logging.handlers.RotatingFileHandler(os.path.join(FILE_DIR, "..", "test-results", "docker_splunk_test_python{}.log".format(sys.version_info[0])), maxBytes=25000000)
file_handler = logging.handlers.RotatingFileHandler("./docker_splunk_tests.log", maxBytes=25000000)
formatter = logging.Formatter('%(asctime)s %(levelname)s [%(name)s] [%(process)d] %(message)s')
file_handler.setFormatter(formatter)
LOGGER.addHandler(file_handler)
Expand Down Expand Up @@ -101,9 +99,9 @@ def get_container_logs(self, container_id):
stream = self.client.logs(container_id, stream=True)
output = ""
for char in stream:
if "Ansible playbook complete" in char:
if "Ansible playbook complete" in char.decode():
break
output += char
output += char.decode()
return output

def cleanup_files(self, files):
Expand All @@ -116,7 +114,7 @@ def cleanup_files(self, files):
raise e

def _clean_docker_env(self):
# Remove anything spun up by docker-compose
# Remove anything spun up by docker compose
containers = self.client.containers(filters={"label": "com.docker.compose.project={}".format(self.project_name)})
for container in containers:
self.client.remove_container(container["Id"], v=True, force=True)
Expand Down Expand Up @@ -147,9 +145,12 @@ def wait_for_containers(self, count, label=None, name=None, timeout=500):
for container in containers:
# The healthcheck on our Splunk image is not reliable - resorting to checking logs
if container.get("Labels", {}).get("maintainer") == "[email protected]":
output = self.client.logs(container["Id"], tail=5)
output = self.client.logs(container["Id"], tail=5).decode()
self.logger.info("DEBUG: Check the tupe of output - {}".format(type(output)))
if "unable to" in output or "denied" in output or "splunkd.pid file is unreadable" in output:
self.logger.error("Container {} did not start properly, last log line: {}".format(container["Names"][0], output))
print("SCRIPT FAILS TO CREATE CONTAINER")
sys.exit(1)
elif "Ansible playbook complete" in output:
self.logger.info("Container {} is ready".format(container["Names"][0]))
healthy_count += 1
Expand Down Expand Up @@ -223,15 +224,24 @@ def _run_splunk_query(self, container_id, query, username="admin", password="pas

def compose_up(self, defaults_url=None, apps_url=None):
container_count = self.get_number_of_containers(os.path.join(self.SCENARIOS_DIR, self.compose_file_name))
command = "docker-compose -p {} -f test_scenarios/{} up -d".format(self.project_name, self.compose_file_name)
command = "docker compose -p {} -f test_scenarios/{} up -d".format(self.project_name, self.compose_file_name)
out, err, rc = self._run_command(command, defaults_url, apps_url)
return container_count, rc

def extract_json(self, container_name):
retries = 15
for i in range(retries):
print("DEBUG: EXTRACT JSON")
import time
print("sleeping now for 10; check if docker container exists")
self.logger.info("sleeping now for 10; check if docker container exists")
os.system("echo '----------- START -----------'")
os.system("docker ps -a")
os.system("docker logs {}".format(container_name))
os.system("echo '----------- END -----------'")
exec_command = self.client.exec_create(container_name, "cat /opt/container_artifact/ansible_inventory.json")
json_data = self.client.exec_start(exec_command)
print("collect exec command: {}".format(exec_command))
json_data = self.client.exec_start(exec_command).decode()
if "No such file or directory" in json_data:
time.sleep(5)
else:
Expand Down Expand Up @@ -270,16 +280,28 @@ def _run_command(self, command, defaults_url=None, apps_url=None):
env["SPLUNK_DEFAULTS_URL"] = defaults_url
if apps_url:
env["SPLUNK_APPS_URL"] = apps_url
proc = subprocess.Popen(sh, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env)
self.logger.info("os.system attempt - {}; SKIPPED".format(command))
#os.system(command)
self.logger.info("execute command vis subprocess;")
proc = subprocess.Popen(sh, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env, text=True)
self.logger.info("PROC created")
lines = []
err_lines = []
self.logger.info("START RECORDING STDOUT")
for line in iter(proc.stdout.readline, ''):
lines.append(line)
self.logger.info(line)
lines.append("out: {}".format(line))
self.logger.info("START RECORDING STDERR")
for line in iter(proc.stderr.readline, ''):
self.logger.info("err: {}".format(line))
err_lines.append(line)
self.logger.info("PROC close stdout")
proc.stdout.close()
self.logger.info("PROC close stdout")
proc.stderr.close()
self.logger.info("PROC WAIT")
proc.wait()
self.logger.info("Done with proc")
out = "".join(lines)
self.logger.info("STDOUT: %s" % out)
err = "".join(err_lines)
Expand Down
12 changes: 6 additions & 6 deletions tests/test_distributed_splunk_image.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,9 +49,9 @@ def setup_method(self, method):
def teardown_method(self, method):
if self.compose_file_name and self.project_name:
if self.DIR:
command = "docker-compose -p {} -f {} down --volumes --remove-orphans".format(self.project_name, os.path.join(self.DIR, self.compose_file_name))
command = "docker compose -p {} -f {} down --volumes --remove-orphans".format(self.project_name, os.path.join(self.DIR, self.compose_file_name))
else:
command = "docker-compose -p {} -f test_scenarios/{} down --volumes --remove-orphans".format(self.project_name, self.compose_file_name)
command = "docker compose -p {} -f test_scenarios/{} down --volumes --remove-orphans".format(self.project_name, self.compose_file_name)
out, err, rc = self._run_command(command)
self._clean_docker_env()
if self.DIR:
Expand Down Expand Up @@ -241,7 +241,7 @@ def test_compose_1uf1so(self):
raise e
# Search results won't return the correct results immediately :(
time.sleep(30)
search_providers, distinct_hosts = self.search_internal_distinct_hosts("{}_so1_1".format(self.project_name), password=self.password)
search_providers, distinct_hosts = self.search_internal_distinct_hosts("{}-so1-1".format(self.project_name), password=self.password)
assert len(search_providers) == 1
assert search_providers[0] == "so1"
assert distinct_hosts == 2
Expand Down Expand Up @@ -842,7 +842,7 @@ def test_compose_1deployment1so(self):
# Wait for containers to come up
assert self.wait_for_containers(container_count, label="com.docker.compose.project={}".format(self.project_name))
# Get container logs
container_mapping = {"{}_so1_1".format(self.project_name): "so", "{}_depserver1_1".format(self.project_name): "deployment_server"}
container_mapping = {"{}-so1-1".format(self.project_name): "so", "{}_depserver1_1".format(self.project_name): "deployment_server"}
for container in container_mapping:
# Check ansible version & configs
ansible_logs = self.get_container_logs(container)
Expand Down Expand Up @@ -929,7 +929,7 @@ def test_compose_1deployment1uf(self):
# Wait for containers to come up
assert self.wait_for_containers(container_count, label="com.docker.compose.project={}".format(self.project_name))
# Get container logs
container_mapping = {"{}_uf1_1".format(self.project_name): "uf", "{}_depserver1_1".format(self.project_name): "deployment_server"}
container_mapping = {"{}-uf1-1".format(self.project_name): "uf", "{}_depserver1_1".format(self.project_name): "deployment_server"}
for container in container_mapping:
# Check ansible version & configs
ansible_logs = self.get_container_logs(container)
Expand Down Expand Up @@ -1104,4 +1104,4 @@ def test_compose_3idx1cm_splunktcp_ssl(self):
os.path.join(self.DEFAULTS_DIR, "cert.pem"),
os.path.join(self.DEFAULTS_DIR, "{}.yml".format(self.project_name))
]
self.cleanup_files(files)
self.cleanup_files(files)
Loading