Skip to content

(IN PROGRESS) Adding the capability to launch additional data-collection tools #117

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 14 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions redis_benchmarks_specification/__runner__/args.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import argparse

from redis_benchmarks_specification.__common__.env import (
SPECS_PATH_SETUPS,
SPECS_PATH_TEST_SUITES,
DATASINK_RTS_HOST,
DATASINK_RTS_PORT,
Expand Down Expand Up @@ -40,6 +41,12 @@ def create_client_runner_args(project_name):
default=SPECS_PATH_TEST_SUITES,
help="Test suites folder, containing the different test variations",
)
parser.add_argument(
"--setups-folder",
type=str,
default=SPECS_PATH_SETUPS,
help="Setups folder, containing the build environment variations sub-folder that we use to trigger different build artifacts",
)
parser.add_argument(
"--test",
type=str,
Expand Down
24 changes: 23 additions & 1 deletion redis_benchmarks_specification/__runner__/runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,11 @@
)
from redis_benchmarks_specification.__runner__.args import create_client_runner_args

from redis_benchmarks_specification.__setups__.data_collection_tools import (
get_tools,
start_tools_if_required,
)


def main():
_, _, project_version = populate_with_poetry_data()
Expand Down Expand Up @@ -91,6 +96,15 @@ def main():
)
)

tools_folder = os.path.abspath(args.setups_folder + "/data-collection-tools")
logging.info("Using tools folder dir {}".format(tools_folder))
tools_files = get_tools(tools_folder)
logging.info(
"There are a total of {} tools in folder {}".format(
len(tools_files), tools_folder
)
)

datasink_conn = None
if args.datasink_push_results_redistimeseries:
logging.info(
Expand Down Expand Up @@ -157,8 +171,9 @@ def main():
testsuite_spec_files,
{},
running_platform,
tools_files,
profilers_enabled,
profilers_list,
profilers_list,
tls_enabled,
tls_skip_verify,
tls_cert,
Expand Down Expand Up @@ -221,6 +236,7 @@ def process_self_contained_coordinator_stream(
testsuite_spec_files,
topologies_map,
running_platform,
tools_files,
profilers_enabled=False,
profilers_list=[],
tls_enabled=False,
Expand Down Expand Up @@ -439,6 +455,12 @@ def process_self_contained_coordinator_stream(
profiler_frequency,
profiler_call_graph_mode,
)

# start data collection
start_tools_if_required(
tools_files
)

logging.info(
"Using docker image {} as benchmark client image (cpuset={}) with the following args: {}".format(
client_container_image,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,10 @@
restore_build_artifacts_from_test_details,
)
from redis_benchmarks_specification.__setups__.topologies import get_topologies
from redis_benchmarks_specification.__setups__.data_collection_tools import (
get_tools,
start_tools_if_required,
)


def main():
Expand Down Expand Up @@ -112,6 +116,15 @@ def main():
)
)

tools_folder = os.path.abspath(args.setups_folder + "/data-collection-tools")
logging.info("Using tools folder dir {}".format(tools_folder))
tools_files = get_tools(tools_folder)
logging.info(
"There are a total of {} tools in folder {}".format(
len(tools_files), tools_folder
)
)

logging.info(
"Reading event streams from: {}:{} with user {}".format(
args.event_stream_host, args.event_stream_port, args.event_stream_user
Expand Down Expand Up @@ -213,6 +226,7 @@ def main():
testsuite_spec_files,
topologies_map,
running_platform,
tools_files,
profilers_enabled,
profilers_list,
grafana_profile_dashboard,
Expand Down Expand Up @@ -260,6 +274,7 @@ def self_contained_coordinator_blocking_read(
testsuite_spec_files,
topologies_map,
platform_name,
tools_files,
profilers_enabled,
profilers_list,
grafana_profile_dashboard="",
Expand Down Expand Up @@ -302,6 +317,7 @@ def self_contained_coordinator_blocking_read(
testsuite_spec_files,
topologies_map,
platform_name,
tools_files,
profilers_enabled,
profilers_list,
grafana_profile_dashboard,
Expand Down Expand Up @@ -369,6 +385,7 @@ def process_self_contained_coordinator_stream(
testsuite_spec_files,
topologies_map,
running_platform,
tools_files,
profilers_enabled=False,
profilers_list=[],
grafana_profile_dashboard="",
Expand Down Expand Up @@ -614,6 +631,11 @@ def process_self_contained_coordinator_stream(
profiler_call_graph_mode,
)

# start data collection tools
start_tools_if_required(
tools_files
)

logging.info(
"Using docker image {} as benchmark client image (cpuset={}) with the following args: {}".format(
client_container_image,
Expand Down
34 changes: 34 additions & 0 deletions redis_benchmarks_specification/__setups__/data_collection_tools.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
import logging
import pathlib
import yaml
import subprocess
import os


def get_tools(tools_folder):
files = pathlib.Path(tools_folder).glob("*.yml")
files = [str(x) for x in files]
logging.info(
"Running tools: {}".format(
" ".join([str(x) for x in files])
)
)
return files

def start_tools_if_required(tools_files):
logging.info(
"Running tools: {}".format(
" ".join([str(x) for x in tools_files])
)
)
for tool_file in tools_files:
with open(tool_file) as stream:
tool_config = yaml.safe_load(stream)
command = tool_config["command"]

# Launch the tool in a background process
tool_output = subprocess.Popen(command)

#tool_output = os.popen(command)
# output = tool_output.read()
logging.info(tool_output)
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
version: 0.1
name: pcm-memory
description: "Memory latency and bandwidth monitoring"
command: "pcm-memory"
env:
PCM_IGNORE_ARCH_PERFMON: "1"