Skip to content

Commit

Permalink
Create dedicate file to node utils
Browse files Browse the repository at this point in the history
  • Loading branch information
saratomaz committed Feb 6, 2025
1 parent fb9aca7 commit e5543ff
Show file tree
Hide file tree
Showing 7 changed files with 1,044 additions and 1,238 deletions.
217 changes: 113 additions & 104 deletions sync_tests/tests/full_sync_from_clean_state.py

Large diffs are not rendered by default.

182 changes: 96 additions & 86 deletions sync_tests/tests/iohk_snapshot_restoration.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,23 +2,26 @@
import datetime
import json
import logging
import pathlib as pl
import os
import sys
import typing as tp
from collections import OrderedDict

sys.path.append(os.getcwd())

from sync_tests.utils import aws_db
from sync_tests.utils import db_sync
from sync_tests.utils import gitpython
from sync_tests.utils import helpers
from sync_tests.utils import node

import sync_tests.utils.aws_db as aws_db_utils
import sync_tests.utils.db_sync as utils_db_sync
import sync_tests.utils.gitpython as git_utils
import sync_tests.utils.helpers as utils

# Configure logging
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")

TEST_RESULTS = "db_sync_iohk_snapshot_restoration_test_results.json"
NODE = pl.Path.cwd() / "cardano-node"


def upload_snapshot_restoration_results_to_aws(env: str) -> None:
Expand All @@ -27,7 +30,7 @@ def upload_snapshot_restoration_results_to_aws(env: str) -> None:
sync_test_results_dict = json.load(json_file)

test_summary_table = env + "_db_sync_snapshot_restoration"
last_identifier = aws_db_utils.get_last_identifier(test_summary_table)
last_identifier = aws_db.get_last_identifier(test_summary_table)
assert last_identifier is not None # TODO: refactor
test_id = str(int(last_identifier.split("_")[-1]) + 1)
identifier = env + "_restoration_" + test_id
Expand All @@ -37,86 +40,93 @@ def upload_snapshot_restoration_results_to_aws(env: str) -> None:
col_to_insert = list(sync_test_results_dict.keys())
val_to_insert = list(sync_test_results_dict.values())

if not aws_db_utils.insert_values_into_db(test_summary_table, col_to_insert, val_to_insert):
if not aws_db.insert_values_into_db(test_summary_table, col_to_insert, val_to_insert):
print(f"col_to_insert: {col_to_insert}")
print(f"val_to_insert: {val_to_insert}")
sys.exit(1)


def main() -> None:
print("--- Db-sync restoration from IOHK official snapshot")
platform_system, platform_release, platform_version = utils.get_os_type()
platform_system, platform_release, platform_version = helpers.get_os_type()
print(f"Platform: {platform_system, platform_release, platform_version}")

start_test_time = datetime.datetime.now(tz=datetime.timezone.utc).strftime("%d/%m/%Y %H:%M:%S")
print(f"Test start time: {start_test_time}")

env = utils.get_arg_value(args=args, key="environment")
env = helpers.get_arg_value(args=args, key="environment")
print(f"Environment: {env}")

node_pr = utils.get_arg_value(args=args, key="node_pr", default="")
node_pr = helpers.get_arg_value(args=args, key="node_pr", default="")
print(f"Node PR number: {node_pr}")

node_branch = utils.get_arg_value(args=args, key="node_branch", default="")
node_branch = helpers.get_arg_value(args=args, key="node_branch", default="")
print(f"Node branch: {node_branch}")

node_version_from_gh_action = utils.get_arg_value(
node_version_from_gh_action = helpers.get_arg_value(
args=args, key="node_version_gh_action", default=""
)
print(f"Node version: {node_version_from_gh_action}")

db_branch = utils.get_arg_value(args=args, key="db_sync_branch", default="")
db_branch = helpers.get_arg_value(args=args, key="db_sync_branch", default="")
print(f"DB sync branch: {db_branch}")

db_sync_version_from_gh_action = utils.get_arg_value(
db_sync_version_from_gh_action = helpers.get_arg_value(
args=args, key="db_sync_version_gh_action", default=""
)
print(f"DB sync version: {db_sync_version_from_gh_action}")

snapshot_url = utils_db_sync.get_latest_snapshot_url(env, args)
snapshot_url = db_sync.get_latest_snapshot_url(env, args)
print(f"Snapshot url: {snapshot_url}")

# cardano-node setup
node_dir = git_utils.clone_repo("cardano-node", node_branch)
os.chdir(node_dir)
utils.execute_command("nix build -v .#cardano-node -o cardano-node-bin")
utils.execute_command("nix build -v .#cardano-cli -o cardano-cli-bin")
print("--- Node setup")
utils_db_sync.copy_node_executables(build_method="nix")
utils_db_sync.get_node_config_files(env)
utils_db_sync.set_node_socket_path_env_var_in_cwd()
cli_version, cli_git_rev = utils_db_sync.get_node_version()
utils_db_sync.download_and_extract_node_snapshot(env)
utils_db_sync.start_node_in_cwd(env)
# cardano-node setup
conf_dir = pl.Path.cwd()
base_dir = pl.Path.cwd()

node.set_node_socket_path_env_var(base_dir=base_dir)
node.get_node_files(node_rev=node_version_from_gh_action)
cli_version, cli_git_rev = node.get_node_version()
node.rm_node_config_files(conf_dir=conf_dir)
# TO DO: change the default to P2P when full P2P will be supported on Mainnet
node.get_node_config_files(
env=env,
conf_dir=conf_dir,
)
node.configure_node(config_file=conf_dir / "config.json")
node.start_node(
cardano_node=NODE, base_dir=base_dir
)
node.wait_node_start(env=env, timeout_minutes=10)
print("--- Node startup", flush=True)
utils_db_sync.print_file(utils_db_sync.NODE_LOG_FILE, 80)
node_sync_time_in_secs = utils_db_sync.wait_for_node_to_sync(env)
db_sync.print_file(db_sync.NODE_LOG_FILE, 80)
node_sync_time_in_secs = node.wait_for_node_to_sync(env, base_dir)

# cardano-db sync setup
print("--- Db sync setup")
os.chdir(utils_db_sync.ROOT_TEST_PATH)
db_sync_dir = git_utils.clone_repo("cardano-db-sync", db_branch)
os.chdir(db_sync.ROOT_TEST_PATH)
db_sync_dir = gitpython.clone_repo("cardano-db-sync", db_branch)
os.chdir(db_sync_dir)
utils_db_sync.setup_postgres()
utils_db_sync.create_pgpass_file(env)
utils_db_sync.create_database()
utils_db_sync.list_databases()
utils.execute_command("nix build .#cardano-db-sync -o db-sync-node")
utils.execute_command("nix build .#cardano-db-tool -o db-sync-tool")
db_sync.setup_postgres()
db_sync.create_pgpass_file(env)
db_sync.create_database()
db_sync.list_databases()
helpers.execute_command("nix build .#cardano-db-sync -o db-sync-node")
helpers.execute_command("nix build .#cardano-db-tool -o db-sync-tool")
print("--- Download and check db-sync snapshot", flush=True)
utils_db_sync.copy_db_sync_executables(build_method="nix")
snapshot_name = utils_db_sync.download_db_sync_snapshot(snapshot_url)
expected_snapshot_sha_256_sum = utils_db_sync.get_snapshot_sha_256_sum(snapshot_url)
actual_snapshot_sha_256_sum = utils_db_sync.get_file_sha_256_sum(snapshot_name)
db_sync.copy_db_sync_executables(build_method="nix")
snapshot_name = db_sync.download_db_sync_snapshot(snapshot_url)
expected_snapshot_sha_256_sum = db_sync.get_snapshot_sha_256_sum(snapshot_url)
actual_snapshot_sha_256_sum = db_sync.get_file_sha_256_sum(snapshot_name)
assert expected_snapshot_sha_256_sum == actual_snapshot_sha_256_sum, "Incorrect sha 256 sum"

# restore snapshot
print("--- Snapshot restoration")
restoration_time = utils_db_sync.restore_db_sync_from_snapshot(
restoration_time = db_sync.restore_db_sync_from_snapshot(
env, snapshot_name, remove_ledger_dir="no"
)
print(f"Restoration time [sec]: {restoration_time}")
db_sync_tip = utils_db_sync.get_db_sync_tip(env)
db_sync_tip = db_sync.get_db_sync_tip(env)
assert db_sync_tip is not None # TODO: refactor
snapshot_epoch_no, snapshot_block_no, snapshot_slot_no = db_sync_tip
print(
Expand All @@ -126,23 +136,23 @@ def main() -> None:

# start db-sync
print("--- Db sync start")
utils_db_sync.start_db_sync(env, start_args="", first_start="True")
utils_db_sync.print_file(utils_db_sync.DB_SYNC_LOG_FILE, 30)
db_sync_version, db_sync_git_rev = utils_db_sync.get_db_sync_version()
db_full_sync_time_in_secs = utils_db_sync.wait_for_db_to_sync(env)
db_sync.start_db_sync(env, start_args="", first_start="True")
db_sync.print_file(db_sync.DB_SYNC_LOG_FILE, 30)
db_sync_version, db_sync_git_rev = db_sync.get_db_sync_version()
db_full_sync_time_in_secs = db_sync.wait_for_db_to_sync(env)
end_test_time = datetime.datetime.now(tz=datetime.timezone.utc).strftime("%d/%m/%Y %H:%M:%S")
wait_time = 30
print(f"Waiting for additional {wait_time} minutes to continue syncying...")
utils_db_sync.wait(wait_time * utils_db_sync.ONE_MINUTE)
utils_db_sync.print_file(utils_db_sync.DB_SYNC_LOG_FILE, 60)
db_sync_tip = utils_db_sync.get_db_sync_tip(env)
db_sync.wait(wait_time * db_sync.ONE_MINUTE)
db_sync.print_file(db_sync.DB_SYNC_LOG_FILE, 60)
db_sync_tip = db_sync.get_db_sync_tip(env)
assert db_sync_tip is not None # TODO: refactor
epoch_no, block_no, slot_no = db_sync_tip

# shut down services
print("--- Stop cardano services")
utils_db_sync.manage_process(proc_name="cardano-db-sync", action="terminate")
utils_db_sync.manage_process(proc_name="cardano-node", action="terminate")
helpers.manage_process(proc_name="cardano-db-sync", action="terminate")
helpers.manage_process(proc_name="cardano-node", action="terminate")

# export test data as a json file
print("--- Gathering end results")
Expand All @@ -151,7 +161,7 @@ def main() -> None:
test_data["platform_release"] = platform_release
test_data["platform_version"] = platform_version
test_data["no_of_cpu_cores"] = os.cpu_count()
test_data["total_ram_in_GB"] = utils.get_total_ram_in_gb()
test_data["total_ram_in_GB"] = helpers.get_total_ram_in_gb()
test_data["env"] = env
test_data["node_pr"] = node_pr
test_data["node_branch"] = node_branch
Expand Down Expand Up @@ -180,69 +190,69 @@ def main() -> None:
test_data["last_synced_epoch_no"] = epoch_no
test_data["last_synced_block_no"] = block_no
test_data["last_synced_slot_no"] = slot_no
last_perf_stats_data_point = utils_db_sync.get_last_perf_stats_point()
last_perf_stats_data_point = db_sync.get_last_perf_stats_point()
test_data["cpu_percent_usage"] = last_perf_stats_data_point["cpu_percent_usage"]
test_data["total_rss_memory_usage_in_B"] = last_perf_stats_data_point["rss_mem_usage"]
test_data["total_database_size"] = utils_db_sync.get_total_db_size(env)
test_data["rollbacks"] = utils_db_sync.are_rollbacks_present_in_db_sync_logs(
utils_db_sync.DB_SYNC_LOG_FILE
test_data["total_database_size"] = db_sync.get_total_db_size(env)
test_data["rollbacks"] = db_sync.are_rollbacks_present_in_db_sync_logs(
db_sync.DB_SYNC_LOG_FILE
)
test_data["errors"] = utils_db_sync.are_errors_present_in_db_sync_logs(
utils_db_sync.DB_SYNC_LOG_FILE
test_data["errors"] = db_sync.are_errors_present_in_db_sync_logs(
db_sync.DB_SYNC_LOG_FILE
)

utils_db_sync.write_data_as_json_to_file(TEST_RESULTS, test_data)
utils_db_sync.write_data_as_json_to_file(
utils_db_sync.DB_SYNC_PERF_STATS_FILE, utils_db_sync.db_sync_perf_stats
db_sync.write_data_as_json_to_file(TEST_RESULTS, test_data)
db_sync.write_data_as_json_to_file(
db_sync.DB_SYNC_PERF_STATS_FILE, db_sync.db_sync_perf_stats
)
utils_db_sync.export_epoch_sync_times_from_db(
env, utils_db_sync.EPOCH_SYNC_TIMES_FILE, snapshot_epoch_no
db_sync.export_epoch_sync_times_from_db(
env, db_sync.EPOCH_SYNC_TIMES_FILE, snapshot_epoch_no
)

utils_db_sync.print_file(TEST_RESULTS)
db_sync.print_file(TEST_RESULTS)

# compress artifacts
utils.zip_file(utils_db_sync.NODE_ARCHIVE_NAME, utils_db_sync.NODE_LOG_FILE)
utils.zip_file(utils_db_sync.DB_SYNC_ARCHIVE_NAME, utils_db_sync.DB_SYNC_LOG_FILE)
utils.zip_file(utils_db_sync.SYNC_DATA_ARCHIVE_NAME, utils_db_sync.EPOCH_SYNC_TIMES_FILE)
utils.zip_file(utils_db_sync.PERF_STATS_ARCHIVE_NAME, utils_db_sync.DB_SYNC_PERF_STATS_FILE)
helpers.zip_file(db_sync.NODE_ARCHIVE_NAME, db_sync.NODE_LOG_FILE)
helpers.zip_file(db_sync.DB_SYNC_ARCHIVE_NAME, db_sync.DB_SYNC_LOG_FILE)
helpers.zip_file(db_sync.SYNC_DATA_ARCHIVE_NAME, db_sync.EPOCH_SYNC_TIMES_FILE)
helpers.zip_file(db_sync.PERF_STATS_ARCHIVE_NAME, db_sync.DB_SYNC_PERF_STATS_FILE)

# upload artifacts
utils_db_sync.upload_artifact(utils_db_sync.NODE_ARCHIVE_NAME)
utils_db_sync.upload_artifact(utils_db_sync.DB_SYNC_ARCHIVE_NAME)
utils_db_sync.upload_artifact(utils_db_sync.SYNC_DATA_ARCHIVE_NAME)
utils_db_sync.upload_artifact(utils_db_sync.PERF_STATS_ARCHIVE_NAME)
utils_db_sync.upload_artifact(TEST_RESULTS)
db_sync.upload_artifact(db_sync.NODE_ARCHIVE_NAME)
db_sync.upload_artifact(db_sync.DB_SYNC_ARCHIVE_NAME)
db_sync.upload_artifact(db_sync.SYNC_DATA_ARCHIVE_NAME)
db_sync.upload_artifact(db_sync.PERF_STATS_ARCHIVE_NAME)
db_sync.upload_artifact(TEST_RESULTS)

# send data to aws database
upload_snapshot_restoration_results_to_aws(env)

# search db-sync log for issues
print("--- Summary: Rollbacks, errors and other isssues")

log_errors = utils_db_sync.are_errors_present_in_db_sync_logs(utils_db_sync.DB_SYNC_LOG_FILE)
utils_db_sync.print_color_log(
utils_db_sync.sh_colors.WARNING, f"Are errors present: {log_errors}"
log_errors = db_sync.are_errors_present_in_db_sync_logs(db_sync.DB_SYNC_LOG_FILE)
db_sync.print_color_log(
db_sync.sh_colors.WARNING, f"Are errors present: {log_errors}"
)

rollbacks = utils_db_sync.are_rollbacks_present_in_db_sync_logs(utils_db_sync.DB_SYNC_LOG_FILE)
utils_db_sync.print_color_log(
utils_db_sync.sh_colors.WARNING, f"Are rollbacks present: {rollbacks}"
rollbacks = db_sync.are_rollbacks_present_in_db_sync_logs(db_sync.DB_SYNC_LOG_FILE)
db_sync.print_color_log(
db_sync.sh_colors.WARNING, f"Are rollbacks present: {rollbacks}"
)

failed_rollbacks = utils_db_sync.is_string_present_in_file(
utils_db_sync.DB_SYNC_LOG_FILE, "Rollback failed"
failed_rollbacks = db_sync.is_string_present_in_file(
db_sync.DB_SYNC_LOG_FILE, "Rollback failed"
)
utils_db_sync.print_color_log(
utils_db_sync.sh_colors.WARNING,
db_sync.print_color_log(
db_sync.sh_colors.WARNING,
f"Are failed rollbacks present: {failed_rollbacks}",
)

corrupted_ledger_files = utils_db_sync.is_string_present_in_file(
utils_db_sync.DB_SYNC_LOG_FILE, "Failed to parse ledger state"
corrupted_ledger_files = db_sync.is_string_present_in_file(
db_sync.DB_SYNC_LOG_FILE, "Failed to parse ledger state"
)
utils_db_sync.print_color_log(
utils_db_sync.sh_colors.WARNING,
db_sync.print_color_log(
db_sync.sh_colors.WARNING,
f"Are corrupted ledger files present: {corrupted_ledger_files}",
)

Expand Down
Loading

0 comments on commit e5543ff

Please sign in to comment.