Skip to content

Commit

Permalink
Fix buildkite pipeline for db-sync sync tests
Browse files Browse the repository at this point in the history
  • Loading branch information
saratomaz committed Nov 19, 2024
1 parent 8d5921f commit 54c077a
Show file tree
Hide file tree
Showing 10 changed files with 2,571 additions and 72 deletions.
6 changes: 3 additions & 3 deletions .buildkite/db_sync_full_sync.yml
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
steps:
- label: ':drum_with_drumsticks: Full sync test :drum_with_drumsticks:'
commands:
- nix develop --accept-flake-config ..#python --command python ./db_sync_tests/tests/full_sync_from_clean_state.py -npr "${node_pr}" -nbr "${node_branch}" -nv "${node_version}" -dbr "${db_sync_branch}" -dv "${db_sync_version}" -dsa "${db_sync_start_arguments}" -e "${environment}"
- nix develop --accept-flake-config ..#python --command python ./db_sync_tests/tests/snapshot_creation.py -dpr "${db_sync_pr}" -dbr "${db_sync_branch}" -dv "${db_sync_version}" -e "${environment}" -rosc "${run_only_sync_test}"
- nix develop --accept-flake-config ..#python --command python ./db_sync_tests/tests/local_snapshot_restoration.py -npr "${node_pr}" -nbr "${node_branch}" -nv "${node_version}" -dbr "${db_sync_branch}" -dv "${db_sync_version}" -dsa "${db_sync_start_arguments}" -e "${environment}" -rosc "${run_only_sync_test}"
- nix develop --accept-flake-config .#python --command python ./db_sync_tests/tests/full_sync_from_clean_state.py -nv "${node_version}" -dv "${db_sync_version}" -dsa "${db_sync_start_arguments}" -e "${environment}"
- nix develop --accept-flake-config .#python --command python ./db_sync_tests/tests/snapshot_creation.py -dv "${db_sync_version}" -e "${environment}" -rosc "${run_only_sync_test}"
- nix develop --accept-flake-config .#python --command python ./db_sync_tests/tests/local_snapshot_restoration.py -nv "${node_version}" -dv "${db_sync_version}" -dsa "${db_sync_start_arguments}" -e "${environment}" -rosc "${run_only_sync_test}"
timeout_in_minutes: 43200
agents:
system: x86_64-linux
Expand Down
43 changes: 14 additions & 29 deletions .github/workflows/db_sync_full_sync.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3,38 +3,27 @@ name: db-sync - full cycle tests
on:
workflow_dispatch:
inputs:
node_pr:
description: node pull request number
required: true
default: "4269"
node_branch:
description: node branch or tag
required: true
default: "1.35.3"
node_version:
description: node version - 1.33.0-rc2 (tag number) or 1.33.0 (release number - for released versions) or 1.33.0_PR2124 (for not released and not tagged runs with a specific node PR/version)
required: true
default: "1.35.3"
db_sync_branch:
description: db-sync branch or tag
description: "cardano-node version"
required: true
default: "tags/13.0.5"
db_sync_pr:
description: db-sync pr
required: true
default: "1208"
default: "tags/10.1.2"
db_sync_version:
description: db-sync version - 12.0.0-rc2 (tag number) or 12.0.2 (release number - for released versions) or 12.0.2_PR2124 (for not released and not tagged runs with a specific db_sync PR/version)
description: "db-sync version"
required: true
default: "13.0.5"
default: "tags/13.6.0.1"
db_sync_start_arguments:
description: argument to be passed when starting the db-sync - none, disable-ledger, disable-epoch, disable-cache
description: "db-sync start arguments"
required: false
default: "none"
environment:
description: environment on which to run the tests - shelley-qa, preview, preprod or mainnet
required: true
default: "preprod"
description: "Environment on which Buildkite agent will run tests"
type: choice
options:
- mainnet
- preprod
- preview
- shelley-qa
default: preprod
run_only_sync_test:
type: boolean
default: true
Expand All @@ -48,19 +37,15 @@ jobs:
uses: 'buildkite/trigger-pipeline-action@v1.5.0'
env:
BUILDKITE_API_ACCESS_TOKEN: ${{ secrets.BUILDKITE_API_ACCESS_TOKEN }}
PIPELINE: 'input-output-hk/db-sync_sync_tests'
PIPELINE: 'input-output-hk/db-sync-sync-tests'
BRANCH: ${{ github.ref_name || 'main' }}
MESSAGE: ':github: Triggered by GitHub Action'
AWS_DB_USERNAME: ${{ secrets.AWS_DB_USERNAME }}
AWS_DB_PASS: ${{ secrets.AWS_DB_PASS }}
AWS_DB_NAME: ${{ secrets.AWS_DB_NAME }}
AWS_DB_HOSTNAME: ${{ secrets.AWS_DB_HOSTNAME }}
BUILD_ENV_VARS: '{
"node_pr":"${{ github.event.inputs.node_pr }}",
"node_branch":"${{ github.event.inputs.node_branch }}",
"node_version":"${{ github.event.inputs.node_version }}",
"db_sync_pr":"${{ github.event.inputs.db_sync_pr }}",
"db_sync_branch":"${{ github.event.inputs.db_sync_branch }}",
"db_sync_version":"${{ github.event.inputs.db_sync_version }}",
"db_sync_start_arguments":"${{ github.event.inputs.db_sync_start_arguments }}",
"environment":"${{ github.event.inputs.environment }}",
Expand Down
2 changes: 1 addition & 1 deletion db_sync_tests/scripts/db-sync-start.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ export PGPASSFILE=config/pgpass-$ENVIRONMENT

if [[ $FIRST_START == "True" ]]; then
cd config
wget -O $ENVIRONMENT-db-config.json https://book.world.dev.cardano.org/environments/$ENVIRONMENT/db-sync-config.json
wget -O $ENVIRONMENT-db-config.json https://book.play.dev.cardano.org/environments/$ENVIRONMENT/db-sync-config.json
sed -i "s/NodeConfigFile.*/NodeConfigFile\": \"..\/..\/cardano-node\/$ENVIRONMENT-config.json\",/g" "$ENVIRONMENT-db-config.json"
cd ..
fi
Expand Down
Empty file added db_sync_tests/tests/__init__.py
Empty file.
12 changes: 7 additions & 5 deletions db_sync_tests/tests/full_sync_from_clean_state.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
import argparse
import json
import os
import subprocess
from collections import OrderedDict
from pathlib import Path
import sys
import matplotlib.pyplot as plt

sys.path.append(os.getcwd())

from utils.utils import seconds_to_time, get_no_of_cpu_cores, get_current_date_time, \
from db_sync_tests.utils.utils import seconds_to_time, get_no_of_cpu_cores, get_current_date_time, \
get_os_type, get_total_ram_in_GB, upload_artifact, clone_repo, zip_file, execute_command, \
print_file, stop_process, write_data_as_json_to_file, get_node_config_files, \
get_node_version, get_db_sync_version, start_node_in_cwd, wait_for_db_to_sync, \
Expand All @@ -24,7 +25,7 @@
NODE_ARCHIVE, DB_SYNC_ARCHIVE, SYNC_DATA_ARCHIVE, EXPECTED_DB_SCHEMA, EXPECTED_DB_INDEXES, \
ENVIRONMENT \

from utils.aws_db_utils import get_identifier_last_run_from_table, \
from db_sync_tests.utils.aws_db_utils import get_identifier_last_run_from_table, \
add_bulk_rows_into_db, add_single_row_into_db


Expand Down Expand Up @@ -168,10 +169,11 @@ def main():
print(f"DB sync version: {db_sync_version_from_gh_action}")

# cardano-node setup
NODE_DIR=clone_repo('cardano-node', node_branch)
NODE_DIR=clone_repo('cardano-node', node_version_from_gh_action)
os.chdir(NODE_DIR)
execute_command("nix build -v .#cardano-node -o cardano-node-bin")
execute_command("nix build -v .#cardano-cli -o cardano-cli-bin")
execute_command("nix build -v --debug .#cardano-cli -o cardano-cli-bin")

print("--- Node setup")
copy_node_executables(build_method="nix")
get_node_config_files(env)
Expand All @@ -184,7 +186,7 @@ def main():

# cardano-db sync setup
os.chdir(ROOT_TEST_PATH)
DB_SYNC_DIR = clone_repo('cardano-db-sync', db_branch)
DB_SYNC_DIR = clone_repo('cardano-db-sync', db_sync_version_from_gh_action.rstrip())
os.chdir(DB_SYNC_DIR)
print("--- Db sync setup")
setup_postgres() # To login use: psql -h /path/to/postgres -p 5432 -e postgres
Expand Down
2 changes: 1 addition & 1 deletion db_sync_tests/tests/snapshot_creation.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ def main():
print(f"Stage 2 result: {stage_2_result}")
end_snapshot_creation = time.perf_counter()

snapshot_file = stage_2_result.split(" ")[1]
snapshot_file = stage_2_result
set_buildkite_meta_data("snapshot_file", snapshot_file)
print(f"Snapshot file name: {snapshot_file}")

Expand Down
Empty file added db_sync_tests/utils/__init__.py
Empty file.
96 changes: 63 additions & 33 deletions db_sync_tests/utils/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,14 +27,14 @@

ONE_MINUTE = 60
ROOT_TEST_PATH = Path.cwd()
ENVIRONMENT = os.environ['environment']
ENVIRONMENT = os.environ.get('environment', None)

NODE_PR = os.environ['node_pr']
NODE_BRANCH = os.environ['node_branch']
NODE_VERSION = os.environ['node_version']
NODE_PR = os.environ.get('node_pr', None)
NODE_BRANCH = os.environ.get('node_branch', None)
NODE_VERSION = os.environ.get('node_version', None)

DB_SYNC_BRANCH = os.environ['db_sync_branch']
DB_SYNC_VERSION = os.environ['db_sync_version']
DB_SYNC_BRANCH = os.environ.get('db_sync_branch', None)
DB_SYNC_VERSION = os.environ.get('db_sync_version', None)

POSTGRES_DIR = ROOT_TEST_PATH.parents[0]
POSTGRES_USER = subprocess.run(['whoami'], stdout=subprocess.PIPE).stdout.decode('utf-8').strip()
Expand Down Expand Up @@ -234,7 +234,9 @@ def print_file(file, number_of_lines = 0):
if index < number_of_lines + 1:
print(line, flush=True)
else: break
else: print(contents, flush=True)
else:
print(contents, flush=True)
return contents


def get_process_info(proc_name):
Expand Down Expand Up @@ -480,20 +482,36 @@ def get_node_archive_url(node_pr):


def get_node_config_files(env):
base_url = "https://book.world.dev.cardano.org/environments/"
urllib.request.urlretrieve(base_url + env + "/config.json", env + "-config.json",)
urllib.request.urlretrieve(base_url + env + "/byron-genesis.json", "byron-genesis.json",)
urllib.request.urlretrieve(base_url + env + "/shelley-genesis.json", "shelley-genesis.json",)
urllib.request.urlretrieve(base_url + env + "/alonzo-genesis.json", "alonzo-genesis.json",)
urllib.request.urlretrieve(base_url + env + "/conway-genesis.json", "conway-genesis.json",)
urllib.request.urlretrieve(base_url + env + "/topology.json", env + "-topology.json",)
base_url = "https://book.play.dev.cardano.org/environments/"
filenames = [
(base_url + env + "/config.json", f"{env}-config.json"),
(base_url + env + "/byron-genesis.json", "byron-genesis.json"),
(base_url + env + "/shelley-genesis.json", "shelley-genesis.json"),
(base_url + env + "/alonzo-genesis.json", "alonzo-genesis.json"),
(base_url + env + "/conway-genesis.json", "conway-genesis.json"),
(base_url + env + "/topology.json", f"{env}-topology.json")
]
for url, filename in filenames:
try:
urllib.request.urlretrieve(url, filename)
# Check if the file exists after download
if not os.path.isfile(filename):
raise FileNotFoundError(f"Downloaded file '{filename}' does not exist.")
except Exception as e:
print(f"Error downloading {url}: {e}")
exit(1)


def copy_node_executables(build_method="nix"):
current_directory = os.getcwd()
os.chdir(ROOT_TEST_PATH)
node_dir = Path.cwd() / 'cardano-node'
node_bin_dir = node_dir / "cardano-node-bin/"
os.chdir(node_dir)
print(f"current_directory: {os.getcwd()}")

result = subprocess.run(['nix', '--version'], stdout=subprocess.PIPE, text=True, check=True)
print(f"Nix version: {result.stdout.strip()}")

if build_method == "nix":
node_binary_location = "cardano-node-bin/bin/cardano-node"
Expand Down Expand Up @@ -595,7 +613,7 @@ def set_node_socket_path_env_var_in_cwd():
def get_node_tip(env, timeout_minutes=20):
current_directory = os.getcwd()
os.chdir(ROOT_TEST_PATH / "cardano-node")
cmd = "./_cardano-cli query tip " + get_testnet_value(env)
cmd = "./_cardano-cli latest query tip " + get_testnet_value(env)

for i in range(timeout_minutes):
try:
Expand Down Expand Up @@ -690,14 +708,16 @@ def start_node_in_cwd(env):
p = subprocess.Popen(cmd.split(" "), stdout=logfile, stderr=logfile)
print("waiting for db folder to be created")
counter = 0
timeout_counter = 25 * ONE_MINUTE
timeout_counter = 1 * ONE_MINUTE
node_db_dir = current_directory + "/db"
while not os.path.isdir(node_db_dir):
time.sleep(1)
counter += 1
if counter > timeout_counter:
print(
f"ERROR: waited {timeout_counter} seconds and the DB folder was not created yet")
node_startup_error = print_file(NODE_LOG)
print_color_log(sh_colors.FAIL, f"Error: {node_startup_error}")
exit(1)

print(f"DB folder was created after {counter} seconds")
Expand Down Expand Up @@ -952,25 +972,35 @@ def create_db_sync_snapshot_stage_1(env):
)

def create_db_sync_snapshot_stage_2(stage_2_cmd, env):
os.chdir(ROOT_TEST_PATH)
os.chdir(Path.cwd() / 'cardano-db-sync')
os.chdir(ROOT_TEST_PATH / 'cardano-db-sync')
export_env_var("PGPASSFILE", f"config/pgpass-{env}")

cmd = f"{stage_2_cmd}"
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding='utf-8')

try:
outs, errs = p.communicate(timeout=43200) # 12 hours
print(f"Snapshot Creation - Stage 2 result: {outs}")
if errs:
print(f"Warnings or Errors: {errs}")
return outs.split("\n")[3].lstrip()
except subprocess.CalledProcessError as e:
raise RuntimeError(
"command '{}' return with error (code {}): {}".format(
e.cmd, e.returncode, " ".join(str(e.output).split())
)
# Running the command and capturing output and error
result = subprocess.run(
stage_2_cmd,
shell=True,
capture_output=True,
text=True,
timeout=43200 # 12 hours
)

print(f"Snapshot Creation - Stage 2 Output:\n{result.stdout}")
if result.stderr:
print(f"Warnings or Errors:\n{result.stderr}")
# Extracting the snapshot path from the last line mentioning 'Created'
snapshot_line = next(
(line for line in result.stdout.splitlines() if line.startswith("Created")),
"Snapshot creation output not found."
)
snapshot_path = snapshot_line.split()[
1] if "Created" in snapshot_line else "Snapshot path unknown"

return snapshot_path
except subprocess.TimeoutExpired:
raise RuntimeError("Snapshot creation timed out.")
except subprocess.CalledProcessError as e:
raise RuntimeError(f"Command '{e.cmd}' failed with error: {e.stderr}")


def get_db_sync_tip(env):
Expand Down Expand Up @@ -1114,7 +1144,7 @@ def start_db_sync(env, start_args="", first_start="True"):
export_env_var("LOG_FILEPATH", DB_SYNC_LOG)

try:
cmd = "./scripts/db-sync-start.sh"
cmd = "./db_sync_tests/scripts/db-sync-start.sh"
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
os.chdir(current_directory)
except subprocess.CalledProcessError as e:
Expand Down Expand Up @@ -1185,7 +1215,7 @@ def setup_postgres(pg_dir=POSTGRES_DIR, pg_user=POSTGRES_USER, pg_port='5432'):
export_env_var("PGPORT", pg_port)

try:
cmd = ["./scripts/postgres-start.sh", f"{pg_dir}", "-k"]
cmd = ["./db_sync_tests/scripts/postgres-start.sh", f"{pg_dir}", "-k"]
output = (
subprocess.check_output(cmd, stderr=subprocess.STDOUT)
.decode("utf-8")
Expand Down
Loading

0 comments on commit 54c077a

Please sign in to comment.