Skip to content

Commit

Permalink
Initial commit
Browse files Browse the repository at this point in the history
  • Loading branch information
maestroi committed Jan 13, 2024
1 parent 6ec76b7 commit 13dd985
Show file tree
Hide file tree
Showing 6 changed files with 344 additions and 1 deletion.
96 changes: 96 additions & 0 deletions .github/workflows/docker-image.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
name: Docker

# This workflow uses actions that are not certified by GitHub.
# They are provided by a third-party and are governed by
# separate terms of service, privacy policy, and support
# documentation.

on:
push:
branches: [ "main" ]
# Publish semver tags as releases.
tags: [ 'v*.*.*' ]
pull_request:
branches: [ "main" ]

env:
# Use docker.io for Docker Hub if empty
REGISTRY: ghcr.io
# github.repository as <account>/<repo>
IMAGE_NAME: ${{ github.repository }}


jobs:
build:

runs-on: ubuntu-latest
permissions:
contents: read
packages: write
# This is used to complete the identity challenge
# with sigstore/fulcio when running outside of PRs.
id-token: write

steps:
- name: Checkout repository
uses: actions/checkout@v3

# Install the cosign tool except on PR
# https://github.com/sigstore/cosign-installer
- name: Install cosign
if: github.event_name != 'pull_request'
uses: sigstore/cosign-installer@6e04d228eb30da1757ee4e1dd75a0ec73a653e06 #v3.1.1
with:
cosign-release: 'v2.1.1'

# Set up BuildKit Docker container builder to be able to build
# multi-platform images and export cache
# https://github.com/docker/setup-buildx-action
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # v3.0.0

# Login against a Docker registry except on PR
# https://github.com/docker/login-action
- name: Log into registry ${{ env.REGISTRY }}
if: github.event_name != 'pull_request'
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}

# Extract metadata (tags, labels) for Docker
# https://github.com/docker/metadata-action
- name: Extract Docker metadata
id: meta
uses: docker/metadata-action@96383f45573cb7f253c731d3b3ab81c87ef81934 # v5.0.0
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}

# Build and push Docker image with Buildx (don't push on PR)
# https://github.com/docker/build-push-action
- name: Build and push Docker image
id: build-and-push
uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09 # v5.0.0
with:
context: .
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max

# Sign the resulting Docker image digest except on PRs.
# This will only write to the public Rekor transparency log when the Docker
# repository is public to avoid leaking data. If you would like to publish
# transparency data even for private images, pass --force to cosign below.
# https://github.com/sigstore/cosign
- name: Sign the published Docker image
if: ${{ github.event_name != 'pull_request' }}
env:
# https://docs.github.com/en/actions/security-guides/security-hardening-for-github-actions#using-an-intermediate-environment-variable
TAGS: ${{ steps.meta.outputs.tags }}
DIGEST: ${{ steps.build-and-push.outputs.digest }}
# This step uses the identity token to provision an ephemeral certificate
# against the sigstore community Fulcio instance.
run: echo "${TAGS}" | xargs -I {} cosign sign --yes {}@${DIGEST}
17 changes: 17 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
# Use an Alpine image with Python 3
FROM python:3-alpine

# Set the working directory in the container
WORKDIR /usr/src/app

# Copy the requirements file into the container
COPY requirements.txt ./

# Install dependencies from the requirements file
RUN pip install --no-cache-dir -r requirements.txt

# Copy the Python script into the container
COPY main.py .

# Command to run when the container starts
CMD ["python", "./main.py"]
18 changes: 17 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1 +1,17 @@
# nimiq-validator-activator
# nimiq-validator-activator

Nimiq activator for validators, to be used with the Nimiq albatross Validators

# Requirements

- Docker
- Docker-compose
- Python3

# Expected

We expect the following files to be present in the `./keys` folder:
with account keys and bls key for activating validator.
- A validator key file in the `./keys` folder
- A validator address file in the `./keys` folder
- A validator wallet file in the `./keys` folder
19 changes: 19 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
version: '3'

services:
activate_validator:
container_name: activate-validator
build:
context: .
dockerfile: Dockerfile
environment:
- NIMIQ_NODE_URL=http://node:8648
- NIMIQ_NETWORK=testnet
- FACUET_URL=https://faucet.pos.nimiq-testnet.com/tapit
- PROMETHEUS_PORT=8000
volumes:
- "/opt/nimiq/validator/secrets:/keys" # mount your validator keys here
- "epoch-data:/usr/src/app" # mount epoch data will be stored here
ports:
- "8000:8000"
restart: unless-stopped
193 changes: 193 additions & 0 deletions main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,193 @@
#!/usr/bin/env python3
import os
import requests
import json
import time
import logging
from prometheus_client import start_http_server, Gauge

NIMIQ_NODE_URL = os.getenv('NIMIQ_NODE_URL', 'http://node:8648')
NIMIQ_NETWORK = os.getenv('NIMIQ_NETWORK', 'testnet')
FACUET_URL = os.getenv('FACUET_URL','https://faucet.pos.nimiq-testnet.com/tapit')
PROMETHEUS_PORT = os.getenv('PROMETHEUS_PORT', 8000)

# Prometheus Metrics
ACTIVATED_AMOUNT = Gauge('nimiq_activated_amount', 'Amount activated', ['address'])
ACTIVATE_EPOCH = Gauge('nimiq_activate_epoch', 'Epoch tried to activate validator')
EPOCH_NUMBER = Gauge('nimiq_epoch_number', 'Epoch number')

logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s — %(message)s',
datefmt='%Y-%m-%d_%H:%M:%S',
handlers=[logging.StreamHandler()])

# Store activation epoch in a file to not activate the validator multiple times in the same epoch.
def store_activation_epoch(epoch):
with open("activation_epoch.txt", "w") as file:
file.write(str(epoch))
ACTIVATE_EPOCH.set(epoch)

def read_activation_epoch():
if os.path.exists("activation_epoch.txt"):
with open("activation_epoch.txt", "r") as file:
return int(file.read().strip())
return None

def nimiq_request(method, params=None, retries=3, delay=5):
while retries > 0:
try:
logging.debug(method)
logging.debug(params)
response = requests.post(NIMIQ_NODE_URL, json={
"jsonrpc": "2.0",
"id": 1,
"method": method,
"params": params or [],
})
response.raise_for_status() # Raises an HTTPError if the HTTP request returned an unsuccessful status code
logging.debug(response.json())
result = response.json().get('result', {})
time.sleep(0.5) # Wait for 0.5 second to not overload the node.
if result is None:
raise ValueError("No result in response")
return result

except requests.exceptions.RequestException as err:
retries -= 1
logging.error(f"Error: {err}. Retrying in {delay} seconds. Retries left: {retries}")
time.sleep(delay)
logging.error("Request failed after multiple retries.")
return None

def get_private_key(file_path):
with open(file_path, 'r') as f:
lines = f.readlines()
for line in lines:
if 'Private Key:' in line:
return line.split('Private Key:')[1].strip()
return None

def get_vote_key(file_path):
with open(file_path, 'r') as file:
lines = file.readlines()

for i in range(len(lines)):
if "Secret Key:" in lines[i]:
secret_key = lines[i+2].strip() # The secret key is two lines down

return secret_key

def needs_funds(address):
res = nimiq_request("getAccountByAddress", [address])
if res is None:
return False
data = res.get('data', {})
if data is None or data.get('balance', 0) == 0:
return True
else:
return False

def get_address():
res = nimiq_request("getAddress")
if res is None:
return None
return res['data']

def get_tx(tx_hash):
res = nimiq_request("getTransactionByHash", [tx_hash])
if res is None:
return None
if 'error' in res:
logging.error(f"Error getting transaction: {res['error']['message']}")
return None
logging.info(f"Transaction: {res}")

def get_epoch_number():
res = nimiq_request("getEpochNumber")
if res is None:
return None
EPOCH_NUMBER.set(res['data'])

def activate_validator():
ADDRESS = get_address()
logging.info(f"Address: {ADDRESS}")

SIGKEY = get_private_key('/keys/address.txt')

VOTEKEY = get_vote_key('/keys/vote_key.txt')

ADDRESS_PRIVATE = get_private_key('/keys/address.txt')

logging.info("Funding Nimiq address.")
if needs_funds(ADDRESS):
requests.post(FACUET_URL, data={'address': ADDRESS})
else:
logging.info("Address already funded.")

current_epoch = nimiq_request("getEpochNumber")['data']
store_activation_epoch(current_epoch)

logging.info("Importing private key.")
nimiq_request("importRawKey", [ADDRESS_PRIVATE, ''])

logging.info("Unlock Account.")
nimiq_request("unlockAccount", [ADDRESS, '', 0])

logging.info("Activate Validator")
result = nimiq_request("sendNewValidatorTransaction", [ADDRESS, ADDRESS, SIGKEY, VOTEKEY, ADDRESS, "", 500, "+0"])

time.sleep(30) # Wait before checking the transaction
logging.info("Check Activate TX")
get_tx(result.get('data'))

ACTIVATED_AMOUNT.labels(address=ADDRESS).inc()
return ADDRESS

def is_validator_active(address):
res = nimiq_request("getActiveValidators")
if res is None:
return False
active_validators = res.get('data', [])
logging.info(json.dumps({"active_validators": active_validators}))
return address in active_validators

def check_and_activate_validator(address):
current_epoch = nimiq_request("getEpochNumber")['data']
activation_epoch = read_activation_epoch()
if activation_epoch is None or current_epoch > activation_epoch:
if not is_validator_active(address):
logging.info("Activating validator.")
activate_validator()
else:
logging.info("Validator already active.")
else:
next_epoch = activation_epoch + 1
logging.info(f"Next epoch to activate validator: {next_epoch}")
logging.info("Waiting for next epoch to activate validator.")

# Check if the consensus is establised and stay, if not, wait until it does.
def check_block_height():
logging.info("Waiting for consensus to be established, this may take a while...")
consensus_count = 0
while consensus_count < 3:
res = nimiq_request("isConsensusEstablished")
if res is not None and res.get('data') == True:
consensus_count += 1
logging.info(f"Consensus established {consensus_count} time(s).")
else:
consensus_count = 0
logging.info("Consensus not established yet.")
time.sleep(5)
logging.info("Consensus confirmed 3 times.")

if __name__ == '__main__':
logging.info("Starting validator activation script...")
logging.info(f"Version: 0.1.0 ")
start_http_server(int(PROMETHEUS_PORT)) # Start Prometheus client
# Run indefinitely
while True:
check_block_height()
get_epoch_number()
address = get_address()
check_and_activate_validator(address)
time.sleep(600) # Wait for 10 minutes to check again.
2 changes: 2 additions & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
requests
prometheus_client

0 comments on commit 13dd985

Please sign in to comment.