commit e1439840903d6e3d70efd33426dd8ccd2381950a Author: Pierre-Olivier Mercier Date: Tue Nov 26 20:43:48 2019 +0100 Initial commit diff --git a/chronograf/chronograf-v1.db b/chronograf/chronograf-v1.db new file mode 100644 index 0000000..ef3ca16 Binary files /dev/null and b/chronograf/chronograf-v1.db differ diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..96e1404 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,36 @@ +version: "2" + +services: + rng: + build: rng + ports: + - "8001:80" + + hasher: + build: hasher + ports: + - "8002:80" + + worker: + build: worker + environment: + - INFLUXDB_DB=chocominer + - INFLUXDB_USER=chocominer + - INFLUXDB_USER_PASSWORD=Ru5icohx6ic8eiVaejeequ6aiMav1Oa + - USER + + influxdb: + image: influxdb:alpine + environment: + - INFLUXDB_DB=chocominer + - INFLUXDB_READ_USER=chronograf + - INFLUXDB_READ_USER_PASSWORD=eBoo8geingie8ziejeeg8bein6Yai1a + - INFLUXDB_WRITE_USER=chocominer + - INFLUXDB_WRITE_USER_PASSWORD=Ru5icohx6ic8eiVaejeequ6aiMav1Oa + + chronograf: + image: chronograf:alpine + volumes: + - ./chronograf:/var/lib/chronograf + ports: + - "8888:8888" diff --git a/hasher/Dockerfile b/hasher/Dockerfile new file mode 100644 index 0000000..62be928 --- /dev/null +++ b/hasher/Dockerfile @@ -0,0 +1,7 @@ +FROM ruby:alpine +RUN apk add --update build-base curl +RUN gem install sinatra +RUN gem install thin +ADD hasher.rb / +CMD ["ruby", "hasher.rb"] +EXPOSE 80 diff --git a/hasher/hasher.rb b/hasher/hasher.rb new file mode 100644 index 0000000..28a929f --- /dev/null +++ b/hasher/hasher.rb @@ -0,0 +1,18 @@ +require 'digest' +require 'sinatra' +require 'socket' + +set :bind, '0.0.0.0' +set :port, 80 + +post '/' do + # Simulate a bit of delay + sleep 0.1 + content_type 'text/plain' + "#{Digest::SHA2.new().update(request.body.read)}" +end + +get '/' do + "HASHER running on #{Socket.gethostname}\n" +end + diff --git a/rng/Dockerfile b/rng/Dockerfile new file mode 100644 index 0000000..34cf43b --- /dev/null +++ b/rng/Dockerfile @@ -0,0 +1,5 @@ +FROM python:alpine +RUN pip install Flask +COPY rng.py / +CMD ["python", "rng.py"] +EXPOSE 80 diff --git a/rng/rng.py b/rng/rng.py new file mode 100644 index 0000000..6809ce6 --- /dev/null +++ b/rng/rng.py @@ -0,0 +1,31 @@ +from flask import Flask, Response +import os +import socket +import time + +app = Flask(__name__) + +# Enable debugging if the DEBUG environment variable is set and starts with Y +app.debug = os.environ.get("DEBUG", "").lower().startswith('y') + +hostname = socket.gethostname() + +urandom = os.open("/dev/urandom", os.O_RDONLY) + + +@app.route("/") +def index(): + return "RNG running on {}\n".format(hostname) + + +@app.route("/") +def rng(how_many_bytes): + # Simulate a little bit of delay + time.sleep(0.1) + return Response( + os.read(urandom, how_many_bytes), + content_type="application/octet-stream") + + +if __name__ == "__main__": + app.run(host="0.0.0.0", port=80, threaded=False) diff --git a/worker/Dockerfile b/worker/Dockerfile new file mode 100644 index 0000000..8b9a74b --- /dev/null +++ b/worker/Dockerfile @@ -0,0 +1,5 @@ +FROM python:alpine +RUN pip install influxdb +RUN pip install requests +COPY worker.py / +CMD ["python", "worker.py"] diff --git a/worker/worker.py b/worker/worker.py new file mode 100644 index 0000000..aa20e4c --- /dev/null +++ b/worker/worker.py @@ -0,0 +1,103 @@ +import logging +import os +import requests +import threading +import time +from influxdb import InfluxDBClient + +DEBUG = os.environ.get("DEBUG", "").lower().startswith("y") + +log = logging.getLogger(__name__) +if DEBUG: + logging.basicConfig(level=logging.DEBUG) +else: + logging.basicConfig(level=logging.INFO) + logging.getLogger("requests").setLevel(logging.WARNING) + +client = InfluxDBClient('influxdb', 8086, os.environ.get("INFLUXDB_USER", "chocominer"), os.environ.get("INFLUXDB_USER_PASSWORD", "chocominer"), os.environ.get("INFLUXDB_DB", "chocominer")) + +def get_random_bytes(): + r = requests.get("http://rng/32") + return r.content + + +def hash_bytes(data): + r = requests.post("http://hasher/", + data=data, + headers={"Content-Type": "application/octet-stream"}) + hex_hash = r.text + return hex_hash + +def current_chunk(): + r = requests.get("https://virli.nemunai.re/chunk") + return r.content.decode() + +def claim_chunk(random_bytes): + r = requests.post("https://virli.nemunai.re/chunk", + data='{"proof": "' + random_bytes + '", "login": "' + os.environ.get("USER", "nemunaire") + '"}', + headers={"Content-Type": "application/json"}) + return r.content + + +chunk = "12" +def update_chunk(): + global chunk + while True: + try: + chunk = current_chunk() + time.sleep(1) + except: + pass + +def work_loop(interval=1): + deadline = 0 + loops_done = 0 + while True: + if time.time() > deadline: + log.info("{} units of work done, updating hash counter" + .format(loops_done)) + client.write_points([{ + "measurement": "hashes", + "fields": { + "value": loops_done + } + }]) + loops_done = 0 + deadline = time.time() + interval + work_once() + loops_done += 1 + + +def work_once(): + log.debug("Doing one unit of work") + time.sleep(0.1) + random_bytes = get_random_bytes() + hex_hash = hash_bytes(random_bytes + chunk[0].encode()) + log.info("Nugget found: {}...{} (looking for {})".format(hex_hash[:8], hex_hash[-2:], chunk[1:])) + client.write_points([{ + "measurement": "nuggets", + "fields": { + "value": str(hex_hash), + "proof": random_bytes.hex(), + } + }]) + + if hex_hash.startswith(chunk[1:]) and claim_chunk(random_bytes.hex()) == b"true": + log.debug("CHUNK FOUND \o/") + client.write_points([{ + "measurement": "chunks", + "fields": { + "value": str(hex_hash), + } + }]) + + +if __name__ == "__main__": + threading.Thread(target=update_chunk).start() + while True: + try: + work_loop() + except: + log.exception("In work loop:") + log.error("Waiting 10s and restarting.") + time.sleep(10)