Initial commit

This commit is contained in:
nemunaire 2019-11-26 20:43:48 +01:00
commit e143984090
8 changed files with 205 additions and 0 deletions

BIN
chronograf/chronograf-v1.db Normal file

Binary file not shown.

36
docker-compose.yml Normal file
View File

@ -0,0 +1,36 @@
version: "2"
services:
rng:
build: rng
ports:
- "8001:80"
hasher:
build: hasher
ports:
- "8002:80"
worker:
build: worker
environment:
- INFLUXDB_DB=chocominer
- INFLUXDB_USER=chocominer
- INFLUXDB_USER_PASSWORD=Ru5icohx6ic8eiVaejeequ6aiMav1Oa
- USER
influxdb:
image: influxdb:alpine
environment:
- INFLUXDB_DB=chocominer
- INFLUXDB_READ_USER=chronograf
- INFLUXDB_READ_USER_PASSWORD=eBoo8geingie8ziejeeg8bein6Yai1a
- INFLUXDB_WRITE_USER=chocominer
- INFLUXDB_WRITE_USER_PASSWORD=Ru5icohx6ic8eiVaejeequ6aiMav1Oa
chronograf:
image: chronograf:alpine
volumes:
- ./chronograf:/var/lib/chronograf
ports:
- "8888:8888"

7
hasher/Dockerfile Normal file
View File

@ -0,0 +1,7 @@
FROM ruby:alpine
RUN apk add --update build-base curl
RUN gem install sinatra
RUN gem install thin
ADD hasher.rb /
CMD ["ruby", "hasher.rb"]
EXPOSE 80

18
hasher/hasher.rb Normal file
View File

@ -0,0 +1,18 @@
require 'digest'
require 'sinatra'
require 'socket'
set :bind, '0.0.0.0'
set :port, 80
post '/' do
# Simulate a bit of delay
sleep 0.1
content_type 'text/plain'
"#{Digest::SHA2.new().update(request.body.read)}"
end
get '/' do
"HASHER running on #{Socket.gethostname}\n"
end

5
rng/Dockerfile Normal file
View File

@ -0,0 +1,5 @@
FROM python:alpine
RUN pip install Flask
COPY rng.py /
CMD ["python", "rng.py"]
EXPOSE 80

31
rng/rng.py Normal file
View File

@ -0,0 +1,31 @@
from flask import Flask, Response
import os
import socket
import time
app = Flask(__name__)
# Enable debugging if the DEBUG environment variable is set and starts with Y
app.debug = os.environ.get("DEBUG", "").lower().startswith('y')
hostname = socket.gethostname()
urandom = os.open("/dev/urandom", os.O_RDONLY)
@app.route("/")
def index():
return "RNG running on {}\n".format(hostname)
@app.route("/<int:how_many_bytes>")
def rng(how_many_bytes):
# Simulate a little bit of delay
time.sleep(0.1)
return Response(
os.read(urandom, how_many_bytes),
content_type="application/octet-stream")
if __name__ == "__main__":
app.run(host="0.0.0.0", port=80, threaded=False)

5
worker/Dockerfile Normal file
View File

@ -0,0 +1,5 @@
FROM python:alpine
RUN pip install influxdb
RUN pip install requests
COPY worker.py /
CMD ["python", "worker.py"]

103
worker/worker.py Normal file
View File

@ -0,0 +1,103 @@
import logging
import os
import requests
import threading
import time
from influxdb import InfluxDBClient
DEBUG = os.environ.get("DEBUG", "").lower().startswith("y")
log = logging.getLogger(__name__)
if DEBUG:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
logging.getLogger("requests").setLevel(logging.WARNING)
client = InfluxDBClient('influxdb', 8086, os.environ.get("INFLUXDB_USER", "chocominer"), os.environ.get("INFLUXDB_USER_PASSWORD", "chocominer"), os.environ.get("INFLUXDB_DB", "chocominer"))
def get_random_bytes():
r = requests.get("http://rng/32")
return r.content
def hash_bytes(data):
r = requests.post("http://hasher/",
data=data,
headers={"Content-Type": "application/octet-stream"})
hex_hash = r.text
return hex_hash
def current_chunk():
r = requests.get("https://virli.nemunai.re/chunk")
return r.content.decode()
def claim_chunk(random_bytes):
r = requests.post("https://virli.nemunai.re/chunk",
data='{"proof": "' + random_bytes + '", "login": "' + os.environ.get("USER", "nemunaire") + '"}',
headers={"Content-Type": "application/json"})
return r.content
chunk = "12"
def update_chunk():
global chunk
while True:
try:
chunk = current_chunk()
time.sleep(1)
except:
pass
def work_loop(interval=1):
deadline = 0
loops_done = 0
while True:
if time.time() > deadline:
log.info("{} units of work done, updating hash counter"
.format(loops_done))
client.write_points([{
"measurement": "hashes",
"fields": {
"value": loops_done
}
}])
loops_done = 0
deadline = time.time() + interval
work_once()
loops_done += 1
def work_once():
log.debug("Doing one unit of work")
time.sleep(0.1)
random_bytes = get_random_bytes()
hex_hash = hash_bytes(random_bytes + chunk[0].encode())
log.info("Nugget found: {}...{} (looking for {})".format(hex_hash[:8], hex_hash[-2:], chunk[1:]))
client.write_points([{
"measurement": "nuggets",
"fields": {
"value": str(hex_hash),
"proof": random_bytes.hex(),
}
}])
if hex_hash.startswith(chunk[1:]) and claim_chunk(random_bytes.hex()) == b"true":
log.debug("CHUNK FOUND \o/")
client.write_points([{
"measurement": "chunks",
"fields": {
"value": str(hex_hash),
}
}])
if __name__ == "__main__":
threading.Thread(target=update_chunk).start()
while True:
try:
work_loop()
except:
log.exception("In work loop:")
log.error("Waiting 10s and restarting.")
time.sleep(10)