Compare commits

...

173 Commits

Author SHA1 Message Date
UpstreamData
ce5e1cad40 added the option to append the last octet of the IP address to the username when configuring 2022-04-29 15:37:07 -06:00
UpstreamData
d877ba01a0 fix spelling issue 2022-04-29 15:02:54 -06:00
UpstreamData
b0ed990d5a update requirements.txt 2022-04-29 14:38:33 -06:00
UpstreamData
89c8a16900 fix light functionality to work as intended 2022-04-29 13:25:08 -06:00
UpstreamData
247cf0ccc2 added fault light option to the board utility 2022-04-29 10:18:16 -06:00
UpstreamData
d0aa219a7a add first page and pie chart to board report 2022-04-28 11:12:33 -06:00
UpstreamData
87291e2a89 change some formatting with the board report and fix some bugs 2022-04-27 16:58:47 -06:00
UpstreamData
9c88d21db6 add basic board report to board util 2022-04-27 16:35:11 -06:00
UpstreamData
8b7415042f fixed a bug with the webserver 2022-04-25 14:40:32 -06:00
UpstreamData
59ab6e6c8a reformatted and clarified some code 2022-04-21 10:09:30 -06:00
UpstreamData
0724a376ea refactored some code in board util 2022-04-21 09:43:22 -06:00
UpstreamData
f9f26a5587 added better logging and process to testbench 2022-04-20 11:36:09 -06:00
UpstreamData
ed4122fb21 added better logging to testbench 2022-04-19 10:29:13 -06:00
UpstreamData
0739a7f689 added a try except block for logging errors per miner in the testbench 2022-04-19 10:15:12 -06:00
UpstreamData
c7b7a6e7c5 made sure there will always be board 6, 7, and 8 in tunerstatus 2022-04-18 16:12:12 -06:00
UpstreamData
2a132c8325 addded basic tuner status data on testbench 2022-04-18 16:02:21 -06:00
UpstreamData
154882a668 fixed an issue with pinging when done. 2022-04-18 14:29:54 -06:00
UpstreamData
3f64c9dd67 Merge remote-tracking branch 'origin/testbench-webserver' into testbench-webserver
# Conflicts:
#	miners/bosminer.py
#	tools/bad_board_util/func/decorators.py
#	tools/bad_board_util/layout.py
#	tools/bad_board_util/ui.py
2022-04-18 14:17:29 -06:00
UpstreamData
d8d66e4244 fixed a bug with not hiding the light button 2022-04-18 14:17:04 -06:00
UpstreamData
a9cdefcd43 finished adding timer 2022-04-18 14:17:04 -06:00
UpstreamData
029d3ef596 added online timer for testing 2022-04-18 14:17:04 -06:00
UpstreamData
0e474402c0 reformatted files 2022-04-18 14:17:04 -06:00
UpstreamData
b6560cdedb added fixing file exists bug 2022-04-18 14:17:04 -06:00
UpstreamData
767575703e fixed some bugs with finishing the install 2022-04-18 14:17:04 -06:00
UpstreamData
4b4d9060ed changed some printing to logging logs 2022-04-18 14:17:04 -06:00
UpstreamData
ad75b1d25c added web testbench to main apps 2022-04-18 14:17:03 -06:00
UpstreamData
4b767c5427 fixed more bugs 2022-04-18 14:17:03 -06:00
UpstreamData
a6df7a83d6 fixed many remaining bugs in testbench webserver, should be ready for use. 2022-04-18 14:17:03 -06:00
UpstreamData
93f2990399 finished miner install to be tested 2022-04-18 14:17:03 -06:00
UpstreamData
e74f67089e finished light functionality 2022-04-18 14:17:03 -06:00
UpstreamData
41a6078790 added partial fault light functionality and fixed stdout output direction 2022-04-18 14:17:03 -06:00
UpstreamData
4d93926fee added output when running install process 2022-04-18 14:17:03 -06:00
UpstreamData
03f5cafe76 added sending output from miners 2022-04-18 14:17:03 -06:00
UpstreamData
4f6ebff880 set graphs to show and hide when getting data 2022-04-18 14:17:03 -06:00
UpstreamData
af27cbbe2c set graphs to update when receiving data 2022-04-18 14:17:03 -06:00
UpstreamData
3604957c83 added auto port finding to both web apps 2022-04-18 14:17:03 -06:00
UpstreamData
3670a02aec add feeds updater to startup process 2022-04-18 14:17:03 -06:00
UpstreamData
7ebfdb3f33 added feeds auto-updater for web testbench 2022-04-18 14:17:03 -06:00
UpstreamData
b9b7da8746 add base files for web interface 2022-04-18 14:17:03 -06:00
UpstreamData
eaaf137b9b added temp fake data to the app for it to send to the JS side. 2022-04-18 14:15:46 -06:00
UpstreamData
a0311e3ce3 add base files for web interface 2022-04-18 14:15:44 -06:00
UpstreamData
8864aa7b4b added install file to do the basic install 2022-04-18 14:15:24 -06:00
UpstreamData
4d58129eee fixed a bug with not hiding the light button 2022-04-18 13:12:08 -06:00
UpstreamData
4468fe9fbb finished adding timer 2022-04-18 12:29:55 -06:00
UpstreamData
3b716a044b added online timer for testing 2022-04-18 12:13:41 -06:00
UpstreamData
25e657729c reformatted files 2022-04-18 10:24:53 -06:00
UpstreamData
cace399ed2 added fixing file exists bug 2022-04-18 10:13:48 -06:00
UpstreamData
045e1ca6ba fixed some bugs with finishing the install 2022-04-18 09:52:45 -06:00
UpstreamData
4f86dec560 changed some printing to logging logs 2022-04-18 08:49:21 -06:00
UpstreamData
13f033440d added web testbench to main apps 2022-04-14 18:43:36 -06:00
UpstreamData
b5c455ffa4 fixed more bugs 2022-04-14 18:38:29 -06:00
UpstreamData
eb5a00b706 fixed many remaining bugs in testbench webserver, should be ready for use. 2022-04-14 18:17:23 -06:00
UpstreamData
3a560472e6 finished miner install to be tested 2022-04-14 14:40:31 -06:00
UpstreamData
4776dce038 finished light functionality 2022-04-14 13:16:16 -06:00
UpstreamData
2d6891c6d2 added partial fault light functionality and fixed stdout output direction 2022-04-14 11:34:21 -06:00
UpstreamData
f5a41f7b13 added output when running install process 2022-04-14 11:08:52 -06:00
UpstreamData
4a2926df94 added sending output from miners 2022-04-14 10:57:32 -06:00
UpstreamData
8736f33a56 set graphs to show and hide when getting data 2022-04-14 10:43:26 -06:00
UpstreamData
89eb77588f set graphs to update when receiving data 2022-04-14 10:34:51 -06:00
UpstreamData
c930510226 added auto port finding to both web apps 2022-04-14 09:43:43 -06:00
UpstreamData
b7c58e5d34 add feeds updater to startup process 2022-04-14 09:37:06 -06:00
UpstreamData
ce48ae020b added feeds auto-updater for web testbench 2022-04-11 16:13:04 -06:00
UpstreamData
7809bfc0d1 added exporting a report from bad board utility 2022-04-01 15:19:12 -06:00
UpstreamData
d84fcaafdf added bos get version 2022-04-01 13:33:05 -06:00
UpstreamData
a9f600b797 add base files for web interface 2022-03-31 11:32:42 -06:00
UpstreamData
f0a8e7ba9f reformatted all files to use the Black formatting style 2022-03-31 11:30:34 -06:00
UpstreamData
c57a523553 reformatted all files to use the Black formatting style 2022-03-31 11:27:57 -06:00
UpstreamData
d905f6f414 added temp fake data to the app for it to send to the JS side. 2022-03-30 08:42:21 -06:00
UpstreamData
22f78ac405 add base files for web interface 2022-03-25 16:02:50 -06:00
UpstreamData
7a098b1c7e added install file to do the basic install 2022-03-25 15:29:30 -06:00
UpstreamData
e1383f2002 Added support for X19 models with BraiinsOS 2022-03-25 09:06:25 -06:00
UpstreamData
c3b23313ba added changing model when configuring for BOS S9s 2022-03-25 08:58:02 -06:00
UpstreamData
02581e917d add temperature graph to miner page 2022-03-21 10:02:11 -06:00
UpstreamData
e267073f76 add the start of a temperature graph to miner page 2022-03-21 09:39:54 -06:00
UpstreamData
4038dae446 fixed some bugs on linux with pipes 2022-03-18 12:02:42 -06:00
UpstreamData
134b5fe0ff added CTRL+A select all binding to cfg util and board util tables 2022-03-17 16:10:12 -06:00
UpstreamData
d452ca36b7 fixed copying from the board util table 2022-03-17 16:05:48 -06:00
UpstreamData
fdec35cd2e added disable button decorator to board util 2022-03-17 16:01:02 -06:00
UpstreamData
d488c8458c added the ability to scan a range of IPs as part of the miner network by passing a string formatted as {ip_range_1_start}-{ip_range_1_end}, {ip_range_2_start}-{ip_range_2_end} to the miner network 2022-03-17 12:05:58 -06:00
UpstreamData
6d2e40c81d added support for avalon10xx miners 2022-03-16 15:21:09 -06:00
UpstreamData
594b5d0448 improved logging format and sent output to a file 2022-03-16 14:03:32 -06:00
UpstreamData
1be12e5d4c moved _get_ssh_connection to the base miner class 2022-03-16 13:34:18 -06:00
UpstreamData
bae2ee4245 changed MinerFactory to a singleton class to ensure clearing its cache is easier and removed creation of independant miner factories for each utility 2022-03-16 12:05:44 -06:00
UpstreamData
57bd606f21 add logging to base miner API 2022-03-16 10:56:33 -06:00
UpstreamData
eb8cefa461 add logging to btminer and fix some bugs 2022-03-16 08:40:41 -06:00
UpstreamData
9edcd866bb added more logging for bosminer models. 2022-03-15 09:07:07 -06:00
UpstreamData
07a8b00a93 added logging to bmminer and X19 models 2022-03-14 16:07:47 -06:00
UpstreamData
c22be7ded8 started adding some basic logging functionality 2022-03-14 15:52:46 -06:00
UpstreamData
2380b94db1 update unknown API docstring 2022-03-14 14:12:31 -06:00
UpstreamData
d8e59afee0 Upsdated bosminer API docstrings, and fixed some errors in CGMiner API docstings 2022-03-14 14:07:17 -06:00
UpstreamData
05e14baa68 added some todos 2022-03-14 11:26:53 -06:00
UpstreamData
ff56148732 fixed some bugs with cgminer, and included VC redistributables in CXFreeze build for CFG util 2022-03-14 10:18:28 -06:00
UpstreamData
bfc5668d24 fixed some bugs with running the web app from docker 2022-03-09 10:53:26 -07:00
UpstreamData
b3103ae700 fixed fan formatting on smaller devices 2022-03-08 12:23:38 -07:00
UpstreamData
43834203a8 reformatted file structure and reformatted for phones, as well as fixed web sockets for remote devices 2022-03-08 11:39:10 -07:00
UpstreamData
7ba8044564 added dockerfile and removed cxfreeze from web_monitor requirements due to it breaking the docker setup 2022-03-08 09:09:28 -07:00
UpstreamData
7e91fe12e7 updated some ports and fixed a bug with summary keys when getting data 2022-03-07 14:54:36 -07:00
UpstreamData
02114aac65 Merge pull request #6 from UpstreamData/web_monitor
Web monitor
2022-03-07 12:40:11 -07:00
UpstreamData
244dac76af finished adding settings page 2022-03-07 12:38:56 -07:00
UpstreamData
2bd25c3f35 started adding settings page 2022-03-07 11:17:41 -07:00
UpstreamData
23350ea4b6 updated requirements, and fixed some formatting issues 2022-03-07 10:36:38 -07:00
UpstreamData
8a6917878e Merge remote-tracking branch 'origin/web_monitor' into web_monitor 2022-03-07 09:40:42 -07:00
UpstreamData
7dd00954e4 fixed some issues with the rounding on floats in the JS 2022-03-07 09:39:56 -07:00
UpstreamData
f3710f618e added miner model and hashrate as a table in the per miner stuff 2022-03-07 09:39:56 -07:00
UpstreamData
8ecdb6f5e8 fixed a bug with scanning and adding miner which didnt append to the navbar 2022-03-07 09:39:56 -07:00
UpstreamData
309b4d44fc updated some formatting on charts 2022-03-07 09:39:56 -07:00
UpstreamData
80f941d912 added remove miner functionality 2022-03-07 09:39:56 -07:00
UpstreamData
4534b09532 added custom TH/s formatting to graphs 2022-03-07 09:39:56 -07:00
UpstreamData
97a9b59acc added dashboard hashrate info 2022-03-07 09:39:56 -07:00
UpstreamData
87b8de9029 strated on basic framework for dashboard in web_monitor 2022-03-07 09:39:56 -07:00
UpstreamData
42f5146632 added different select gradient 2022-03-07 09:39:56 -07:00
UpstreamData
f613cc039f added spinner to scan 2022-03-07 09:39:56 -07:00
UpstreamData
e974c77359 added fan and hashrate data for S19s and Whatsminers 2022-03-07 09:39:56 -07:00
UpstreamData
0f324177cb added fan data for braiins OS 2022-03-07 09:39:56 -07:00
UpstreamData
46a4508cd7 updated more gradient formatting an added gradients to navbar 2022-03-07 09:39:56 -07:00
UpstreamData
d4d9b1ad3c added gradients to fan data 2022-03-07 09:39:56 -07:00
UpstreamData
322ee05fdf added bounding box to the chart 2022-03-07 09:39:56 -07:00
UpstreamData
85569366a2 sorted current miners for the navbar 2022-03-07 09:39:56 -07:00
UpstreamData
dea6ff2a96 improved chart functionality in the web monitor and added handlers for errors such as no response from the miner 2022-03-07 09:39:56 -07:00
UpstreamData
3fcd2edf6f charts on miner pages work now, they gather data from miners and put it into the graph, with a max size of 49 entried per graph 2022-03-07 09:39:56 -07:00
UpstreamData
16b84310ec added graph with fake data on each miner page, and added basic formatting to it. 2022-03-07 09:39:56 -07:00
UpstreamData
f8899521bc improved navbar formatting, added active formats for all miners, moved add miners to a miner subtab 2022-03-07 09:39:56 -07:00
UpstreamData
3558a1a6b1 finished up scan page, added the ability to add miners and them get listed in the miner list, and started adding the individual miner pages 2022-03-07 09:39:56 -07:00
UpstreamData
385943755d further improved formatting of scan page, added disabled checkboxes on scan, updated miner count on add 2022-03-07 09:39:56 -07:00
UpstreamData
3002cb4e97 added basic addition of miners to the list and improved some functionality of the web tool 2022-03-07 09:39:56 -07:00
UpstreamData
6d711520fc added add selected miners button 2022-03-07 09:39:56 -07:00
UpstreamData
584de40983 improved formatting on scan page and made the scan a bit more robust 2022-03-07 09:39:56 -07:00
UpstreamData
81911ba549 fixed some formwatting on the scan page 2022-03-07 09:39:55 -07:00
UpstreamData
e37e9e2251 added the scan page to scan for miners on a subnet 2022-03-07 09:39:55 -07:00
UpstreamData
92a65c8977 switched to fastAPi and jinja 2 for templates and html 2022-03-07 09:39:55 -07:00
UpstreamData
ae8b2cbd07 added the required directories for settings and scanning 2022-03-07 09:39:55 -07:00
UpstreamData
cda13edf85 improved formatting of index.html 2022-03-07 09:39:55 -07:00
UpstreamData
610ee57963 started adding HTML files for the web monitor program 2022-03-07 09:39:55 -07:00
UpstreamData
2ef809db54 fixed some issues with the rounding on floats in the JS 2022-03-07 09:32:06 -07:00
UpstreamData
f315c0c051 added miner model and hashrate as a table in the per miner stuff 2022-03-04 16:10:27 -07:00
UpstreamData
936c230aa3 fixed a bug with scanning and adding miner which didnt append to the navbar 2022-03-04 15:48:17 -07:00
UpstreamData
2c93f1f395 updated some formatting on charts 2022-03-04 14:36:43 -07:00
UpstreamData
727ebd9c42 added remove miner functionality 2022-03-04 14:08:27 -07:00
UpstreamData
1e4fc897e3 added custom TH/s formatting to graphs 2022-03-04 13:39:23 -07:00
UpstreamData
3945a86004 added dashboard hashrate info 2022-03-04 11:53:31 -07:00
UpstreamData
58cc64d17b strated on basic framework for dashboard in web_monitor 2022-03-04 11:24:06 -07:00
UpstreamData
b66cf6f0ba added different select gradient 2022-03-02 15:54:49 -07:00
UpstreamData
1db15a741e added spinner to scan 2022-03-02 15:47:17 -07:00
UpstreamData
5f355c833b added fan and hashrate data for S19s and Whatsminers 2022-03-02 15:38:29 -07:00
UpstreamData
a76b32e3ff added fan data for braiins OS 2022-03-02 15:15:20 -07:00
UpstreamData
f2c01dca25 updated more gradient formatting an added gradients to navbar 2022-03-02 14:36:34 -07:00
UpstreamData
abc542a0ca added gradients to fan data 2022-03-02 13:12:20 -07:00
UpstreamData
9e598ebd8c added bounding box to the chart 2022-03-02 12:15:46 -07:00
UpstreamData
7801ca5819 sorted current miners for the navbar 2022-03-02 11:16:02 -07:00
UpstreamData
482edabd27 improved chart functionality in the web monitor and added handlers for errors such as no response from the miner 2022-03-02 11:11:34 -07:00
UpstreamData
3e5998de6e charts on miner pages work now, they gather data from miners and put it into the graph, with a max size of 49 entried per graph 2022-03-01 16:17:28 -07:00
UpstreamData
c3d19607f6 added graph with fake data on each miner page, and added basic formatting to it. 2022-03-01 16:01:39 -07:00
UpstreamData
2c2648cbe7 improved navbar formatting, added active formats for all miners, moved add miners to a miner subtab 2022-03-01 12:51:49 -07:00
UpstreamData
a72c4f7797 finished up scan page, added the ability to add miners and them get listed in the miner list, and started adding the individual miner pages 2022-03-01 12:28:36 -07:00
UpstreamData
19ee9eb18f further improved formatting of scan page, added disabled checkboxes on scan, updated miner count on add 2022-03-01 11:30:48 -07:00
UpstreamData
3ae29c3883 added basic addition of miners to the list and improved some functionality of the web tool 2022-02-28 16:28:40 -07:00
UpstreamData
d9f8f53a10 added add selected miners button 2022-02-28 15:15:57 -07:00
UpstreamData
6b3e525f45 improved formatting on scan page and made the scan a bit more robust 2022-02-28 14:10:43 -07:00
UpstreamData
c8824f86af fixed some formwatting on the scan page 2022-02-25 16:11:06 -07:00
UpstreamData
cf3163dccf added the scan page to scan for miners on a subnet 2022-02-25 15:58:01 -07:00
UpstreamData
da5a784214 switched to fastAPi and jinja 2 for templates and html 2022-02-24 15:59:48 -07:00
UpstreamData
30b3315084 added the required directories for settings and scanning 2022-02-24 15:25:49 -07:00
UpstreamData
5a7dcc7fcf fixed some bugs in getting ssh connections 2022-02-24 14:42:34 -07:00
UpstreamData
c6305c57cf improved formatting of index.html 2022-02-24 09:13:07 -07:00
UpstreamData
d330e2e978 started adding HTML files for the web monitor program 2022-02-24 08:57:23 -07:00
UpstreamData
1ec2a2a4a6 update CFG-Util-README.md 2022-02-23 14:39:29 -07:00
UpstreamData
c97d384cf4 updated red row color on fault light to work with tkinter tags and be sortable. 2022-02-23 14:35:29 -07:00
UpstreamData
ca52e40a6a fixed a bug with fault lighting bugging the tool 2022-02-23 11:56:21 -07:00
UpstreamData
4a10efd7a4 added send command option in the window 2022-02-22 13:53:07 -07:00
UpstreamData
128aab1b88 switched to a monospace font in the board util. 2022-02-22 11:01:00 -07:00
UpstreamData
bb89be64f4 switched to a monospace font in the cfg tool, padded the hashrates to appear as decimal centered, and left justified hostnames for better readability. 2022-02-22 10:49:23 -07:00
UpstreamData
ef0a507306 changed the disabling buttons to use a decorator as it looks much cleaner 2022-02-18 11:10:44 -07:00
UpstreamData
908594970e disabled the buttons that can break each other when another coroutine is running 2022-02-18 10:59:10 -07:00
91 changed files with 5697 additions and 1140 deletions

8
.dockerignore Normal file
View File

@@ -0,0 +1,8 @@
# Ignore VENV
venv
# Ignore builds
build
# Ignore github files
.github

View File

@@ -2,6 +2,7 @@ import asyncio
import json
import ipaddress
import warnings
import logging
class APIError(Exception):
@@ -41,36 +42,44 @@ class BaseMinerAPI:
def get_commands(self) -> list:
"""Get a list of command accessible to a specific type of API on the miner."""
return [func for func in
# each function in self
dir(self) if callable(getattr(self, func)) and
# no __ methods
not func.startswith("__") and
# remove all functions that are in this base class
func not in
[func for func in
dir(BaseMinerAPI) if callable(getattr(BaseMinerAPI, func))
]
]
return [
func
for func in
# each function in self
dir(self)
if callable(getattr(self, func)) and
# no __ methods
not func.startswith("__") and
# remove all functions that are in this base class
func
not in [
func
for func in dir(BaseMinerAPI)
if callable(getattr(BaseMinerAPI, func))
]
]
async def multicommand(self, *commands: str) -> dict:
"""Creates and sends multiple commands as one command to the miner."""
logging.debug(f"{self.ip}: Sending multicommand: {[*commands]}")
# split the commands into a proper list
user_commands = [*commands]
allowed_commands = self.get_commands()
# make sure we can actually run the command, otherwise it will fail
commands = [command for command in user_commands if command in allowed_commands]
for item in list(set(user_commands) - set(commands)):
warnings.warn(f"""Removing incorrect command: {item}
warnings.warn(
f"""Removing incorrect command: {item}
If you are sure you want to use this command please use API.send_command("{item}", ignore_errors=True) instead.""",
APIWarning)
APIWarning,
)
# standard multicommand format is "command1+command2"
# doesnt work for S19 which is dealt with in the send command function
command = "+".join(commands)
data = None
try:
data = await self.send_command(command)
except APIError:
except APIError as e:
try:
data = {}
# S19 handler, try again
@@ -80,11 +89,17 @@ If you are sure you want to use this command please use API.send_command("{item}
except APIError as e:
raise APIError(e)
except Exception as e:
print(e)
logging.warning(f"{self.ip}: API Multicommand Error: {e}")
if data:
logging.debug(f"{self.ip}: Received multicommand data.")
return data
async def send_command(self, command: str, parameters: str or int or bool = None, ignore_errors: bool = False) -> dict:
async def send_command(
self,
command: str,
parameters: str or int or bool = None,
ignore_errors: bool = False,
) -> dict:
"""Send an API command to the miner and return the result."""
try:
# get reader and writer streams
@@ -92,7 +107,7 @@ If you are sure you want to use this command please use API.send_command("{item}
# handle OSError 121
except OSError as e:
if e.winerror == "121":
print("Semaphore Timeout has Expired.")
logging.warning("Semaphore Timeout has Expired.")
return {}
# create the command
@@ -101,7 +116,7 @@ If you are sure you want to use this command please use API.send_command("{item}
cmd["parameter"] = parameters
# send the command
writer.write(json.dumps(cmd).encode('utf-8'))
writer.write(json.dumps(cmd).encode("utf-8"))
await writer.drain()
# instantiate data
@@ -115,7 +130,7 @@ If you are sure you want to use this command please use API.send_command("{item}
break
data += d
except Exception as e:
print(e)
logging.warning(f"{self.ip}: API Command Error: {e}")
data = self.load_api_data(data)
@@ -128,12 +143,13 @@ If you are sure you want to use this command please use API.send_command("{item}
# validate the command succeeded
validation = self.validate_command_output(data)
if not validation[0]:
logging.warning(f"{self.ip}: API Command Error: {validation[1]}")
raise APIError(validation[1])
return data
@staticmethod
def validate_command_output(data: dict) -> tuple[bool, str | None]:
def validate_command_output(data: dict) -> tuple:
"""Check if the returned command output is correctly formatted."""
# check if the data returned is correct or an error
# if status isn't a key, it is a multicommand
@@ -160,14 +176,15 @@ If you are sure you want to use this command please use API.send_command("{item}
@staticmethod
def load_api_data(data: bytes) -> dict:
"""Convert API data from JSON to dict"""
str_data = None
try:
# some json from the API returns with a null byte (\x00) on the end
if data.endswith(b"\x00"):
# handle the null byte
str_data = data.decode('utf-8')[:-1]
str_data = data.decode("utf-8")[:-1]
else:
# no null byte
str_data = data.decode('utf-8')
str_data = data.decode("utf-8")
# fix an error with a btminer return having an extra comma that breaks json.loads()
str_data = str_data.replace(",}", "}")
# fix an error with a btminer return having a newline that breaks json.loads()
@@ -180,6 +197,5 @@ If you are sure you want to use this command please use API.send_command("{item}
parsed_data = json.loads(str_data)
# handle bad json
except json.decoder.JSONDecodeError as e:
print(e)
raise APIError(f"Decode Error: {str_data}")
raise APIError(f"Decode Error {e}: {str_data}")
return parsed_data

View File

@@ -18,6 +18,7 @@ class BMMinerAPI(BaseMinerAPI):
:param ip: The IP of the miner to reference the API on.
:param port: The port to reference the API on. Default is 4028.
"""
def __init__(self, ip: str, port: int = 4028) -> None:
super().__init__(ip, port)
@@ -115,11 +116,7 @@ class BMMinerAPI(BaseMinerAPI):
"""
return await self.send_command("enablepool", parameters=n)
async def addpool(self,
url: str,
username: str,
password: str
) -> dict:
async def addpool(self, url: str, username: str, password: str) -> dict:
"""Add a pool to the miner.
:param url: The URL of the new pool to add.
@@ -128,11 +125,9 @@ class BMMinerAPI(BaseMinerAPI):
:return: A confirmation of adding the pool.
"""
return await self.send_command("addpool",
parameters=f"{url}, "
f"{username}, "
f"{password}"
)
return await self.send_command(
"addpool", parameters=f"{url}, " f"{username}, " f"{password}"
)
async def poolpriority(self, *n: int) -> dict:
"""Set pool priority.
@@ -142,8 +137,7 @@ class BMMinerAPI(BaseMinerAPI):
:return: A confirmation of setting pool priority.
"""
pools = f"{','.join([str(item) for item in n])}"
return await self.send_command("poolpriority",
parameters=pools)
return await self.send_command("poolpriority", parameters=pools)
async def poolquota(self, n: int, q: int) -> dict:
"""Set pool quota.
@@ -153,10 +147,7 @@ class BMMinerAPI(BaseMinerAPI):
:return: A confirmation of setting pool quota.
"""
return await self.send_command("poolquota",
parameters=f"{n}, "
f"{q}"
)
return await self.send_command("poolquota", parameters=f"{n}, " f"{q}")
async def disablepool(self, n: int) -> dict:
"""Disable a pool.
@@ -292,9 +283,7 @@ class BMMinerAPI(BaseMinerAPI):
:return: Confirmation of setting failover-only.
"""
return await self.send_command("failover-only",
parameters=failover
)
return await self.send_command("failover-only", parameters=failover)
async def coin(self) -> dict:
"""Get information on the current coin.
@@ -337,10 +326,7 @@ class BMMinerAPI(BaseMinerAPI):
:return: The results of setting config of name to n.
"""
return await self.send_command("setconfig",
parameters=f"{name}, "
f"{n}"
)
return await self.send_command("setconfig", parameters=f"{name}, " f"{n}")
async def usbstats(self) -> dict:
"""Get stats of all USB devices except ztex.
@@ -368,15 +354,11 @@ class BMMinerAPI(BaseMinerAPI):
:return: Confirmation of setting PGA n with opt[,val].
"""
if val:
return await self.send_command("pgaset",
parameters=f"{n}, "
f"{opt}, "
f"{val}"
)
return await self.send_command(
"pgaset", parameters=f"{n}, " f"{opt}, " f"{val}"
)
else:
return await self.send_command("pgaset",
parameters=f"{n}, "
f"{opt}")
return await self.send_command("pgaset", parameters=f"{n}, " f"{opt}")
async def zero(self, which: str, summary: bool) -> dict:
"""Zero a device.

View File

@@ -2,64 +2,64 @@ from API import BaseMinerAPI
class BOSMinerAPI(BaseMinerAPI):
"""
A class that abstracts the BOSMiner API in the miners.
"""An abstraction of the BOSMiner API.
Each method corresponds to an API command in BOSMiner.
BOSMiner API documentation:
https://docs.braiins.com/os/plus-en/Development/1_api.html
Parameters:
ip: the IP address of the miner.
port (optional): the port of the API on the miner (standard is 4028)
This class abstracts use of the BOSMiner API, as well as the
methods for sending commands to it. The self.send_command()
function handles sending a command to the miner asynchronously, and
as such is the base for many of the functions in this class, which
rely on it to send the command for them.
:param ip: The IP of the miner to reference the API on.
:param port: The port to reference the API on. Default is 4028.
"""
def __init__(self, ip, port=4028):
super().__init__(ip, port)
async def asccount(self) -> dict:
"""
API 'asccount' command.
"""Get data on the number of ASC devices and their info.
Returns a dict containing the number of ASC devices.
:return: Data on all ASC devices.
"""
return await self.send_command("asccount")
async def asc(self, n: int) -> dict:
"""
API 'asc' command.
"""Get data for ASC device n.
Returns a dict containing the details of a single ASC of number N.
:param n: The device to get data for.
n: the ASC device to get details of.
:return: The data for ASC device n.
"""
return await self.send_command("asc", parameters=n)
async def devdetails(self) -> dict:
"""
API 'devdetails' command.
"""Get data on all devices with their static details.
Returns a dict containing all devices with their static details.
:return: Data on all devices with their static details.
"""
return await self.send_command("devdetails")
async def devs(self) -> dict:
"""
API 'devs' command.
"""Get data on each PGA/ASC with their details.
Returns a dict containing each PGA/ASC with their details.
:return: Data on each PGA/ASC with their details.
"""
return await self.send_command("devs")
async def edevs(self, old: bool = False) -> dict:
"""
API 'edevs' command.
"""Get data on each PGA/ASC with their details, ignoring
blacklisted and zombie devices.
Returns a dict containing each PGA/ASC with their details,
ignoring blacklisted devices and zombie devices.
:param old: Include zombie devices that became zombies less
than 'old' seconds ago
Parameters:
old (optional): include zombie devices that became zombies less than 'old' seconds ago
:return: Data on each PGA/ASC with their details.
"""
if old:
return await self.send_command("edevs", parameters="old")
@@ -67,77 +67,76 @@ class BOSMinerAPI(BaseMinerAPI):
return await self.send_command("edevs")
async def pools(self) -> dict:
"""
API 'pools' command.
"""Get pool information.
Returns a dict containing the status of each pool.
:return: Miner pool information.
"""
return await self.send_command("pools")
async def summary(self) -> dict:
"""
API 'summary' command.
"""Get the status summary of the miner.
Returns a dict containing the status summary of the miner.
:return: The status summary of the miner.
"""
return await self.send_command("summary")
async def stats(self) -> dict:
"""
API 'stats' command.
"""Get stats of each device/pool with more than 1 getwork.
Returns a dict containing stats for all device/pool with more than 1 getwork.
:return: Stats of each device/pool with more than 1 getwork.
"""
return await self.send_command("stats")
async def version(self) -> dict:
"""
API 'version' command.
"""Get miner version info.
Returns a dict containing version information.
:return: Miner version information.
"""
return await self.send_command("version")
async def estats(self) -> dict:
"""
API 'estats' command.
async def estats(self, old: bool = False) -> dict:
"""Get stats of each device/pool with more than 1 getwork,
ignoring zombie devices.
Returns a dict containing stats for all device/pool with more than 1 getwork,
:param old: Include zombie devices that became zombies less
than 'old' seconds ago.
:return: Stats of each device/pool with more than 1 getwork,
ignoring zombie devices.
"""
return await self.send_command("estats")
if old:
return await self.send_command("estats", parameters=old)
else:
return await self.send_command("estats")
async def check(self, command: str) -> dict:
"""
API 'check' command.
"""Check if the command command exists in BOSMiner.
Returns information about a command:
:param command: The command to check.
:return: Information about a command:
Exists (Y/N) <- the command exists in this version
Access (Y/N) <- you have access to use the command
Parameters:
command: the command to get information about.
"""
return await self.send_command("check", parameters=command)
async def coin(self) -> dict:
"""
API 'coin' command.
"""Get information on the current coin.
Returns information about the current coin being mined:
:return: Information about the current coin being mined:
Hash Method <- the hashing algorithm
Current Block Time <- blocktime as a float, 0 means none
Current Block Hash <- the hash of the current block, blank means none
Current Block Hash <- the hash of the current block, blank
means none
LP <- whether LP is in use on at least 1 pool
Network Difficulty: the current network difficulty
"""
return await self.send_command("coin")
async def lcd(self) -> dict:
"""
API 'lcd' command.
"""Get a general all-in-one status summary of the miner.
Returns a dict containing an all in one status summary of the miner.
:return: An all-in-one status summary of the miner.
"""
return await self.send_command("lcd")
@@ -167,53 +166,43 @@ class BOSMinerAPI(BaseMinerAPI):
# return await self.send_command("removepool", parameters=n)
async def fans(self) -> dict:
"""
API 'fans' command.
"""Get fan data.
Returns a dict containing information on fans and fan speeds.
:return: Data on the fans of the miner.
"""
return await self.send_command("fans")
async def tempctrl(self) -> dict:
"""
API 'tempctrl' command.
"""Get temperature control data.
Returns a dict containing temp control configuration.
:return: Data about the temp control settings of the miner.
"""
return await self.send_command("tempctrl")
async def temps(self) -> dict:
"""
API 'temps' command.
"""Get temperature data.
Returns a dict containing temperature information.
:return: Data on the temps of the miner.
"""
return await self.send_command("temps")
async def tunerstatus(self) -> dict:
"""
API 'tunerstatus' command.
"""Get tuner status data
Returns a dict containing tuning stats.
:return: Data on the status of autotuning.
"""
return await self.send_command("tunerstatus")
async def pause(self) -> dict:
"""
API 'pause' command.
"""Pause mining.
Pauses mining and stops power consumption and waits for resume command.
Returns a dict stating that the miner paused mining.
:return: Confirmation of pausing mining.
"""
return await self.send_command("pause")
async def resume(self) -> dict:
"""
API 'pause' command.
"""Resume mining.
Resumes mining on the miner.
Returns a dict stating that the miner resumed mining.
:return: Confirmation of resuming mining.
"""
return await self.send_command("resume")

View File

@@ -4,16 +4,15 @@ import json
import hashlib
import binascii
import base64
import logging
from passlib.handlers.md5_crypt import md5_crypt
from cryptography.hazmat.primitives.ciphers import \
Cipher, algorithms, modes
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
from API import BaseMinerAPI, APIError
from settings import WHATSMINER_PWD
### IMPORTANT ###
# you need to change the password of the miners using the Whatsminer
# tool, then you can set them back to admin with this tool, but they
@@ -35,7 +34,7 @@ def _crypt(word: str, salt: str) -> str:
:return: An MD5 hash of the word with the salt.
"""
# compile a standard format for the salt
standard_salt = re.compile('\s*\$(\d+)\$([\w\./]*)\$')
standard_salt = re.compile("\s*\$(\d+)\$([\w\./]*)\$")
# check if the salt matches
match = standard_salt.match(salt)
# if the matching fails, the salt is incorrect
@@ -58,7 +57,7 @@ def _add_to_16(string: str) -> bytes:
length.
"""
while len(string) % 16 != 0:
string += '\0'
string += "\0"
return str.encode(string) # return bytes
@@ -74,20 +73,20 @@ def parse_btminer_priviledge_data(token_data: dict, data: dict):
:return: A decoded dict version of the privileged command output.
"""
# get the encoded data from the dict
enc_data = data['enc']
enc_data = data["enc"]
# get the aes key from the token data
aeskey = hashlib.sha256(
token_data['host_passwd_md5'].encode()
).hexdigest()
aeskey = hashlib.sha256(token_data["host_passwd_md5"].encode()).hexdigest()
# unhexlify the aes key
aeskey = binascii.unhexlify(aeskey.encode())
# create the required decryptor
aes = Cipher(algorithms.AES(aeskey), modes.ECB())
decryptor = aes.decryptor()
# decode the message with the decryptor
ret_msg = json.loads(decryptor.update(
base64.decodebytes(bytes(enc_data, encoding='utf8'))
).rstrip(b'\0').decode("utf8"))
ret_msg = json.loads(
decryptor.update(base64.decodebytes(bytes(enc_data, encoding="utf8")))
.rstrip(b"\0")
.decode("utf8")
)
return ret_msg
@@ -104,11 +103,9 @@ def create_privileged_cmd(token_data: dict, command: dict) -> bytes:
:return: The encrypted privileged command to be sent to the miner.
"""
# add token to command
command['token'] = token_data['host_sign']
command["token"] = token_data["host_sign"]
# encode host_passwd data and get hexdigest
aeskey = hashlib.sha256(
token_data['host_passwd_md5'].encode()
).hexdigest()
aeskey = hashlib.sha256(token_data["host_passwd_md5"].encode()).hexdigest()
# unhexlify the encoded host_passwd
aeskey = binascii.unhexlify(aeskey.encode())
# create a new AES key
@@ -117,18 +114,16 @@ def create_privileged_cmd(token_data: dict, command: dict) -> bytes:
# dump the command to json
api_json_str = json.dumps(command)
# encode the json command with the aes key
api_json_str_enc = base64.encodebytes(
encryptor.update(
_add_to_16(
api_json_str
)
)
).decode("utf-8").replace("\n", "")
api_json_str_enc = (
base64.encodebytes(encryptor.update(_add_to_16(api_json_str)))
.decode("utf-8")
.replace("\n", "")
)
# label the data as being encoded
data_enc = {'enc': 1, 'data': api_json_str_enc}
data_enc = {"enc": 1, "data": api_json_str_enc}
# dump the labeled data to json
api_packet_str = json.dumps(data_enc)
return api_packet_str.encode('utf-8')
return api_packet_str.encode("utf-8")
class BTMinerAPI(BaseMinerAPI):
@@ -157,16 +152,18 @@ class BTMinerAPI(BaseMinerAPI):
:param port: The port to reference the API on. Default is 4028.
:param pwd: The admin password of the miner. Default is admin.
"""
def __init__(self, ip, port=4028, pwd: str = WHATSMINER_PWD):
super().__init__(ip, port)
self.admin_pwd = pwd
self.current_token = None
async def send_command(self,
command: str | bytes,
parameters: str or int or bool = None,
ignore_errors: bool = False
) -> dict:
async def send_command(
self,
command: str or bytes,
parameters: str or int or bool = None,
ignore_errors: bool = False,
) -> dict:
"""Send a command to the miner API.
Send a command using an asynchronous connection, load the data,
@@ -187,10 +184,7 @@ class BTMinerAPI(BaseMinerAPI):
command = json.dumps({"command": command}).encode("utf-8")
try:
# get reader and writer streams
reader, writer = await asyncio.open_connection(
str(self.ip),
self.port
)
reader, writer = await asyncio.open_connection(str(self.ip), self.port)
# handle OSError 121
except OSError as e:
if e.winerror == "121":
@@ -212,7 +206,7 @@ class BTMinerAPI(BaseMinerAPI):
break
data += d
except Exception as e:
print(e)
logging.info(f"{str(self.ip)}: {e}")
data = self.load_api_data(data)
@@ -221,15 +215,12 @@ class BTMinerAPI(BaseMinerAPI):
await writer.wait_closed()
# check if the returned data is encoded
if 'enc' in data.keys():
if "enc" in data.keys():
# try to parse the encoded data
try:
data = parse_btminer_priviledge_data(
self.current_token,
data
)
data = parse_btminer_priviledge_data(self.current_token, data)
except Exception as e:
print(e)
logging.info(f"{str(self.ip)}: {e}")
if not ignore_errors:
# if it fails to validate, it is likely an error
@@ -250,25 +241,24 @@ class BTMinerAPI(BaseMinerAPI):
data = await self.send_command("get_token")
# encrypt the admin password with the salt
pwd = _crypt(self.admin_pwd, "$1$" + data["Msg"]["salt"] + '$')
pwd = pwd.split('$')
pwd = _crypt(self.admin_pwd, "$1$" + data["Msg"]["salt"] + "$")
pwd = pwd.split("$")
# take the 4th item from the pwd split
host_passwd_md5 = pwd[3]
# encrypt the pwd with the time and new salt
tmp = _crypt(pwd[3] + data["Msg"]["time"],
"$1$" + data["Msg"]["newsalt"] + '$'
)
tmp = tmp.split('$')
tmp = _crypt(pwd[3] + data["Msg"]["time"], "$1$" + data["Msg"]["newsalt"] + "$")
tmp = tmp.split("$")
# take the 4th item from the encrypted pwd split
host_sign = tmp[3]
# set the current token
self.current_token = {'host_sign': host_sign,
'host_passwd_md5': host_passwd_md5
}
self.current_token = {
"host_sign": host_sign,
"host_passwd_md5": host_passwd_md5,
}
return self.current_token
#### PRIVILEGED COMMANDS ####
@@ -276,19 +266,18 @@ class BTMinerAPI(BaseMinerAPI):
# how to configure the Whatsminer API to
# use these commands.
async def update_pools(self,
pool_1: str,
worker_1: str,
passwd_1: str,
pool_2: str = None,
worker_2: str = None,
passwd_2: str = None,
pool_3: str = None,
worker_3: str = None,
passwd_3: str = None
):
async def update_pools(
self,
pool_1: str,
worker_1: str,
passwd_1: str,
pool_2: str = None,
worker_2: str = None,
passwd_2: str = None,
pool_3: str = None,
worker_3: str = None,
passwd_3: str = None,
):
"""Update the pools of the miner using the API.
Update the pools of the miner using the API, only works after
@@ -314,15 +303,12 @@ class BTMinerAPI(BaseMinerAPI):
elif pool_2 and pool_3:
command = {
"cmd": "update_pools",
"pool1": pool_1,
"worker1": worker_1,
"passwd1": passwd_1,
"pool2": pool_2,
"worker2": worker_2,
"passwd2": passwd_2,
"pool3": pool_3,
"worker3": worker_3,
"passwd3": passwd_3,
@@ -333,10 +319,9 @@ class BTMinerAPI(BaseMinerAPI):
"pool1": pool_1,
"worker1": worker_1,
"passwd1": passwd_1,
"pool2": pool_2,
"worker2": worker_2,
"passwd2": passwd_2
"passwd2": passwd_2,
}
else:
command = {
@@ -406,12 +391,13 @@ class BTMinerAPI(BaseMinerAPI):
enc_command = create_privileged_cmd(token_data, command)
return await self.send_command(enc_command)
async def set_led(self,
color: str = "red",
period: int = 2000,
duration: int = 1000,
start: int = 0
):
async def set_led(
self,
color: str = "red",
period: int = 2000,
duration: int = 1000,
start: int = 0,
):
"""Set the LED on the miner using the API.
Set the LED on the miner using the API, only works after
@@ -423,12 +409,13 @@ class BTMinerAPI(BaseMinerAPI):
:param start: LED on time offset in the cycle in ms.
:return: A reply informing of the status of setting the LED.
"""
command = {"cmd": "set_led",
"color": color,
"period": period,
"duration": duration,
"start": start
}
command = {
"cmd": "set_led",
"color": color,
"period": period,
"duration": duration,
"start": start,
}
token_data = await self.get_token()
enc_command = create_privileged_cmd(token_data, command)
return await self.send_command(enc_command)
@@ -486,10 +473,11 @@ class BTMinerAPI(BaseMinerAPI):
password.
"""
# check if password length is greater than 8 bytes
if len(new_pwd.encode('utf-8')) > 8:
if len(new_pwd.encode("utf-8")) > 8:
return APIError(
f"New password too long, the max length is 8. "
f"Password size: {len(new_pwd.encode('utf-8'))}")
f"Password size: {len(new_pwd.encode('utf-8'))}"
)
command = {"cmd": "update_pwd", "old": old_pwd, "new": new_pwd}
token_data = await self.get_token()
enc_command = create_privileged_cmd(token_data, command)
@@ -507,9 +495,11 @@ class BTMinerAPI(BaseMinerAPI):
frequency.
"""
if not -10 < percent < 100:
return APIError(f"Frequency % is outside of the allowed "
f"range. Please set a % between -10 and "
f"100")
return APIError(
f"Frequency % is outside of the allowed "
f"range. Please set a % between -10 and "
f"100"
)
command = {"cmd": "set_target_freq", "percent": str(percent)}
token_data = await self.get_token()
enc_command = create_privileged_cmd(token_data, command)
@@ -596,9 +586,11 @@ class BTMinerAPI(BaseMinerAPI):
"""
if not 0 < percent < 100:
return APIError(f"Power PCT % is outside of the allowed "
f"range. Please set a % between 0 and "
f"100")
return APIError(
f"Power PCT % is outside of the allowed "
f"range. Please set a % between 0 and "
f"100"
)
command = {"cmd": "set_power_pct", "percent": str(percent)}
token_data = await self.get_token()
enc_command = create_privileged_cmd(token_data, command)
@@ -618,12 +610,9 @@ class BTMinerAPI(BaseMinerAPI):
:return: A reply informing of the status of pre power on.
"""
if not msg == \
"wait for adjust temp" or \
"adjust complete" or \
"adjust continue":
if not msg == "wait for adjust temp" or "adjust complete" or "adjust continue":
return APIError(
'Message is incorrect, please choose one of '
"Message is incorrect, please choose one of "
'["wait for adjust temp", '
'"adjust complete", '
'"adjust continue"]'
@@ -632,10 +621,7 @@ class BTMinerAPI(BaseMinerAPI):
complete = "true"
else:
complete = "false"
command = {"cmd": "pre_power_on",
"complete": complete,
"msg": msg
}
command = {"cmd": "pre_power_on", "complete": complete, "msg": msg}
token_data = await self.get_token()
enc_command = create_privileged_cmd(token_data, command)
return await self.send_command(enc_command)

View File

@@ -2,14 +2,14 @@ from API import BaseMinerAPI
class CGMinerAPI(BaseMinerAPI):
"""An abstraction of the BMMiner API.
"""An abstraction of the CGMiner API.
Each method corresponds to an API command in BMMiner.
Each method corresponds to an API command in GGMiner.
CGMiner API documentation:
https://github.com/ckolivas/cgminer/blob/master/API-README
This class abstracts use of the BMMiner API, as well as the
This class abstracts use of the CGMiner API, as well as the
methods for sending commands to it. The self.send_command()
function handles sending a command to the miner asynchronously, and
as such is the base for many of the functions in this class, which
@@ -18,6 +18,7 @@ class CGMinerAPI(BaseMinerAPI):
:param ip: The IP of the miner to reference the API on.
:param port: The port to reference the API on. Default is 4028.
"""
def __init__(self, ip, port=4028):
super().__init__(ip, port)
@@ -111,11 +112,7 @@ class CGMinerAPI(BaseMinerAPI):
"""
return await self.send_command("enablepool", parameters=n)
async def addpool(self,
url: str,
username: str,
password: str
) -> dict:
async def addpool(self, url: str, username: str, password: str) -> dict:
"""Add a pool to the miner.
:param url: The URL of the new pool to add.
@@ -124,11 +121,9 @@ class CGMinerAPI(BaseMinerAPI):
:return: A confirmation of adding the pool.
"""
return await self.send_command("addpool",
parameters=f"{url}, "
f"{username}, "
f"{password}"
)
return await self.send_command(
"addpool", parameters=f"{url}, " f"{username}, " f"{password}"
)
async def poolpriority(self, *n: int) -> dict:
"""Set pool priority.
@@ -138,8 +133,7 @@ class CGMinerAPI(BaseMinerAPI):
:return: A confirmation of setting pool priority.
"""
pools = f"{','.join([str(item) for item in n])}"
return await self.send_command("poolpriority",
parameters=pools)
return await self.send_command("poolpriority", parameters=pools)
async def poolquota(self, n: int, q: int) -> dict:
"""Set pool quota.
@@ -149,10 +143,7 @@ class CGMinerAPI(BaseMinerAPI):
:return: A confirmation of setting pool quota.
"""
return await self.send_command("poolquota",
parameters=f"{n}, "
f"{q}"
)
return await self.send_command("poolquota", parameters=f"{n}, " f"{q}")
async def disablepool(self, n: int) -> dict:
"""Disable a pool.
@@ -185,7 +176,7 @@ class CGMinerAPI(BaseMinerAPI):
return await self.send_command("save")
async def quit(self) -> dict:
"""Quit BMMiner.
"""Quit CGMiner.
:return: A single "BYE" before CGMiner quits.
"""
@@ -270,7 +261,7 @@ class CGMinerAPI(BaseMinerAPI):
return await self.send_command("estats")
async def check(self, command: str) -> dict:
"""Check if the command command exists in BMMiner.
"""Check if the command command exists in CGMiner.
:param command: The command to check.
@@ -288,9 +279,7 @@ class CGMinerAPI(BaseMinerAPI):
:return: Confirmation of setting failover-only.
"""
return await self.send_command("failover-only",
parameters=failover
)
return await self.send_command("failover-only", parameters=failover)
async def coin(self) -> dict:
"""Get information on the current coin.
@@ -333,10 +322,7 @@ class CGMinerAPI(BaseMinerAPI):
:return: The results of setting config of name to n.
"""
return await self.send_command("setconfig",
parameters=f"{name}, "
f"{n}"
)
return await self.send_command("setconfig", parameters=f"{name}, " f"{n}")
async def usbstats(self) -> dict:
"""Get stats of all USB devices except ztex.
@@ -364,9 +350,11 @@ class CGMinerAPI(BaseMinerAPI):
:return: Confirmation of setting PGA n with opt[,val].
"""
if val:
return await self.send_command("pgaset", parameters=f"{n}, {opt}, {val}")
return await self.send_command(
"pgaset", parameters=f"{n}, " f"{opt}, " f"{val}"
)
else:
return await self.send_command("pgaset", parameters=f"{n}, {opt}")
return await self.send_command("pgaset", parameters=f"{n}, " f"{opt}")
async def zero(self, which: str, summary: bool) -> dict:
"""Zero a device.
@@ -381,7 +369,7 @@ class CGMinerAPI(BaseMinerAPI):
:return: the STATUS section with info on the zero and optional
summary.
"""
return await self.send_command("zero", parameters=f"{which}, {summary}")
return await self.send_command("zero", parameters=f"{which}, " f"{summary}")
async def hotplug(self, n: int) -> dict:
"""Enable hotplug.
@@ -482,9 +470,11 @@ class CGMinerAPI(BaseMinerAPI):
:return: Confirmation of setting option opt to value val.
"""
if val:
return await self.send_command("ascset", parameters=f"{n}, {opt}, {val}")
return await self.send_command(
"ascset", parameters=f"{n}, " f"{opt}, " f"{val}"
)
else:
return await self.send_command("ascset", parameters=f"{n}, {opt}")
return await self.send_command("ascset", parameters=f"{n}, " f"{opt}")
async def lcd(self) -> dict:
"""Get a general all-in-one status summary of the miner.

View File

@@ -2,6 +2,13 @@ from API import BaseMinerAPI
class UnknownAPI(BaseMinerAPI):
"""An abstraction of an API for a miner which is unknown.
This class is designed to try to be a intersection of as many miner APIs
and API commands as possible (API ⋂ API), to ensure that it can be used
with as many APIs as possible.
"""
def __init__(self, ip, port=4028):
super().__init__(ip, port)

13
Dockerfile Normal file
View File

@@ -0,0 +1,13 @@
FROM python:3.10-slim-buster
EXPOSE 80
WORKDIR /minerInterface-web_monitor
COPY tools/web_monitor/requirements.txt .
RUN pip install --no-cache-dir --upgrade -r requirements.txt
COPY . .
CMD ["uvicorn", "tools.web_monitor.app:app", "--host", "0.0.0.0", "--port", "80"]

View File

@@ -1,4 +1,4 @@
from tools.bad_board_util import main
if __name__ == '__main__':
main()
if __name__ == "__main__":
main()

View File

@@ -75,4 +75,4 @@ SAMPLE CONFIG
"shutdown_duration": 3.0, # -> (default = 3.0, float, (bos: power_scaling.shutdown_duration))
}
}
"""
"""

View File

@@ -8,7 +8,7 @@ async def bos_config_convert(config: dict):
for opt in config:
if opt == "format":
out_config["format"] = config[opt]
out_config["format"]["generator"] = 'upstream_config_util'
out_config["format"]["generator"] = "upstream_config_util"
out_config["format"]["timestamp"] = int(time.time())
elif opt == "temp_control":
out_config["temperature"] = {}
@@ -47,20 +47,28 @@ async def bos_config_convert(config: dict):
out_config["pool_groups"][idx]["pools"] = []
out_config["pool_groups"][idx] = {}
if "name" in config[opt][idx].keys():
out_config["pool_groups"][idx]["group_name"] = config[opt][idx]["name"]
out_config["pool_groups"][idx]["group_name"] = config[opt][idx][
"name"
]
else:
out_config["pool_groups"][idx]["group_name"] = f"group_{idx}"
if "quota" in config[opt][idx].keys():
out_config["pool_groups"][idx]["quota"] = config[opt][idx]["quota"]
else:
out_config["pool_groups"][idx]["quota"] = 1
out_config["pool_groups"][idx]["pools"] = [{} for _item in range(len(config[opt][idx]["pool"]))]
out_config["pool_groups"][idx]["pools"] = [
{} for _item in range(len(config[opt][idx]["pool"]))
]
for pool_idx in range(len(config[opt][idx]["pool"])):
out_config["pool_groups"][idx]["pools"][pool_idx]["url"] = config[opt][idx]["pool"][pool_idx]["url"]
out_config["pool_groups"][idx]["pools"][pool_idx]["username"] = config[opt][idx]["pool"][pool_idx][
"user"]
out_config["pool_groups"][idx]["pools"][pool_idx]["password"] = config[opt][idx]["pool"][pool_idx][
"password"]
out_config["pool_groups"][idx]["pools"][pool_idx]["url"] = config[
opt
][idx]["pool"][pool_idx]["url"]
out_config["pool_groups"][idx]["pools"][pool_idx][
"username"
] = config[opt][idx]["pool"][pool_idx]["user"]
out_config["pool_groups"][idx]["pools"][pool_idx][
"password"
] = config[opt][idx]["pool"][pool_idx]["password"]
elif opt == "autotuning":
out_config["autotuning"] = {}
if "enabled" in config[opt].keys():
@@ -82,27 +90,33 @@ async def bos_config_convert(config: dict):
else:
out_config["power_scaling"]["power_step"] = 100
if "min_psu_power_limit" in config[opt].keys():
out_config["power_scaling"]["min_psu_power_limit"] = config[opt]["min_psu_power_limit"]
out_config["power_scaling"]["min_psu_power_limit"] = config[opt][
"min_psu_power_limit"
]
else:
out_config["power_scaling"]["min_psu_power_limit"] = 800
if "shutdown_enabled" in config[opt].keys():
out_config["power_scaling"]["shutdown_enabled"] = config[opt]["shutdown_enabled"]
out_config["power_scaling"]["shutdown_enabled"] = config[opt][
"shutdown_enabled"
]
else:
out_config["power_scaling"]["shutdown_enabled"] = False
if "shutdown_duration" in config[opt].keys():
out_config["power_scaling"]["shutdown_duration"] = config[opt]["shutdown_duration"]
out_config["power_scaling"]["shutdown_duration"] = config[opt][
"shutdown_duration"
]
else:
out_config["power_scaling"]["shutdown_duration"] = 3.0
return yaml.dump(out_config, sort_keys=False)
async def general_config_convert_bos(yaml_config):
async def general_config_convert_bos(yaml_config, user_suffix: str = None):
config = yaml.load(yaml_config, Loader=yaml.SafeLoader)
out_config = {}
for opt in config:
if opt == "format":
out_config["format"] = config[opt]
out_config["format"]["generator"] = 'upstream_config_util'
out_config["format"]["generator"] = "upstream_config_util"
out_config["format"]["timestamp"] = int(time.time())
elif opt == "temperature":
out_config["temp_control"] = {}
@@ -148,11 +162,24 @@ async def general_config_convert_bos(yaml_config):
out_config["group"][idx]["quota"] = config[opt][idx]["quota"]
else:
out_config["group"][idx]["quota"] = 1
out_config["group"][idx]["pool"] = [{} for _item in range(len(config[opt][idx]["pools"]))]
out_config["group"][idx]["pool"] = [
{} for _item in range(len(config[opt][idx]["pools"]))
]
for pool_idx in range(len(config[opt][idx]["pools"])):
out_config["group"][idx]["pool"][pool_idx]["url"] = config[opt][idx]["pools"][pool_idx]["url"]
out_config["group"][idx]["pool"][pool_idx]["user"] = config[opt][idx]["pools"][pool_idx]["username"]
out_config["group"][idx]["pool"][pool_idx]["password"] = config[opt][idx]["pools"][pool_idx]["password"]
out_config["group"][idx]["pool"][pool_idx]["url"] = config[opt][
idx
]["pools"][pool_idx]["url"]
username = config[opt][idx]["pools"][pool_idx]["username"]
if user_suffix:
if "." in username:
username = f"{username}x{user_suffix}"
else:
username = f"{username}.{user_suffix}"
out_config["group"][idx]["pool"][pool_idx]["user"] = username
out_config["group"][idx]["pool"][pool_idx]["password"] = config[
opt
][idx]["pools"][pool_idx]["password"]
elif opt == "autotuning":
out_config["autotuning"] = {}
if "enabled" in config[opt].keys():
@@ -174,15 +201,21 @@ async def general_config_convert_bos(yaml_config):
else:
out_config["power_scaling"]["power_step"] = 100
if "min_psu_power_limit" in config[opt].keys():
out_config["power_scaling"]["min_psu_power_limit"] = config[opt]["min_psu_power_limit"]
out_config["power_scaling"]["min_psu_power_limit"] = config[opt][
"min_psu_power_limit"
]
else:
out_config["power_scaling"]["min_psu_power_limit"] = 800
if "shutdown_enabled" in config[opt].keys():
out_config["power_scaling"]["shutdown_enabled"] = config[opt]["shutdown_enabled"]
out_config["power_scaling"]["shutdown_enabled"] = config[opt][
"shutdown_enabled"
]
else:
out_config["power_scaling"]["shutdown_enabled"] = False
if "shutdown_duration" in config[opt].keys():
out_config["power_scaling"]["shutdown_duration"] = config[opt]["shutdown_duration"]
out_config["power_scaling"]["shutdown_duration"] = config[opt][
"shutdown_duration"
]
else:
out_config["power_scaling"]["shutdown_duration"] = 3.0
return toml.dumps(out_config)
return out_config

View File

@@ -1,4 +1,4 @@
from tools.cfg_util import main
if __name__ == '__main__':
if __name__ == "__main__":
main()

18
logger/__init__.py Normal file
View File

@@ -0,0 +1,18 @@
import logging
from settings import DEBUG
logging.basicConfig(
# filename="logfile.txt",
# filemode="a",
format="[%(levelname)s][%(asctime)s](%(name)s) - %(message)s",
datefmt="%x %X",
)
logger = logging.getLogger()
if DEBUG:
logger.setLevel(logging.DEBUG)
logging.getLogger("asyncssh").setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
logging.getLogger("asyncssh").setLevel(logging.WARNING)

View File

@@ -19,11 +19,26 @@ version = version.strftime("%y.%m.%d")
print(version)
setup(name="UpstreamBoardUtil.exe",
version=version,
description="Upstream Data Board Utility Build",
options={"build_exe": {"build_exe": f"{os.getcwd()}\\build\\board_util\\UpstreamBoardUtil-{version}-{sys.platform}\\"
},
},
executables=[Executable("board_util.py", base=base, icon="icon.ico", target_name="UpstreamBoardUtil.exe")]
)
setup(
name="UpstreamBoardUtil.exe",
version=version,
description="Upstream Data Board Utility Build",
options={
"build_exe": {
"build_exe": f"{os.getcwd()}\\build\\board_util\\UpstreamBoardUtil-{version}-{sys.platform}\\",
"include_files": [
os.path.join(os.getcwd(), "settings/settings.toml"),
],
"include_msvcr": True,
"add_to_path": True,
},
},
executables=[
Executable(
"board_util.py",
base=base,
icon="icon.ico",
target_name="UpstreamBoardUtil.exe",
)
],
)

View File

@@ -19,13 +19,25 @@ version = version.strftime("%y.%m.%d")
print(version)
setup(name="UpstreamCFGUtil.exe",
version=version,
description="Upstream Data Config Utility Build",
options={"build_exe": {"build_exe": f"{os.getcwd()}\\build\\UpstreamCFGUtil-{version}-{sys.platform}\\",
"include_files": [os.path.join(os.getcwd(), "settings/settings.toml"),
os.path.join(os.getcwd(), "static/CFG-Util-README.md")],
},
},
executables=[Executable("config_tool.py", base=base, icon="icon.ico", target_name="UpstreamCFGUtil.exe")]
)
setup(
name="UpstreamCFGUtil.exe",
version=version,
description="Upstream Data Config Utility Build",
options={
"build_exe": {
"build_exe": f"{os.getcwd()}\\build\\UpstreamCFGUtil-{version}-{sys.platform}\\",
"include_files": [
os.path.join(os.getcwd(), "settings/settings.toml"),
os.path.join(os.getcwd(), "static/CFG-Util-README.md"),
],
},
},
executables=[
Executable(
"config_tool.py",
base=base,
icon="icon.ico",
target_name="UpstreamCFGUtil.exe",
)
],
)

View File

@@ -4,14 +4,62 @@ from API.cgminer import CGMinerAPI
from API.btminer import BTMinerAPI
from API.unknown import UnknownAPI
import ipaddress
import asyncssh
import logging
class BaseMiner:
def __init__(self, ip: str, api: BMMinerAPI | BOSMinerAPI | CGMinerAPI | BTMinerAPI | UnknownAPI) -> None:
def __init__(
self,
ip: str,
api: BMMinerAPI or BOSMinerAPI or CGMinerAPI or BTMinerAPI or UnknownAPI,
) -> None:
self.ip = ipaddress.ip_address(ip)
self.uname = None
self.pwd = None
self.api = api
self.api_type = None
self.model = None
self.light = None
self.nominal_chips = 1
async def _get_ssh_connection(self) -> asyncssh.connect:
"""Create a new asyncssh connection"""
try:
conn = await asyncssh.connect(
str(self.ip),
known_hosts=None,
username=self.uname,
password=self.pwd,
server_host_key_algs=["ssh-rsa"],
)
return conn
except asyncssh.misc.PermissionDenied:
try:
conn = await asyncssh.connect(
str(self.ip),
known_hosts=None,
username="admin",
password="admin",
server_host_key_algs=["ssh-rsa"],
)
return conn
except Exception as e:
logging.warning(f"{self} raised an exception: {e}")
raise e
except OSError:
logging.warning(f"Connection refused: {self}")
return None
except Exception as e:
logging.warning(f"{self} raised an exception: {e}")
raise e
async def send_file(self, src, dest):
async with (await self._get_ssh_connection()) as conn:
await asyncssh.scp(src, (conn, dest))
async def check_light(self):
return self.light
async def get_board_info(self):
return None
@@ -33,7 +81,3 @@ class BaseMiner:
async def send_config(self, yaml_config):
return None

View File

@@ -1,4 +1,8 @@
import logging
import toml
from miners.bosminer import BOSMiner
from config.bos import general_config_convert_bos
class BOSMinerS9(BOSMiner):

View File

@@ -13,7 +13,7 @@ class HiveonT9(BMMiner):
async def get_board_info(self) -> dict:
"""Gets data on each board and chain in the miner."""
board_stats = await self.api.stats()
stats = board_stats['STATS'][1]
stats = board_stats["STATS"][1]
boards = {}
board_chains = {0: [2, 9, 10], 1: [3, 11, 12], 2: [4, 13, 14]}
for idx, board in enumerate(board_chains):
@@ -25,12 +25,14 @@ class HiveonT9(BMMiner):
nominal = False
else:
nominal = True
boards[board].append({
"chain": chain,
"chip_count": count,
"chip_status": chips,
"nominal": nominal
})
boards[board].append(
{
"chain": chain,
"chip_count": count,
"chip_status": chips,
"nominal": nominal,
}
)
return boards
async def get_bad_boards(self) -> dict:
@@ -43,4 +45,4 @@ class HiveonT9(BMMiner):
if board not in bad_boards.keys():
bad_boards[board] = []
bad_boards[board].append(chain)
return bad_boards
return bad_boards

View File

@@ -1,4 +1,5 @@
from miners.bmminer import BMMiner
import logging
class BMMinerX19(BMMiner):
@@ -10,9 +11,12 @@ class BMMinerX19(BMMiner):
async def get_model(self):
if self.model:
logging.debug(f"Found model for {self.ip}: {self.model}")
return self.model
version_data = await self.api.version()
if version_data:
self.model = version_data["VERSION"][0]["Type"].replace("Antminer ", "")
logging.debug(f"Found model for {self.ip}: {self.model}")
return self.model
logging.warning(f"Failed to get model for miner: {self}")
return None

View File

@@ -0,0 +1,11 @@
from miners.bosminer import BOSMiner
class BOSMinerX19(BOSMiner):
def __init__(self, ip: str) -> None:
super().__init__(ip)
self.api_type = "BOSMiner"
self.nominal_chips = 114
def __repr__(self) -> str:
return f"BOSminerX19: {str(self.ip)}"

View File

@@ -1,4 +1,5 @@
from miners.cgminer import CGMiner
import logging
class CGMinerX19(CGMiner):
@@ -11,9 +12,12 @@ class CGMinerX19(CGMiner):
async def get_model(self):
if self.model:
logging.debug(f"Found model for {self.ip}: {self.model}")
return self.model
version_data = await self.api.version()
if version_data:
self.model = version_data["VERSION"][0]["Type"].replace("Antminer ", "")
logging.debug(f"Found model for {self.ip}: {self.model}")
return self.model
logging.warning(f"Failed to get model for miner: {self}")
return None

View File

@@ -0,0 +1,24 @@
from miners.cgminer import CGMiner
import logging
class CGMinerAvalon10(CGMiner):
def __init__(self, ip: str) -> None:
super().__init__(ip)
self.model = "Avalon 10"
self.api_type = "CGMiner"
async def get_hostname(self):
try:
devdetails = await self.api.devdetails()
if devdetails:
if len(devdetails.get("DEVDETAILS")) > 0:
if "Name" in devdetails["DEVDETAILS"][0]:
host = devdetails["DEVDETAILS"][0]["Name"]
logging.debug(f"Found hostname for {self.ip}: {host}")
return host
except Exception as e:
logging.warning(f"Failed to get hostname for miner: {self}")
return "?"
logging.warning(f"Failed to get hostname for miner: {self}")
return "?"

View File

@@ -2,168 +2,170 @@ from miners.cgminer import CGMiner
import re
class CGMinerAvalon(CGMiner):
class CGMinerAvalon8(CGMiner):
def __init__(self, ip: str) -> None:
super().__init__(ip)
self.model = "Avalon"
self.model = "Avalon 8"
self.api_type = "CGMiner"
self.pattern = re.compile(r'Ver\[(?P<Ver>[-0-9A-Fa-f+]+)\]\s'
'DNA\[(?P<DNA>[0-9A-Fa-f]+)\]\s'
'Elapsed\[(?P<Elapsed>[-0-9]+)\]\s'
'MW\[(?P<MW>[-\s0-9]+)\]\s'
'LW\[(?P<LW>[-0-9]+)\]\s'
'MH\[(?P<MH>[-\s0-9]+)\]\s'
'HW\[(?P<HW>[-0-9]+)\]\s'
'Temp\[(?P<Temp>[0-9]+)\]\s'
'TMax\[(?P<TMax>[0-9]+)\]\s'
'Fan\[(?P<Fan>[0-9]+)\]\s'
'FanR\[(?P<FanR>[0-9]+)%\]\s'
'Vi\[(?P<Vi>[-\s0-9]+)\]\s'
'Vo\[(?P<Vo>[-\s0-9]+)\]\s'
'('
'PLL0\[(?P<PLL0>[-\s0-9]+)\]\s'
'PLL1\[(?P<PLL1>[-\s0-9]+)\]\s'
'PLL2\[(?P<PLL2>[-\s0-9]+)\]\s'
'PLL3\[(?P<PLL3>[-\s0-9]+)\]\s'
')?'
'GHSmm\[(?P<GHSmm>[-.0-9]+)\]\s'
'WU\[(?P<WU>[-.0-9]+)\]\s'
'Freq\[(?P<Freq>[.0-9]+)\]\s'
'PG\[(?P<PG>[0-9]+)\]\s'
'Led\[(?P<LED>0|1)\]\s'
'MW0\[(?P<MW0>[0-9\s]+)\]\s'
'MW1\[(?P<MW1>[0-9\s]+)\]\s'
'MW2\[(?P<MW2>[0-9\s]+)\]\s'
'MW3\[(?P<MW3>[0-9\s]+)\]\s'
'TA\[(?P<TA>[0-9]+)\]\s'
'ECHU\[(?P<ECHU>[0-9\s]+)\]\s'
'ECMM\[(?P<ECMM>[0-9]+)\]\s.*'
'FAC0\[(?P<FAC0>[-0-9]+)\]\s'
'OC\[(?P<OC>[0-9]+)\]\s'
'SF0\[(?P<SF0>[-\s0-9]+)\]\s'
'SF1\[(?P<SF1>[-\s0-9]+)\]\s'
'SF2\[(?P<SF2>[-\s0-9]+)\]\s'
'SF3\[(?P<SF3>[-\s0-9]+)\]\s'
'PMUV\[(?P<PMUV>[-\s\S*]+)\]\s'
'PVT_T0\[(?P<PVT_T0>[-0-9\s]+)\]\s'
'PVT_T1\[(?P<PVT_T1>[-0-9\s]+)\]\s'
'PVT_T2\[(?P<PVT_T2>[-0-9\s]+)\]\s'
'PVT_T3\[(?P<PVT_T3>[-0-9\s]+)\]\s'
'PVT_V0_0\[(?P<PVT_V0_0>[-0-9\s]+)\]\s'
'PVT_V0_1\[(?P<PVT_V0_1>[-0-9\s]+)\]\s'
'PVT_V0_2\[(?P<PVT_V0_2>[-0-9\s]+)\]\s'
'PVT_V0_3\[(?P<PVT_V0_3>[-0-9\s]+)\]\s'
'PVT_V0_4\[(?P<PVT_V0_4>[-0-9\s]+)\]\s'
'PVT_V0_5\[(?P<PVT_V0_5>[-0-9\s]+)\]\s'
'PVT_V0_6\[(?P<PVT_V0_6>[-0-9\s]+)\]\s'
'PVT_V0_7\[(?P<PVT_V0_7>[-0-9\s]+)\]\s'
'PVT_V0_8\[(?P<PVT_V0_8>[-0-9\s]+)\]\s'
'PVT_V0_9\[(?P<PVT_V0_9>[-0-9\s]+)\]\s'
'PVT_V0_10\[(?P<PVT_V0_10>[-0-9\s]+)\]\s'
'PVT_V0_11\[(?P<PVT_V0_11>[-0-9\s]+)\]\s'
'PVT_V0_12\[(?P<PVT_V0_12>[-0-9\s]+)\]\s'
'PVT_V0_13\[(?P<PVT_V0_13>[-0-9\s]+)\]\s'
'PVT_V0_14\[(?P<PVT_V0_14>[-0-9\s]+)\]\s'
'PVT_V0_15\[(?P<PVT_V0_15>[-0-9\s]+)\]\s'
'PVT_V0_16\[(?P<PVT_V0_16>[-0-9\s]+)\]\s'
'PVT_V0_17\[(?P<PVT_V0_17>[-0-9\s]+)\]\s'
'PVT_V0_18\[(?P<PVT_V0_18>[-0-9\s]+)\]\s'
'PVT_V0_19\[(?P<PVT_V0_19>[-0-9\s]+)\]\s'
'PVT_V0_20\[(?P<PVT_V0_20>[-0-9\s]+)\]\s'
'PVT_V0_21\[(?P<PVT_V0_21>[-0-9\s]+)\]\s'
'PVT_V0_22\[(?P<PVT_V0_22>[-0-9\s]+)\]\s'
'PVT_V0_23\[(?P<PVT_V0_23>[-0-9\s]+)\]\s'
'PVT_V0_24\[(?P<PVT_V0_24>[-0-9\s]+)\]\s'
'PVT_V0_25\[(?P<PVT_V0_25>[-0-9\s]+)\]\s'
'PVT_V1_0\[(?P<PVT_V1_0>[-0-9\s]+)\]\s'
'PVT_V1_1\[(?P<PVT_V1_1>[-0-9\s]+)\]\s'
'PVT_V1_2\[(?P<PVT_V1_2>[-0-9\s]+)\]\s'
'PVT_V1_3\[(?P<PVT_V1_3>[-0-9\s]+)\]\s'
'PVT_V1_4\[(?P<PVT_V1_4>[-0-9\s]+)\]\s'
'PVT_V1_5\[(?P<PVT_V1_5>[-0-9\s]+)\]\s'
'PVT_V1_6\[(?P<PVT_V1_6>[-0-9\s]+)\]\s'
'PVT_V1_7\[(?P<PVT_V1_7>[-0-9\s]+)\]\s'
'PVT_V1_8\[(?P<PVT_V1_8>[-0-9\s]+)\]\s'
'PVT_V1_9\[(?P<PVT_V1_9>[-0-9\s]+)\]\s'
'PVT_V1_10\[(?P<PVT_V1_10>[-0-9\s]+)\]\s'
'PVT_V1_11\[(?P<PVT_V1_11>[-0-9\s]+)\]\s'
'PVT_V1_12\[(?P<PVT_V1_12>[-0-9\s]+)\]\s'
'PVT_V1_13\[(?P<PVT_V1_13>[-0-9\s]+)\]\s'
'PVT_V1_14\[(?P<PVT_V1_14>[-0-9\s]+)\]\s'
'PVT_V1_15\[(?P<PVT_V1_15>[-0-9\s]+)\]\s'
'PVT_V1_16\[(?P<PVT_V1_16>[-0-9\s]+)\]\s'
'PVT_V1_17\[(?P<PVT_V1_17>[-0-9\s]+)\]\s'
'PVT_V1_18\[(?P<PVT_V1_18>[-0-9\s]+)\]\s'
'PVT_V1_19\[(?P<PVT_V1_19>[-0-9\s]+)\]\s'
'PVT_V1_20\[(?P<PVT_V1_20>[-0-9\s]+)\]\s'
'PVT_V1_21\[(?P<PVT_V1_21>[-0-9\s]+)\]\s'
'PVT_V1_22\[(?P<PVT_V1_22>[-0-9\s]+)\]\s'
'PVT_V1_23\[(?P<PVT_V1_23>[-0-9\s]+)\]\s'
'PVT_V1_24\[(?P<PVT_V1_24>[-0-9\s]+)\]\s'
'PVT_V1_25\[(?P<PVT_V1_25>[-0-9\s]+)\]\s'
'PVT_V2_0\[(?P<PVT_V2_0>[-0-9\s]+)\]\s'
'PVT_V2_1\[(?P<PVT_V2_1>[-0-9\s]+)\]\s'
'PVT_V2_2\[(?P<PVT_V2_2>[-0-9\s]+)\]\s'
'PVT_V2_3\[(?P<PVT_V2_3>[-0-9\s]+)\]\s'
'PVT_V2_4\[(?P<PVT_V2_4>[-0-9\s]+)\]\s'
'PVT_V2_5\[(?P<PVT_V2_5>[-0-9\s]+)\]\s'
'PVT_V2_6\[(?P<PVT_V2_6>[-0-9\s]+)\]\s'
'PVT_V2_7\[(?P<PVT_V2_7>[-0-9\s]+)\]\s'
'PVT_V2_8\[(?P<PVT_V2_8>[-0-9\s]+)\]\s'
'PVT_V2_9\[(?P<PVT_V2_9>[-0-9\s]+)\]\s'
'PVT_V2_10\[(?P<PVT_V2_10>[-0-9\s]+)\]\s'
'PVT_V2_11\[(?P<PVT_V2_11>[-0-9\s]+)\]\s'
'PVT_V2_12\[(?P<PVT_V2_12>[-0-9\s]+)\]\s'
'PVT_V2_13\[(?P<PVT_V2_13>[-0-9\s]+)\]\s'
'PVT_V2_14\[(?P<PVT_V2_14>[-0-9\s]+)\]\s'
'PVT_V2_15\[(?P<PVT_V2_15>[-0-9\s]+)\]\s'
'PVT_V2_16\[(?P<PVT_V2_16>[-0-9\s]+)\]\s'
'PVT_V2_17\[(?P<PVT_V2_17>[-0-9\s]+)\]\s'
'PVT_V2_18\[(?P<PVT_V2_18>[-0-9\s]+)\]\s'
'PVT_V2_19\[(?P<PVT_V2_19>[-0-9\s]+)\]\s'
'PVT_V2_20\[(?P<PVT_V2_20>[-0-9\s]+)\]\s'
'PVT_V2_21\[(?P<PVT_V2_21>[-0-9\s]+)\]\s'
'PVT_V2_22\[(?P<PVT_V2_22>[-0-9\s]+)\]\s'
'PVT_V2_23\[(?P<PVT_V2_23>[-0-9\s]+)\]\s'
'PVT_V2_24\[(?P<PVT_V2_24>[-0-9\s]+)\]\s'
'PVT_V2_25\[(?P<PVT_V2_25>[-0-9\s]+)\]\s'
'PVT_V3_0\[(?P<PVT_V3_0>[-0-9\s]+)\]\s'
'PVT_V3_1\[(?P<PVT_V3_1>[-0-9\s]+)\]\s'
'PVT_V3_2\[(?P<PVT_V3_2>[-0-9\s]+)\]\s'
'PVT_V3_3\[(?P<PVT_V3_3>[-0-9\s]+)\]\s'
'PVT_V3_4\[(?P<PVT_V3_4>[-0-9\s]+)\]\s'
'PVT_V3_5\[(?P<PVT_V3_5>[-0-9\s]+)\]\s'
'PVT_V3_6\[(?P<PVT_V3_6>[-0-9\s]+)\]\s'
'PVT_V3_7\[(?P<PVT_V3_7>[-0-9\s]+)\]\s'
'PVT_V3_8\[(?P<PVT_V3_8>[-0-9\s]+)\]\s'
'PVT_V3_9\[(?P<PVT_V3_9>[-0-9\s]+)\]\s'
'PVT_V3_10\[(?P<PVT_V3_10>[-0-9\s]+)\]\s'
'PVT_V3_11\[(?P<PVT_V3_11>[-0-9\s]+)\]\s'
'PVT_V3_12\[(?P<PVT_V3_12>[-0-9\s]+)\]\s'
'PVT_V3_13\[(?P<PVT_V3_13>[-0-9\s]+)\]\s'
'PVT_V3_14\[(?P<PVT_V3_14>[-0-9\s]+)\]\s'
'PVT_V3_15\[(?P<PVT_V3_15>[-0-9\s]+)\]\s'
'PVT_V3_16\[(?P<PVT_V3_16>[-0-9\s]+)\]\s'
'PVT_V3_17\[(?P<PVT_V3_17>[-0-9\s]+)\]\s'
'PVT_V3_18\[(?P<PVT_V3_18>[-0-9\s]+)\]\s'
'PVT_V3_19\[(?P<PVT_V3_19>[-0-9\s]+)\]\s'
'PVT_V3_20\[(?P<PVT_V3_20>[-0-9\s]+)\]\s'
'PVT_V3_21\[(?P<PVT_V3_21>[-0-9\s]+)\]\s'
'PVT_V3_22\[(?P<PVT_V3_22>[-0-9\s]+)\]\s'
'PVT_V3_23\[(?P<PVT_V3_23>[-0-9\s]+)\]\s'
'PVT_V3_24\[(?P<PVT_V3_24>[-0-9\s]+)\]\s'
'PVT_V3_25\[(?P<PVT_V3_25>[-0-9\s]+)\]\s'
'FM\[(?P<FM>[0-9]+)\]\s'
'CRC\[(?P<CRC>[0-9\s]+)\]', re.X
)
self.pattern = re.compile(
r"Ver\[(?P<Ver>[-0-9A-Fa-f+]+)\]\s"
"DNA\[(?P<DNA>[0-9A-Fa-f]+)\]\s"
"Elapsed\[(?P<Elapsed>[-0-9]+)\]\s"
"MW\[(?P<MW>[-\s0-9]+)\]\s"
"LW\[(?P<LW>[-0-9]+)\]\s"
"MH\[(?P<MH>[-\s0-9]+)\]\s"
"HW\[(?P<HW>[-0-9]+)\]\s"
"Temp\[(?P<Temp>[0-9]+)\]\s"
"TMax\[(?P<TMax>[0-9]+)\]\s"
"Fan\[(?P<Fan>[0-9]+)\]\s"
"FanR\[(?P<FanR>[0-9]+)%\]\s"
"Vi\[(?P<Vi>[-\s0-9]+)\]\s"
"Vo\[(?P<Vo>[-\s0-9]+)\]\s"
"("
"PLL0\[(?P<PLL0>[-\s0-9]+)\]\s"
"PLL1\[(?P<PLL1>[-\s0-9]+)\]\s"
"PLL2\[(?P<PLL2>[-\s0-9]+)\]\s"
"PLL3\[(?P<PLL3>[-\s0-9]+)\]\s"
")?"
"GHSmm\[(?P<GHSmm>[-.0-9]+)\]\s"
"WU\[(?P<WU>[-.0-9]+)\]\s"
"Freq\[(?P<Freq>[.0-9]+)\]\s"
"PG\[(?P<PG>[0-9]+)\]\s"
"Led\[(?P<LED>0|1)\]\s"
"MW0\[(?P<MW0>[0-9\s]+)\]\s"
"MW1\[(?P<MW1>[0-9\s]+)\]\s"
"MW2\[(?P<MW2>[0-9\s]+)\]\s"
"MW3\[(?P<MW3>[0-9\s]+)\]\s"
"TA\[(?P<TA>[0-9]+)\]\s"
"ECHU\[(?P<ECHU>[0-9\s]+)\]\s"
"ECMM\[(?P<ECMM>[0-9]+)\]\s.*"
"FAC0\[(?P<FAC0>[-0-9]+)\]\s"
"OC\[(?P<OC>[0-9]+)\]\s"
"SF0\[(?P<SF0>[-\s0-9]+)\]\s"
"SF1\[(?P<SF1>[-\s0-9]+)\]\s"
"SF2\[(?P<SF2>[-\s0-9]+)\]\s"
"SF3\[(?P<SF3>[-\s0-9]+)\]\s"
"PMUV\[(?P<PMUV>[-\s\S*]+)\]\s"
"PVT_T0\[(?P<PVT_T0>[-0-9\s]+)\]\s"
"PVT_T1\[(?P<PVT_T1>[-0-9\s]+)\]\s"
"PVT_T2\[(?P<PVT_T2>[-0-9\s]+)\]\s"
"PVT_T3\[(?P<PVT_T3>[-0-9\s]+)\]\s"
"PVT_V0_0\[(?P<PVT_V0_0>[-0-9\s]+)\]\s"
"PVT_V0_1\[(?P<PVT_V0_1>[-0-9\s]+)\]\s"
"PVT_V0_2\[(?P<PVT_V0_2>[-0-9\s]+)\]\s"
"PVT_V0_3\[(?P<PVT_V0_3>[-0-9\s]+)\]\s"
"PVT_V0_4\[(?P<PVT_V0_4>[-0-9\s]+)\]\s"
"PVT_V0_5\[(?P<PVT_V0_5>[-0-9\s]+)\]\s"
"PVT_V0_6\[(?P<PVT_V0_6>[-0-9\s]+)\]\s"
"PVT_V0_7\[(?P<PVT_V0_7>[-0-9\s]+)\]\s"
"PVT_V0_8\[(?P<PVT_V0_8>[-0-9\s]+)\]\s"
"PVT_V0_9\[(?P<PVT_V0_9>[-0-9\s]+)\]\s"
"PVT_V0_10\[(?P<PVT_V0_10>[-0-9\s]+)\]\s"
"PVT_V0_11\[(?P<PVT_V0_11>[-0-9\s]+)\]\s"
"PVT_V0_12\[(?P<PVT_V0_12>[-0-9\s]+)\]\s"
"PVT_V0_13\[(?P<PVT_V0_13>[-0-9\s]+)\]\s"
"PVT_V0_14\[(?P<PVT_V0_14>[-0-9\s]+)\]\s"
"PVT_V0_15\[(?P<PVT_V0_15>[-0-9\s]+)\]\s"
"PVT_V0_16\[(?P<PVT_V0_16>[-0-9\s]+)\]\s"
"PVT_V0_17\[(?P<PVT_V0_17>[-0-9\s]+)\]\s"
"PVT_V0_18\[(?P<PVT_V0_18>[-0-9\s]+)\]\s"
"PVT_V0_19\[(?P<PVT_V0_19>[-0-9\s]+)\]\s"
"PVT_V0_20\[(?P<PVT_V0_20>[-0-9\s]+)\]\s"
"PVT_V0_21\[(?P<PVT_V0_21>[-0-9\s]+)\]\s"
"PVT_V0_22\[(?P<PVT_V0_22>[-0-9\s]+)\]\s"
"PVT_V0_23\[(?P<PVT_V0_23>[-0-9\s]+)\]\s"
"PVT_V0_24\[(?P<PVT_V0_24>[-0-9\s]+)\]\s"
"PVT_V0_25\[(?P<PVT_V0_25>[-0-9\s]+)\]\s"
"PVT_V1_0\[(?P<PVT_V1_0>[-0-9\s]+)\]\s"
"PVT_V1_1\[(?P<PVT_V1_1>[-0-9\s]+)\]\s"
"PVT_V1_2\[(?P<PVT_V1_2>[-0-9\s]+)\]\s"
"PVT_V1_3\[(?P<PVT_V1_3>[-0-9\s]+)\]\s"
"PVT_V1_4\[(?P<PVT_V1_4>[-0-9\s]+)\]\s"
"PVT_V1_5\[(?P<PVT_V1_5>[-0-9\s]+)\]\s"
"PVT_V1_6\[(?P<PVT_V1_6>[-0-9\s]+)\]\s"
"PVT_V1_7\[(?P<PVT_V1_7>[-0-9\s]+)\]\s"
"PVT_V1_8\[(?P<PVT_V1_8>[-0-9\s]+)\]\s"
"PVT_V1_9\[(?P<PVT_V1_9>[-0-9\s]+)\]\s"
"PVT_V1_10\[(?P<PVT_V1_10>[-0-9\s]+)\]\s"
"PVT_V1_11\[(?P<PVT_V1_11>[-0-9\s]+)\]\s"
"PVT_V1_12\[(?P<PVT_V1_12>[-0-9\s]+)\]\s"
"PVT_V1_13\[(?P<PVT_V1_13>[-0-9\s]+)\]\s"
"PVT_V1_14\[(?P<PVT_V1_14>[-0-9\s]+)\]\s"
"PVT_V1_15\[(?P<PVT_V1_15>[-0-9\s]+)\]\s"
"PVT_V1_16\[(?P<PVT_V1_16>[-0-9\s]+)\]\s"
"PVT_V1_17\[(?P<PVT_V1_17>[-0-9\s]+)\]\s"
"PVT_V1_18\[(?P<PVT_V1_18>[-0-9\s]+)\]\s"
"PVT_V1_19\[(?P<PVT_V1_19>[-0-9\s]+)\]\s"
"PVT_V1_20\[(?P<PVT_V1_20>[-0-9\s]+)\]\s"
"PVT_V1_21\[(?P<PVT_V1_21>[-0-9\s]+)\]\s"
"PVT_V1_22\[(?P<PVT_V1_22>[-0-9\s]+)\]\s"
"PVT_V1_23\[(?P<PVT_V1_23>[-0-9\s]+)\]\s"
"PVT_V1_24\[(?P<PVT_V1_24>[-0-9\s]+)\]\s"
"PVT_V1_25\[(?P<PVT_V1_25>[-0-9\s]+)\]\s"
"PVT_V2_0\[(?P<PVT_V2_0>[-0-9\s]+)\]\s"
"PVT_V2_1\[(?P<PVT_V2_1>[-0-9\s]+)\]\s"
"PVT_V2_2\[(?P<PVT_V2_2>[-0-9\s]+)\]\s"
"PVT_V2_3\[(?P<PVT_V2_3>[-0-9\s]+)\]\s"
"PVT_V2_4\[(?P<PVT_V2_4>[-0-9\s]+)\]\s"
"PVT_V2_5\[(?P<PVT_V2_5>[-0-9\s]+)\]\s"
"PVT_V2_6\[(?P<PVT_V2_6>[-0-9\s]+)\]\s"
"PVT_V2_7\[(?P<PVT_V2_7>[-0-9\s]+)\]\s"
"PVT_V2_8\[(?P<PVT_V2_8>[-0-9\s]+)\]\s"
"PVT_V2_9\[(?P<PVT_V2_9>[-0-9\s]+)\]\s"
"PVT_V2_10\[(?P<PVT_V2_10>[-0-9\s]+)\]\s"
"PVT_V2_11\[(?P<PVT_V2_11>[-0-9\s]+)\]\s"
"PVT_V2_12\[(?P<PVT_V2_12>[-0-9\s]+)\]\s"
"PVT_V2_13\[(?P<PVT_V2_13>[-0-9\s]+)\]\s"
"PVT_V2_14\[(?P<PVT_V2_14>[-0-9\s]+)\]\s"
"PVT_V2_15\[(?P<PVT_V2_15>[-0-9\s]+)\]\s"
"PVT_V2_16\[(?P<PVT_V2_16>[-0-9\s]+)\]\s"
"PVT_V2_17\[(?P<PVT_V2_17>[-0-9\s]+)\]\s"
"PVT_V2_18\[(?P<PVT_V2_18>[-0-9\s]+)\]\s"
"PVT_V2_19\[(?P<PVT_V2_19>[-0-9\s]+)\]\s"
"PVT_V2_20\[(?P<PVT_V2_20>[-0-9\s]+)\]\s"
"PVT_V2_21\[(?P<PVT_V2_21>[-0-9\s]+)\]\s"
"PVT_V2_22\[(?P<PVT_V2_22>[-0-9\s]+)\]\s"
"PVT_V2_23\[(?P<PVT_V2_23>[-0-9\s]+)\]\s"
"PVT_V2_24\[(?P<PVT_V2_24>[-0-9\s]+)\]\s"
"PVT_V2_25\[(?P<PVT_V2_25>[-0-9\s]+)\]\s"
"PVT_V3_0\[(?P<PVT_V3_0>[-0-9\s]+)\]\s"
"PVT_V3_1\[(?P<PVT_V3_1>[-0-9\s]+)\]\s"
"PVT_V3_2\[(?P<PVT_V3_2>[-0-9\s]+)\]\s"
"PVT_V3_3\[(?P<PVT_V3_3>[-0-9\s]+)\]\s"
"PVT_V3_4\[(?P<PVT_V3_4>[-0-9\s]+)\]\s"
"PVT_V3_5\[(?P<PVT_V3_5>[-0-9\s]+)\]\s"
"PVT_V3_6\[(?P<PVT_V3_6>[-0-9\s]+)\]\s"
"PVT_V3_7\[(?P<PVT_V3_7>[-0-9\s]+)\]\s"
"PVT_V3_8\[(?P<PVT_V3_8>[-0-9\s]+)\]\s"
"PVT_V3_9\[(?P<PVT_V3_9>[-0-9\s]+)\]\s"
"PVT_V3_10\[(?P<PVT_V3_10>[-0-9\s]+)\]\s"
"PVT_V3_11\[(?P<PVT_V3_11>[-0-9\s]+)\]\s"
"PVT_V3_12\[(?P<PVT_V3_12>[-0-9\s]+)\]\s"
"PVT_V3_13\[(?P<PVT_V3_13>[-0-9\s]+)\]\s"
"PVT_V3_14\[(?P<PVT_V3_14>[-0-9\s]+)\]\s"
"PVT_V3_15\[(?P<PVT_V3_15>[-0-9\s]+)\]\s"
"PVT_V3_16\[(?P<PVT_V3_16>[-0-9\s]+)\]\s"
"PVT_V3_17\[(?P<PVT_V3_17>[-0-9\s]+)\]\s"
"PVT_V3_18\[(?P<PVT_V3_18>[-0-9\s]+)\]\s"
"PVT_V3_19\[(?P<PVT_V3_19>[-0-9\s]+)\]\s"
"PVT_V3_20\[(?P<PVT_V3_20>[-0-9\s]+)\]\s"
"PVT_V3_21\[(?P<PVT_V3_21>[-0-9\s]+)\]\s"
"PVT_V3_22\[(?P<PVT_V3_22>[-0-9\s]+)\]\s"
"PVT_V3_23\[(?P<PVT_V3_23>[-0-9\s]+)\]\s"
"PVT_V3_24\[(?P<PVT_V3_24>[-0-9\s]+)\]\s"
"PVT_V3_25\[(?P<PVT_V3_25>[-0-9\s]+)\]\s"
"FM\[(?P<FM>[0-9]+)\]\s"
"CRC\[(?P<CRC>[0-9\s]+)\]",
re.X,
)
def __repr__(self) -> str:
return f"CGMinerAvalon: {str(self.ip)}"
return f"CGMinerAvalon8: {str(self.ip)}"
def parse_estats(self, estats):
for estat in estats:
for key in estat:
if key[:5] == 'MM ID':
if key[:5] == "MM ID":
self._parse_estat(estat, key)
def _parse_estat(self, estat, key):

View File

@@ -1,6 +1,6 @@
from API.bmminer import BMMinerAPI
from miners import BaseMiner
import asyncssh
import logging
class BMMiner(BaseMiner):
@@ -9,66 +9,108 @@ class BMMiner(BaseMiner):
super().__init__(ip, api)
self.model = None
self.config = None
self.uname = 'root'
self.pwd = 'admin'
self.uname = "root"
self.pwd = "admin"
def __repr__(self) -> str:
return f"BMMiner: {str(self.ip)}"
async def get_model(self):
async def get_model(self) -> str or None:
"""Get miner model.
:return: Miner model or None.
"""
# check if model is cached
if self.model:
logging.debug(f"Found model for {self.ip}: {self.model}")
return self.model
# get devdetails data
version_data = await self.api.devdetails()
# if we get data back, parse it for model
if version_data:
# handle Antminer BMMiner as a base
self.model = version_data["DEVDETAILS"][0]["Model"].replace("Antminer ", "")
logging.debug(f"Found model for {self.ip}: {self.model}")
return self.model
# if we don't get devdetails, log a failed attempt
logging.warning(f"Failed to get model for miner: {self}")
return None
async def get_hostname(self) -> str:
"""Get miner hostname.
:return: The hostname of the miner as a string or "?"
"""
try:
# open an ssh connection
async with (await self._get_ssh_connection()) as conn:
# if we get the connection, check hostname
if conn is not None:
data = await conn.run('cat /proc/sys/kernel/hostname')
return data.stdout.strip()
# get output of the hostname file
data = await conn.run("cat /proc/sys/kernel/hostname")
host = data.stdout.strip()
# return hostname data
logging.debug(f"Found hostname for {self.ip}: {host}")
return host
else:
# return ? if we fail to get hostname with no ssh connection
logging.warning(f"Failed to get hostname for miner: {self}")
return "?"
except Exception:
except Exception as e:
# return ? if we fail to get hostname with an exception
logging.warning(f"Failed to get hostname for miner: {self}")
return "?"
async def _get_ssh_connection(self) -> asyncssh.connect:
try:
conn = await asyncssh.connect(str(self.ip),
known_hosts=None,
username=self.uname,
password=self.pwd,
server_host_key_algs=['ssh-rsa'])
return conn
except asyncssh.misc.PermissionDenied:
try:
conn = await asyncssh.connect(str(self.ip),
known_hosts=None,
username="admin",
password="admin",
server_host_key_algs=['ssh-rsa'])
return conn
except Exception as e:
print(e)
except OSError:
print(str(self.ip) + ": Connection refused.")
return None
async def send_ssh_command(self, cmd: str) -> str or None:
"""Send a command to the miner over ssh.
async def send_ssh_command(self, cmd):
:param cmd: The command to run.
:return: Result of the command or None.
"""
result = None
# open an ssh connection
async with (await self._get_ssh_connection()) as conn:
# 3 retries
for i in range(3):
try:
# run the command and get the result
result = await conn.run(cmd)
except Exception as e:
print(f"{cmd} error: {e}")
# if the command fails, log it
logging.warning(f"{self} command {cmd} error: {e}")
# on the 3rd retry, return None
if i == 3:
return
continue
# return the result, either command output or None
return result
async def get_config(self) -> list or None:
"""Get the pool configuration of the miner.
:return: Pool config data or None.
"""
# get pool data
pools = await self.api.pools()
pool_data = []
# ensure we got pool data
if not pools:
return
# parse all the pools
for pool in pools["POOLS"]:
pool_data.append({"url": pool["URL"], "user": pool["User"], "pwd": "123"})
return pool_data
async def reboot(self) -> None:
logging.debug(f"{self}: Sending reboot command.")
await self.send_ssh_command("reboot")
logging.debug(f"{self}: Reboot command completed.")

View File

@@ -1,8 +1,8 @@
from miners import BaseMiner
from API.bosminer import BOSMinerAPI
import asyncssh
import toml
from config.bos import bos_config_convert, general_config_convert_bos
import logging
class BOSMiner(BaseMiner):
@@ -11,126 +11,193 @@ class BOSMiner(BaseMiner):
super().__init__(ip, api)
self.model = None
self.config = None
self.uname = 'root'
self.pwd = 'admin'
self.version = None
self.uname = "root"
self.pwd = "admin"
self.nominal_chips = 63
def __repr__(self) -> str:
return f"BOSminer: {str(self.ip)}"
async def _get_ssh_connection(self) -> asyncssh.connect:
"""Create a new asyncssh connection"""
conn = await asyncssh.connect(str(self.ip), known_hosts=None, username=self.uname, password=self.pwd,
server_host_key_algs=['ssh-rsa'])
# return created connection
return conn
async def send_ssh_command(self, cmd: str) -> str or None:
"""Send a command to the miner over ssh.
async def send_ssh_command(self, cmd: str) -> None:
"""Sends SSH command to miner."""
# creates result variable
:return: Result of the command or None.
"""
result = None
# runs the command on the miner
# open an ssh connection
async with (await self._get_ssh_connection()) as conn:
# attempt to run command up to 3 times
# 3 retries
for i in range(3):
try:
# save result of the command
# run the command and get the result
result = await conn.run(cmd)
except Exception as e:
print(f"{cmd} error: {e}")
# if the command fails, log it
logging.warning(f"{self} command {cmd} error: {e}")
# on the 3rd retry, return None
if i == 3:
return
continue
# let the user know the result of the command
if result is not None:
if result.stdout != "":
print(result.stdout)
if result.stderr != "":
print("ERROR: " + result.stderr)
elif result.stderr != "":
print("ERROR: " + result.stderr)
else:
print(cmd)
# return the result, either command output or None
return result
async def fault_light_on(self) -> None:
"""Sends command to turn on fault light on the miner."""
await self.send_ssh_command('miner fault_light on')
logging.debug(f"{self}: Sending fault_light on command.")
self.light = True
await self.send_ssh_command("miner fault_light on")
logging.debug(f"{self}: fault_light on command completed.")
async def fault_light_off(self) -> None:
"""Sends command to turn off fault light on the miner."""
await self.send_ssh_command('miner fault_light off')
logging.debug(f"{self}: Sending fault_light off command.")
self.light = False
await self.send_ssh_command("miner fault_light off")
logging.debug(f"{self}: fault_light off command completed.")
async def restart_backend(self):
async def restart_backend(self) -> None:
await self.restart_bosminer()
async def restart_bosminer(self) -> None:
"""Restart bosminer hashing process."""
await self.send_ssh_command('/etc/init.d/bosminer restart')
logging.debug(f"{self}: Sending bosminer restart command.")
await self.send_ssh_command("/etc/init.d/bosminer restart")
logging.debug(f"{self}: bosminer restart command completed.")
async def reboot(self) -> None:
"""Reboots power to the physical miner."""
await self.send_ssh_command('/sbin/reboot')
logging.debug(f"{self}: Sending reboot command.")
await self.send_ssh_command("/sbin/reboot")
logging.debug(f"{self}: Reboot command completed.")
async def get_config(self) -> None:
logging.debug(f"{self}: Getting config.")
async with (await self._get_ssh_connection()) as conn:
logging.debug(f"{self}: Opening SFTP connection.")
async with conn.start_sftp_client() as sftp:
async with sftp.open('/etc/bosminer.toml') as file:
logging.debug(f"{self}: Reading config file.")
async with sftp.open("/etc/bosminer.toml") as file:
toml_data = toml.loads(await file.read())
logging.debug(f"{self}: Converting config file.")
cfg = await bos_config_convert(toml_data)
self.config = cfg
async def get_hostname(self) -> str:
"""Attempts to get hostname from miner."""
"""Get miner hostname.
:return: The hostname of the miner as a string or "?"
"""
try:
async with (await self._get_ssh_connection()) as conn:
data = await conn.run('cat /proc/sys/kernel/hostname')
return data.stdout.strip()
if conn is not None:
data = await conn.run("cat /proc/sys/kernel/hostname")
host = data.stdout.strip()
logging.debug(f"Found hostname for {self.ip}: {host}")
return host
else:
logging.warning(f"Failed to get hostname for miner: {self}")
return "?"
except Exception as e:
print(self.ip, e)
return "BOSMiner Unknown"
logging.warning(f"Failed to get hostname for miner: {self}")
return "?"
async def get_model(self):
async def get_model(self) -> str or None:
"""Get miner model.
:return: Miner model or None.
"""
# check if model is cached
if self.model:
logging.debug(f"Found model for {self.ip}: {self.model} (BOS)")
return self.model + " (BOS)"
# get devdetails data
version_data = await self.api.devdetails()
# if we get data back, parse it for model
if version_data:
if not version_data["DEVDETAILS"] == []:
self.model = version_data["DEVDETAILS"][0]["Model"].replace("Antminer ", "")
# handle Antminer BOSMiner as a base
self.model = version_data["DEVDETAILS"][0]["Model"].replace(
"Antminer ", ""
)
logging.debug(f"Found model for {self.ip}: {self.model} (BOS)")
return self.model + " (BOS)"
# if we don't get devdetails, log a failed attempt
logging.warning(f"Failed to get model for miner: {self}")
return None
async def send_config(self, yaml_config) -> None:
async def get_version(self):
"""Get miner firmware version.
:return: Miner firmware version or None.
"""
# check if version is cached
if self.version:
logging.debug(f"Found version for {self.ip}: {self.version}")
return self.version
# get output of bos version file
version_data = await self.send_ssh_command("cat /etc/bos_version")
# if we get the version data, parse it
if version_data:
self.version = version_data.stdout.split("-")[5]
logging.debug(f"Found version for {self.ip}: {self.version}")
return self.version
# if we fail to get version, log a failed attempt
logging.warning(f"Failed to get model for miner: {self}")
return None
async def send_config(self, yaml_config, ip_user: bool = False) -> None:
"""Configures miner with yaml config."""
toml_conf = await general_config_convert_bos(yaml_config)
logging.debug(f"{self}: Sending config.")
if ip_user:
suffix = str(self.ip).split(".")[-1]
toml_conf = toml.dumps(
await general_config_convert_bos(yaml_config, user_suffix=suffix)
)
else:
toml_conf = toml.dumps(await general_config_convert_bos(yaml_config))
async with (await self._get_ssh_connection()) as conn:
logging.debug(f"{self}: Opening SFTP connection.")
async with conn.start_sftp_client() as sftp:
async with sftp.open('/etc/bosminer.toml', 'w+') as file:
logging.debug(f"{self}: Opening config file.")
async with sftp.open("/etc/bosminer.toml", "w+") as file:
await file.write(toml_conf)
logging.debug(f"{self}: Restarting BOSMiner")
await conn.run("/etc/init.d/bosminer restart")
async def get_board_info(self) -> dict:
"""Gets data on each board and chain in the miner."""
logging.debug(f"{self}: Getting board info.")
devdetails = await self.api.devdetails()
if not devdetails.get("DEVDETAILS"):
print("devdetails error", devdetails)
return {0: [], 1: [], 2: []}
devs = devdetails['DEVDETAILS']
devs = devdetails["DEVDETAILS"]
boards = {}
offset = devs[0]["ID"]
for board in devs:
boards[board["ID"] - offset] = []
if not board['Chips'] == self.nominal_chips:
if not board["Chips"] == self.nominal_chips:
nominal = False
else:
nominal = True
boards[board["ID"] - offset].append({
"chain": board["ID"] - offset,
"chip_count": board['Chips'],
"chip_status": "o" * board['Chips'],
"nominal": nominal
})
boards[board["ID"] - offset].append(
{
"chain": board["ID"] - offset,
"chip_count": board["Chips"],
"chip_status": "o" * board["Chips"],
"nominal": nominal,
}
)
logging.debug(f"Found board data for {self}: {boards}")
return boards
async def get_bad_boards(self) -> dict:
@@ -145,14 +212,13 @@ class BOSMiner(BaseMiner):
bad_boards[board].append(chain)
return bad_boards
async def check_good_boards(self) -> str:
"""Checks for and provides list for working boards."""
devs = await self.api.devdetails()
bad = 0
chains = devs['DEVDETAILS']
chains = devs["DEVDETAILS"]
for chain in chains:
if chain['Chips'] == 0:
if chain["Chips"] == 0:
bad += 1
if not bad > 0:
return str(self.ip)

View File

@@ -1,6 +1,7 @@
from API.btminer import BTMinerAPI
from miners import BaseMiner
from API import APIError
import logging
class BTMiner(BaseMiner):
@@ -15,24 +16,33 @@ class BTMiner(BaseMiner):
async def get_model(self):
if self.model:
logging.debug(f"Found model for {self.ip}: {self.model}")
return self.model
version_data = await self.api.devdetails()
if version_data:
self.model = version_data["DEVDETAILS"][0]["Model"].split("V")[0]
logging.debug(f"Found model for {self.ip}: {self.model}")
return self.model
logging.warning(f"Failed to get model for miner: {self}")
return None
async def get_hostname(self) -> str:
try:
host_data = await self.api.get_miner_info()
if host_data:
return host_data["Msg"]["hostname"]
host = host_data["Msg"]["hostname"]
logging.debug(f"Found hostname for {self.ip}: {host}")
return host
except APIError:
logging.warning(f"Failed to get hostname for miner: {self}")
return "?"
except Exception as e:
logging.warning(f"Failed to get hostname for miner: {self}")
return "?"
async def get_board_info(self) -> dict:
"""Gets data on each board and chain in the miner."""
logging.debug(f"{self}: Getting board info.")
devs = await self.api.devs()
if not devs.get("DEVS"):
print("devs error", devs)
@@ -43,17 +53,20 @@ class BTMiner(BaseMiner):
for board in devs:
boards[board["ID"] - offset] = []
if "Effective Chips" in board.keys():
if not board['Effective Chips'] in self.nominal_chips:
if not board["Effective Chips"] in self.nominal_chips:
nominal = False
else:
nominal = True
boards[board["ID"] - offset].append({
"chain": board["ID"] - offset,
"chip_count": board['Effective Chips'],
"chip_status": "o" * board['Effective Chips'],
"nominal": nominal
})
boards[board["ID"] - offset].append(
{
"chain": board["ID"] - offset,
"chip_count": board["Effective Chips"],
"chip_status": "o" * board["Effective Chips"],
"nominal": nominal,
}
)
else:
logging.warning(f"Incorrect board data from {self}: {board}")
print(board)
logging.debug(f"Found board data for {self}: {boards}")
return boards

View File

@@ -1,7 +1,6 @@
from miners import BaseMiner
from API.cgminer import CGMinerAPI
from API import APIError
import asyncssh
class CGMiner(BaseMiner):
@@ -10,8 +9,8 @@ class CGMiner(BaseMiner):
super().__init__(ip, api)
self.model = None
self.config = None
self.uname = 'root'
self.pwd = 'admin'
self.uname = "root"
self.pwd = "admin"
def __repr__(self) -> str:
return f"CGMiner: {str(self.ip)}"
@@ -32,35 +31,13 @@ class CGMiner(BaseMiner):
try:
async with (await self._get_ssh_connection()) as conn:
if conn is not None:
data = await conn.run('cat /proc/sys/kernel/hostname')
data = await conn.run("cat /proc/sys/kernel/hostname")
return data.stdout.strip()
else:
return "?"
except Exception:
return "?"
async def _get_ssh_connection(self) -> asyncssh.connect:
try:
conn = await asyncssh.connect(str(self.ip),
known_hosts=None,
username=self.uname,
password=self.pwd,
server_host_key_algs=['ssh-rsa'])
return conn
except asyncssh.misc.PermissionDenied:
try:
conn = await asyncssh.connect(str(self.ip),
known_hosts=None,
username="admin",
password="admin",
server_host_key_algs=['ssh-rsa'])
return conn
except Exception as e:
print(e)
except OSError:
print(str(self.ip) + " Connection refused.")
return None
async def send_ssh_command(self, cmd):
result = None
async with (await self._get_ssh_connection()) as conn:
@@ -72,65 +49,42 @@ class CGMiner(BaseMiner):
if i == 3:
return
continue
# handle result
self._result_handler(result)
@staticmethod
def _result_handler(result: asyncssh.process.SSHCompletedProcess) -> None:
if result is not None:
# noinspection PyUnresolvedReferences
if len(result.stdout) > 0:
# noinspection PyUnresolvedReferences
print("ssh stdout: \n" + result.stdout)
# noinspection PyUnresolvedReferences
if len(result.stderr) > 0:
# noinspection PyUnresolvedReferences
print("ssh stderr: \n" + result.stderrr)
# noinspection PyUnresolvedReferences
if len(result.stdout) <= 0 and len(result.stderr) <= 0:
print("ssh stdout stderr empty")
# if result.stdout != "":
# print(result.stdout)
# if result.stderr != "":
# print("ERROR: " + result.stderr)
# elif result.stderr != "":
# print("ERROR: " + result.stderr)
# else:
# print(cmd)
return result
async def restart_backend(self) -> None:
await self.restart_cgminer()
async def restart_cgminer(self) -> None:
commands = ['cgminer-api restart',
'/usr/bin/cgminer-monitor >/dev/null 2>&1']
commands = ';'.join(commands)
commands = ["cgminer-api restart", "/usr/bin/cgminer-monitor >/dev/null 2>&1"]
commands = ";".join(commands)
await self.send_ssh_command(commands)
async def reboot(self) -> None:
await self.send_ssh_command("reboot")
async def start_cgminer(self) -> None:
commands = ['mkdir -p /etc/tmp/',
'echo \"*/3 * * * * /usr/bin/cgminer-monitor\" > /etc/tmp/root',
'crontab -u root /etc/tmp/root',
'/usr/bin/cgminer-monitor >/dev/null 2>&1']
commands = ';'.join(commands)
commands = [
"mkdir -p /etc/tmp/",
'echo "*/3 * * * * /usr/bin/cgminer-monitor" > /etc/tmp/root',
"crontab -u root /etc/tmp/root",
"/usr/bin/cgminer-monitor >/dev/null 2>&1",
]
commands = ";".join(commands)
await self.send_ssh_command(commands)
async def stop_cgminer(self) -> None:
commands = ['mkdir -p /etc/tmp/',
'echo \"\" > /etc/tmp/root',
'crontab -u root /etc/tmp/root',
'killall cgminer']
commands = ';'.join(commands)
commands = [
"mkdir -p /etc/tmp/",
'echo "" > /etc/tmp/root',
"crontab -u root /etc/tmp/root",
"killall cgminer",
]
commands = ";".join(commands)
await self.send_ssh_command(commands)
async def get_config(self) -> None:
async with (await self._get_ssh_connection()) as conn:
command = 'cat /etc/config/cgminer'
command = "cat /etc/config/cgminer"
result = await conn.run(command, check=True)
self._result_handler(result)
self.config = result.stdout
print(str(self.config))

View File

@@ -12,6 +12,7 @@ from miners.antminer.X17.cgminer import CGMinerX17
from miners.antminer.X19.bmminer import BMMinerX19
from miners.antminer.X19.cgminer import CGMinerX19
from miners.antminer.X19.bosminer import BOSMinerX19
from miners.whatsminer.M20 import BTMinerM20
from miners.whatsminer.M21 import BTMinerM21
@@ -19,7 +20,8 @@ from miners.whatsminer.M30 import BTMinerM30
from miners.whatsminer.M31 import BTMinerM31
from miners.whatsminer.M32 import BTMinerM32
from miners.avalonminer import CGMinerAvalon
from miners.avalonminer.Avalon8 import CGMinerAvalon8
from miners.avalonminer.Avalon10 import CGMinerAvalon10
from miners.cgminer import CGMiner
from miners.bmminer import BMMiner
@@ -32,14 +34,22 @@ from API import APIError
import asyncio
import ipaddress
import json
import logging
from settings import MINER_FACTORY_GET_VERSION_RETRIES as GET_VERSION_RETRIES
class MinerFactory:
_instance = None
def __init__(self):
self.miners = {}
def __new__(cls):
if not cls._instance:
cls._instance = super(MinerFactory, cls).__new__(cls)
return cls._instance
async def get_miner_generator(self, ips: list):
"""
Get Miner objects from ip addresses using an async generator.
@@ -98,7 +108,7 @@ class MinerFactory:
# handle the different API types
if not api:
print(ip)
logging.warning(f"{str(ip)}: No API data found, using BraiinsOS.")
miner = BOSMinerS9(str(ip))
elif "BOSMiner" in api:
miner = BOSMinerS9(str(ip))
@@ -106,7 +116,7 @@ class MinerFactory:
miner = CGMinerS9(str(ip))
elif "BMMiner" in api:
miner = BMMinerS9(str(ip))
elif "Antminer T9" in model:
if "BMMiner" in api:
if "Hiveon" in model:
@@ -130,16 +140,20 @@ class MinerFactory:
# X19 logic
elif "19" in model:
# handle the different API types
if "BOSMiner" in api:
miner = BOSMinerX19(str(ip))
if "CGMiner" in api:
miner = CGMinerX19(str(ip))
elif "BMMiner" in api:
miner = BMMinerX19(str(ip))
# Avalonminer V8
# Avalonminers
elif "avalon" in model:
miner = CGMinerAvalon(str(ip))
if model == "avalon10":
miner = CGMinerAvalon10(str(ip))
else:
miner = CGMinerAvalon8(str(ip))
# Whatsminers
elif "M20" in model:
@@ -205,7 +219,10 @@ class MinerFactory:
model = data["VERSION"][0]["Type"]
else:
# make sure devdetails actually contains data, if its empty, there are no devices
if "DEVDETAILS" in data.keys() and not data["DEVDETAILS"] == []:
if (
"DEVDETAILS" in data.keys()
and not data["DEVDETAILS"] == []
):
# check for model, for most miners
if not data["DEVDETAILS"][0]["Model"] == "":
@@ -227,31 +244,25 @@ class MinerFactory:
return model
# if there are errors, we just return None
except APIError:
return model
except APIError as e:
logging.debug(f"{str(ip)}: {e}")
except OSError as e:
if e.winerror == 121:
print(e)
return model
else:
print(ip, e)
logging.debug(f"{str(ip)}: {e}")
return model
async def _send_api_command(self, ip: ipaddress.ip_address or str, command: str):
try:
# get reader and writer streams
reader, writer = await asyncio.open_connection(str(ip), 4028)
# handle OSError 121
except OSError as e:
if e.winerror == "121":
print("Semaphore Timeout has Expired.")
logging.warning(f"{str(ip)} - Command {command}: {e}")
return {}
# create the command
cmd = {"command": command}
# send the command
writer.write(json.dumps(cmd).encode('utf-8'))
writer.write(json.dumps(cmd).encode("utf-8"))
await writer.drain()
# instantiate data
@@ -265,16 +276,16 @@ class MinerFactory:
break
data += d
except Exception as e:
print(e)
logging.debug(f"{str(ip)}: {e}")
try:
# some json from the API returns with a null byte (\x00) on the end
if data.endswith(b"\x00"):
# handle the null byte
str_data = data.decode('utf-8')[:-1]
str_data = data.decode("utf-8")[:-1]
else:
# no null byte
str_data = data.decode('utf-8')
str_data = data.decode("utf-8")
# fix an error with a btminer return having an extra comma that breaks json.loads()
str_data = str_data.replace(",}", "}")
# fix an error with a btminer return having a newline that breaks json.loads()
@@ -311,19 +322,27 @@ class MinerFactory:
if data["STATUS"][0].get("STATUS") in ["I", "S"]:
# check if there are any BMMiner strings in any of the dict keys
if any("BMMiner" in string for string in data["VERSION"][0].keys()):
if any(
"BMMiner" in string for string in data["VERSION"][0].keys()
):
api = "BMMiner"
# check if there are any CGMiner strings in any of the dict keys
elif any("CGMiner" in string for string in data["VERSION"][0].keys()):
elif any(
"CGMiner" in string for string in data["VERSION"][0].keys()
):
api = "CGMiner"
# check if there are any BOSMiner strings in any of the dict keys
elif any("BOSminer" in string for string in data["VERSION"][0].keys()):
elif any(
"BOSminer" in string for string in data["VERSION"][0].keys()
):
api = "BOSMiner"
# if all that fails, check the Description to see if it is a whatsminer
elif data.get("Description") and "whatsminer" in data.get("Description"):
elif data.get("Description") and "whatsminer" in data.get(
"Description"
):
api = "BTMiner"
# return the API if we found it
@@ -335,5 +354,5 @@ class MinerFactory:
if e.winerror == 121:
return None
else:
print(ip, e)
logging.debug(f"{str(ip)}: {e}")
return None

View File

@@ -22,7 +22,7 @@ async def get_bos_bad_tuners(ip: str = "192.168.1.0", mask: int = 24):
# run all the tuner status commands
tuner_status = await asyncio.gather(*tuner_tasks)
# create a list of all miners with bad board tuner status'
# create a list of all miners with bad board tuner status
bad_tuner_miners = []
for item in tuner_status:
# loop through and get each miners' bad board count

View File

@@ -1,14 +1,21 @@
import ipaddress
import asyncio
import logging
from network.net_range import MinerNetworkRange
from miners.miner_factory import MinerFactory
from settings import NETWORK_PING_RETRIES as PING_RETRIES, NETWORK_PING_TIMEOUT as PING_TIMEOUT, \
NETWORK_SCAN_THREADS as SCAN_THREADS
from settings import (
NETWORK_PING_RETRIES as PING_RETRIES,
NETWORK_PING_TIMEOUT as PING_TIMEOUT,
NETWORK_SCAN_THREADS as SCAN_THREADS,
)
class MinerNetwork:
def __init__(self, ip_addr: str or None = None, mask: str or int or None = None) -> None:
def __init__(
self, ip_addr: str or None = None, mask: str or int or None = None
) -> None:
self.network = None
self.miner_factory = MinerFactory()
self.ip_addr = ip_addr
self.connected_miners = {}
self.mask = mask
@@ -16,28 +23,36 @@ class MinerNetwork:
def __len__(self):
return len([item for item in self.get_network().hosts()])
def __repr__(self):
return str(self.network)
def get_network(self) -> ipaddress.ip_network:
"""Get the network using the information passed to the MinerNetwork or from cache."""
# if we have a network cached already, use that
if self.network:
return self.network
# if there is no IP address passed, default to 192.168.1.0
if not self.ip_addr:
default_gateway = "192.168.1.0"
# if we do have an IP address passed, use that
if "-" in self.ip_addr:
self.network = MinerNetworkRange(self.ip_addr)
else:
default_gateway = self.ip_addr
# if there is no IP address passed, default to 192.168.1.0
if not self.ip_addr:
default_gateway = "192.168.1.0"
# if we do have an IP address passed, use that
else:
default_gateway = self.ip_addr
# if there is no subnet mask passed, default to /24
if not self.mask:
subnet_mask = "24"
# if we do have a mask passed, use that
else:
subnet_mask = str(self.mask)
# if there is no subnet mask passed, default to /24
if not self.mask:
subnet_mask = "24"
# if we do have a mask passed, use that
else:
subnet_mask = str(self.mask)
# save the network and return it
self.network = ipaddress.ip_network(f"{default_gateway}/{subnet_mask}", strict=False)
# save the network and return it
self.network = ipaddress.ip_network(
f"{default_gateway}/{subnet_mask}", strict=False
)
return self.network
async def scan_network_for_miners(self) -> None or list:
@@ -46,6 +61,9 @@ class MinerNetwork:
local_network = self.get_network()
print(f"Scanning {local_network} for miners...")
# clear cached miners
MinerFactory().clear_cached_miners()
# create a list of tasks and miner IPs
scan_tasks = []
miner_ips = []
@@ -75,13 +93,10 @@ class MinerNetwork:
# create a list of tasks to get miners
create_miners_tasks = []
# clear cached miners
self.miner_factory.clear_cached_miners()
# try to get each miner found
for miner_ip in miner_ips:
# append to the list of tasks
create_miners_tasks.append(self.miner_factory.get_miner(miner_ip))
create_miners_tasks.append(MinerFactory().get_miner(miner_ip))
# get all miners in the list
miners = await asyncio.gather(*create_miners_tasks)
@@ -127,25 +142,33 @@ class MinerNetwork:
@staticmethod
async def ping_miner(ip: ipaddress.ip_address) -> None or ipaddress.ip_address:
for i in range(PING_RETRIES):
connection_fut = asyncio.open_connection(str(ip), 4028)
try:
# get the read and write streams from the connection
reader, writer = await asyncio.wait_for(connection_fut, timeout=PING_TIMEOUT)
# immediately close connection, we know connection happened
writer.close()
# make sure the writer is closed
await writer.wait_closed()
# ping was successful
return ip
except asyncio.exceptions.TimeoutError:
# ping failed if we time out
continue
except ConnectionRefusedError:
# handle for other connection errors
print(f"{str(ip)}: Connection Refused.")
# ping failed, likely with an exception
except Exception as e:
print(e)
return await ping_miner(ip)
async def ping_miner(
ip: ipaddress.ip_address, port=4028
) -> None or ipaddress.ip_address:
for i in range(PING_RETRIES):
connection_fut = asyncio.open_connection(str(ip), port)
try:
# get the read and write streams from the connection
reader, writer = await asyncio.wait_for(
connection_fut, timeout=PING_TIMEOUT
)
# immediately close connection, we know connection happened
writer.close()
# make sure the writer is closed
await writer.wait_closed()
# ping was successful
return ip
except asyncio.exceptions.TimeoutError:
# ping failed if we time out
continue
return
except ConnectionRefusedError:
# handle for other connection errors
logging.debug(f"{str(ip)}: Connection Refused.")
# ping failed, likely with an exception
except Exception as e:
logging.warning(f"{str(ip)}: {e}")
continue
return

31
network/net_range.py Normal file
View File

@@ -0,0 +1,31 @@
import ipaddress
class MinerNetworkRange:
"""A MinerNetwork that takes a range of IP addresses.
:param ip_range: A range of IP addresses to put in the network.
Takes a string formatted as
{ip_range_1_start}-{ip_range_1_end}, {ip_range_2_start}-{ip_range_2_end}
"""
def __init__(self, ip_range: str):
ip_ranges = ip_range.replace(" ", "").split(",")
self.host_ips = []
for item in ip_ranges:
start, end = item.split("-")
start_ip = ipaddress.ip_address(start)
end_ip = ipaddress.ip_address(end)
networks = ipaddress.summarize_address_range(start_ip, end_ip)
for network in networks:
self.host_ips.append(network.network_address)
for host in network.hosts():
if host not in self.host_ips:
self.host_ips.append(host)
if network.broadcast_address not in self.host_ips:
self.host_ips.append(network.broadcast_address)
def hosts(self):
for x in self.host_ips:
yield x

Binary file not shown.

View File

@@ -1,27 +1,48 @@
import toml
import os
NETWORK_PING_RETRIES: int = 3
NETWORK_PING_TIMEOUT: int = 5
NETWORK_SCAN_THREADS: int = 300
CFG_UTIL_REBOOT_THREADS: int = 300
CFG_UTIL_CONFIG_THREADS: int = 300
MINER_FACTORY_GET_VERSION_RETRIES: int = 3
WHATSMINER_PWD = "admin"
DEBUG = False
try:
with open(os.path.join(os.getcwd(), "settings.toml"), "r") as settings_file:
with open(
os.path.join(os.path.dirname(__file__), "settings.toml"), "r"
) as settings_file:
settings = toml.loads(settings_file.read())
except:
pass
settings_keys = settings.keys()
if "ping_retries" in settings_keys:
NETWORK_PING_RETRIES: int = settings["ping_retries"]
if "ping_timeout" in settings_keys:
NETWORK_PING_TIMEOUT: int = settings["ping_timeout"]
if "scan_threads" in settings_keys:
NETWORK_SCAN_THREADS: int = settings["scan_threads"]
if "reboot_threads" in settings_keys:
CFG_UTIL_REBOOT_THREADS: int = settings["reboot_threads"]
if "config_threads" in settings_keys:
CFG_UTIL_CONFIG_THREADS: int = settings["config_threads"]
if "get_version_retries" in settings_keys:
MINER_FACTORY_GET_VERSION_RETRIES: int = settings["get_version_retries"]
if "whatsminer_pwd" in settings_keys:
WHATSMINER_PWD: str = settings["whatsminer_pwd"]
except:
NETWORK_PING_RETRIES: int = 3
NETWORK_PING_TIMEOUT: int = 5
NETWORK_SCAN_THREADS: int = 300
CFG_UTIL_REBOOT_THREADS: int = 300
CFG_UTIL_CONFIG_THREADS: int = 300
MINER_FACTORY_GET_VERSION_RETRIES: int = 3
WHATSMINER_PWD = "admin"
if "debug" in settings_keys:
DEBUG: int = settings["debug"]

View File

@@ -1,6 +1,6 @@
get_version_retries = 3
ping_retries = 3
ping_timeout = 5
ping_timeout = 5 # Seconds
scan_threads = 300
config_threads = 300
reboot_threads = 300
@@ -11,4 +11,10 @@ reboot_threads = 300
# tool or the privileged API will not work using admin as the password.
# If you change the password, you can pass that password here.
whatsminer_pwd = "admin"
whatsminer_pwd = "admin"
### DEBUG MODE ###
# change this to debug = true
# to enable debug mode.
debug = false
# debug = true

View File

@@ -36,6 +36,9 @@
* ALL: Selects all miners in the table, or deselects all if they are already all selected.
* REFRESH DATA: Refreshes data for the currently selected miners, or all miners if none are selected.
* OPEN IN WEB: Opens all currently selected miners web interfaces in your default browser.
* REBOOT: Reboots all selected miners.
* RESTART BACKEND: Restarts the mining process on the miner (bosminer daemon, bmminer daemon, cgminer daemon, etc).
* SEND SSH COMMAND: Open a new window to send a SSH command to all selected miners (or all miners if none are selected).
### Table:
* Click any header in the table to sort that row.

View File

@@ -1,18 +1,29 @@
from tools.bad_board_util.miner_factory import miner_factory
from tools.bad_board_util.ui import ui
import asyncio
import sys
import logging
from logger import logger
logger.info("Initializing logger for Board Util.")
# Fix bug with some whatsminers and asyncio because of a socket not being shut down:
if sys.version_info[0] == 3 and sys.version_info[1] >= 8 and sys.platform.startswith('win'):
if (
sys.version_info[0] == 3
and sys.version_info[1] >= 8
and sys.platform.startswith("win")
):
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
def main():
logging.info("Starting Board Util.")
loop = asyncio.new_event_loop()
loop.run_until_complete(ui())
logging.info("Closing Board Util.")
if __name__ == '__main__':
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,29 @@
from tools.bad_board_util.layout import window
def disable_buttons(func):
button_list = [
"scan",
"import_iplist",
"export_iplist",
"select_all_ips",
"refresh_data",
"open_in_web",
"save_report_button",
"light",
]
# handle the inner function that the decorator is wrapping
async def inner(*args, **kwargs):
# disable the buttons
for button in button_list:
window[button].Update(disabled=True)
# call the original wrapped function
await func(*args, **kwargs)
# re-enable the buttons after the wrapped function completes
for button in button_list:
window[button].Update(disabled=False)
return inner

View File

@@ -1,11 +1,67 @@
import ipaddress
import os
import re
import xlsxwriter
import aiofiles
from tools.bad_board_util.func.ui import update_ui_with_data
from tools.bad_board_util.layout import window
from tools.bad_board_util.func.decorators import disable_buttons
from miners.miner_factory import MinerFactory
@disable_buttons
async def save_report(file_location):
data = {}
workbook = xlsxwriter.Workbook(file_location)
sheet = workbook.add_worksheet()
for line in window["ip_table"].Values:
data[line[0]] = {
"Model": line[1],
"Total Chips": line[2],
"Left Chips": line[3],
"Center Chips": line[5],
"Right Chips": line[7],
"Nominal": 1,
}
async for miner in MinerFactory().get_miner_generator([key for key in data.keys()]):
if miner:
data[miner.ip]["Nominal"] = miner.nominal
list_data = []
for ip in data.keys():
new_data = data[ip]
new_data["IP"] = ip
list_data.append(new_data)
data = sorted(data, reverse=True, key=lambda x: x["Total Chips"])
headers = [
"IP",
"Miner Model",
"Total Chip Count",
"Left Board Chips",
"Center Board Chips",
"Right Board Chips",
]
print(data)
row = 0
col = 0
for item in headers:
sheet.write(row, col, item)
col += 1
row = 1
for line in data:
col = 0
for point in line:
sheet.write(row, col, point)
col += 1
row += 1
workbook.close()
async def import_iplist(file_location):
@@ -14,10 +70,15 @@ async def import_iplist(file_location):
return
else:
ip_list = []
async with aiofiles.open(file_location, mode='r') as file:
async with aiofiles.open(file_location, mode="r") as file:
async for line in file:
ips = [x.group() for x in re.finditer(
"^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)", line)]
ips = [
x.group()
for x in re.finditer(
"^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)",
line,
)
]
for ip in ips:
if ip not in ip_list:
ip_list.append(ipaddress.ip_address(ip))
@@ -33,11 +94,11 @@ async def export_iplist(file_location, ip_list_selected):
return
else:
if ip_list_selected is not None and not ip_list_selected == []:
async with aiofiles.open(file_location, mode='w') as file:
async with aiofiles.open(file_location, mode="w") as file:
for item in ip_list_selected:
await file.write(str(item) + "\n")
else:
async with aiofiles.open(file_location, mode='w') as file:
for item in window['ip_table'].Values:
async with aiofiles.open(file_location, mode="w") as file:
for item in window["ip_table"].Values:
await file.write(str(item[0]) + "\n")
await update_ui_with_data("status", "")

View File

@@ -2,11 +2,45 @@ import asyncio
import ipaddress
import warnings
from tools.bad_board_util.func.ui import update_ui_with_data, update_prog_bar, set_progress_bar_len
from tools.bad_board_util.func.ui import (
update_ui_with_data,
update_prog_bar,
set_progress_bar_len,
)
from tools.bad_board_util.layout import window
from tools.bad_board_util.miner_factory import miner_factory
from miners.miner_factory import MinerFactory
from tools.bad_board_util.func.decorators import disable_buttons
@disable_buttons
async def miner_light(ips: list):
await asyncio.gather(*[flip_light(ip) for ip in ips])
async def flip_light(ip):
ip_list = window["ip_table"].Widget
miner = await MinerFactory().get_miner(ip)
index = [item[0] for item in window["ip_table"].Values].index(ip)
index_tags = ip_list.item(index + 1)["tags"]
if "light" not in index_tags and "light+bad" not in index_tags:
tag = "light"
if "bad" in index_tags:
index_tags.remove("bad")
tag = "light+bad"
index_tags.append(tag)
ip_list.item(index + 1, tags=index_tags)
await miner.fault_light_on()
else:
if "light+bad" in index_tags:
index_tags.remove("light+bad")
index_tags.append("bad")
if "light" in index_tags:
index_tags.remove("light")
ip_list.item(index + 1, tags=index_tags)
await miner.fault_light_off()
@disable_buttons
async def scan_network(network):
await update_ui_with_data("status", "Scanning")
await update_ui_with_data("ip_count", "")
@@ -24,7 +58,7 @@ async def scan_network(network):
asyncio.create_task(update_prog_bar(progress_bar_len))
progress_bar_len += network_size - len(miners)
asyncio.create_task(update_prog_bar(progress_bar_len))
get_miner_genenerator = miner_factory.get_miner_generator(miners)
get_miner_genenerator = MinerFactory().get_miner_generator(miners)
all_miners = []
async for found_miner in get_miner_genenerator:
all_miners.append(found_miner)
@@ -36,11 +70,15 @@ async def scan_network(network):
await update_ui_with_data("status", "")
@disable_buttons
async def refresh_data(ip_list: list):
await update_ui_with_data("status", "Getting Data")
ips = [ipaddress.ip_address(ip) for ip in ip_list]
if len(ips) == 0:
ips = [ipaddress.ip_address(ip) for ip in [item[0] for item in window["ip_table"].Values]]
ips = [
ipaddress.ip_address(ip)
for ip in [item[0] for item in window["ip_table"].Values]
]
await set_progress_bar_len(len(ips))
progress_bar_len = 0
asyncio.create_task(update_prog_bar(progress_bar_len))
@@ -65,22 +103,17 @@ async def refresh_data(ip_list: list):
board_right = ""
if data_point["data"]:
if 0 in data_point["data"].keys():
board_left = " ".join([chain["chip_status"] for chain in data_point["data"][0]]).replace("o", "")
else:
row_colors.append((ip_table_index, "white", "red"))
board_left = " ".join(
[chain["chip_status"] for chain in data_point["data"][0]]
).replace("o", "")
if 1 in data_point["data"].keys():
board_center = " ".join([chain["chip_status"] for chain in data_point["data"][1]]).replace("o", "")
else:
row_colors.append((ip_table_index, "white", "red"))
board_center = " ".join(
[chain["chip_status"] for chain in data_point["data"][1]]
).replace("o", "")
if 2 in data_point["data"].keys():
board_right = " ".join([chain["chip_status"] for chain in data_point["data"][2]]).replace("o", "")
else:
row_colors.append((ip_table_index, "white", "red"))
if False in [chain["nominal"] for chain in [data_point["data"][key] for key in data_point["data"].keys()][0]]:
row_colors.append((ip_table_index, "white", "red"))
else:
row_colors.append((ip_table_index, "white", "red"))
board_right = " ".join(
[chain["chip_status"] for chain in data_point["data"][2]]
).replace("o", "")
data = [
data_point["IP"],
data_point["model"],
@@ -89,109 +122,187 @@ async def refresh_data(ip_list: list):
len(board_center),
board_center,
len(board_right),
board_right
board_right,
]
ip_table_data[ip_table_index] = data
window["ip_table"].update(ip_table_data, row_colors=row_colors)
window["ip_table"].update(ip_table_data)
progress_bar_len += 1
asyncio.create_task(update_prog_bar(progress_bar_len))
await update_ui_with_data("status", "")
@disable_buttons
async def scan_and_get_data(network):
# update status and reset the table
await update_ui_with_data("status", "Scanning")
await update_ui_with_data("ip_count", "")
await update_ui_with_data("ip_table", [])
# set progress bar length to network size
network_size = len(network)
miner_generator = network.scan_network_generator()
await set_progress_bar_len(3 * network_size)
progress_bar_len = 0
miners = []
async for miner in miner_generator:
# scan the network for miners using a generator
async for miner in network.scan_network_generator():
# the generator will either return None or an IP address
if miner:
miners.append(miner)
# can output "Identifying" for each found item, but it gets a bit cluttered
# and could possibly be confusing for the end user because of timing on
# adding the IPs
# window["ip_table"].update([["Identifying..."] for miner in miners])
# add to the progress bar length after scanning an address
progress_bar_len += 1
asyncio.create_task(update_prog_bar(progress_bar_len))
# add progress for the miners that we aren't going to identify
progress_bar_len += network_size - len(miners)
asyncio.create_task(update_prog_bar(progress_bar_len))
get_miner_genenerator = miner_factory.get_miner_generator(miners)
all_miners = []
async for found_miner in get_miner_genenerator:
# identify different miner instances using the miner factory generator
async for found_miner in MinerFactory().get_miner_generator(miners):
# miner factory generator will always return a miner
all_miners.append(found_miner)
# sort the list of miners by IP address
all_miners.sort(key=lambda x: x.ip)
# add the new miner to the table
window["ip_table"].update([[str(miner.ip)] for miner in all_miners])
# update progress bar
progress_bar_len += 1
asyncio.create_task(update_prog_bar(progress_bar_len))
# update the count of found miners
await update_ui_with_data("ip_count", str(len(all_miners)))
data_gen = asyncio.as_completed([get_formatted_data(miner) for miner in miners])
# update progress bar for miners we wont get data for
progress_bar_len += network_size - len(miners)
asyncio.create_task(update_prog_bar(progress_bar_len))
# get the list of IP addresses from the table
ip_table_data = window["ip_table"].Values
ordered_all_ips = [item[0] for item in ip_table_data]
progress_bar_len += (network_size - len(miners))
asyncio.create_task(update_prog_bar(progress_bar_len))
await update_ui_with_data("status", "Getting Data")
row_colors = []
for all_data in data_gen:
# create an in place generator for getting data
for all_data in asyncio.as_completed(
[get_formatted_data(miner) for miner in miners]
):
# wait for a generator item to return
data_point = await all_data
# make sure the IP is one we have
# this will likely never fail, but a good failsafe
if data_point["IP"] in ordered_all_ips:
# get the index of the IP in the table
ip_table_index = ordered_all_ips.index(data_point["IP"])
board_left = ""
board_center = ""
board_right = ""
# make sure we have data, some miners don't allow getting board data
if data_point["data"]:
# check if the 0th board (L board) is in the data
if 0 in data_point["data"].keys():
board_left = " ".join([chain["chip_status"] for chain in data_point["data"][0]]).replace("o", "")
board_left = " ".join(
[chain["chip_status"] for chain in data_point["data"][0]]
).replace("o", "")
else:
# if the board isn't in data, highlight it red
row_colors.append((ip_table_index, "bad"))
# check if the 1st board (C board) is in the data
if 1 in data_point["data"].keys():
board_center = " ".join([chain["chip_status"] for chain in data_point["data"][1]]).replace("o", "")
board_center = " ".join(
[chain["chip_status"] for chain in data_point["data"][1]]
).replace("o", "")
else:
# if the board isn't in data, highlight it red
row_colors.append((ip_table_index, "bad"))
# check if the 2nd board (R board) is in the data
if 2 in data_point["data"].keys():
board_right = " ".join([chain["chip_status"] for chain in data_point["data"][2]]).replace("o", "")
board_right = " ".join(
[chain["chip_status"] for chain in data_point["data"][2]]
).replace("o", "")
else:
# if the board isn't in data, highlight it red
row_colors.append((ip_table_index, "bad"))
if False in [chain["nominal"] for board in [data_point["data"][key] for key in data_point["data"].keys()] for chain in board]:
# check if the miner has all nominal chips
if False in [
# True/False if the miner is nominal
chain["nominal"]
# for each board in the miner
for board in [
data_point["data"][key] for key in data_point["data"].keys()
]
# for each chain in each board in the miner
for chain in board
]:
# if the miner doesn't have all chips, highlight it red
row_colors.append((ip_table_index, "bad"))
else:
# the row is bad if we have no data
row_colors.append((ip_table_index, "bad"))
# split the chip data into thirds
board_left_chips = "\n".join(split_chips(board_left, 3))
board_center_chips = "\n".join(split_chips(board_center, 3))
board_right_chips = "\n".join(split_chips(board_right, 3))
# create data for the table
data = [
data_point["IP"],
data_point["model"],
(len(board_left) + len(board_center) + len(board_right)),
len(board_left),
board_left_chips,
len(board_center),
board_center_chips,
len(board_right),
board_right_chips
board_right_chips,
]
# put the data at the index of the IP address
ip_table_data[ip_table_index] = data
window["ip_table"].update(ip_table_data)
# configure "bad" tag to highlight red
table = window["ip_table"].Widget
table.tag_configure("bad", foreground="white", background="red")
# set tags on the row if they have been set
for row in row_colors:
table.item(row[0] + 1, tags=row[1])
# add to the progress bar
progress_bar_len += 1
asyncio.create_task(update_prog_bar(progress_bar_len))
# reset status
await update_ui_with_data("status", "")
def split_chips(string, number_of_splits):
k, m = divmod(len(string), number_of_splits)
return (string[i*k+min(i, m):(i+1)*k+min(i+1, m)] for i in range(number_of_splits))
return (
string[i * k + min(i, m) : (i + 1) * k + min(i + 1, m)]
for i in range(number_of_splits)
)
async def get_formatted_data(ip: ipaddress.ip_address):
miner = await miner_factory.get_miner(ip)
miner = await MinerFactory().get_miner(ip)
model = await miner.get_model()
warnings.filterwarnings('ignore')
warnings.filterwarnings("ignore")
board_data = await miner.get_board_info()
data = {"IP": str(ip), "model": str(model), "data": board_data}
return data

View File

@@ -0,0 +1,392 @@
import datetime
from base64 import b64decode
from io import BytesIO
from reportlab.lib.pagesizes import letter, inch
from reportlab.lib.utils import ImageReader
from reportlab.lib.styles import ParagraphStyle, TA_CENTER
from reportlab.platypus import (
SimpleDocTemplate,
KeepInFrame,
Table,
Image,
Paragraph,
TableStyle,
PageBreak,
Spacer,
)
from reportlab.lib import colors
import ipaddress
import numpy as np
import matplotlib.dates
import matplotlib.pyplot as plt
from svglib.svglib import svg2rlg
from matplotlib import cm
from matplotlib.ticker import FormatStrFormatter
from miners.miner_factory import MinerFactory
from tools.bad_board_util.func.decorators import disable_buttons
from tools.bad_board_util.img import IMAGE_SELECTION_MATRIX, LOGO
from tools.bad_board_util.layout import window
IP_STYLE = ParagraphStyle(
"IP Style",
alignment=TA_CENTER,
fontSize=7,
fontName="Helvetica-Bold",
)
TITLE_STYLE = ParagraphStyle(
"Title",
alignment=TA_CENTER,
fontSize=20,
spaceAfter=40,
fontName="Helvetica-Bold",
)
def add_first_page_number(canvas, doc):
canvas.saveState()
canvas.drawString(letter[0] - 60, 20, "Page " + str(doc.page))
canvas.restoreState()
def add_page_header(canvas, doc):
canvas.saveState()
canvas.drawCentredString(
(letter[0] / 16) * 14,
letter[1] - 57,
datetime.datetime.now().strftime("%Y-%b-%d"),
)
img_dec = b64decode(LOGO)
img = BytesIO(img_dec)
img.seek(0)
canvas.drawImage(
ImageReader(img),
30,
letter[1] - 65,
150,
35,
)
canvas.drawString(letter[0] - 60, 20, "Page " + str(doc.page))
canvas.restoreState()
@disable_buttons
async def save_report(file_location):
p1_logo, p1_title = create_first_page()
data = {}
for line in window["ip_table"].Values:
data[line[0]] = {
"Model": line[1],
"Total Chips": line[2],
"Left Chips": line[3],
"Center Chips": line[5],
"Right Chips": line[7],
"Nominal": 1,
}
async for miner in MinerFactory().get_miner_generator([key for key in data.keys()]):
if miner:
data[str(miner.ip)]["Nominal"] = miner.nominal_chips
list_data = []
for ip in data.keys():
new_data = data[ip]
new_data["IP"] = ip
list_data.append(new_data)
list_data = sorted(
list_data, reverse=False, key=lambda x: ipaddress.ip_address(x["IP"])
)
image_selection_data = {}
for miner in list_data:
miner_bad_boards = ""
if miner["Left Chips"] < miner["Nominal"]:
miner_bad_boards += "l"
if miner["Center Chips"] < miner["Nominal"]:
miner_bad_boards += "c"
if miner["Right Chips"] < miner["Nominal"]:
miner_bad_boards += "r"
image_selection_data[miner["IP"]] = miner_bad_boards
doc = SimpleDocTemplate(
file_location,
pagesize=letter,
topMargin=1 * inch,
leftMargin=1 * inch,
rightMargin=1 * inch,
bottomMargin=1 * inch,
title=f"Board Report {datetime.datetime.now().strftime('%Y/%b/%d')}",
)
pie_chart, board_table = create_boards_pie_chart(image_selection_data)
table_data = get_table_data(image_selection_data)
miner_img_table = Table(
table_data,
colWidths=0.8 * inch,
# repeatRows=1,
# rowHeights=[4 * inch],
)
miner_img_table.setStyle(
TableStyle(
[
("SPAN", (0, 0), (-1, 0)),
("LEFTPADDING", (0, 0), (-1, -1), 0),
("RIGHTPADDING", (0, 0), (-1, -1), 0),
("BOTTOMPADDING", (0, 1), (-1, -1), 0),
("TOPPADDING", (0, 1), (-1, -1), 0),
("BOTTOMPADDING", (0, 0), (-1, 0), 20),
("TOPPADDING", (0, 0), (-1, 0), 20),
]
)
)
elements = []
elements.append(p1_logo)
elements.append(p1_title)
elements.append(PageBreak())
elements.append(pie_chart)
elements.append(Spacer(0, 60))
elements.append(board_table)
elements.append(PageBreak())
elements.append(miner_img_table)
elements.append(PageBreak())
elements.append(
Paragraph(
"Board Data",
style=TITLE_STYLE,
)
)
elements.append(create_data_table(list_data))
elements.append(PageBreak())
doc.build(
elements,
onFirstPage=add_first_page_number,
onLaterPages=add_page_header,
)
def create_boards_pie_chart(data):
labels = ["All Working", "1 Bad Board", "2 Bad Boards", "3 Bad Boards"]
num_bad_boards = [0, 0, 0, 0]
for item in data.keys():
num_bad_boards[len(data[item])] += 1
cmap = plt.get_cmap("Blues")
cs = cmap(np.linspace(0.2, 0.8, num=len(num_bad_boards)))
fig1, ax = plt.subplots()
ax.pie(
num_bad_boards,
labels=labels,
autopct="%1.2f%%",
shadow=True,
startangle=180,
colors=cs,
pctdistance=0.8,
)
ax.axis("equal")
ax.set_title("Broken Boards", fontsize=24, pad=20)
imgdata = BytesIO()
fig1.savefig(imgdata, format="svg")
imgdata.seek(0) # rewind the data
drawing = svg2rlg(imgdata)
imgdata.close()
plt.close("all")
pie_chart = KeepInFrame(375, 375, [Image(drawing)], hAlign="CENTER")
table_data = [labels, num_bad_boards]
t = Table(table_data)
table_style = TableStyle(
[
# ("FONTSIZE", (0, 0), (-1, -1), 13),
# line for below titles
("LINEBELOW", (0, 0), (-1, 0), 2, colors.black),
("FONTNAME", (0, 0), (-1, 0), "Helvetica-Bold"),
# line for above totals
("LINEABOVE", (0, -1), (-1, -1), 2, colors.black),
# line for beside unit #
("LINEAFTER", (0, 0), (0, -1), 2, colors.black),
# gridlines and outline of table
("INNERGRID", (0, 0), (-1, -1), 0.25, colors.black),
("BOX", (0, 0), (-1, -1), 2, colors.black),
("LEFTPADDING", (0, 0), (-1, -1), 3),
("RIGHTPADDING", (0, 0), (-1, -1), 3),
("BOTTOMPADDING", (0, 0), (-1, -1), 3),
("TOPPADDING", (0, 0), (-1, -1), 3),
]
)
t.setStyle(table_style)
# zebra stripes on table
for each in range(len(table_data)):
if each % 2 == 0:
bg_color = colors.whitesmoke
else:
bg_color = colors.lightgrey
t.setStyle(TableStyle([("BACKGROUND", (0, each), (-1, each), bg_color)]))
return pie_chart, t
def create_first_page():
title_style = ParagraphStyle(
"Title",
alignment=TA_CENTER,
fontSize=50,
spaceAfter=40,
spaceBefore=150,
fontName="Helvetica-Bold",
)
img_dec = b64decode(LOGO)
img = BytesIO(img_dec)
img.seek(0)
logo = KeepInFrame(450, 105, [Image(img)])
title = Paragraph("Board Report", style=title_style)
return logo, title
def create_data_table(data):
left_bad_boards = 0
right_bad_boards = 0
center_bad_boards = 0
table_data = []
for miner in data:
miner_bad_boards = 0
if miner["Left Chips"] < miner["Nominal"]:
miner_bad_boards += 1
left_bad_boards += 1
if miner["Center Chips"] < miner["Nominal"]:
miner_bad_boards += 1
right_bad_boards += 1
if miner["Right Chips"] < miner["Nominal"]:
miner_bad_boards += 1
center_bad_boards += 1
table_data.append(
[
miner["IP"],
miner["Total Chips"],
miner["Left Chips"],
miner["Center Chips"],
miner["Right Chips"],
miner_bad_boards,
]
)
table_data.append(
[
"Total",
sum([miner[1] for miner in table_data]),
sum([miner[2] for miner in table_data]),
sum([miner[3] for miner in table_data]),
sum([miner[4] for miner in table_data]),
sum([miner[5] for miner in table_data]),
]
)
table_data[:0] = (
[
"IP",
"Total Chips",
"Left Board Chips",
"Center Board Chips",
"Right Board Chips",
"Failed Boards",
],
)
# create the table
t = Table(table_data, repeatRows=1)
# generate a basic table style
table_style = TableStyle(
[
("FONTSIZE", (0, 0), (-1, -1), 8),
# line for below titles
("LINEBELOW", (0, 0), (-1, 0), 2, colors.black),
("FONTNAME", (0, 0), (-1, 0), "Helvetica-Bold"),
# line for above totals
("LINEABOVE", (0, -1), (-1, -1), 2, colors.black),
("FONTNAME", (0, -1), (-1, -1), "Helvetica-Bold"),
# line for beside unit #
("LINEAFTER", (0, 0), (0, -1), 2, colors.black),
("FONTNAME", (0, 0), (0, -1), "Helvetica-Bold"),
# gridlines and outline of table
("INNERGRID", (0, 0), (-1, -1), 0.25, colors.black),
("BOX", (0, 0), (-1, -1), 2, colors.black),
]
)
for (
row,
values,
) in enumerate(table_data):
if not row == 0 and not row == (len(table_data) - 1):
failed_boards = values[5]
if not failed_boards == 0:
table_style.add("TEXTCOLOR", (5, row), (5, row), colors.red)
# set the styles to the table
t.setStyle(table_style)
# zebra stripes on table
for each in range(len(table_data)):
if each % 2 == 0:
bg_color = colors.whitesmoke
else:
bg_color = colors.lightgrey
t.setStyle(TableStyle([("BACKGROUND", (0, each), (-1, each), bg_color)]))
return t
def get_table_data(data):
table_elems = [[Paragraph("Hashboard Visual Representation", style=TITLE_STYLE)]]
table_row = []
table_style = TableStyle(
[
("LEFTPADDING", (0, 0), (-1, -1), 0),
("RIGHTPADDING", (0, 0), (-1, -1), 0),
("BOTTOMPADDING", (0, 0), (-1, -1), 0),
("BOX", (0, 0), (-1, -1), 2, colors.black),
]
)
table_width = 0.8 * inch
for ip in data.keys():
img_dec = b64decode(IMAGE_SELECTION_MATRIX[data[ip]])
img = BytesIO(img_dec)
img.seek(0)
image = KeepInFrame(table_width, table_width, [Image(img)])
ip_para = Paragraph(ip, style=IP_STYLE)
table_row.append(
Table([[ip_para], [image]], colWidths=table_width, style=table_style)
)
# table_row.append(image)
# table_row_txt.append(ip_para)
if len(table_row) > 7:
# table_elems.append(table_row_txt)
# table_elems.append(table_row)
table_elems.append(table_row)
# table_row_txt = []
table_row = []
if not table_row == []:
table_elems.append(table_row)
return table_elems

View File

@@ -6,6 +6,12 @@ from tools.bad_board_util.layout import window
import pyperclip
def table_select_all():
window["ip_table"].update(
select_rows=([row for row in range(len(window["ip_table"].Values))])
)
def copy_from_table(table):
selection = table.selection()
copy_values = []
@@ -15,10 +21,11 @@ def copy_from_table(table):
table_values = table.item(each)["values"]
ip = table_values[0]
model = table_values[1]
l_brd_chips = str(table_values[2])
c_brd_chips = str(table_values[4])
r_brd_chips = str(table_values[6])
all_values = [ip, model, l_brd_chips, c_brd_chips, r_brd_chips]
total = str(table_values[2])
l_brd_chips = str(table_values[3])
c_brd_chips = str(table_values[5])
r_brd_chips = str(table_values[7])
all_values = [ip, model, total, l_brd_chips, c_brd_chips, r_brd_chips]
value = ", ".join(all_values)
copy_values.append(str(value))
@@ -36,7 +43,7 @@ async def update_ui_with_data(key, message, append=False):
async def update_prog_bar(amount):
window["progress"].Update(amount)
percent_done = 100 * (amount / window['progress'].maxlen)
percent_done = 100 * (amount / window["progress"].maxlen)
window["progress_percent"].Update(f"{round(percent_done, 2)} %")
if percent_done == 100:
window["progress_percent"].Update("")
@@ -49,18 +56,28 @@ async def set_progress_bar_len(amount):
async def sort_data(index: int or str):
if window["scan"].Disabled:
return
await update_ui_with_data("status", "Sorting Data")
data_list = window['ip_table'].Values
data_list = window["ip_table"].Values
table = window["ip_table"].Widget
all_data = []
for idx, item in enumerate(data_list):
all_data.append({"data": item, "tags": table.item(int(idx) + 1)["tags"]})
# ip addresses
if re.match("^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)",
str(all_data[0]["data"][index])):
new_list = sorted(all_data, key=lambda x: ipaddress.ip_address(x["data"][index]))
if re.match(
"^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)",
str(all_data[0]["data"][index]),
):
new_list = sorted(
all_data, key=lambda x: ipaddress.ip_address(x["data"][index])
)
if all_data == new_list:
new_list = sorted(all_data, reverse=True, key=lambda x: ipaddress.ip_address(x["data"][index]))
new_list = sorted(
all_data,
reverse=True,
key=lambda x: ipaddress.ip_address(x["data"][index]),
)
# everything else, model, chips
else:

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1,7 +0,0 @@
"""
This file stores the MinerFactory instance used by the BadBoardUtility for use in other files.
"""
from miners.miner_factory import MinerFactory
miner_factory = MinerFactory()

View File

@@ -3,9 +3,14 @@ import sys
import PySimpleGUI as sg
from tools.bad_board_util.layout import window
from tools.bad_board_util.func.miners import refresh_data, scan_and_get_data
from tools.bad_board_util.func.miners import (
refresh_data,
scan_and_get_data,
miner_light,
)
from tools.bad_board_util.func.files import import_iplist, export_iplist
from tools.bad_board_util.func.ui import sort_data, copy_from_table
from tools.bad_board_util.func.pdf import save_report
from tools.bad_board_util.func.ui import sort_data, copy_from_table, table_select_all
from network import MinerNetwork
@@ -16,35 +21,66 @@ async def ui():
window.read(timeout=0)
table = window["ip_table"].Widget
table.bind("<Control-Key-c>", lambda x: copy_from_table(table))
table.bind("<Control-Key-a>", lambda x: table_select_all())
# light tag shows red row for fault lights
table.tag_configure("bad", foreground="white", background="orange")
table.tag_configure("light", foreground="white", background="red")
table.tag_configure("light+bad", foreground="white", background="red")
while True:
event, value = window.read(timeout=0)
if event in (None, 'Close', sg.WIN_CLOSED):
if event in (None, "Close", sg.WIN_CLOSED):
sys.exit()
if isinstance(event, tuple):
if len(window["ip_table"].Values) > 0:
if event[0] == 'ip_table':
if event[0] == "ip_table":
if event[2][0] == -1:
await sort_data(event[2][1])
if event == 'open_in_web':
if event == "open_in_web":
for row in value["ip_table"]:
webbrowser.open("http://" + window["ip_table"].Values[row][0])
if event == 'scan':
if len(value['miner_network'].split("/")) > 1:
network = value['miner_network'].split("/")
if event == "scan":
if len(value["miner_network"].split("/")) > 1:
network = value["miner_network"].split("/")
miner_network = MinerNetwork(ip_addr=network[0], mask=network[1])
else:
miner_network = MinerNetwork(value['miner_network'])
miner_network = MinerNetwork(value["miner_network"])
asyncio.create_task(scan_and_get_data(miner_network))
if event == 'select_all_ips':
if event == "save_report":
if not value["save_report"] == "":
asyncio.create_task(save_report(value["save_report"]))
window["save_report"].update("")
if event == "select_all_ips":
if len(value["ip_table"]) == len(window["ip_table"].Values):
window["ip_table"].update(select_rows=())
else:
window["ip_table"].update(select_rows=([row for row in range(len(window["ip_table"].Values))]))
window["ip_table"].update(
select_rows=([row for row in range(len(window["ip_table"].Values))])
)
if event == "light":
if len(window["ip_table"].Values) > 0:
asyncio.create_task(
miner_light(
[
window["ip_table"].Values[item][0]
for item in value["ip_table"]
]
)
)
if event == "import_iplist":
asyncio.create_task(import_iplist(value["file_iplist"]))
if event == "export_iplist":
asyncio.create_task(export_iplist(value["file_iplist"], [window['ip_table'].Values[item][0] for item in value['ip_table']]))
asyncio.create_task(
export_iplist(
value["file_iplist"],
[window["ip_table"].Values[item][0] for item in value["ip_table"]],
)
)
if event == "refresh_data":
asyncio.create_task(refresh_data([window["ip_table"].Values[item][0] for item in value["ip_table"]]))
asyncio.create_task(
refresh_data(
[window["ip_table"].Values[item][0] for item in value["ip_table"]]
)
)
if event == "__TIMEOUT__":
await asyncio.sleep(0)

View File

@@ -1,13 +1,32 @@
from tools.cfg_util.cfg_util_sg.ui import ui
# TODO: Add Logging
# TODO: Add an option to append the last octet of the IP
# address to the workername when configuring
import asyncio
import sys
import logging
from tools.cfg_util.cfg_util_sg.ui import ui
# initialize logger and get settings
from logger import logger
logger.info("Initializing logger for CFG Util.")
# Fix bug with some whatsminers and asyncio because of a socket not being shut down:
if sys.version_info[0] == 3 and sys.version_info[1] >= 8 and sys.platform.startswith('win'):
if (
sys.version_info[0] == 3
and sys.version_info[1] >= 8
and sys.platform.startswith("win")
):
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
def main():
logging.info("Starting CFG Util.")
loop = asyncio.new_event_loop()
loop.run_until_complete(ui())
logging.info("Closing CFG Util.")

View File

@@ -0,0 +1,37 @@
from tools.cfg_util.cfg_util_sg.layout import window
def disable_buttons(func):
button_list = [
"scan",
"import_file_config",
"export_file_config",
"import_iplist",
"export_iplist",
"export_csv",
"select_all_ips",
"refresh_data",
"open_in_web",
"reboot_miners",
"restart_miner_backend",
"import_config",
"send_config",
"light",
"generate_config",
"send_miner_ssh_command_window",
]
# handle the inner function that the decorator is wrapping
async def inner(*args, **kwargs):
# disable the buttons
for button in button_list:
window[button].Update(disabled=True)
# call the original wrapped function
await func(*args, **kwargs)
# re-enable the buttons after the wrapped function completes
for button in button_list:
window[button].Update(disabled=False)
return inner

View File

@@ -17,10 +17,15 @@ async def import_iplist(file_location):
return
else:
ip_list = []
async with aiofiles.open(file_location, mode='r') as file:
async with aiofiles.open(file_location, mode="r") as file:
async for line in file:
ips = [x.group() for x in re.finditer(
"^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)", line)]
ips = [
x.group()
for x in re.finditer(
"^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)",
line,
)
]
for ip in ips:
if ip not in ip_list:
ip_list.append(ipaddress.ip_address(ip))
@@ -36,33 +41,34 @@ async def export_csv(file_location, ip_list_selected):
return
else:
if ip_list_selected is not None and not ip_list_selected == []:
async with aiofiles.open(file_location, mode='w') as file:
async with aiofiles.open(file_location, mode="w") as file:
for item in ip_list_selected:
await file.write(str(
", ".join([str(part) for part in item])
) + "\n")
await file.write(
str(", ".join([str(part).rstrip().lstrip() for part in item]))
+ "\n"
)
else:
async with aiofiles.open(file_location, mode='w') as file:
for item in window['ip_table'].Values:
await file.write(str(
", ".join([str(part) for part in item])
) + "\n")
async with aiofiles.open(file_location, mode="w") as file:
for item in window["ip_table"].Values:
await file.write(
str(", ".join([str(part).rstrip().lstrip() for part in item]))
+ "\n"
)
await update_ui_with_data("status", "")
async def export_iplist(file_location, ip_list_selected):
await update_ui_with_data("status", "Exporting")
if not os.path.exists(file_location):
return
else:
if ip_list_selected is not None and not ip_list_selected == []:
async with aiofiles.open(file_location, mode='w') as file:
async with aiofiles.open(file_location, mode="w") as file:
for item in ip_list_selected:
await file.write(str(item) + "\n")
else:
async with aiofiles.open(file_location, mode='w') as file:
for item in window['ip_table'].Values:
async with aiofiles.open(file_location, mode="w") as file:
for item in window["ip_table"].Values:
await file.write(str(item[0]) + "\n")
await update_ui_with_data("status", "")
@@ -72,7 +78,7 @@ async def import_config_file(file_location):
if not os.path.exists(file_location):
return
else:
async with aiofiles.open(file_location, mode='r') as file:
async with aiofiles.open(file_location, mode="r") as file:
config = await file.read()
await update_ui_with_data("config", await bos_config_convert(toml.loads(config)))
await update_ui_with_data("status", "")
@@ -80,11 +86,11 @@ async def import_config_file(file_location):
async def export_config_file(file_location, config):
await update_ui_with_data("status", "Exporting")
config = await general_config_convert_bos(config)
config = toml.dumps(await general_config_convert_bos(config))
config = toml.loads(config)
config['format']['generator'] = 'upstream_config_util'
config['format']['timestamp'] = int(time.time())
config["format"]["generator"] = "upstream_config_util"
config["format"]["timestamp"] = int(time.time())
config = toml.dumps(config)
async with aiofiles.open(file_location, mode='w+') as file:
async with aiofiles.open(file_location, mode="w+") as file:
await file.write(config)
await update_ui_with_data("status", "")

View File

@@ -2,25 +2,38 @@ import asyncio
import ipaddress
import time
import warnings
import logging
from API import APIError
from tools.cfg_util.cfg_util_sg.func.parse_data import safe_parse_api_data
from tools.cfg_util.cfg_util_sg.func.ui import update_ui_with_data, update_prog_bar, set_progress_bar_len
from tools.cfg_util.cfg_util_sg.func.ui import (
update_ui_with_data,
update_prog_bar,
set_progress_bar_len,
)
from tools.cfg_util.cfg_util_sg.layout import window
from tools.cfg_util.cfg_util_sg.miner_factory import miner_factory
from miners.miner_factory import MinerFactory
from config.bos import bos_config_convert
from settings import CFG_UTIL_CONFIG_THREADS as CONFIG_THREADS, CFG_UTIL_REBOOT_THREADS as REBOOT_THREADS
from tools.cfg_util.cfg_util_sg.func.decorators import disable_buttons
from settings import (
CFG_UTIL_CONFIG_THREADS as CONFIG_THREADS,
CFG_UTIL_REBOOT_THREADS as REBOOT_THREADS,
)
async def import_config(idx):
await update_ui_with_data("status", "Importing")
miner = await miner_factory.get_miner(ipaddress.ip_address(window["ip_table"].Values[idx[0]][0]))
miner_ip = window["ip_table"].Values[idx[0]][0]
logging.debug(f"{miner_ip}: Importing config.")
miner = await MinerFactory().get_miner(ipaddress.ip_address(miner_ip))
await miner.get_config()
config = miner.config
await update_ui_with_data("config", str(config))
logging.debug(f"{miner_ip}: Config import completed.")
await update_ui_with_data("status", "")
@disable_buttons
async def scan_network(network):
await update_ui_with_data("status", "Scanning")
await update_ui_with_data("ip_count", "")
@@ -43,7 +56,7 @@ async def scan_network(network):
asyncio.create_task(update_prog_bar(progress_bar_len))
progress_bar_len += network_size - len(miners)
asyncio.create_task(update_prog_bar(progress_bar_len))
get_miner_genenerator = miner_factory.get_miner_generator(miners)
get_miner_genenerator = MinerFactory().get_miner_generator(miners)
all_miners = []
async for found_miner in get_miner_genenerator:
all_miners.append(found_miner)
@@ -55,26 +68,73 @@ async def scan_network(network):
await update_ui_with_data("status", "")
@disable_buttons
async def miner_light(ips: list):
await asyncio.gather(*[flip_light(ip) for ip in ips])
async def flip_light(ip):
ip_list = window['ip_table'].Widget
miner = await miner_factory.get_miner(ip)
ip_list = window["ip_table"].Widget
miner = await MinerFactory().get_miner(ip)
index = [item[0] for item in window["ip_table"].Values].index(ip)
index_tags = ip_list.item(index)['tags']
index_tags = ip_list.item(index + 1)["tags"]
if "light" not in index_tags:
ip_list.item(index, tags=([*index_tags, "light"]))
window['ip_table'].update(row_colors=[(index, "white", "red")])
index_tags.append("light")
ip_list.item(index + 1, tags=index_tags)
await miner.fault_light_on()
else:
index_tags.remove("light")
ip_list.item(index, tags=index_tags)
window['ip_table'].update(row_colors=[(index, "black", "white")])
ip_list.item(index + 1, tags=index_tags)
await miner.fault_light_off()
async def send_command_generator(miners: list, command: str):
loop = asyncio.get_event_loop()
command_tasks = []
for miner in miners:
if len(command_tasks) >= CONFIG_THREADS:
cmd_sent = asyncio.as_completed(command_tasks)
command_tasks = []
for done in cmd_sent:
yield await done
command_tasks.append(loop.create_task(send_ssh_command(miner, command)))
cmd_sent = asyncio.as_completed(command_tasks)
for done in cmd_sent:
yield await done
async def send_ssh_command(miner, command: str):
proc = await miner.send_ssh_command(command)
return {"IP": miner.ip, "proc": proc}
async def send_miners_ssh_commands(ips: list, command: str, ssh_cmd_window):
get_miner_genenerator = MinerFactory().get_miner_generator(ips)
all_miners = []
async for miner in get_miner_genenerator:
all_miners.append(miner)
data = []
send_cmd_generator = send_command_generator(all_miners, command)
async for command_sent in send_cmd_generator:
data.append(command_sent)
proc_table_data = [[ip, ""] for ip in ips]
for item in data:
if item["proc"].returncode == 0:
return_data = item["proc"].stdout
else:
return_data = item["proc"].stderr
if str(item["IP"]) in ips:
proc_table_index = ips.index(str(item["IP"]))
proc_table_data[proc_table_index] = [
str(item["IP"]),
return_data.replace("\n", " "),
]
ssh_cmd_window["ssh_cmd_table"].update(proc_table_data)
async def reboot_generator(miners: list):
loop = asyncio.get_event_loop()
reboot_tasks = []
@@ -90,11 +150,12 @@ async def reboot_generator(miners: list):
yield await done
@disable_buttons
async def reboot_miners(ips: list):
await update_ui_with_data("status", "Rebooting")
await set_progress_bar_len(2 * len(ips))
progress_bar_len = 0
get_miner_genenerator = miner_factory.get_miner_generator(ips)
get_miner_genenerator = MinerFactory().get_miner_generator(ips)
all_miners = []
async for miner in get_miner_genenerator:
all_miners.append(miner)
@@ -123,25 +184,26 @@ async def restart_backend_generator(miners: list):
yield await done
@disable_buttons
async def restart_miners_backend(ips: list):
await update_ui_with_data("status", "Restarting Backends")
await set_progress_bar_len(2 * len(ips))
progress_bar_len = 0
get_miner_genenerator = miner_factory.get_miner_generator(ips)
get_miner_genenerator = MinerFactory().get_miner_generator(ips)
all_miners = []
async for miner in get_miner_genenerator:
all_miners.append(miner)
progress_bar_len += 1
asyncio.create_task(update_prog_bar(progress_bar_len))
reboot_miners_generator = reboot_generator(all_miners)
async for _rebooter in reboot_miners_generator:
restart_backend_gen = restart_backend_generator(all_miners)
async for _rebooter in restart_backend_gen:
progress_bar_len += 1
asyncio.create_task(update_prog_bar(progress_bar_len))
await update_ui_with_data("status", "")
async def send_config_generator(miners: list, config):
async def send_config_generator(miners: list, config, last_octet_ip_user: bool = False):
loop = asyncio.get_event_loop()
config_tasks = []
for miner in miners:
@@ -150,25 +212,30 @@ async def send_config_generator(miners: list, config):
config_tasks = []
for sent_config in configured:
yield await sent_config
config_tasks.append(loop.create_task(miner.send_config(config)))
config_tasks.append(
loop.create_task(miner.send_config(config, ip_user=last_octet_ip_user))
)
configured = asyncio.as_completed(config_tasks)
for sent_config in configured:
yield await sent_config
async def send_config(ips: list, config):
@disable_buttons
async def send_config(ips: list, config, last_octet_ip: bool = False):
await update_ui_with_data("status", "Configuring")
await set_progress_bar_len(2 * len(ips))
progress_bar_len = 0
asyncio.create_task(update_prog_bar(progress_bar_len))
get_miner_genenerator = miner_factory.get_miner_generator(ips)
get_miner_genenerator = MinerFactory().get_miner_generator(ips)
all_miners = []
async for miner in get_miner_genenerator:
all_miners.append(miner)
progress_bar_len += 1
asyncio.create_task(update_prog_bar(progress_bar_len))
config_sender_generator = send_config_generator(all_miners, config)
config_sender_generator = send_config_generator(
all_miners, config, last_octet_ip_user=last_octet_ip
)
async for _config_sender in config_sender_generator:
progress_bar_len += 1
asyncio.create_task(update_prog_bar(progress_bar_len))
@@ -177,12 +244,16 @@ async def send_config(ips: list, config):
await refresh_data(ips)
@disable_buttons
async def refresh_data(ip_list: list):
await update_ui_with_data("status", "Getting Data")
await update_ui_with_data("hr_total", "")
ips = [ipaddress.ip_address(ip) for ip in ip_list]
if len(ips) == 0:
ips = [ipaddress.ip_address(ip) for ip in [item[0] for item in window["ip_table"].Values]]
ips = [
ipaddress.ip_address(ip)
for ip in [item[0] for item in window["ip_table"].Values]
]
await set_progress_bar_len(len(ips))
progress_bar_len = 0
asyncio.create_task(update_prog_bar(progress_bar_len))
@@ -202,9 +273,13 @@ async def refresh_data(ip_list: list):
if data_point["IP"] in ordered_all_ips:
ip_table_index = ordered_all_ips.index(data_point["IP"])
ip_table_data[ip_table_index] = [
data_point["IP"], data_point["model"], data_point["host"], str(data_point['TH/s']) + " TH/s",
data_point["IP"],
data_point["model"],
data_point["host"],
str(data_point["TH/s"]) + " TH/s ",
data_point["temp"],
data_point['user'], str(data_point['wattage']) + " W"
data_point["user"],
str(data_point["wattage"]) + " W",
]
window["ip_table"].update(ip_table_data)
progress_bar_len += 1
@@ -214,8 +289,10 @@ async def refresh_data(ip_list: list):
hr_idx = 3
for item, _ in enumerate(window["ip_table"].Values):
if len(window["ip_table"].Values[item]) > hr_idx:
if not window["ip_table"].Values[item][hr_idx] == '':
hashrate_list.append(float(window["ip_table"].Values[item][hr_idx].replace(" TH/s", "")))
if not window["ip_table"].Values[item][hr_idx] == "":
hashrate_list.append(
float(window["ip_table"].Values[item][hr_idx].replace(" TH/s ", ""))
)
else:
hashrate_list.append(0)
else:
@@ -227,6 +304,7 @@ async def refresh_data(ip_list: list):
await update_ui_with_data("status", "")
@disable_buttons
async def scan_and_get_data(network):
await update_ui_with_data("status", "Scanning")
await update_ui_with_data("hr_total", "")
@@ -234,6 +312,10 @@ async def scan_and_get_data(network):
await update_ui_with_data("ip_table", [])
network_size = len(network)
miner_generator = network.scan_network_generator()
MinerFactory().clear_cached_miners()
logging.info(f"Scanning network: {str(network)}")
await set_progress_bar_len(3 * network_size)
progress_bar_len = 0
miners = []
@@ -246,9 +328,11 @@ async def scan_and_get_data(network):
# window["ip_table"].update([["Identifying..."] for miner in miners])
progress_bar_len += 1
asyncio.create_task(update_prog_bar(progress_bar_len))
logging.info(f"Found {len(miners)} Miners")
logging.debug(f"Found miners: {miners}")
progress_bar_len += network_size - len(miners)
asyncio.create_task(update_prog_bar(progress_bar_len))
get_miner_genenerator = miner_factory.get_miner_generator(miners)
get_miner_genenerator = MinerFactory().get_miner_generator(miners)
all_miners = []
async for found_miner in get_miner_genenerator:
all_miners.append(found_miner)
@@ -256,34 +340,46 @@ async def scan_and_get_data(network):
window["ip_table"].update([[str(miner.ip)] for miner in all_miners])
progress_bar_len += 1
asyncio.create_task(update_prog_bar(progress_bar_len))
logging.info(f"Resolved {len(all_miners)} Miners")
logging.debug(f"Resolved to miner types: {all_miners}")
await update_ui_with_data("ip_count", str(len(all_miners)))
data_gen = asyncio.as_completed([get_formatted_data(miner) for miner in miners])
ip_table_data = window["ip_table"].Values
ordered_all_ips = [item[0] for item in ip_table_data]
progress_bar_len += (network_size - len(miners))
progress_bar_len += network_size - len(miners)
asyncio.create_task(update_prog_bar(progress_bar_len))
await update_ui_with_data("status", "Getting Data")
logging.debug("Getting data on miners.")
for all_data in data_gen:
data_point = await all_data
if data_point["IP"] in ordered_all_ips:
ip_table_index = ordered_all_ips.index(data_point["IP"])
ip_table_data[ip_table_index] = [
data_point["IP"], data_point["model"], data_point["host"], str(data_point['TH/s']) + " TH/s",
data_point["IP"],
data_point["model"],
data_point["host"],
str(data_point["TH/s"]) + " TH/s ",
data_point["temp"],
data_point['user'], str(data_point['wattage']) + " W"
data_point["user"],
str(data_point["wattage"]) + " W",
]
window["ip_table"].update(ip_table_data)
progress_bar_len += 1
asyncio.create_task(update_prog_bar(progress_bar_len))
hashrate_list = [float(item[3].replace(" TH/s", "")) for item in window["ip_table"].Values if not item[3] == '']
hashrate_list = [
float(item[3].replace(" TH/s ", ""))
for item in window["ip_table"].Values
if not item[3] == ""
]
total_hr = round(sum(hashrate_list), 2)
await update_ui_with_data("hr_total", f"{total_hr} TH/s")
await update_ui_with_data("status", "")
async def get_formatted_data(ip: ipaddress.ip_address):
miner = await miner_factory.get_miner(ip)
warnings.filterwarnings('ignore')
miner = await MinerFactory().get_miner(ip)
logging.debug(f"Getting data for miner: {miner.ip}")
warnings.filterwarnings("ignore")
miner_data = None
host = await miner.get_hostname()
try:
@@ -298,80 +394,148 @@ async def get_formatted_data(ip: ipaddress.ip_address):
user = "?"
try:
miner_data = await miner.api.multicommand("summary", "devs", "temps", "tunerstatus", "pools", "stats")
miner_data = await miner.api.multicommand(
"summary", "devs", "temps", "tunerstatus", "pools", "stats"
)
except APIError:
try:
# no devs command, it will fail in this case
miner_data = await miner.api.multicommand("summary", "temps", "tunerstatus", "pools", "stats")
miner_data = await miner.api.multicommand(
"summary", "temps", "tunerstatus", "pools", "stats"
)
except APIError as e:
print(e)
return {'TH/s': 0, 'IP': str(miner.ip), 'model': 'Unknown', 'temp': 0, 'host': 'Unknown', 'user': 'Unknown',
'wattage': 0}
logging.warning(f"{str(ip)}: {e}")
return {
"TH/s": 0,
"IP": str(miner.ip),
"model": "Unknown",
"temp": 0,
"host": "Unknown",
"user": "Unknown",
"wattage": 0,
}
if miner_data:
logging.info(f"Received miner data for miner: {miner.ip}")
# get all data from summary
if "summary" in miner_data.keys():
if not miner_data["summary"][0].get("SUMMARY") == []:
if (
not miner_data["summary"][0].get("SUMMARY") == []
and "SUMMARY" in miner_data["summary"][0].keys()
):
# temperature data, this is the idea spot to get this
if "Temperature" in miner_data['summary'][0]['SUMMARY'][0].keys():
if not round(miner_data['summary'][0]['SUMMARY'][0]["Temperature"]) == 0:
temps = miner_data['summary'][0]['SUMMARY'][0]["Temperature"]
# hashrate data, this is the only place to get this for most miners as far as I know
if 'MHS av' in miner_data['summary'][0]['SUMMARY'][0].keys():
th5s = round(await safe_parse_api_data(miner_data, 'summary', 0, 'SUMMARY', 0, 'MHS av') / 1000000, 2)
elif 'GHS av' in miner_data['summary'][0]['SUMMARY'][0].keys():
if not miner_data['summary'][0]['SUMMARY'][0]['GHS av'] == "":
th5s = round(
float(await safe_parse_api_data(miner_data, 'summary', 0, 'SUMMARY', 0, 'GHS av')) / 1000,
2)
if "Temperature" in miner_data["summary"][0]["SUMMARY"][0].keys():
if (
not round(miner_data["summary"][0]["SUMMARY"][0]["Temperature"])
== 0
):
temps = miner_data["summary"][0]["SUMMARY"][0]["Temperature"]
# hashrate data
if "MHS av" in miner_data["summary"][0]["SUMMARY"][0].keys():
th5s = format(
round(
await safe_parse_api_data(
miner_data, "summary", 0, "SUMMARY", 0, "MHS av"
)
/ 1000000,
2,
),
".2f",
).rjust(6, " ")
elif "GHS av" in miner_data["summary"][0]["SUMMARY"][0].keys():
if not miner_data["summary"][0]["SUMMARY"][0]["GHS av"] == "":
th5s = format(
round(
float(
await safe_parse_api_data(
miner_data, "summary", 0, "SUMMARY", 0, "GHS av"
)
)
/ 1000,
2,
),
".2f",
).rjust(6, " ")
# alternate temperature data, for BraiinsOS
if "temps" in miner_data.keys():
if not miner_data["temps"][0]['TEMPS'] == []:
if "Chip" in miner_data["temps"][0]['TEMPS'][0].keys():
for board in miner_data["temps"][0]['TEMPS']:
if not miner_data["temps"][0].get("TEMPS") == []:
if "Chip" in miner_data["temps"][0]["TEMPS"][0].keys():
for board in miner_data["temps"][0]["TEMPS"]:
if board["Chip"] is not None and not board["Chip"] == 0.0:
temps = board["Chip"]
# alternate temperature data, for Whatsminers
if "devs" in miner_data.keys():
if not miner_data["devs"][0].get('DEVS') == []:
if "Chip Temp Avg" in miner_data["devs"][0]['DEVS'][0].keys():
for board in miner_data["devs"][0]['DEVS']:
if board['Chip Temp Avg'] is not None and not board['Chip Temp Avg'] == 0.0:
temps = board['Chip Temp Avg']
if not miner_data["devs"][0].get("DEVS") == []:
if "Chip Temp Avg" in miner_data["devs"][0]["DEVS"][0].keys():
for board in miner_data["devs"][0]["DEVS"]:
if (
board["Chip Temp Avg"] is not None
and not board["Chip Temp Avg"] == 0.0
):
temps = board["Chip Temp Avg"]
# alternate temperature data
if "stats" in miner_data.keys():
if not miner_data["stats"][0]['STATS'] == []:
if not miner_data["stats"][0]["STATS"] == []:
for temp in ["temp2", "temp1", "temp3"]:
if temp in miner_data["stats"][0]['STATS'][1].keys():
if miner_data["stats"][0]['STATS'][1][temp] is not None and not miner_data["stats"][0]['STATS'][1][temp] == 0.0:
temps = miner_data["stats"][0]['STATS'][1][temp]
if temp in miner_data["stats"][0]["STATS"][1].keys():
if (
miner_data["stats"][0]["STATS"][1][temp] is not None
and not miner_data["stats"][0]["STATS"][1][temp] == 0.0
):
temps = miner_data["stats"][0]["STATS"][1][temp]
# alternate temperature data, for Avalonminers
miner_data["stats"][0]['STATS'][0].keys()
if any("MM ID" in string for string in miner_data["stats"][0]['STATS'][0].keys()):
miner_data["stats"][0]["STATS"][0].keys()
if any(
"MM ID" in string
for string in miner_data["stats"][0]["STATS"][0].keys()
):
temp_all = []
for key in [string for string in miner_data["stats"][0]['STATS'][0].keys() if "MM ID" in string]:
for value in [string for string in miner_data["stats"][0]['STATS'][0][key].split(" ") if
"TMax" in string]:
for key in [
string
for string in miner_data["stats"][0]["STATS"][0].keys()
if "MM ID" in string
]:
for value in [
string
for string in miner_data["stats"][0]["STATS"][0][key].split(" ")
if "TMax" in string
]:
temp_all.append(int(value.split("[")[1].replace("]", "")))
temps = round(sum(temp_all) / len(temp_all))
# pool information
if "pools" in miner_data.keys():
if not miner_data['pools'][0].get('POOLS') == []:
user = await safe_parse_api_data(miner_data, 'pools', 0, 'POOLS', 0, 'User')
if not miner_data["pools"][0].get("POOLS") == []:
user = await safe_parse_api_data(
miner_data, "pools", 0, "POOLS", 0, "User"
)
else:
print(miner_data['pools'][0])
print(miner_data["pools"][0])
user = "Blank"
# braiins tuner status / wattage
if "tunerstatus" in miner_data.keys():
wattage = await safe_parse_api_data(miner_data, "tunerstatus", 0, 'TUNERSTATUS', 0, "PowerLimit")
wattage = await safe_parse_api_data(
miner_data, "tunerstatus", 0, "TUNERSTATUS", 0, "PowerLimit"
)
elif "Power" in miner_data["summary"][0]["SUMMARY"][0].keys():
wattage = await safe_parse_api_data(miner_data, "summary", 0, 'SUMMARY', 0, "Power")
wattage = await safe_parse_api_data(
miner_data, "summary", 0, "SUMMARY", 0, "Power"
)
return {'TH/s': th5s, 'IP': str(miner.ip), 'model': model,
'temp': round(temps), 'host': host, 'user': user,
'wattage': wattage}
ret_data = {
"TH/s": th5s,
"IP": str(miner.ip),
"model": model,
"temp": round(temps),
"host": host,
"user": user,
"wattage": wattage,
}
logging.debug(f"{ret_data}")
return ret_data
async def generate_config(username, workername, v2_allowed):
@@ -383,45 +547,37 @@ async def generate_config(username, workername, v2_allowed):
return
if v2_allowed:
url_1 = 'stratum2+tcp://v2.us-east.stratum.slushpool.com/u95GEReVMjK6k5YqiSFNqqTnKU4ypU2Wm8awa6tmbmDmk1bWt'
url_2 = 'stratum2+tcp://v2.stratum.slushpool.com/u95GEReVMjK6k5YqiSFNqqTnKU4ypU2Wm8awa6tmbmDmk1bWt'
url_3 = 'stratum+tcp://stratum.slushpool.com:3333'
url_1 = "stratum2+tcp://v2.us-east.stratum.slushpool.com/u95GEReVMjK6k5YqiSFNqqTnKU4ypU2Wm8awa6tmbmDmk1bWt"
url_2 = "stratum2+tcp://v2.stratum.slushpool.com/u95GEReVMjK6k5YqiSFNqqTnKU4ypU2Wm8awa6tmbmDmk1bWt"
url_3 = "stratum+tcp://stratum.slushpool.com:3333"
else:
url_1 = 'stratum+tcp://ca.stratum.slushpool.com:3333'
url_2 = 'stratum+tcp://us-east.stratum.slushpool.com:3333'
url_3 = 'stratum+tcp://stratum.slushpool.com:3333'
url_1 = "stratum+tcp://ca.stratum.slushpool.com:3333"
url_2 = "stratum+tcp://us-east.stratum.slushpool.com:3333"
url_3 = "stratum+tcp://stratum.slushpool.com:3333"
config = {'group': [{
'name': 'group',
'quota': 1,
'pool': [{
'url': url_1,
'user': user,
'password': '123'
}, {
'url': url_2,
'user': user,
'password': '123'
}, {
'url': url_3,
'user': user,
'password': '123'
}]
}],
'format': {
'version': '1.2+',
'model': 'Antminer S9',
'generator': 'upstream_config_util',
'timestamp': int(time.time())
config = {
"group": [
{
"name": "group",
"quota": 1,
"pool": [
{"url": url_1, "user": user, "password": "123"},
{"url": url_2, "user": user, "password": "123"},
{"url": url_3, "user": user, "password": "123"},
],
}
],
"format": {
"version": "1.2+",
"model": "Antminer S9",
"generator": "upstream_config_util",
"timestamp": int(time.time()),
},
'temp_control': {
'target_temp': 80.0,
'hot_temp': 90.0,
'dangerous_temp': 120.0
"temp_control": {
"target_temp": 80.0,
"hot_temp": 90.0,
"dangerous_temp": 120.0,
},
'autotuning': {
'enabled': True,
'psu_power_limit': 900
}
"autotuning": {"enabled": True, "psu_power_limit": 900},
}
window['config'].update(await bos_config_convert(config))
window["config"].update(await bos_config_convert(config))

View File

@@ -4,7 +4,7 @@ from API import APIError
# noinspection PyPep8
async def safe_parse_api_data(data: dict or list, *path: str or int, idx: int = 0):
path = [*path]
if len(path) == idx+1:
if len(path) == idx + 1:
if isinstance(path[idx], str):
if isinstance(data, dict):
if path[idx] in data.keys():
@@ -17,34 +17,50 @@ async def safe_parse_api_data(data: dict or list, *path: str or int, idx: int =
if isinstance(path[idx], str):
if isinstance(data, dict):
if path[idx] in data.keys():
parsed_data = await safe_parse_api_data(data[path[idx]], idx=idx+1, *path)
parsed_data = await safe_parse_api_data(
data[path[idx]], idx=idx + 1, *path
)
# has to be == None, or else it fails on 0.0 hashrates
# noinspection PyPep8
if parsed_data == None:
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
raise APIError(
f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}"
)
return parsed_data
else:
if idx == 0:
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
raise APIError(
f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}"
)
return False
else:
if idx == 0:
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
raise APIError(
f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}"
)
return False
elif isinstance(path[idx], int):
if isinstance(data, list):
if len(data) > path[idx]:
parsed_data = await safe_parse_api_data(data[path[idx]], idx=idx+1, *path)
parsed_data = await safe_parse_api_data(
data[path[idx]], idx=idx + 1, *path
)
# has to be == None, or else it fails on 0.0 hashrates
# noinspection PyPep8
if parsed_data == None:
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
raise APIError(
f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}"
)
return parsed_data
else:
if idx == 0:
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
raise APIError(
f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}"
)
return False
else:
if idx == 0:
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
raise APIError(
f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}"
)
return False

View File

@@ -6,6 +6,12 @@ from tools.cfg_util.cfg_util_sg.layout import window
import pyperclip
def table_select_all():
window["ip_table"].update(
select_rows=([row for row in range(len(window["ip_table"].Values))])
)
def copy_from_table(table):
selection = table.selection()
copy_values = []
@@ -19,6 +25,19 @@ def copy_from_table(table):
pyperclip.copy(copy_string)
def copy_from_ssh_table(table):
selection = table.selection()
copy_values = []
for each in selection:
try:
value = ", ".join(table.item(each)["values"])
copy_values.append(str(value))
except:
pass
copy_string = "\n".join(copy_values)
pyperclip.copy(copy_string)
async def update_ui_with_data(key, message, append=False):
if append:
message = window[key].get_text() + message
@@ -27,7 +46,7 @@ async def update_ui_with_data(key, message, append=False):
async def update_prog_bar(amount):
window["progress"].Update(amount)
percent_done = 100 * (amount / window['progress'].maxlen)
percent_done = 100 * (amount / window["progress"].maxlen)
window["progress_percent"].Update(f"{round(percent_done, 2)} %")
if percent_done == 100:
window["progress_percent"].Update("")
@@ -40,33 +59,66 @@ async def set_progress_bar_len(amount):
async def sort_data(index: int or str):
if window["scan"].Disabled:
return
await update_ui_with_data("status", "Sorting Data")
data_list = window['ip_table'].Values
data_list = window["ip_table"].Values
table = window["ip_table"].Widget
all_data = []
for idx, item in enumerate(data_list):
all_data.append({"data": item, "tags": table.item(int(idx) + 1)["tags"]})
# wattage
if re.match("[0-9]* W", str(data_list[0][index])):
new_list = sorted(data_list, key=lambda x: int(x[index].replace(" W", "")))
if data_list == new_list:
new_list = sorted(data_list, reverse=True, key=lambda x: int(x[index].replace(" W", "")))
if re.match("[0-9]* W", str(all_data[0]["data"][index])):
new_list = sorted(
all_data, key=lambda x: int(x["data"][index].replace(" W", ""))
)
if all_data == new_list:
new_list = sorted(
all_data,
reverse=True,
key=lambda x: int(x["data"][index].replace(" W", "")),
)
# hashrate
elif re.match("[0-9]*\.?[0-9]* TH\/s", str(data_list[0][index])):
new_list = sorted(data_list, key=lambda x: float(x[index].replace(" TH/s", "")))
if data_list == new_list:
new_list = sorted(data_list, reverse=True, key=lambda x: float(x[index].replace(" TH/s", "")))
elif re.match("[0-9]*\.?[0-9]* TH\/s", str(all_data[0]["data"][index])):
new_list = sorted(
all_data, key=lambda x: float(x["data"][index].replace(" TH/s", ""))
)
if all_data == new_list:
new_list = sorted(
all_data,
reverse=True,
key=lambda x: float(x["data"][index].replace(" TH/s", "")),
)
# ip addresses
elif re.match("^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)",
str(data_list[0][index])):
new_list = sorted(data_list, key=lambda x: ipaddress.ip_address(x[index]))
if data_list == new_list:
new_list = sorted(data_list, reverse=True, key=lambda x: ipaddress.ip_address(x[index]))
elif re.match(
"^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)",
str(all_data[0]["data"][index]),
):
new_list = sorted(
all_data, key=lambda x: ipaddress.ip_address(x["data"][index])
)
if all_data == new_list:
new_list = sorted(
all_data,
reverse=True,
key=lambda x: ipaddress.ip_address(x["data"][index]),
)
# everything else, hostname, temp, and user
else:
new_list = sorted(data_list, key=lambda x: x[index])
if data_list == new_list:
new_list = sorted(data_list, reverse=True, key=lambda x: x[index])
new_list = sorted(all_data, key=lambda x: x["data"][index])
if all_data == new_list:
new_list = sorted(all_data, reverse=True, key=lambda x: x["data"][index])
new_data = []
for item in new_list:
new_data.append(item["data"])
await update_ui_with_data("ip_table", new_data)
for idx, item in enumerate(new_list):
table.item(idx + 1, tags=item["tags"])
await update_ui_with_data("ip_table", new_list)
await update_ui_with_data("status", "")

File diff suppressed because one or more lines are too long

View File

@@ -1,7 +0,0 @@
"""
This file stores the MinerFactory instance used by the ConfigUtility for use in other files.
"""
from miners.miner_factory import MinerFactory
miner_factory = MinerFactory()

View File

@@ -1,13 +1,38 @@
import asyncio
import sys
import PySimpleGUI as sg
import tkinter as tk
from tools.cfg_util.cfg_util_sg.layout import window, generate_config_layout
from tools.cfg_util.cfg_util_sg.func.miners import send_config, miner_light, refresh_data, generate_config, import_config, \
scan_and_get_data, restart_miners_backend, reboot_miners
from tools.cfg_util.cfg_util_sg.func.files import import_iplist, \
import_config_file, export_iplist, export_config_file, export_csv
from tools.cfg_util.cfg_util_sg.func.ui import sort_data, copy_from_table
from tools.cfg_util.cfg_util_sg.layout import (
window,
generate_config_layout,
send_ssh_cmd_layout,
)
from tools.cfg_util.cfg_util_sg.func.miners import (
send_config,
miner_light,
refresh_data,
generate_config,
import_config,
scan_and_get_data,
restart_miners_backend,
reboot_miners,
send_miners_ssh_commands,
)
from tools.cfg_util.cfg_util_sg.func.files import (
import_iplist,
import_config_file,
export_iplist,
export_config_file,
export_csv,
)
from tools.cfg_util.cfg_util_sg.func.decorators import disable_buttons
from tools.cfg_util.cfg_util_sg.func.ui import (
sort_data,
copy_from_table,
table_select_all,
copy_from_ssh_table,
)
from network import MinerNetwork
@@ -18,53 +43,120 @@ async def ui():
window.read(timeout=0)
table = window["ip_table"].Widget
table.bind("<Control-Key-c>", lambda x: copy_from_table(table))
table.bind("<Control-Key-a>", lambda x: table_select_all())
# light tag shows red row for fault lights
table.tag_configure("light", foreground="white", background="red")
# left justify the hostnames
table.column(2, anchor=tk.W)
while True:
event, value = window.read(timeout=10)
if event in (None, 'Close', sg.WIN_CLOSED):
event, value = window.read(timeout=0)
if event in (None, "Close", sg.WIN_CLOSED):
sys.exit()
if isinstance(event, tuple):
if len(window["ip_table"].Values) > 0:
if event[0] == 'ip_table':
if event[0] == "ip_table":
if event[2][0] == -1:
await sort_data(event[2][1])
if event == 'open_in_web':
if event == "open_in_web":
for row in value["ip_table"]:
webbrowser.open("http://" + window["ip_table"].Values[row][0])
if event == 'scan':
if len(value['miner_network'].split("/")) > 1:
network = value['miner_network'].split("/")
if event == "scan":
if len(value["miner_network"].split("/")) > 1:
network = value["miner_network"].split("/")
miner_network = MinerNetwork(ip_addr=network[0], mask=network[1])
else:
miner_network = MinerNetwork(value['miner_network'])
miner_network = MinerNetwork(value["miner_network"])
asyncio.create_task(scan_and_get_data(miner_network))
if event == 'select_all_ips':
if event == "select_all_ips":
if len(value["ip_table"]) == len(window["ip_table"].Values):
window["ip_table"].update(select_rows=())
else:
window["ip_table"].update(select_rows=([row for row in range(len(window["ip_table"].Values))]))
if event == 'import_config':
if 2 > len(value['ip_table']) > 0:
asyncio.create_task(import_config(value['ip_table']))
window["ip_table"].update(
select_rows=([row for row in range(len(window["ip_table"].Values))])
)
if event == "import_config":
if 2 > len(value["ip_table"]) > 0:
asyncio.create_task(import_config(value["ip_table"]))
if event == "restart_miner_backend":
asyncio.create_task(restart_miners_backend([window['ip_table'].Values[item][0] for item in value['ip_table']]))
if len(window["ip_table"].Values) > 0:
asyncio.create_task(
restart_miners_backend(
[
window["ip_table"].Values[item][0]
for item in value["ip_table"]
]
)
)
if event == "reboot_miners":
asyncio.create_task(reboot_miners([window['ip_table'].Values[item][0] for item in value['ip_table']]))
if event == 'light':
asyncio.create_task(miner_light([window['ip_table'].Values[item][0] for item in value['ip_table']]))
if len(window["ip_table"].Values) > 0:
asyncio.create_task(
reboot_miners(
[
window["ip_table"].Values[item][0]
for item in value["ip_table"]
]
)
)
if event == "send_miner_ssh_command_window":
ips = [window["ip_table"].Values[item][0] for item in value["ip_table"]]
if len(ips) == 0:
ips = [item[0] for item in window["ip_table"].Values]
if not len(ips) == 0:
await generate_ssh_cmd_ui(ips)
if event == "light":
if len(window["ip_table"].Values) > 0:
asyncio.create_task(
miner_light(
[
window["ip_table"].Values[item][0]
for item in value["ip_table"]
]
)
)
if event == "import_iplist":
asyncio.create_task(import_iplist(value["file_iplist"]))
if event == "export_iplist":
asyncio.create_task(export_iplist(value["file_iplist"], [window['ip_table'].Values[item][0] for item in value['ip_table']]))
asyncio.create_task(
export_iplist(
value["file_iplist"],
[window["ip_table"].Values[item][0] for item in value["ip_table"]],
)
)
if event == "export_csv":
asyncio.create_task(export_csv(value["file_iplist"], [window['ip_table'].Values[item] for item in value['ip_table']]))
asyncio.create_task(
export_csv(
value["file_iplist"],
[window["ip_table"].Values[item] for item in value["ip_table"]],
)
)
if event == "send_config":
asyncio.create_task(send_config([window['ip_table'].Values[item][0] for item in value['ip_table']], value['config']))
if len(window["ip_table"].Values) > 0:
asyncio.create_task(
send_config(
[
window["ip_table"].Values[item][0]
for item in value["ip_table"]
],
value["config"],
last_octet_ip=value["last_octet_user"],
)
)
if event == "import_file_config":
asyncio.create_task(import_config_file(value['file_config']))
asyncio.create_task(import_config_file(value["file_config"]))
if event == "export_file_config":
asyncio.create_task(export_config_file(value['file_config'], value["config"]))
asyncio.create_task(
export_config_file(value["file_config"], value["config"])
)
if event == "refresh_data":
asyncio.create_task(refresh_data([window["ip_table"].Values[item][0] for item in value["ip_table"]]))
if len(window["ip_table"].Values) > 0:
asyncio.create_task(
refresh_data(
[
window["ip_table"].Values[item][0]
for item in value["ip_table"]
]
)
)
if event == "generate_config":
await generate_config_ui()
if event == "__TIMEOUT__":
@@ -72,15 +164,43 @@ async def ui():
async def generate_config_ui():
generate_config_window = sg.Window("Generate Config", generate_config_layout(), modal=True)
generate_config_window = sg.Window(
"Generate Config", generate_config_layout(), modal=True
)
while True:
event, values = generate_config_window.read()
if event in (None, 'Close', sg.WIN_CLOSED):
if event in (None, "Close", sg.WIN_CLOSED):
break
if event == "generate_config_window_generate":
if values['generate_config_window_username']:
await generate_config(values['generate_config_window_username'],
values['generate_config_window_workername'],
values['generate_config_window_allow_v2'])
if values["generate_config_window_username"]:
await generate_config(
values["generate_config_window_username"],
values["generate_config_window_workername"],
values["generate_config_window_allow_v2"],
)
generate_config_window.close()
break
@disable_buttons
async def generate_ssh_cmd_ui(selected_miners: list):
ssh_cmd_window = sg.Window(
"Send Command", send_ssh_cmd_layout(selected_miners), modal=True
)
ssh_cmd_window.read(timeout=0)
table = ssh_cmd_window["ssh_cmd_table"].Widget
table.bind("<Control-Key-c>", lambda x: copy_from_ssh_table(table))
# left justify the results
table.column(1, anchor=tk.W)
while True:
event, values = ssh_cmd_window.read(timeout=0)
if event in (None, "Close", sg.WIN_CLOSED):
break
if event == "ssh_command_window_send_cmd":
asyncio.create_task(
send_miners_ssh_commands(
selected_miners, values["ssh_command_window_cmd"], ssh_cmd_window
)
)
if event == "__TIMEOUT__":
await asyncio.sleep(0)

View File

@@ -4,7 +4,7 @@ from API import APIError
# noinspection PyPep8
async def safe_parse_api_data(data: dict or list, *path: str or int, idx: int = 0):
path = [*path]
if len(path) == idx+1:
if len(path) == idx + 1:
if isinstance(path[idx], str):
if isinstance(data, dict):
if path[idx] in data.keys():
@@ -17,34 +17,50 @@ async def safe_parse_api_data(data: dict or list, *path: str or int, idx: int =
if isinstance(path[idx], str):
if isinstance(data, dict):
if path[idx] in data.keys():
parsed_data = await safe_parse_api_data(data[path[idx]], idx=idx+1, *path)
parsed_data = await safe_parse_api_data(
data[path[idx]], idx=idx + 1, *path
)
# has to be == None, or else it fails on 0.0 hashrates
# noinspection PyPep8
if parsed_data == None:
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
raise APIError(
f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}"
)
return parsed_data
else:
if idx == 0:
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
raise APIError(
f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}"
)
return False
else:
if idx == 0:
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
raise APIError(
f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}"
)
return False
elif isinstance(path[idx], int):
if isinstance(data, list):
if len(data) > path[idx]:
parsed_data = await safe_parse_api_data(data[path[idx]], idx=idx+1, *path)
parsed_data = await safe_parse_api_data(
data[path[idx]], idx=idx + 1, *path
)
# has to be == None, or else it fails on 0.0 hashrates
# noinspection PyPep8
if parsed_data == None:
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
raise APIError(
f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}"
)
return parsed_data
else:
if idx == 0:
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
raise APIError(
f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}"
)
return False
else:
if idx == 0:
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
raise APIError(
f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}"
)
return False

View File

@@ -0,0 +1,10 @@
from tools.web_monitor.app import app
import uvicorn
def main():
uvicorn.run("app:app", host="0.0.0.0", port=80)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,36 @@
from fastapi import Request, APIRouter
from fastapi.responses import RedirectResponse
from tools.web_monitor.template import templates
from tools.web_monitor.func import get_current_miner_list
from tools.web_monitor._settings.func import get_current_settings, update_settings
router = APIRouter()
@router.route("/", methods=["GET", "POST"])
async def settings(request: Request):
return templates.TemplateResponse(
"settings.html",
{
"request": request,
"cur_miners": get_current_miner_list(),
"settings": get_current_settings(),
},
)
@router.post("/update")
async def update_settings_page(request: Request):
data = await request.form()
graph_data_sleep_time = data.get("graph_data_sleep_time")
miner_data_timeout = data.get("miner_data_timeout")
miner_identify_timeout = data.get("miner_identify_timeout")
new_settings = {
"graph_data_sleep_time": int(graph_data_sleep_time),
"miner_data_timeout": int(miner_data_timeout),
"miner_identify_timeout": int(miner_identify_timeout),
}
update_settings(new_settings)
return RedirectResponse(request.url_for("settings"))

View File

@@ -0,0 +1,24 @@
import toml
import os
def get_current_settings():
try:
with open(
os.path.join(os.getcwd(), "settings/web_settings.toml"), "r"
) as settings_file:
settings = toml.loads(settings_file.read())
except:
settings = {
"graph_data_sleep_time": 1,
"miner_data_timeout": 5,
"miner_identify_timeout": 5,
}
return settings
def update_settings(settings):
with open(
os.path.join(os.getcwd(), "settings/web_settings.toml"), "w"
) as settings_file:
settings_file.write(toml.dumps(settings))

View File

@@ -0,0 +1,3 @@
graph_data_sleep_time = 1
miner_data_timeout = 5
miner_identify_timeout = 5

35
tools/web_monitor/app.py Normal file
View File

@@ -0,0 +1,35 @@
import os
import uvicorn
from fastapi import FastAPI, Request
from fastapi.responses import RedirectResponse
from fastapi.staticfiles import StaticFiles
from tools.web_monitor.dashboard import router as dashboard_router
from tools.web_monitor.miner import router as miner_router
from tools.web_monitor.scan import router as scan_router
from tools.web_monitor._settings import router as settings_router
app = FastAPI()
app.mount(
"/static",
StaticFiles(directory=os.path.join(os.path.dirname(__file__), "static")),
name="static",
)
app.include_router(dashboard_router, tags=["dashboard"])
app.include_router(miner_router, tags=["miner"], prefix="/miner")
app.include_router(scan_router, tags=["scan"], prefix="/scan")
app.include_router(settings_router, tags=["settings"], prefix="/settings")
@app.get("/remove_all_miners")
async def remove_all_miners(request: Request):
file = open("miner_list.txt", "w")
file.close()
return RedirectResponse(request.url_for("settings"))
if __name__ == "__main__":
uvicorn.run("app:app", host="0.0.0.0", port=80)

View File

@@ -0,0 +1,22 @@
from fastapi import Request, APIRouter
from fastapi.responses import RedirectResponse
from tools.web_monitor.template import templates
from tools.web_monitor.func import get_current_miner_list
from .ws import router as ws_router
router = APIRouter()
router.include_router(ws_router)
@router.get("/")
def index(request: Request):
return RedirectResponse(request.url_for("dashboard"))
@router.get("/dashboard")
def dashboard(request: Request):
return templates.TemplateResponse(
"index.html", {"request": request, "cur_miners": get_current_miner_list()}
)

View File

@@ -0,0 +1,43 @@
import asyncio
from miners.miner_factory import MinerFactory
from tools.web_monitor._settings.func import get_current_settings
async def get_miner_data_dashboard(miner_ip):
try:
settings = get_current_settings()
miner_identify_timeout = settings["miner_identify_timeout"]
miner_data_timeout = settings["miner_data_timeout"]
miner_ip = await asyncio.wait_for(
MinerFactory().get_miner(miner_ip), miner_identify_timeout
)
miner_summary = await asyncio.wait_for(
miner_ip.api.summary(), miner_data_timeout
)
if miner_summary:
if "MHS av" in miner_summary["SUMMARY"][0].keys():
hashrate = format(
round(miner_summary["SUMMARY"][0]["MHS av"] / 1000000, 2), ".2f"
)
elif "GHS av" in miner_summary["SUMMARY"][0].keys():
hashrate = format(
round(miner_summary["SUMMARY"][0]["GHS av"] / 1000, 2), ".2f"
)
else:
hashrate = 0
else:
hashrate = 0
return {"ip": str(miner_ip.ip), "hashrate": hashrate}
except asyncio.exceptions.TimeoutError:
return {"ip": miner_ip, "error": "The miner is not responding."}
except KeyError:
return {
"ip": miner_ip,
"error": "The miner returned unusable/unsupported data.",
}

View File

@@ -0,0 +1,41 @@
import asyncio
import datetime
import websockets.exceptions
from fastapi import WebSocket, WebSocketDisconnect, APIRouter
from tools.web_monitor.func import get_current_miner_list
from tools.web_monitor._settings.func import get_current_settings
from tools.web_monitor.dashboard.func import get_miner_data_dashboard
router = APIRouter()
@router.websocket("/dashboard/ws")
async def dashboard_websocket(websocket: WebSocket):
await websocket.accept()
graph_sleep_time = get_current_settings()["graph_data_sleep_time"]
try:
while True:
miners = get_current_miner_list()
all_miner_data = []
data_gen = asyncio.as_completed(
[get_miner_data_dashboard(miner_ip) for miner_ip in miners]
)
for all_data in data_gen:
data_point = await all_data
all_miner_data.append(data_point)
all_miner_data.sort(key=lambda x: x["ip"])
await websocket.send_json(
{
"datetime": datetime.datetime.now().isoformat(),
"miners": all_miner_data,
}
)
await asyncio.sleep(graph_sleep_time)
except WebSocketDisconnect:
print("Websocket disconnected.")
pass
except websockets.exceptions.ConnectionClosedOK:
pass

View File

@@ -0,0 +1,12 @@
import os
import ipaddress
def get_current_miner_list():
cur_miners = []
if os.path.exists(os.path.join(os.getcwd(), "miner_list.txt")):
with open(os.path.join(os.getcwd(), "miner_list.txt")) as file:
for line in file.readlines():
cur_miners.append(line.strip())
cur_miners = sorted(cur_miners, key=lambda x: ipaddress.ip_address(x))
return cur_miners

View File

@@ -0,0 +1,22 @@
from fastapi import Request, APIRouter
from tools.web_monitor.template import templates
from tools.web_monitor.func import get_current_miner_list
from .ws import router as ws_router
router = APIRouter()
router.include_router(ws_router)
@router.get("/")
def miner(_request: Request, _miner_ip):
return get_miner
@router.get("/{miner_ip}")
def get_miner(request: Request, miner_ip):
return templates.TemplateResponse(
"miner.html",
{"request": request, "cur_miners": get_current_miner_list(), "miner": miner_ip},
)

View File

@@ -0,0 +1,16 @@
from fastapi import Request
from fastapi.responses import RedirectResponse
from tools.web_monitor.app import app
from tools.web_monitor.func import get_current_miner_list
@app.get("/{miner_ip}/remove")
def get_miner(request: Request, miner_ip):
miners = get_current_miner_list()
miners.remove(miner_ip)
with open("miner_list.txt", "w") as file:
for miner_ip in miners:
file.write(miner_ip + "\n")
return RedirectResponse(request.url_for("dashboard"))

View File

@@ -0,0 +1,167 @@
import asyncio
import datetime
import websockets.exceptions
from fastapi import WebSocket, WebSocketDisconnect, APIRouter
from miners.miner_factory import MinerFactory
from tools.web_monitor._settings.func import get_current_settings
router = APIRouter()
@router.websocket("/{miner_ip}/ws")
async def miner_websocket(websocket: WebSocket, miner_ip):
await websocket.accept()
settings = get_current_settings()
miner_identify_timeout = settings["miner_identify_timeout"]
miner_data_timeout = settings["miner_data_timeout"]
try:
while True:
try:
cur_miner = await asyncio.wait_for(
MinerFactory().get_miner(str(miner_ip)), miner_identify_timeout
)
data = await asyncio.wait_for(
cur_miner.api.multicommand(
"summary", "fans", "stats", "devs", "temps"
),
miner_data_timeout,
)
miner_model = await cur_miner.get_model()
miner_summary = None
miner_fans = None
if "summary" in data.keys():
miner_summary = data["summary"][0]
elif "SUMMARY" in data.keys():
miner_summary = data
miner_fans = {"FANS": []}
for item in ["Fan Speed In", "Fan Speed Out"]:
if item in miner_summary["SUMMARY"][0].keys():
miner_fans["FANS"].append(
{"RPM": miner_summary["SUMMARY"][0][item]}
)
if "fans" in data.keys():
miner_fans = data["fans"][0]
miner_temp_list = []
if "temps" in data.keys():
miner_temps = data["temps"][0]
for board in miner_temps["TEMPS"]:
if board["Chip"] is not None and not board["Chip"] == 0.0:
miner_temp_list.append(board["Chip"])
if "devs" in data.keys() and not len(miner_temp_list) > 0:
if not data["devs"][0].get("DEVS") == []:
if "Chip Temp Avg" in data["devs"][0]["DEVS"][0].keys():
for board in data["devs"][0]["DEVS"]:
if (
board["Chip Temp Avg"] is not None
and not board["Chip Temp Avg"] == 0.0
):
miner_temp_list.append(board["Chip Temp Avg"])
if "stats" in data.keys() and not len(miner_temp_list) > 0:
if not data["stats"][0]["STATS"] == []:
for temp in ["temp2", "temp1", "temp3"]:
if temp in data["stats"][0]["STATS"][1].keys():
if (
data["stats"][0]["STATS"][1][temp] is not None
and not data["stats"][0]["STATS"][1][temp] == 0.0
):
miner_temp_list.append(
data["stats"][0]["STATS"][1][temp]
)
data["stats"][0]["STATS"][0].keys()
if any(
"MM ID" in string
for string in data["stats"][0]["STATS"][0].keys()
):
temp_all = []
for key in [
string
for string in data["stats"][0]["STATS"][0].keys()
if "MM ID" in string
]:
for value in [
string
for string in data["stats"][0]["STATS"][0][key].split(
" "
)
if "TMax" in string
]:
temp_all.append(
int(value.split("[")[1].replace("]", ""))
)
miner_temp_list.append(round(sum(temp_all) / len(temp_all)))
if "stats" in data.keys() and not miner_fans:
miner_stats = data["stats"][0]
miner_fans = {"FANS": []}
for item in ["fan1", "fan2", "fan3", "fan4"]:
if item in miner_stats["STATS"][1].keys():
miner_fans["FANS"].append(
{"RPM": miner_stats["STATS"][1][item]}
)
if miner_summary:
if "MHS av" in miner_summary["SUMMARY"][0].keys():
hashrate = float(
format(
round(
miner_summary["SUMMARY"][0]["MHS av"] / 1000000, 2
),
".2f",
)
)
elif "GHS av" in miner_summary["SUMMARY"][0].keys():
hashrate = float(
format(
round(miner_summary["SUMMARY"][0]["GHS av"] / 1000, 2),
".2f",
)
)
else:
hashrate = 0
else:
hashrate = 0
fan_speeds = []
if miner_fans:
for fan in miner_fans["FANS"]:
fan_speeds.append(fan["RPM"])
while len(fan_speeds) < 4:
fan_speeds.append(0)
if len(miner_temp_list) == 0:
miner_temp_list = [0]
data = {
"hashrate": hashrate,
"fans": fan_speeds,
"temp": round(sum(miner_temp_list) / len(miner_temp_list), 2),
"datetime": datetime.datetime.now().isoformat(),
"model": miner_model,
}
print(data)
await websocket.send_json(data)
await asyncio.sleep(settings["graph_data_sleep_time"])
except asyncio.exceptions.TimeoutError:
data = {"error": "The miner is not responding."}
await websocket.send_json(data)
await asyncio.sleep(0.5)
except KeyError as e:
print(e)
data = {"error": "The miner returned unusable/unsupported data."}
await websocket.send_json(data)
await asyncio.sleep(0.5)
except WebSocketDisconnect:
print("Websocket disconnected.")
except websockets.exceptions.ConnectionClosedOK:
pass

Binary file not shown.

View File

@@ -0,0 +1,25 @@
from fastapi import Request, APIRouter
from tools.web_monitor.template import templates
from tools.web_monitor.func import get_current_miner_list
from .ws import router as ws_router
router = APIRouter()
router.include_router(ws_router)
@router.get("/")
def scan(request: Request):
return templates.TemplateResponse(
"scan.html", {"request": request, "cur_miners": get_current_miner_list()}
)
@router.post("/add_miners")
async def add_miners_scan(request: Request):
miners = await request.json()
with open("miner_list.txt", "a+") as file:
for miner_ip in miners["miners"]:
file.write(miner_ip + "\n")
return scan

View File

@@ -0,0 +1,39 @@
import asyncio
from fastapi import WebSocket
from network import MinerNetwork
from tools.web_monitor.func import get_current_miner_list
from miners.miner_factory import MinerFactory
async def do_websocket_scan(websocket: WebSocket, network_ip: str):
cur_miners = get_current_miner_list()
try:
if "/" in network_ip:
network_ip, network_subnet = network_ip.split("/")
network = MinerNetwork(network_ip, mask=network_subnet)
else:
network = MinerNetwork(network_ip)
miner_generator = network.scan_network_generator()
miners = []
async for miner_ip in miner_generator:
if miner_ip and str(miner_ip) not in cur_miners:
miners.append(miner_ip)
get_miner_generator = MinerFactory().get_miner_generator(miners)
all_miners = []
async for found_miner in get_miner_generator:
all_miners.append(
{"ip": found_miner.ip, "model": await found_miner.get_model()}
)
all_miners.sort(key=lambda x: x["ip"])
send_miners = []
for miner_ip in all_miners:
send_miners.append(
{"ip": str(miner_ip["ip"]), "model": miner_ip["model"]}
)
await websocket.send_json(send_miners)
await websocket.send_text("Done")
except asyncio.CancelledError:
raise

View File

@@ -0,0 +1,35 @@
import asyncio
import websockets.exceptions
from fastapi import WebSocket, WebSocketDisconnect, APIRouter
from tools.web_monitor.scan.func import do_websocket_scan
router = APIRouter()
@router.websocket("/ws")
async def websocket_scan(websocket: WebSocket):
await websocket.accept()
cur_task = None
try:
while True:
ws_data = await websocket.receive_text()
if "-Cancel-" in ws_data:
if cur_task:
cur_task.cancel()
print("Cancelling scan...")
try:
await cur_task
except asyncio.CancelledError:
cur_task = None
await websocket.send_text("Cancelled")
else:
cur_task = asyncio.create_task(do_websocket_scan(websocket, ws_data))
if cur_task and cur_task.done():
cur_task = None
except WebSocketDisconnect:
print("Websocket disconnected.")
except websockets.exceptions.ConnectionClosedOK:
pass

View File

@@ -0,0 +1,165 @@
body {
min-height: 100vh;
min-height: -webkit-fill-available;
}
html {
height: -webkit-fill-available;
}
main {
display: flex;
flex-wrap: nowrap;
height: 100vh;
height: -webkit-fill-available;
max-height: 100vh;
overflow-x: auto;
overflow-y: hidden;
}
.bi {
vertical-align: -.125em;
pointer-events: none;
fill: currentColor;
}
.dropdown-toggle { outline: 0; }
.nav-flush .nav-link {
border-radius: 0;
}
.btn-toggle-nav a {
display: inline-flex;
padding: .1875rem .5rem;
margin-top: .125rem;
margin-left: 1.25rem;
text-decoration: none;
}
.btn-toggle-nav a:hover,
.btn-toggle-nav a:focus {
background-color: #0d6efd;
}
.scrollarea {
overflow-y: auto;
}
.fw-semibold { font-weight: 600; }
.sidebar {
position: fixed;
top: 0;
/* rtl:raw:
right: 0;
*/
bottom: 0;
/* rtl:remove */
left: 0;
z-index: 100; /* Behind the navbar */
padding: 0px 0 0; /* Height of navbar */
box-shadow: inset -1px 0 0 rgba(0, 0, 0, .1);
}
@media (max-width: 991.98px) {
.sidebar {
top: 48px;
}
}
.sidebar-sticky {
position: relative;
top: 0;
height: calc(100vh - 48px);
padding-top: .5rem;
overflow-x: hidden;
overflow-y: auto;
}
.sidebar .nav-link {
font-weight: 500;
color: #333;
}
.sidebar .nav-link .feather {
margin-right: 4px;
color: #727272;
}
.sidebar .nav-link.active {
color: #2470dc;
}
.sidebar .nav-link:hover .feather,
.sidebar .nav-link.active .feather {
color: inherit;
}
.sidebar-heading {
font-size: .75rem;
text-transform: uppercase;
}
.navbar-brand {
padding-top: .75rem;
padding-bottom: .75rem;
font-size: 1rem;
background-color: rgba(0, 0, 0, .25);
box-shadow: inset -1px 0 0 rgba(0, 0, 0, .25);
}
.navbar .navbar-toggler {
top: .25rem;
right: 1rem;
}
.navbar .form-control {
padding: .75rem 1rem;
border-width: 0;
border-radius: 0;
}
.form-control-dark {
color: #fff;
background-color: rgba(255, 255, 255, .1);
border-color: rgba(255, 255, 255, .1);
}
.form-control-dark:focus {
border-color: transparent;
box-shadow: 0 0 0 3px rgba(255, 255, 255, .25);
}
.btn-toggle-nav{
max-height: 300px;
-webkit-overflow-scrolling: touch;
}
/* Scrollbar */
.btn-toggle-nav::-webkit-scrollbar {
width: 5px;
}
.btn-toggle-nav::-webkit-scrollbar-track {
box-shadow: inset 0 0 5px grey;
border-radius: 10px;
}
.btn-toggle-nav::-webkit-scrollbar-thumb {
background-image: linear-gradient(180deg, #D0368A 0%, #708AD4 99%);
box-shadow: inset 2px 2px 5px 0 rgba(#fff, 0.5);
border-radius: 100px;
}
.nav-pills .nav-link.active {
color: #212529;
background-image: linear-gradient(180deg, #D0368A 0%, #708AD4 99%);
}
.navbar-link:hover {
background-image: linear-gradient(180deg, #760A45 0%, #23449F 99%);
}
.navbar-link {
transition: unset;
color: unset;
}

View File

@@ -0,0 +1,7 @@
import os
from fastapi.templating import Jinja2Templates
templates = Jinja2Templates(
directory=os.path.join(os.path.dirname(__file__), "templates")
)

View File

@@ -0,0 +1,128 @@
{% extends 'navbar.html'%}
{% block content %}
<script src="https://cdn.jsdelivr.net/npm/chart.js"></script>
<script src="https://cdn.jsdelivr.net/npm/luxon@2.3.1/build/global/luxon.min.js"></script>
<canvas id="line-chart" class="grad-border mt-3 mb-4" width="600" height="360"></canvas>
{% if cur_miners|length == 0 %}<a role="button" href="/scan" id="noMiners" class="w-100 btn btn-info">Click here to add miners.</a>{% endif %}
<div id="errors"></div>
<script>
if (!window.WebSocket) alert("WebSocket not supported by this browser");
var ws = new WebSocket("ws://{{request.url.hostname}}:{% if request.port %}{{request.url.port}}{% else %}80{% endif %}/dashboard/ws");
let all_data = []
let all_labels = []
ws.onmessage = function(event) {
var new_data = JSON.parse(event.data)
if (!new_data["miners"].length == 0) {
total_hashrate = parseFloat(0)
errors = document.getElementById("errors")
for (i = 0; i< new_data["miners"].length; i++) {
if (new_data["miners"][i].hasOwnProperty("error")) {
if (!document.getElementById(new_data["miners"][i]["ip"] + "_error")) {
errors.innerHTML += "<div id='" + new_data["miners"][i]["ip"] + "_error" +
"' class='d-flex align-items-center p-1 mb-1 alert alert-danger'><strong class='p-0 m-0'>" +
new_data["miners"][i]["ip"] + ": " +
new_data["miners"][i]["error"] +
"</strong><div class='spinner-border spinner-border-sm ms-auto'></div></div>"
}
} else {
if (document.getElementById(new_data["miners"][i]["ip"] + "_error")) {
document.getElementById(new_data["miners"][i]["ip"] + "_error").remove()
}
total_hashrate += parseFloat(new_data["miners"][i]["hashrate"])
}
};
var chart = document.getElementById("line-chart")
datetime = luxon.DateTime.fromISO(new_data["datetime"]).toLocal();
if (minerDataChart.data.labels.length > 50) minerDataChart.data.labels.shift();
if (minerDataChart.data.datasets[0].data.length > 50) minerDataChart.data.datasets[0].data.shift();
minerDataChart.data.labels.push(datetime.toLocaleString(luxon.DateTime.TIME_WITH_SECONDS));
minerDataChart.data.datasets[0].data.push(total_hashrate.toFixed(2));
minerDataChart.update();
}
};
var ctx = document.getElementById("line-chart").getContext("2d");
var width = document.getElementById("line-chart").width;
var chartGradient = ctx.createLinearGradient(0, 0, width, 0)
chartGradient.addColorStop(0, '#D0368A');
chartGradient.addColorStop(1, '#708AD4');
const chartAreaBorder = {
id: 'chartAreaBorder',
beforeDraw(chart, args, options) {
const {ctx, chartArea: {left, top, width, height}} = chart;
ctx.save();
ctx.strokeStyle = options.borderColor;
ctx.lineWidth = options.borderWidth;
ctx.strokeRect(left, top, width, height);
ctx.restore();
}
};
var minerDataChart = new Chart(document.getElementById("line-chart"), {
type: 'line',
data: {
labels: [
],
datasets: [{
label: "Hashrate",
borderColor: chartGradient,
pointBorderColor: chartGradient,
pointBackgroundColor: chartGradient,
pointHoverBackgroundColor: chartGradient,
pointHoverBorderColor: chartGradient,
data: [
],
}
]
},
plugins: [chartAreaBorder],
options: {
animation: {
easing: 'easeInSine',
duration: 0
},
plugins: {
chartAreaBorder: {
borderColor: chartGradient,
borderWidth: 1
},
legend: {
labels: {
color: chartGradient
}
},
tooltip: {
callbacks: {
label: function(data) {
return data.dataset.data[data.dataIndex] + " TH/s";
}
}
}
},
scales: {
y: {
min: 0, // minimum value
suggestedMax: 100,
stepSize: 10,
ticks: {
callback: function(value, index, ticks) {
return value + " TH/s";
}
}
},
x: {
ticks: {
maxTicksLimit: 6,
maxRotation: 0,
}
}
}
}
});
</script>
{% endblock content %}

View File

@@ -0,0 +1,376 @@
{% extends 'navbar.html'%}
{% block content %}
<script src="https://cdn.jsdelivr.net/npm/chart.js"></script>
<script src="https://cdn.jsdelivr.net/npm/luxon@2.3.1/build/global/luxon.min.js"></script>
<div class="row mt-2">
<div class="col">
<h2 class="ms-3 mt-1">{{miner}}</h2>
</div>
<div class="col">
<div class="d-flex flex-row-reverse">
<button type="button" class="btn btn-outline-danger mx-1" data-bs-toggle="modal" data-bs-target="#removeModal">
Remove Miner
</button>
<!-- Modal -->
<div class="modal fade" id="removeModal" tabindex="-1" aria-labelledby="removeModalLabel" aria-hidden="true">
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title" id="removeModalLabel">Remove Miner</h5>
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
</div>
<div class="modal-body">
Do you really want to remove this miner?
</div>
<div class="modal-footer">
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Cancel</button>
<a class="btn btn-danger" href="{{url_for('miner')}}/{{miner}}/remove" role="button">Remove</a>
</div>
</div>
</div>
</div>
<a class="btn btn-primary mx-1" target="_blank" href="http://{{miner}}" role="button">Web Interface</a>
</div>
</div>
</div>
<div class="row">
<ul class="nav nav-tabs" id="myTab" role="tablist">
<li class="nav-item" role="presentation">
<button class="nav-link active text-dark" id="hashrate-tab" data-bs-toggle="tab" data-bs-target="#hashrate" type="button" role="tab" aria-controls="hashrate" aria-selected="true">Hashrate</button>
</li>
<li class="nav-item" role="presentation">
<button class="nav-link text-dark" id="temp-tab" data-bs-toggle="tab" data-bs-target="#temp" type="button" role="tab" aria-controls="temp" aria-selected="false">Temperature</button>
</li>
</ul>
<div class="tab-content" id="hashrateTempTabs">
<div class="tab-pane fade show active" id="hashrate" role="tabpanel" aria-labelledby="hashrate-tab">
<div class="col-12 line_chart">
<canvas id="hr-chart" class="grad-border mt-3" width="600" height="200"></canvas>
</div>
</div>
<div class="tab-pane fade" id="temp" role="tabpanel" aria-labelledby="temp-tab">
<div class="col-12 line_chart">
<canvas id="temp-chart" class="grad-border mt-3" width="600" height="200"></canvas>
</div>
</div>
</div>
<div class="col-3">
<div class="d-flex justify-content-center text-nowrap" id="fan1">Fan 1</div>
<canvas class="mb-2" id="fan-chart-1" width="100" height="100"></canvas>
</div>
<div class="col-3">
<div class="d-flex justify-content-center text-nowrap" id="fan2">Fan 2</div>
<canvas class="mb-2" id="fan-chart-2" width="100" height="100"></canvas>
</div>
<div class="col-3">
<div class="d-flex justify-content-center text-nowrap" id="fan3">Fan 3</div>
<canvas class="mb-2" id="fan-chart-3" width="100" height="100"></canvas>
</div>
<div class="col-3">
<div class="d-flex justify-content-center text-nowrap" id="fan4">Fan 4</div>
<canvas class="mb-2" id="fan-chart-4" width="100" height="100"></canvas>
</div>
</div>
<div class="d-flex align-items-center mt-4 alert alert-secondary">
<div class="mx-auto">Model:</div>
<div class="mx-auto fw-bolder" id="minerModel">?</div>
<div class="mx-auto" style="border-left: 1px solid grey; height: 50px;"></div>
<div class="mx-auto">Hashrate:</div>
<div class="mx-auto fw-bolder" id="minerHashrate">?</div>
</div>
<div id="errorContainer" class="d-flex align-items-center mt-4 alert alert-danger invisible">
<strong id="errorCode"></strong>
<div class="spinner-border ms-auto"></div>
</div>
<script>
var ws = new WebSocket("ws://{{request.url.hostname}}:{% if request.port %}{{request.url.port}}{% else %}80{% endif %}/miner/{{miner}}/ws");
let all_data = []
let all_labels = []
ws.onmessage = function(event) {
var new_data = JSON.parse(event.data)
if (new_data.hasOwnProperty("error")) {
var err_container = document.getElementById("errorContainer")
var err_code = document.getElementById("errorCode")
err_code.innerHTML = new_data['error']
err_container.classList.remove("invisible")
var miner_hr = document.getElementById("minerHashrate")
miner_hr.innerHTML = "?"
} else {
var chart = document.getElementById("hr-chart")
var err_container = document.getElementById("errorContainer")
if (!err_container.classList.hasOwnProperty("invisible")) {
err_container.classList.add("invisible")
}
datetime = luxon.DateTime.fromISO(new_data["datetime"]).toLocal();
if (minerDataChart.data.labels.length > 50) minerDataChart.data.labels.shift();
if (minerDataChart.data.datasets[0].data.length > 50) minerDataChart.data.datasets[0].data.shift();
if (minerTempChart.data.labels.length > 50) minerTempChart.data.labels.shift();
if (minerTempChart.data.datasets[0].data.length > 50) minerTempChart.data.datasets[0].data.shift();
minerDataChart.data.labels.push(datetime.toLocaleString(luxon.DateTime.TIME_WITH_SECONDS));
minerDataChart.data.datasets[0].data.push(new_data["hashrate"].toFixed(2));
minerTempChart.data.labels.push(datetime.toLocaleString(luxon.DateTime.TIME_WITH_SECONDS));
minerTempChart.data.datasets[0].data.push(new_data["temp"].toFixed(2));
fan1Chart.data.datasets[0].data = [new_data["fans"][0], 6000-new_data["fans"][0]]
fan2Chart.data.datasets[0].data = [new_data["fans"][1], 6000-new_data["fans"][1]]
fan3Chart.data.datasets[0].data = [new_data["fans"][2], 6000-new_data["fans"][2]]
fan4Chart.data.datasets[0].data = [new_data["fans"][3], 6000-new_data["fans"][3]]
document.getElementById("fan1").innerHTML = "Fan 1: " + new_data["fans"][0]
document.getElementById("fan2").innerHTML = "Fan 2: " + new_data["fans"][1]
document.getElementById("fan3").innerHTML = "Fan 3: " + new_data["fans"][2]
document.getElementById("fan4").innerHTML = "Fan 4: " + new_data["fans"][3]
fan1Chart.update();
fan2Chart.update();
fan3Chart.update();
fan4Chart.update();
minerDataChart.update();
minerTempChart.update();
var miner_hr = document.getElementById("minerHashrate")
miner_hr.innerHTML = new_data["hashrate"].toFixed(2) + " TH/s"
var miner_model = document.getElementById("minerModel")
miner_model.innerHTML = new_data["model"]
};
};
var ctx = document.getElementById("hr-chart").getContext("2d");
var width = document.getElementById("hr-chart").width;
var chartGradient = ctx.createLinearGradient(0, 0, width, 0)
chartGradient.addColorStop(0, '#D0368A');
chartGradient.addColorStop(1, '#708AD4');
const chartAreaBorder = {
id: 'chartAreaBorder',
beforeDraw(chart, args, options) {
const {ctx, chartArea: {left, top, width, height}} = chart;
ctx.save();
ctx.strokeStyle = options.borderColor;
ctx.lineWidth = options.borderWidth;
ctx.strokeRect(left, top, width, height);
ctx.restore();
}
};
var minerDataChart = new Chart(document.getElementById("hr-chart"), {
type: 'line',
data: {
labels: [
],
datasets: [{
label: "Hashrate",
borderColor: chartGradient,
pointBorderColor: chartGradient,
pointBackgroundColor: chartGradient,
pointHoverBackgroundColor: chartGradient,
pointHoverBorderColor: chartGradient,
data: [
],
}
]
},
plugins: [chartAreaBorder],
options: {
animation: {
easing: 'easeInSine',
duration: 0
},
plugins: {
chartAreaBorder: {
borderColor: chartGradient,
borderWidth: 1
},
legend: {
labels: {
color: chartGradient
}
},
tooltip: {
callbacks: {
label: function(data) {
return data.dataset.data[data.dataIndex] + " TH/s";
}
}
}
},
scales: {
y: {
min: 0, // minimum value
suggestedMax: 10,
stepSize: 1,
ticks: {
callback: function(value, index, ticks) {
return value + " TH/s";
}
}
},
x: {
ticks: {
maxTicksLimit: 6,
maxRotation: 0
}
}
}
}
});
var minerTempChart = new Chart(document.getElementById("temp-chart"), {
type: 'line',
data: {
labels: [
],
datasets: [{
label: "Temperature",
borderColor: chartGradient,
pointBorderColor: chartGradient,
pointBackgroundColor: chartGradient,
pointHoverBackgroundColor: chartGradient,
pointHoverBorderColor: chartGradient,
data: [
],
}
]
},
plugins: [chartAreaBorder],
options: {
animation: {
easing: 'easeInSine',
duration: 0
},
plugins: {
chartAreaBorder: {
borderColor: chartGradient,
borderWidth: 1
},
legend: {
labels: {
color: chartGradient
}
},
tooltip: {
callbacks: {
label: function(data) {
return data.dataset.data[data.dataIndex] + " °C";
}
}
}
},
scales: {
y: {
min: 0, // minimum value
suggestedMax: 100,
stepSize: 5,
ticks: {
callback: function(value, index, ticks) {
return value + " °C";
}
}
},
x: {
ticks: {
maxTicksLimit: 6,
maxRotation: 0
}
}
}
}
});
var options_fans = {
animation: {
easing: 'easeInSine',
duration: 250,
},
aspectRatio: 1.5,
events: [],
responsive: true,
plugins: {
legend: {
display: false,
}
}
};
var fanCtx = document.getElementById("fan-chart-1").getContext("2d");
var fanWidth = document.getElementById("fan-chart-1").width;
var fanChartGradient = fanCtx.createLinearGradient(0, 0, fanWidth, -fanWidth)
fanChartGradient.addColorStop(0, '#D0368A');
fanChartGradient.addColorStop(1, '#708AD4');
var fan1Chart = new Chart(document.getElementById("fan-chart-1"), {
type: "doughnut",
data: {
labels: ["Fan 1"],
datasets: [
{
data: [0, 6000],
// add colors
backgroundColor: [
fanChartGradient,
"rgba(199, 199, 199, 1)"
]
},
]
},
options: options_fans
});
var fan2Chart = new Chart(document.getElementById("fan-chart-2"), {
type: "doughnut",
data: {
labels: ["Fan 2"],
datasets: [
{
data: [0, 6000],
// add colors
backgroundColor: [
fanChartGradient,
"rgba(199, 199, 199, 1)"
]
},
]
},
options: options_fans
});
var fan3Chart = new Chart(document.getElementById("fan-chart-3"), {
type: "doughnut",
data: {
labels: ["Fan 3"],
datasets: [
{
data: [0, 6000],
// add colors
backgroundColor: [
fanChartGradient,
"rgba(199, 199, 199, 1)"
]
},
]
},
options: options_fans
});
var fan4Chart = new Chart(document.getElementById("fan-chart-4"), {
type: "doughnut",
data: {
labels: ["Fan 4"],
datasets: [
{
data: [0, 6000],
// add colors
backgroundColor: [
fanChartGradient,
"rgba(199, 199, 199, 1)"
]
},
]
},
options: options_fans
});
</script>
{% endblock content %}

View File

@@ -0,0 +1,108 @@
<!DOCTYPE html>
<html lang="en">
<head>
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.0.2/dist/js/bootstrap.bundle.min.js" integrity="sha384-MrcW6ZMFYlzcLA8Nl+NtUVF0sA7MsXsP1UyJoMp4YLEuNSfAP+JcXn/tWtIaxVXM" crossorigin="anonymous"></script>
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.0.2/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-EVSTQN3/azprG1Anm3QDgpJLIm9Nao0Yz1ztcQTwFspd3yD65VohhpuuCOmLASjC" crossorigin="anonymous">
<link href="{{ url_for('static', path='/navbar.css')}}" rel="stylesheet">
<meta charset="UTF-8">
<title>Title</title>
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta http-equiv="Content-Security-Policy" content="default-src * gap: data: blob: 'unsafe-inline' 'unsafe-eval' ws: wss:;">
</head>
<body>
<svg xmlns="http://www.w3.org/2000/svg" style="display: none;">
<symbol id="dashboard" viewBox="0 0 16 16">
<path d="M8.354 1.146a.5.5 0 0 0-.708 0l-6 6A.5.5 0 0 0 1.5 7.5v7a.5.5 0 0 0 .5.5h4.5a.5.5 0 0 0 .5-.5v-4h2v4a.5.5 0 0 0 .5.5H14a.5.5 0 0 0 .5-.5v-7a.5.5 0 0 0-.146-.354L13 5.793V2.5a.5.5 0 0 0-.5-.5h-1a.5.5 0 0 0-.5.5v1.293L8.354 1.146zM2.5 14V7.707l5.5-5.5 5.5 5.5V14H10v-4a.5.5 0 0 0-.5-.5h-3a.5.5 0 0 0-.5.5v4H2.5z"></path>
</symbol>
<symbol id="miners" viewBox="0 0 16 16">
<path d="M8 4a.5.5 0 0 1 .5.5V6a.5.5 0 0 1-1 0V4.5A.5.5 0 0 1 8 4zM3.732 5.732a.5.5 0 0 1 .707 0l.915.914a.5.5 0 1 1-.708.708l-.914-.915a.5.5 0 0 1 0-.707zM2 10a.5.5 0 0 1 .5-.5h1.586a.5.5 0 0 1 0 1H2.5A.5.5 0 0 1 2 10zm9.5 0a.5.5 0 0 1 .5-.5h1.5a.5.5 0 0 1 0 1H12a.5.5 0 0 1-.5-.5zm.754-4.246a.389.389 0 0 0-.527-.02L7.547 9.31a.91.91 0 1 0 1.302 1.258l3.434-4.297a.389.389 0 0 0-.029-.518z"></path>
<path fill-rule="evenodd" d="M0 10a8 8 0 1 1 15.547 2.661c-.442 1.253-1.845 1.602-2.932 1.25C11.309 13.488 9.475 13 8 13c-1.474 0-3.31.488-4.615.911-1.087.352-2.49.003-2.932-1.25A7.988 7.988 0 0 1 0 10zm8-7a7 7 0 0 0-6.603 9.329c.203.575.923.876 1.68.63C4.397 12.533 6.358 12 8 12s3.604.532 4.923.96c.757.245 1.477-.056 1.68-.631A7 7 0 0 0 8 3z"></path>
</symbol>
<symbol id="settings" viewBox="0 0 16 16">
<path d="M8 4.754a3.246 3.246 0 1 0 0 6.492 3.246 3.246 0 0 0 0-6.492zM5.754 8a2.246 2.246 0 1 1 4.492 0 2.246 2.246 0 0 1-4.492 0z"/>
<path d="M9.796 1.343c-.527-1.79-3.065-1.79-3.592 0l-.094.319a.873.873 0 0 1-1.255.52l-.292-.16c-1.64-.892-3.433.902-2.54 2.541l.159.292a.873.873 0 0 1-.52 1.255l-.319.094c-1.79.527-1.79 3.065 0 3.592l.319.094a.873.873 0 0 1 .52 1.255l-.16.292c-.892 1.64.901 3.434 2.541 2.54l.292-.159a.873.873 0 0 1 1.255.52l.094.319c.527 1.79 3.065 1.79 3.592 0l.094-.319a.873.873 0 0 1 1.255-.52l.292.16c1.64.893 3.434-.902 2.54-2.541l-.159-.292a.873.873 0 0 1 .52-1.255l.319-.094c1.79-.527 1.79-3.065 0-3.592l-.319-.094a.873.873 0 0 1-.52-1.255l.16-.292c.893-1.64-.902-3.433-2.541-2.54l-.292.159a.873.873 0 0 1-1.255-.52l-.094-.319zm-2.633.283c.246-.835 1.428-.835 1.674 0l.094.319a1.873 1.873 0 0 0 2.693 1.115l.291-.16c.764-.415 1.6.42 1.184 1.185l-.159.292a1.873 1.873 0 0 0 1.116 2.692l.318.094c.835.246.835 1.428 0 1.674l-.319.094a1.873 1.873 0 0 0-1.115 2.693l.16.291c.415.764-.42 1.6-1.185 1.184l-.291-.159a1.873 1.873 0 0 0-2.693 1.116l-.094.318c-.246.835-1.428.835-1.674 0l-.094-.319a1.873 1.873 0 0 0-2.692-1.115l-.292.16c-.764.415-1.6-.42-1.184-1.185l.159-.291A1.873 1.873 0 0 0 1.945 8.93l-.319-.094c-.835-.246-.835-1.428 0-1.674l.319-.094A1.873 1.873 0 0 0 3.06 4.377l-.16-.292c-.415-.764.42-1.6 1.185-1.184l.292.159a1.873 1.873 0 0 0 2.692-1.115l.094-.319z"/>
</symbol>
<symbol id="scan" viewBox="0 0 16 16">
<path d="M14 1a1 1 0 0 1 1 1v12a1 1 0 0 1-1 1H2a1 1 0 0 1-1-1V2a1 1 0 0 1 1-1h12zM2 0a2 2 0 0 0-2 2v12a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V2a2 2 0 0 0-2-2H2z"/>
<path d="M8 4a.5.5 0 0 1 .5.5v3h3a.5.5 0 0 1 0 1h-3v3a.5.5 0 0 1-1 0v-3h-3a.5.5 0 0 1 0-1h3v-3A.5.5 0 0 1 8 4z"/>
</symbol>
<symbol id="miner" viewBox="0 0 16 16">
<path d="M11.5 2a.5.5 0 0 1 .5.5v7a.5.5 0 0 1-1 0v-7a.5.5 0 0 1 .5-.5Zm2 0a.5.5 0 0 1 .5.5v7a.5.5 0 0 1-1 0v-7a.5.5 0 0 1 .5-.5Zm-10 8a.5.5 0 0 0 0 1h6a.5.5 0 0 0 0-1h-6Zm0 2a.5.5 0 0 0 0 1h6a.5.5 0 0 0 0-1h-6ZM5 3a1 1 0 0 0-1 1h-.5a.5.5 0 0 0 0 1H4v1h-.5a.5.5 0 0 0 0 1H4a1 1 0 0 0 1 1v.5a.5.5 0 0 0 1 0V8h1v.5a.5.5 0 0 0 1 0V8a1 1 0 0 0 1-1h.5a.5.5 0 0 0 0-1H9V5h.5a.5.5 0 0 0 0-1H9a1 1 0 0 0-1-1v-.5a.5.5 0 0 0-1 0V3H6v-.5a.5.5 0 0 0-1 0V3Zm0 1h3v3H5V4Zm6.5 7a.5.5 0 0 0-.5.5v1a.5.5 0 0 0 .5.5h2a.5.5 0 0 0 .5-.5v-1a.5.5 0 0 0-.5-.5h-2Z"/>
<path d="M1 2a2 2 0 0 1 2-2h11a2 2 0 0 1 2 2v11a2 2 0 0 1-2 2H3a2 2 0 0 1-2-2v-2H.5a.5.5 0 0 1-.5-.5v-1A.5.5 0 0 1 .5 9H1V8H.5a.5.5 0 0 1-.5-.5v-1A.5.5 0 0 1 .5 6H1V5H.5a.5.5 0 0 1-.5-.5v-2A.5.5 0 0 1 .5 2H1Zm1 11a1 1 0 0 0 1 1h11a1 1 0 0 0 1-1V2a1 1 0 0 0-1-1H3a1 1 0 0 0-1 1v11Z"/>
</symbol>
</svg>
<header class="navbar navbar-dark sticky-top bg-dark flex-xl-nowrap p-0 shadow">
<a class="d-lg-none col-lg-3 col-xl-2 me-0 px-3" style="height: 50px;" href="#"></a>
<button class="navbar-toggler position-absolute d-lg-none collapsed" type="button" data-bs-toggle="collapse" data-bs-target="#sidebarMenu" aria-controls="sidebarMenu" aria-expanded="false" aria-label="Toggle navigation">
<span class="navbar-toggler-icon"></span>
</button>
<ul class="navbar-nav px-3">
<li class="nav-item text-nowrap">
</li>
</ul>
</header>
<div class="container-fluid">
<div class="row">
<nav id="sidebarMenu" class="text-white bg-dark col-lg-3 col-xl-2 d-lg-block sidebar collapse">
<div class="position-sticky pt-3">
<ul class="nav nav-pills flex-column">
<li class="nav-item mb-1 mx-2">
<a href="/dashboard" class="nav-link navbar-link {% if request.path.strip('/') == 'dashboard' %}active{% else %}text-white{% endif %}">
<svg class="bi me-2" width="16" height="16"><use xlink:href="#dashboard"></use></svg>
Dashboard
</a>
</li>
<li class="nav-item mb-1 mx-2">
<a href="" class="nav-link navbar-link {% if request.path.strip('/') == 'scan' or request.path.split('/')[1] == 'miner' %}active{% else %}text-white{% endif %}" data-bs-toggle="collapse" data-bs-target="#miners-collapse" aria-expanded="false">
<svg class="bi me-2" width="16" height="16"><use xlink:href="#miners"></use></svg>
Miners
</a>
<div class="collapse mt-1" id="miners-collapse" style="">
<ul id="navMiners" class="btn-toggle-nav overflow-auto list-unstyled fw-normal pb-1 small">
<li>
<a href="/scan" class="nav-link navbar-link {% if request.path.strip('/') == 'scan' %}active{% else %}text-white{% endif %}">
<svg class="bi me-2 mt-1" width="16" height="16"><use xlink:href="#scan"></use></svg>
Add Miners
</a>
</li>
{% for miner in cur_miners %}
<li>
<a href="/miner/{{miner}}" class="nav-link navbar-link {% if request.path.strip('/') == 'miner/' + miner %}active{% else %}text-white{% endif %}">
<svg class="bi me-2 mt-1" width="16" height="16"><use xlink:href="#miner"></use></svg>
{{miner}}
</a>
</li>
{% endfor %}
</ul>
</div>
</li>
<li class="border-top my-3"></li>
<li class="nav-item mb-1 mx-2">
<a href="/settings" class="nav-link navbar-link {% if request.path.strip('/') == 'settings' %}active{% else %}text-white{% endif %}">
<svg class="bi me-2" width="16" height="16"><use xlink:href="#settings"></use></svg>
Settings
</a>
</li>
</ul>
</div>
</nav>
<div class="col-lg-9 ms-md-auto col-xl-10 px-lg-4 ps-4">
{% block content %}
{% endblock content %}
</div>
</div>
</div>
</body>
</html>

View File

@@ -0,0 +1,158 @@
{% extends 'navbar.html'%}
{% block content %}
<div class="row w-100 my-4">
<form action="" onsubmit="sendMessage(event)">
<div class="input-group mb-3">
<span class="input-group-text" id="scan-ip">Subnet</span>
<input type="text" class="form-control" id="messageText" placeholder="192.168.1.0/24" aria-describedby="scan-ip">
<button class="btn btn-danger" onclick="cancelScan()" style="display:none;" type="button" id="cancelButton">Cancel</button>
<button class="btn btn-primary" onclick="scanMiners()" type="button" id="scanButton">Scan</button>
</div>
</form>
</div>
<div class="row w-100">
<button class="btn btn-primary mb-4 mx-1" onclick="addMiners()" type="button" id="addButton">Add Selected Miners</button>
</div>
<div class="row w-100">
<table class="table table-striped table-responsive" style="max-height:300px;">
<thead>
<tr>
<th class="active col-1">
<input type="checkbox" class="select-all checkbox" name="select-all" id="selectAllCheckbox"/>
</th>
<th>IP</th>
<th>Model</th>
<th id="scanStatus" class="col-2">0 Miners</th>
</tr>
</thead>
<tbody id="minerTable">
</tbody>
</table>
</div>
<script src="https://code.jquery.com/jquery-3.3.1.slim.min.js" integrity="sha384-q8i/X+965DzO0rT7abK41JStQIAqVgRVzpbzo5smXKp4YfRvH+8abtTE1Pi6jizo" crossorigin="anonymous"></script>
<script>
$(function(){
//column checkbox select all or cancel
$("input.select-all").click(function () {
var checked = this.checked;
$("input.select-item").each(function (index,item) {
item.checked = checked;
});
});
//check selected items
$("input.select-item").click(function () {
var checked = this.checked;
var all = $("input.select-all")[0];
var total = $("input.select-item").length;
var len = $("input.select-item:checked:checked").length;
all.checked = len===total;
});
});
</script>
<script>
window.post = function(url, data) {
return fetch(url, {method: "POST", headers: {'Content-Type': 'application/json'}, body: JSON.stringify(data)});
}
var ws = new WebSocket("ws://{{request.url.hostname}}:{{request.url.port}}/scan/ws");
ws.onmessage = function(event) {
if (event.data == "Done") {
document.getElementById("scanButton").innerHTML = "Scan"
document.getElementById("scanButton").disabled = false
document.getElementById("selectAllCheckbox").disabled = false
document.getElementById("scanStatus").innerHTML = document.getElementById('minerTable').rows.length + " Miners"
document.getElementById("cancelButton").style = "display:none;"
enableCheckboxes();
} else if (event.data == "Cancelled") {
document.getElementById("scanButton").innerHTML = "Scan"
document.getElementById("scanButton").disabled = false
document.getElementById("selectAllCheckbox").disabled = false
document.getElementById("scanStatus").innerHTML = document.getElementById('minerTable').rows.length + " Miners"
document.getElementById("cancelButton").style = "display:none;"
enableCheckboxes();
} else {
var miner_data = JSON.parse(event.data)
var miners = document.getElementById('minerTable')
miners.innerHTML = ""
miner_data.forEach(function(miner) {
var tr = document.createElement('tr')
tr.id = miner["ip"]
var checkbox_td = document.createElement('td')
checkbox_td.innerHTML = '<input type="checkbox" class="select-item checkbox" name="minerCheckboxes" value="' + miner["ip"] + '" />'
checkbox_td.className = "active"
var ip_td = document.createElement('td')
ip_td.innerHTML = miner["ip"]
var model_td = document.createElement('td')
model_td.innerHTML = miner["model"]
var empty_td = document.createElement('td')
tr.append(checkbox_td)
tr.append(ip_td)
tr.append(model_td)
tr.append(empty_td)
miners.append(tr)
});
disableCheckboxes();
};
};
function scanMiners(event) {
var input = document.getElementById("messageText")
var miners = document.getElementById('minerTable')
miners.innerHTML = ""
document.getElementById("scanStatus").innerHTML = "<span class='spinner-border spinner-border-sm'></span> Scanning"
document.getElementById("scanButton").innerHTML = "<span class='spinner-border spinner-border-sm'></span> Scanning"
document.getElementById("scanButton").disabled = true
document.getElementById("selectAllCheckbox").disabled = true
document.getElementById("cancelButton").style = ""
if (input.value != "") {
ws.send(input.value)
event.preventDefault()
} else {
ws.send("192.168.1.0/24")
};
};
function cancelScan(event) {
document.getElementById("scanStatus").innerHTML = "Canceling..."
document.getElementById("scanButton").innerHTML = "Canceling..."
document.getElementById("cancelButton").style = "display:none;"
ws.send("-Cancel-")
};
function addMiners(event) {
var checkedBoxes = document.querySelectorAll('input[name=minerCheckboxes]:checked');
if (checkedBoxes.length != 0) {
var minerList = [];
for (i = 0; i< checkedBoxes.length; i++) {
minerList.push(checkedBoxes[i].defaultValue);
}
post("{{url_for('add_miners_scan')}}", {miners: minerList})
for (i = 0; i< minerList.length; i++) {
var tr_to_remove = document.getElementById(minerList[i])
tr_to_remove.remove()
var navbar_miners = document.getElementById("navMiners")
navbar_miners.innerHTML += '<li><a href="/miner/' + minerList[i] + '" class="nav-link text-white"><svg class="bi me-2 mt-1" width="16" height="16"><use xlink:href="#miner"></use></svg>' + minerList[i] + '</a></li>'
}
document.getElementById("scanStatus").innerHTML = document.getElementById('minerTable').rows.length + " Miners"
};
};
function disableCheckboxes() {
var checkBoxes = document.querySelectorAll('input[name=minerCheckboxes]');
for (i = 0; i< checkBoxes.length; i++) {
checkBoxes[i].disabled = true
};
};
function enableCheckboxes() {
var checkBoxes = document.querySelectorAll('input[name=minerCheckboxes]');
for (i = 0; i< checkBoxes.length; i++) {
checkBoxes[i].disabled = false
};
};
</script>
{% endblock content %}

View File

@@ -0,0 +1,46 @@
{% extends 'navbar.html'%}
{% block content %}
<div class="row my-2">
<div class="col">
<div class="d-flex flex-row-reverse">
<button type="button" class="btn btn-outline-danger mx-1" data-bs-toggle="modal" data-bs-target="#removeModal">
Remove All Miners
</button>
<!-- Modal -->
<div class="modal fade" id="removeModal" tabindex="-1" aria-labelledby="removeModalLabel" aria-hidden="true">
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title" id="removeModalLabel">Remove Miner</h5>
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
</div>
<div class="modal-body">
Do you really want to remove all miners?
</div>
<div class="modal-footer">
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Cancel</button>
<a class="btn btn-danger" href="{{url_for('remove_all_miners')}}" role="button">Remove</a>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<form method="post" action="/settings/update">
<div class="input-group mb-3">
<span class="input-group-text">Graph Data Sleep Time</span>
<input type="number" class="form-control" value="{{settings['graph_data_sleep_time']}}" name="graph_data_sleep_time" id="graph_data_sleep_time">
</div>
<div class="input-group mb-3">
<span class="input-group-text">Miner Data Timeout</span>
<input type="number" class="form-control" value="{{settings['miner_data_timeout']}}" name="miner_data_timeout" id="miner_data_timeout">
</div>
<div class="input-group mb-3">
<span class="input-group-text">Miner Identification Timeout</span>
<input type="number" class="form-control" value="{{settings['miner_identify_timeout']}}" name="miner_identify_timeout" id=" ">
</div>
<button type="submit" class="btn btn-primary w-100">Submit</button>
</form>
{% endblock content %}

View File

View File

@@ -0,0 +1,345 @@
from ipaddress import ip_address
import asyncio
import os
import logging
import datetime
from network import ping_miner
from miners.miner_factory import MinerFactory
from miners.antminer.S9.bosminer import BOSMinerS9
from tools.web_testbench.connections import ConnectionManager
from tools.web_testbench.feeds import get_local_versions
from settings import NETWORK_PING_TIMEOUT as PING_TIMEOUT
REFERRAL_FILE_S9 = os.path.join(os.path.dirname(__file__), "files", "referral.ipk")
UPDATE_FILE_S9 = os.path.join(os.path.dirname(__file__), "files", "update.tar")
CONFIG_FILE = os.path.join(os.path.dirname(__file__), "files", "config.toml")
# static states
(START, UNLOCK, INSTALL, UPDATE, REFERRAL, DONE, ERROR) = range(7)
class TestbenchMiner:
def __init__(self, host: ip_address):
self.host = host
self.state = START
self.latest_version = None
self.start_time = None
async def get_bos_version(self):
miner = await MinerFactory().get_miner(self.host)
result = await miner.send_ssh_command("cat /etc/bos_version")
version_base = result.stdout
version_base = version_base.strip()
version_base = version_base.split("-")
version = version_base[-2]
return version
def get_online_time(self):
online_time = "0:00:00"
if self.start_time:
online_time = str(datetime.datetime.now() - self.start_time).split(".")[0]
return online_time
async def add_to_output(self, message):
data = {
"IP": str(self.host),
"text": str(message).replace("\r", "") + "\n",
"Light": "hide",
"online": self.get_online_time(),
}
await ConnectionManager().broadcast_json(data)
return
async def remove_from_cache(self):
if self.host in MinerFactory().miners.keys():
MinerFactory().miners.remove(self.host)
async def wait_for_disconnect(self, wait_time: int = 1):
await self.add_to_output("Waiting for disconnect...")
while await ping_miner(self.host):
await asyncio.sleep(wait_time)
self.state = START
async def install_start(self):
try:
if not await ping_miner(self.host, 80):
await self.add_to_output("Waiting for miner connection...")
return
except asyncio.exceptions.TimeoutError:
await self.add_to_output("Waiting for miner connection...")
return
self.start_time = datetime.datetime.now()
await ConnectionManager().broadcast_json(
{"IP": str(self.host), "Light": "hide", "online": self.get_online_time()}
)
await self.remove_from_cache()
miner = await MinerFactory().get_miner(self.host)
await self.add_to_output("Found miner: " + str(miner))
if isinstance(miner, BOSMinerS9):
try:
if await self.get_bos_version() == self.latest_version:
await self.add_to_output(
f"Already running the latest version of BraiinsOS, {self.latest_version}, configuring."
)
self.state = REFERRAL
return
except AttributeError:
return
await self.add_to_output("Already running BraiinsOS, updating.")
self.state = UPDATE
return
if await ping_miner(self.host, 22):
await self.add_to_output("Miner is unlocked, installing.")
self.state = INSTALL
return
await self.add_to_output("Miner needs unlock, unlocking.")
self.state = UNLOCK
async def install_unlock(self):
if await self.ssh_unlock():
await self.add_to_output("Unlocked miner, installing.")
self.state = INSTALL
return
await self.add_to_output("Failed to unlock miner, please pin reset.")
self.state = START
await self.wait_for_disconnect()
async def ssh_unlock(self):
proc = await asyncio.create_subprocess_shell(
f'{os.path.join(os.path.dirname(__file__), "files", "asicseer_installer.exe")} -p -f {str(self.host)} root',
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE,
)
stdout, stderr = await proc.communicate()
if str(stdout).find("webUI") != -1:
return False
return True
async def fix_file_exists_bug(self):
miner = await MinerFactory().get_miner(self.host)
await miner.send_ssh_command(
"rm /lib/ld-musl-armhf.so.1; rm /usr/lib/openssh/sftp-server; rm /usr/sbin/fw_printenv"
)
async def do_install(self):
await self.add_to_output("Running install...")
error = None
proc = await asyncio.create_subprocess_shell(
f'{os.path.join(os.path.dirname(__file__), "files", "bos-toolbox", "bos-toolbox.bat")} install {str(self.host)} --no-keep-pools --psu-power-limit 900 --no-nand-backup --feeds-url file:./feeds/ -p root',
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE,
stdin=asyncio.subprocess.PIPE
)
# get stdout of the install
stdout = None
await self.add_to_output("Getting output...")
while True:
try:
stdout = await asyncio.wait_for(proc.stderr.readuntil(b"\r"), 10)
except asyncio.exceptions.IncompleteReadError:
break
except asyncio.exceptions.TimeoutError:
if not stdout:
await self.add_to_output("Miner encountered an error when installing, attempting to re-unlock. If this fails, you may need to factory reset the miner.")
self.state = UNLOCK
proc.kill()
return
continue
stdout_data = stdout.decode("utf-8").strip()
if "ERROR:File" in stdout_data:
error = "FILE"
if "ERROR:Auth" in stdout_data:
error = "AUTH"
proc.kill()
await self.add_to_output(stdout_data)
if stdout == b"":
break
await self.add_to_output("Waiting for process to complete...")
await proc.wait()
if not error:
await self.add_to_output("Waiting for miner to finish rebooting...")
while not await ping_miner(self.host):
await asyncio.sleep(3)
await asyncio.sleep(5)
if error == "FILE":
await self.add_to_output("Encountered error, attempting to fix.")
await self.fix_file_exists_bug()
self.state = START
return
elif error == "AUTH":
await self.add_to_output("Encountered unlock error, please pin reset.")
self.state = ERROR
return
await self.add_to_output("Install complete, configuring.")
self.state = REFERRAL
async def install_update(self):
await self.add_to_output("Updating miner...")
await self.remove_from_cache()
miner = await MinerFactory().get_miner(self.host)
try:
await miner.send_file(UPDATE_FILE_S9, "/tmp/firmware.tar")
await miner.send_ssh_command("sysupgrade /tmp/firmware.tar")
except Exception as e:
logging.warning(f"{str(self.host)} Exception: {e}")
await self.add_to_output("Failed to update, restarting.")
self.state = START
return
await asyncio.sleep(10)
await self.add_to_output("Update complete, configuring.")
self.state = REFERRAL
async def install_referral(self):
while not await ping_miner(self.host):
await asyncio.sleep(1)
miner = await MinerFactory().get_miner(self.host)
try:
await miner.send_file(REFERRAL_FILE_S9, "/tmp/referral.ipk")
await miner.send_file(CONFIG_FILE, "/etc/bosminer.toml")
await miner.send_ssh_command(
"opkg install /tmp/referral.ipk && /etc/init.d/bosminer restart"
)
except Exception as e:
await self.add_to_output(
"Failed to add referral and configure, restarting."
)
self.state = START
return
await asyncio.sleep(5)
await self.add_to_output("Configuration complete.")
self.state = DONE
async def get_web_data(self):
miner = await MinerFactory().get_miner(self.host)
if not isinstance(miner, BOSMinerS9):
await self.add_to_output("Miner type changed, restarting.")
self.state = START
return
try:
all_data = await miner.api.multicommand(
"devs", "temps", "fans", "tunerstatus"
)
devs_raw = all_data["devs"][0]
temps_raw = all_data["temps"][0]
fans_raw = all_data["fans"][0]
tunerstatus_raw = all_data["tunerstatus"][0]
# parse temperature data
temps_data = {}
for board in range(len(temps_raw["TEMPS"])):
temps_data[f"board_{temps_raw['TEMPS'][board]['ID']}"] = {}
temps_data[f"board_{temps_raw['TEMPS'][board]['ID']}"][
"Board"
] = temps_raw["TEMPS"][board]["Board"]
temps_data[f"board_{temps_raw['TEMPS'][board]['ID']}"][
"Chip"
] = temps_raw["TEMPS"][board]["Chip"]
if len(temps_data.keys()) < 3:
for board in [6, 7, 8]:
if f"board_{board}" not in temps_data.keys():
temps_data[f"board_{board}"] = {"Chip": 0, "Board": 0}
# parse individual board and chip temperature data
for board in temps_data.keys():
if "Board" not in temps_data[board].keys():
temps_data[board]["Board"] = 0
if "Chip" not in temps_data[board].keys():
temps_data[board]["Chip"] = 0
# parse hashrate data
hr_data = {}
for board in range(len(devs_raw["DEVS"])):
hr_data[f"board_{devs_raw['DEVS'][board]['ID']}"] = {}
hr_data[f"board_{devs_raw['DEVS'][board]['ID']}"]["HR"] = round(
devs_raw["DEVS"][board]["MHS 5s"] / 1000000, 2
)
# parse fan data
fans_data = {}
for fan in range(len(fans_raw["FANS"])):
fans_data[f"fan_{fans_raw['FANS'][fan]['ID']}"] = {}
fans_data[f"fan_{fans_raw['FANS'][fan]['ID']}"]["RPM"] = fans_raw[
"FANS"
][fan]["RPM"]
# parse tuner data
tuner_data = {}
if tunerstatus_raw:
for board in tunerstatus_raw["TUNERSTATUS"][0]["TunerChainStatus"]:
tuner_data[f"board_{board['HashchainIndex']}"] = {
"power_limit": board["PowerLimitWatt"],
"real_power": board["ApproximatePowerConsumptionWatt"],
"status": board["Status"],
}
if len(tuner_data.keys()) < 3:
for board in [6, 7, 8]:
if f"board_{board}" not in tuner_data.keys():
temps_data[f"board_{board}"] = {
"power_limit": 0,
"real_power": 0,
"status": "ERROR: No board found!",
}
# set the miner data
miner_data = {
"IP": str(self.host),
"Light": "show",
"Fans": fans_data,
"HR": hr_data,
"Temps": temps_data,
"online": self.get_online_time(),
"Tuner": tuner_data,
}
# return stats
return miner_data
except:
return
async def install_done(self):
await self.add_to_output("Waiting for disconnect...")
try:
while (
await asyncio.wait_for(ping_miner(self.host), PING_TIMEOUT + 3)
and self.state == DONE
):
data = await self.get_web_data()
await ConnectionManager().broadcast_json(data)
await asyncio.sleep(1)
except:
self.state = START
await self.add_to_output("Miner disconnected, waiting for new miner.")
self.start_time = None
return
self.state = START
await self.add_to_output("Miner disconnected, waiting for new miner.")
self.start_time = None
async def install_loop(self):
self.latest_version = sorted(await get_local_versions(), reverse=True)[0]
while True:
try:
if self.state == START:
self.start_time = None
await self.install_start()
if self.state == UNLOCK:
await self.install_unlock()
if self.state == INSTALL:
await self.do_install()
if self.state == UPDATE:
await self.install_update()
if self.state == REFERRAL:
await self.install_referral()
if self.state == DONE:
await self.install_done()
if self.state == ERROR:
await self.wait_for_disconnect(wait_time=5)
except Exception as E:
logging.error(f"{self.host}: {E}")
await self.add_to_output(f"Error: {E}")

View File

@@ -0,0 +1,3 @@
from network import MinerNetwork
miner_network = MinerNetwork("192.168.1.11-192.168.1.34").get_network()

View File

@@ -0,0 +1,86 @@
from fastapi import FastAPI, WebSocket, Request
from fastapi.websockets import WebSocketDisconnect
import asyncio
from fastapi.staticfiles import StaticFiles
import uvicorn
import os
from fastapi.templating import Jinja2Templates
from tools.web_testbench.feeds import update_installer_files
from miners.miner_factory import MinerFactory
from tools.web_testbench.connections import ConnectionManager
from tools.web_testbench._miners import TestbenchMiner
from tools.web_testbench._network import miner_network
app = FastAPI()
app.mount(
"/public",
StaticFiles(directory=os.path.join(os.path.dirname(__file__), "public")),
name="public",
)
templates = Jinja2Templates(
directory=os.path.join(os.path.dirname(__file__), "templates")
)
@app.websocket("/ws")
async def ws(websocket: WebSocket):
await ConnectionManager().connect(websocket)
try:
while True:
data = await websocket.receive_json()
if "IP" in data.keys():
miner = await MinerFactory().get_miner(data["IP"])
try:
if data["Data"] == "unlight":
if data["IP"] in ConnectionManager.lit_miners:
ConnectionManager.lit_miners.remove(data["IP"])
await miner.fault_light_off()
if data["Data"] == "light":
if data["IP"] not in ConnectionManager().lit_miners:
ConnectionManager.lit_miners.append(data["IP"])
await miner.fault_light_on()
except AttributeError:
await ConnectionManager().broadcast_json(
{
"IP": data["IP"],
"text": "Fault light command failed, miner is not running BraiinsOS.",
}
)
except WebSocketDisconnect:
ConnectionManager().disconnect(websocket)
except RuntimeError:
ConnectionManager().disconnect(websocket)
@app.get("/")
def dashboard(request: Request):
return templates.TemplateResponse(
"index.html",
{
"request": request,
},
)
@app.on_event("startup")
async def update_installer():
await update_installer_files()
@app.on_event("startup")
def start_install():
asyncio.create_task(install())
async def install():
for host in miner_network.hosts():
miner = TestbenchMiner(host)
asyncio.create_task(miner.install_loop())
if __name__ == "__main__":
uvicorn.run("app:app", host="0.0.0.0", port=80)

View File

@@ -0,0 +1,43 @@
from fastapi import WebSocket
import logging
from miners.miner_factory import MinerFactory
from tools.web_testbench._network import miner_network
class ConnectionManager:
_instance = None
_connections = []
lit_miners = []
def __new__(cls):
if not cls._instance:
cls._instance = super(ConnectionManager, cls).__new__(cls)
return cls._instance
async def connect(self, websocket: WebSocket):
await websocket.accept()
miners = []
for host in miner_network.hosts():
if str(host) in ConnectionManager.lit_miners:
miners.append(
{
"IP": str(host),
"Light_On": True,
}
)
else:
miners.append({"IP": str(host), "Light_On": False})
await websocket.send_json({"miners": miners})
ConnectionManager._connections.append(websocket)
def disconnect(self, websocket: WebSocket):
logging.info("Disconnected")
ConnectionManager._connections.remove(websocket)
async def broadcast_json(self, data: dict):
for connection in ConnectionManager._connections:
try:
await connection.send_json(data)
except Exception as e:
self.disconnect(connection)

View File

@@ -0,0 +1,151 @@
import aiohttp
import shutil
import aiofiles
import asyncio
from bs4 import BeautifulSoup
import re
import os
import logging
async def get_latest_version(session):
feeds_url = "http://feeds.braiins-os.com"
async with session.get(feeds_url) as resp:
data = await resp.read()
soup = BeautifulSoup(data, "html.parser")
versions = []
for link in soup.find_all("td", {"class": "link"}):
link_title = link.text.strip("/")
if re.match("(\d+)\.(\d+)(\.\d+)?", link_title):
versions.append(link_title)
versions = sorted(versions, reverse=True)
latest_version = versions[0]
return latest_version
async def get_feeds_file(session, version):
feeds_url = "http://feeds.braiins-os.com"
async with session.get(feeds_url + "/" + version) as resp:
data = await resp.read()
soup = BeautifulSoup(data, "html.parser")
file = None
for link in soup.find_all("a", href=True):
href = link["href"]
if re.match("braiins-os_am1-s9_ssh_.+\.tar.gz", href):
if not href.endswith(".asc"):
file = href
if file:
return file
async def get_update_file(session, version):
feeds_url = "http://feeds.braiins-os.com"
async with session.get(feeds_url + "/am1-s9") as resp:
data = await resp.read()
soup = BeautifulSoup(data, "html.parser")
file = None
for link in soup.find_all("a", href=True):
href = link["href"]
if re.match(f"firmware_(.+)-{version}-plus_arm_cortex-a9_neon\.tar", href):
if not href.endswith(".asc"):
file = href
if file:
return file
async def get_latest_update_file(session, update_file):
update_file_loc = f"http://feeds.braiins-os.com/am1-s9/{update_file}"
update_file_dir = os.path.join(os.path.dirname(__file__), "files", "update.tar")
if os.path.exists(update_file_dir):
os.remove(update_file_dir)
async with session.get(update_file_loc) as update_file_data:
if update_file_data.status == 200:
f = await aiofiles.open(
os.path.join(os.path.dirname(__file__), "files", "update.tar"),
mode="wb",
)
await f.write(await update_file_data.read())
await f.close()
async def get_latest_install_file(session, version, feeds_path, install_file):
install_file_loc = f"http://feeds.braiins-os.com/{version}/{install_file}"
feeds_file_path = os.path.join(feeds_path, "toolbox_bos_am1-s9")
with open(feeds_file_path, "a+") as feeds_file:
feeds_file.write(version + "\t" + install_file)
install_file_folder = os.path.join(feeds_path, version)
if os.path.exists(install_file_folder):
shutil.rmtree(install_file_folder)
os.mkdir(install_file_folder)
async with session.get(install_file_loc) as install_file_data:
if install_file_data.status == 200:
f = await aiofiles.open(
os.path.join(install_file_folder, install_file), mode="wb"
)
await f.write(await install_file_data.read())
await f.close()
async def update_installer_files():
feeds_path = os.path.join(
os.path.dirname(__file__), "files", "bos-toolbox", "feeds"
)
feeds_versions = await get_local_versions()
async with aiohttp.ClientSession() as session:
version = await get_latest_version(session)
if version not in feeds_versions:
update_file = await get_update_file(session, version)
install_file = await get_feeds_file(session, version)
await get_latest_update_file(session, update_file)
await get_latest_install_file(session, version, feeds_path, install_file)
else:
logging.info("Feeds are up to date.")
async def get_local_versions():
feeds_versions = []
feeds_path = os.path.join(
os.path.dirname(__file__), "files", "bos-toolbox", "feeds"
)
if not os.path.exists(feeds_path):
os.mkdir(feeds_path)
feeds_file_path = os.path.join(feeds_path, "toolbox_bos_am1-s9")
if not os.path.exists(feeds_file_path):
feeds_file = open(feeds_file_path, "w+")
feeds_file.close()
with open(feeds_file_path) as feeds_file:
for line in feeds_file.readlines():
ver = line.strip().split("\t")[0]
feeds_versions.append(ver)
return feeds_versions
if __name__ == "__main__":
asyncio.get_event_loop().run_until_complete(update_installer_files())

View File

@@ -0,0 +1,261 @@
import { generate_graphs } from "./generate_graphs.js"
function pauseMiner(ip, checkbox) {
// if the checkbox is checked we need to pause, unchecked is unpause
if (checkbox.checked){
sio.emit("pause", ip)
} else if (!(checkbox.check)) {
sio.emit("unpause", ip)
}
}
function checkPause(ip, checkbox) {
// make sure the checkbox exists, removes an error
if (checkbox) {
// get status of pause and set checkbox to this status
sio.emit("check_pause", ip, (result) => {
checkbox.checked = result
}
);
}
}
function lightMiner(ip, checkbox) {
// if the checkbox is checked turn the light on, otherwise off
if (checkbox.checked){
sio.emit("light", ip)
} else if (!(checkbox.check)) {
sio.emit("unlight", ip)
}
}
function checkLight(ip, checkbox) {
// make sure the checkbox exists, removes an error
if (checkbox) {
// get status of light and set checkbox to this status
sio.emit("check_light", ip, (result) => {
checkbox.checked = result
}
);
}
}
export function generate_layout(miners) {
// get the container for all the charts and data
var container_all = document.getElementById('chart_container');
// empty the container out
container_all.innerHTML = ""
miners.forEach(function(miner) {
// create main div column for all data to sit inside
var column = document.createElement('div');
column.className = "col border border-dark p-3"
// create IP address header
var header = document.createElement('button');
header.className = "text-center btn btn-primary w-100"
header.onclick = function(){window.open("http://" + miner.IP, '_blank');}
header.innerHTML += miner.IP
// add the header to col first
column.append(header)
// create light button container
var container_light = document.createElement('div');
container_light.className = "form-check form-switch d-flex justify-content-evenly"
// create light button
var light_switch = document.createElement('input');
light_switch.type = "checkbox"
light_switch.id = "light_" + miner.IP
light_switch.className = "form-check-input"
// check if the light is turned on and add click listener
checkLight(miner.IP, light_switch);
light_switch.addEventListener("click", function(){lightMiner(miner.IP, light_switch);}, false);
// add a light label to the button
var label_light = document.createElement("label");
label_light.setAttribute("for", "light_" + miner.IP);
label_light.innerHTML = "Light";
// add the button and label to the container
container_light.append(light_switch)
container_light.append(label_light)
if (miner.hasOwnProperty('text')) {
// create text row
var row_text = document.createElement('div');
row_text.className = "row"
// create text container
var text_container = document.createElement('div')
text_container.className = "col w-100 p-3"
// create text area for data
var text_area = document.createElement('textarea');
text_area.rows = "10"
text_area.className = "form-control"
text_area.style = "font-size: 12px"
text_area.disabled = true
text_area.readonly = true
// add data to the text area
var text = miner.text
text += text_area.innerHTML
text_area.innerHTML = text
// add the text area to the row
row_text.append(text_area)
// create a row for buttons
var row_buttons = document.createElement('div');
row_buttons.className = "row mt-3"
// create pause button container
var container_pause = document.createElement('div');
container_pause.className = "form-check form-switch d-flex justify-content-evenly"
// create the pause button
var pause_switch = document.createElement('input');
pause_switch.type = "checkbox"
pause_switch.id = "pause_" + miner.IP
pause_switch.className = "form-check-input"
// check if it is paused and add the click listener
checkPause(miner.IP, pause_switch);
pause_switch.addEventListener("click", function(){pauseMiner(miner.IP, pause_switch);}, false);
// add a pause label
var label_pause = document.createElement("label");
label_pause.setAttribute("for", "pause_" + miner.IP);
label_pause.innerHTML = "Pause";
// add the label and button to the container
container_pause.append(pause_switch);
container_pause.append(label_pause);
text_container.append(row_text);
// add the container to the row
row_buttons.append(container_pause);
if (miner.Light == "show") {
// add the light container to the row
row_buttons.append(container_light)
}
//add the row to the main column
column.append(text_container);
column.append(row_buttons);
// add the column onto the page
container_all.append(column);
} else {
// get fan rpm
var fan_rpm_1 = miner.Fans.fan_0.RPM;
var fan_rpm_2 = miner.Fans.fan_1.RPM;
// create hr canvas
var hr_canvas = document.createElement('canvas');
// create temp canvas
var temp_canvas = document.createElement('canvas');
// create fan 1 title
var fan_1_title = document.createElement('p');
fan_1_title.innerHTML += "Fan L: " + fan_rpm_1 + " RPM";
fan_1_title.className = "text-center"
// create fan 2 title
var fan_2_title = document.createElement('p');
fan_2_title.innerHTML += "Fan R: " + fan_rpm_2 + " RPM";
fan_2_title.className = "text-center"
// create fan 1 canvas
var fan_1_canvas = document.createElement('canvas');
// create fan 2 canvas
var fan_2_canvas = document.createElement('canvas');
// create row for hr and temp data
var row_hr = document.createElement('div');
row_hr.className = "row"
// create row for titles of fans
var row_fan_title = document.createElement('div');
row_fan_title.className = "row"
// create row for fan graphs
var row_fan = document.createElement('div');
row_fan.className = "row"
// create hr container
var container_col_hr = document.createElement('div');
container_col_hr.className = "col w-50 ps-0 pe-4"
// create temp container
var container_col_temp = document.createElement('div');
container_col_temp.className = "col w-50 ps-0 pe-4"
// create fan title 1 container
var container_col_title_fan_1 = document.createElement('div');
container_col_title_fan_1.className = "col"
// create fan title 2 container
var container_col_title_fan_2 = document.createElement('div');
container_col_title_fan_2.className = "col"
// create fan 1 data container
var container_col_fan_1 = document.createElement('div');
container_col_fan_1.className = "col w-50 ps-3 pe-1"
// create fan 2 data container
var container_col_fan_2 = document.createElement('div');
container_col_fan_2.className = "col w-50 ps-3 pe-1"
// append canvases to the appropriate container columns
container_col_hr.append(hr_canvas)
container_col_temp.append(temp_canvas)
container_col_title_fan_1.append(fan_1_title)
container_col_title_fan_2.append(fan_2_title)
container_col_fan_1.append(fan_1_canvas)
container_col_fan_2.append(fan_2_canvas)
// add container columns to the correct rows
row_hr.append(container_col_hr)
row_hr.append(container_col_temp)
row_fan_title.append(container_col_title_fan_1)
row_fan_title.append(container_col_title_fan_2)
row_fan.append(container_col_fan_1)
row_fan.append(container_col_fan_2)
// append the rows to the columns
column.append(row_hr)
column.append(row_fan_title)
column.append(row_fan)
// create a row for buttons
var row_buttons = document.createElement('div');
row_buttons.className = "row mt-3"
if (miner.Light == "show") {
// add the light container to the row
row_buttons.append(container_light)
}
// add the row to the main column
column.append(row_buttons)
// add the column to the page
container_all.append(column);
// generate the graphs
generate_graphs(miner, hr_canvas, temp_canvas, fan_1_canvas, fan_2_canvas);
}
});
<<<<<<< HEAD
}

View File

@@ -0,0 +1,7 @@
import {generate_layout} from "./create_layout.js"
// when miner data is sent
ws.onmessage = function(event) {
// generate the layout of the page
generate_layout(JSON.parse(event.data));
});

View File

@@ -0,0 +1,135 @@
import { options_hr, options_temp, options_fans } from "./graph_options.js";
// generate graphs used for the layout
export function generate_graphs(miner, hr_canvas, temp_canvas, fan_1_canvas, fan_2_canvas) {
var hr_data = []
var count = 0
// get data on all 3 boards
for (const board_num of [6, 7, 8]) {
// check if that board exists in the data
if (("board_" + board_num) in miner.HR) {
// set the key used to get the data
var key = "board_"+board_num
// add the hr info to the hr_data
hr_data.push({label: board_num, data: [miner.HR[key].HR], backgroundColor: []})
// set the colors to be used in the graphs (shades of blue)
if (board_num == 6) {
hr_data[count].backgroundColor = ["rgba(0, 19, 97, 1)"]
} else if (board_num == 7) {
hr_data[count].backgroundColor = ["rgba(0, 84, 219, 1)"]
} else if (board_num == 8) {
hr_data[count].backgroundColor = ["rgba(36, 180, 224, 1)"]
}
count += 1
}
}
// create the hr chart
var chart_hr = new Chart(hr_canvas, {
type: "bar",
data: {
labels: ["Hashrate"],
// data from above
datasets: hr_data
},
// options imported from graph_options.js
options: options_hr
});
var temps_data = []
// get temp data for each board
for (const board_num of [6, 7, 8]) {
// check if the board is in the keys list
if (("board_" + board_num) in miner.Temps) {
// set the key to be used to access the data
key = "board_"+board_num
// add chip and board temps to the temps_data along with colors
temps_data.push({label: board_num + " Chip", data: [miner.Temps[key].Chip], backgroundColor: ["rgba(6, 92, 39, 1)"]});
temps_data.push({label: board_num + " Board", data: [miner.Temps[key].Board], backgroundColor: ["rgba(255, 15, 58, 1)"]});
}
}
var chart_temp = new Chart(temp_canvas, {
type: "bar",
data: {
labels: ["Temps"],
// data from above
datasets: temps_data
},
// options imported from graph_options.js
options: options_temp,
});
// get fan rpm
var fan_rpm_1 = miner.Fans.fan_0.RPM;
if (fan_rpm_1 == 0){
var secondary_col_1 = "rgba(97, 4, 4, 1)"
} else {
var secondary_col_1 = "rgba(199, 199, 199, 1)"
}
var fan_rpm_2 = miner.Fans.fan_1.RPM;
if (fan_rpm_2 == 0){
var secondary_col_2 = "rgba(97, 4, 4, 1)"
} else {
var secondary_col_2 = "rgba(199, 199, 199, 1)"
}
// set the fan data to be rpm and the rest to go up to 6000
var fan_data_1 = [fan_rpm_1, (6000-fan_rpm_1)];
// create the fan 1 chart
var chart_fan_1 = new Chart(fan_1_canvas, {
type: "doughnut",
data: {
labels: ["Fan L"],
datasets: [
{
// data from above, no colors included
data: fan_data_1,
// add colors
backgroundColor: [
"rgba(103, 0, 221, 1)",
secondary_col_1
]
},
]
},
// options imported from graph_options.js
options: options_fans
});
var fan_data_2 = [fan_rpm_2, (6000-fan_rpm_2)];
// create the fan 2 chart
var chart_fan_2 = new Chart(fan_2_canvas, {
type: "doughnut",
data: {
labels: ["Fan R"],
datasets: [
{
// data from above, no colors included
data: fan_data_2,
// add colors
backgroundColor: [
"rgba(103, 0, 221, 1)",
secondary_col_2
]
},
]
},
// options imported from graph_options.js
options: options_fans
});
}

View File

@@ -0,0 +1,59 @@
// All options for creation of graphs in ./generate_graphs.js
export var options_hr = {
animation: {
duration: 0,
},
responsive: true,
aspectRatio: .75,
plugins: {
legend: {
display: false,
}
},
scales: {
y: {
ticks: { stepSize: .6 },
min: 0,
suggestedMax: 3.6,
grid: {
color: function(context) {
if (context.tick.value == 2.4) {
return "rgba(0, 0, 0, 1)";
} else if (context.tick.value > 2.4) {
return "rgba(103, 221, 0, 1)";
} else if (context.tick.value < 2.4) {
return "rgba(221, 0, 103, 1)";
}
}
}
}
}
};
export var options_temp = {
animation: {
duration: 0,
},
responsive: true,
plugins: {
legend: {
display: false,
}
},
aspectRatio: .75,
};
export var options_fans = {
animation: {
duration: 0,
},
aspectRatio: 1.5,
events: [],
responsive: true,
plugins: {
legend: {
display: false,
}
}
};

View File

@@ -0,0 +1,419 @@
<!DOCTYPE html>
<html lang="en">
<head>
<!-- Include chart.js -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/Chart.js/3.5.1/chart.min.js" integrity="sha512-Wt1bJGtlnMtGP0dqNFH1xlkLBNpEodaiQ8ZN5JLA5wpc1sUlk/O5uuOMNgvzddzkpvZ9GLyYNa8w2s7rqiTk5Q==" crossorigin="anonymous" referrerpolicy="no-referrer"></script>
<!-- Include Bootstrap -->
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bootstrap@5.1.0/dist/css/bootstrap.min.css" integrity="sha384-KyZXEAg3QhqLMpG8r+8fhAXLRk2vvoC2f3B09zVXn8CA5QIVfZOJ3BCsw2P0p/We" crossorigin="anonymous">
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.1.0/dist/js/bootstrap.min.js" integrity="sha384-cn7l7gDp0eyniUwwAZgrzD06kc/tftFf19TOAs2zVinnD/C7E91j9yyk5//jjpt/" crossorigin="anonymous"></script>
<meta charset="UTF-8">
<title>Title</title>
</head>
<body>
<div class="py-2">
<div class="container">
<div id="chart_container" class="row row-cols-1 row-cols-sm-2 row-cols-md-4" style="height:1500px;"></div>
</div>
</div>
<script>
var options_hr = {
animation: {
duration: 0,
},
responsive: true,
aspectRatio: .75,
plugins: {
legend: {
display: false,
}
},
scales: {
y: {
ticks: { stepSize: .6 },
min: 0,
suggestedMax: 3.6,
grid: {
color: function(context) {
if (context.tick.value == 2.4) {
return "rgba(0, 0, 0, 1)";
} else if (context.tick.value > 2.4) {
return "rgba(103, 221, 0, 1)";
} else if (context.tick.value < 2.4) {
return "rgba(221, 0, 103, 1)";
}
}
}
}
}
};
var options_temp = {
animation: {
duration: 0,
},
responsive: true,
plugins: {
legend: {
display: false,
}
},
aspectRatio: .75,
};
var options_fans = {
animation: {
duration: 0,
},
aspectRatio: 1.5,
events: [],
responsive: true,
plugins: {
legend: {
display: false,
}
}
};
var ws = new WebSocket("ws://{{request.url.hostname}}:{% if request.url.port %}{{request.url.port}}{% else %}80{% endif %}/ws");
function lightMiner(ip, checkbox) {
// if the checkbox is checked turn the light on, otherwise off
if (checkbox.checked){
ws.send(JSON.stringify({"IP": ip, "Data": "light"}))
} else if (!(checkbox.check)) {
ws.send(JSON.stringify({"IP": ip, "Data": "unlight"}))
}
};
ws.onmessage = function(event) {
var data = JSON.parse(event.data)
if (data.hasOwnProperty("online")) {
timer = document.getElementById(data["IP"] + "-timer")
if (data["online"] == "0:00:00") {
if (timer.classList.contains("btn-success")) {
timer.classList.remove("btn-success")
timer.className += " btn-secondary"
}
} else {
if (timer.classList.contains("btn-secondary")) {
timer.classList.remove("btn-secondary")
timer.className += " btn-success"
}
}
timer.innerHTML = data["online"]
};
if (data.hasOwnProperty("miners")) {
var container_all = document.getElementById('chart_container');
container_all.innerHTML = ""
data["miners"].forEach(function(miner) {
// create column with ID being the IP for later use
var column = document.createElement('div');
column.className = "col border border-dark p-3"
column.id = miner["IP"]
// create button group
var button_group = document.createElement("div");
button_group.className = "btn-group w-100"
// create IP address header
var header = document.createElement('button');
header.className = "text-center btn btn-primary"
header.onclick = function(){window.open("http://" + miner["IP"], '_blank');}
header.innerHTML += miner["IP"]
// create online timer
var timer = document.createElement('button');
timer.className = "text-center btn btn-secondary"
timer.disabled = true
timer.innerHTML = "0:00:00"
timer.id = miner["IP"] + "-timer"
button_group.append(header)
button_group.append(timer)
column.append(button_group)
// create install stdout
var row_text = document.createElement('div');
row_text.className = "row p-3"
row_text.id = miner["IP"] + "-stdout"
// create text area for data
var text_area = document.createElement('textarea');
text_area.id = miner["IP"] + "-stdout_text"
text_area.rows = "15"
text_area.className = "form-control"
text_area.style = "font-size: 12px"
text_area.disabled = true
text_area.readonly = true
row_text.append(text_area)
column.append(row_text)
// create hr and temp canvas
var hr_canvas = document.createElement('canvas');
hr_canvas.width = 125
hr_canvas.height = 125
hr_canvas.id = miner["IP"] + "-hr"
var temp_canvas = document.createElement('canvas');
temp_canvas.width = 125
temp_canvas.height = 125
temp_canvas.id = miner["IP"] + "-temp"
// create fan 1 title
var fan_1_title = document.createElement('p');
fan_1_title.innerHTML += "Fan L: 0 RPM";
fan_1_title.className = "text-center"
fan_1_title.id = miner["IP"] + "-fan_l"
// create fan 2 title
var fan_2_title = document.createElement('p');
fan_2_title.innerHTML += "Fan R: 0 RPM";
fan_2_title.className = "text-center"
fan_2_title.id = miner["IP"] + "-fan_r"
// create fan 1 canvas
var fan_1_canvas = document.createElement('canvas');
fan_1_canvas.width = 100
fan_1_canvas.height = 100
fan_1_canvas.id = miner["IP"] + "-fan-1"
// create fan 2 canvas
var fan_2_canvas = document.createElement('canvas');
fan_2_canvas.width = 100
fan_2_canvas.height = 100
fan_2_canvas.id = miner["IP"] + "-fan-2"
// create row for hr and temp data
var row_hr = document.createElement('div');
row_hr.className = "row"
// create row for titles of fans
var row_fan_title = document.createElement('div');
row_fan_title.className = "row"
// create row for fan graphs
var row_fan = document.createElement('div');
row_fan.className = "row mb-4"
// create hr container
var container_col_hr = document.createElement('div');
container_col_hr.className = "col w-50 ps-0 pe-4"
// create temp container
var container_col_temp = document.createElement('div');
container_col_temp.className = "col w-50 ps-0 pe-4"
// create fan title 1 container
var container_col_title_fan_1 = document.createElement('div');
container_col_title_fan_1.className = "col"
// create fan title 2 container
var container_col_title_fan_2 = document.createElement('div');
container_col_title_fan_2.className = "col"
// create fan 1 data container
var container_col_fan_1 = document.createElement('div');
container_col_fan_1.className = "col w-50 ps-3 pe-1"
// create fan 2 data container
var container_col_fan_2 = document.createElement('div');
container_col_fan_2.className = "col w-50 ps-3 pe-1"
// append canvases to the appropriate container columns
container_col_hr.append(hr_canvas)
container_col_temp.append(temp_canvas)
container_col_title_fan_1.append(fan_1_title)
container_col_title_fan_2.append(fan_2_title)
container_col_fan_1.append(fan_1_canvas)
container_col_fan_2.append(fan_2_canvas)
// add container columns to the correct rows
row_hr.append(container_col_hr)
row_hr.append(container_col_temp)
row_fan_title.append(container_col_title_fan_1)
row_fan_title.append(container_col_title_fan_2)
row_fan.append(container_col_fan_1)
row_fan.append(container_col_fan_2)
// create miner graph container
var miner_graphs = document.createElement('div');
miner_graphs.id = miner["IP"] + "-graphs"
miner_graphs.hidden = true
// append the rows to the column
miner_graphs.append(row_hr)
miner_graphs.append(row_fan_title)
miner_graphs.append(row_fan)
column.append(miner_graphs)
// create light button container
var container_light = document.createElement('div');
container_light.className = "form-check form-switch justify-content-evenly"
container_light.style = "display: none;"
container_light.id = miner["IP"] + "-light_container"
// create light button
var light_switch = document.createElement('input');
light_switch.type = "checkbox"
if (miner["Light_On"] == true) {
light_switch.checked = true
}
light_switch.id = miner["IP"] + "-light"
light_switch.className = "form-check-input"
light_switch.addEventListener("click", function(){lightMiner(miner["IP"], light_switch);}, false);
// add a light label to the button
var label_light = document.createElement("label");
label_light.setAttribute("for", miner["IP"] + "-light");
label_light.innerHTML = "Light";
// add the button and label to the container
container_light.append(light_switch)
container_light.append(label_light)
column.append(container_light)
container_all.append(column)
var chart_hr = new Chart(hr_canvas, {
type: "bar",
data: {
labels: ["Hashrate"],
datasets: [],
},
options: options_hr
});
var chart_temp = new Chart(temp_canvas, {
type: "bar",
data: {
labels: ["Temps"],
datasets: [],
},
options: options_temp,
});
var chart_fan_1 = new Chart(fan_1_canvas, {
type: "doughnut",
data: {
labels: ["Fan L"],
datasets: [
{
data: [],
// add colors
backgroundColor: [
"rgba(103, 0, 221, 1)",
"rgba(199, 199, 199, 1)"
]
},
]
},
options: options_fans
});
// create the fan 2 chart
var chart_fan_2 = new Chart(fan_2_canvas, {
type: "doughnut",
data: {
labels: ["Fan R"],
datasets: [
{
data: [],
backgroundColor: [
"rgba(103, 0, 221, 1)",
"rgba(199, 199, 199, 1)"
]
},
]
},
options: options_fans
});
});
}
else if (data.hasOwnProperty("HR")) {
var miner_stdout = document.getElementById(data["IP"] + "-stdout")
miner_stdout.hidden = true
var miner_graphs = document.getElementById(data["IP"] + "-graphs")
miner_graphs.hidden = false
var hr_graph = Chart.getChart(data["IP"] + "-hr")
var temp_graph = Chart.getChart(data["IP"] + "-temp")
var fan_1_graph = Chart.getChart(data["IP"] + "-fan-1")
var fan_2_graph = Chart.getChart(data["IP"] + "-fan-2")
// update hr graph data and call the Update method
var hr_data = []
hr_data.push({label: "Board 6", data: [data["HR"]["board_6"]["HR"]], backgroundColor: ["rgba(0, 19, 97, 1)"]});
hr_data.push({label: "Board 7", data: [data["HR"]["board_7"]["HR"]], backgroundColor: ["rgba(0, 84, 219, 1)"]});
hr_data.push({label: "Board 8", data: [data["HR"]["board_8"]["HR"]], backgroundColor: ["rgba(36, 180, 224, 1)"]});
hr_graph.data.datasets = hr_data;
hr_graph.update();
// update temp graph data and call the Update method
var temp_data = []
temp_data.push({label: "Board 6 Chips", data: [data["Temps"]["board_6"]["Chip"]], backgroundColor: ["rgba(6, 92, 39, 1)"]});
temp_data.push({label: "Board 6", data: [data["Temps"]["board_6"]["Board"]], backgroundColor: ["rgba(255, 15, 58, 1)"]});
temp_data.push({label: "Board 7 Chips", data: [data["Temps"]["board_7"]["Chip"]], backgroundColor: ["rgba(6, 92, 39, 1)"]});
temp_data.push({label: "Board 7", data: [data["Temps"]["board_7"]["Board"]], backgroundColor: ["rgba(255, 15, 58, 1)"]});
temp_data.push({label: "Board 8 Chips", data: [data["Temps"]["board_8"]["Chip"]], backgroundColor: ["rgba(6, 92, 39, 1)"]});
temp_data.push({label: "Board 8", data: [data["Temps"]["board_8"]["Board"]], backgroundColor: ["rgba(255, 15, 58, 1)"]});
temp_graph.data.datasets = temp_data;
temp_graph.update();
// update fan 1 graph data and call the Update method
var fan_1_rpm = data["Fans"]["fan_0"]["RPM"]
var fan_1_title = document.getElementById(data["IP"] + "-fan_l");
fan_1_title.innerHTML = "Fan L: " + fan_1_rpm + " RPM";
if (fan_1_rpm == 0){
var secondary_col_1 = "rgba(97, 4, 4, 1)"
} else {
var secondary_col_1 = "rgba(199, 199, 199, 1)"
}
var fan_1_data = [{label: "Fan Speed", data: [fan_1_rpm, 6000-fan_1_rpm], backgroundColor: ["rgba(103, 0, 221, 1)", secondary_col_1]}]
fan_1_graph.data.datasets = fan_1_data;
fan_1_graph.update();
// update fan 2 graph data and call the Update method
var fan_2_rpm = data["Fans"]["fan_1"]["RPM"]
var fan_2_title = document.getElementById(data["IP"] + "-fan_r");
fan_2_title.innerHTML = "Fan R: " + fan_2_rpm + " RPM";
if (fan_2_rpm == 0){
var secondary_col_2 = "rgba(97, 4, 4, 1)"
} else {
var secondary_col_2 = "rgba(199, 199, 199, 1)"
}
var fan_2_data = [{label: "Fan Speed", data: [fan_2_rpm, 6000-fan_2_rpm], backgroundColor: ["rgba(103, 0, 221, 1)", secondary_col_2]}]
fan_2_graph.data.datasets = fan_2_data;
fan_2_graph.update();
} else if (data.hasOwnProperty("text")) {
var miner_graphs = document.getElementById(data["IP"] + "-graphs")
miner_graphs.hidden = true
var miner_stdout = document.getElementById(data["IP"] + "-stdout")
var miner_stdout_text = document.getElementById(data["IP"] + "-stdout_text")
miner_stdout.hidden = false
miner_stdout_text.innerHTML = data["text"] + miner_stdout_text.innerHTML
}
if (data.hasOwnProperty("Light")) {
light_box = document.getElementById(data["IP"] + "-light_container")
if (data["Light"] == "show") {
light_box.style = "display: flex;"
} else if (data["Light"] == "hide") {
light_box.style = "display: none;"
}
}
}
</script>
</body>
</html>

10
web_monitor.py Normal file
View File

@@ -0,0 +1,10 @@
from tools.web_monitor.app import app
import uvicorn
def main():
uvicorn.run("web_monitor:app", host="0.0.0.0", port=80)
if __name__ == "__main__":
main()

10
web_testbench.py Normal file
View File

@@ -0,0 +1,10 @@
from tools.web_testbench.app import app
import uvicorn
from logger import logger
def main():
uvicorn.run("web_testbench:app", host="0.0.0.0", port=80)
if __name__ == "__main__":
main()