Compare commits

...

217 Commits

Author SHA1 Message Date
UpstreamData
8677eff491 moved miner count and hashrate to top of tool 2022-05-10 14:00:50 -06:00
UpstreamData
63a21ea9aa updated formatting on scrollbars 2022-05-10 13:53:18 -06:00
UpstreamData
1c9d3dc84d updated formatting on page 2022-05-10 13:44:08 -06:00
UpstreamData
0dacd3d294 changed sorting to show up on the table headers 2022-05-10 11:51:26 -06:00
UpstreamData
6fa74613b4 updated look of CFG util 2022-05-10 11:13:27 -06:00
UpstreamData
f7fb7a3acb update requirements.txt 2022-05-09 10:25:25 -06:00
UpstreamData
666c5bfc64 added new text buttons to show total hashrate and current sort key 2022-05-09 10:24:48 -06:00
UpstreamData
1f8d92f6bb fixed some bugs with sorting 2022-05-09 09:59:48 -06:00
UpstreamData
ef336a9e23 added asyncio event loop policy update to fix some bugs 2022-05-09 09:20:21 -06:00
UpstreamData
7fe6fd47fb added sorting to command table (Tree) 2022-05-09 09:14:32 -06:00
UpstreamData
91a0298d96 fix a bug where unknown miners would break configuration 2022-05-06 16:29:07 -06:00
UpstreamData
ed3d8fc815 Merge branch 'pyqt_gui_cfg_util' 2022-05-06 16:22:28 -06:00
UpstreamData
4f2d630746 fix formatting on readme 2022-05-06 16:22:13 -06:00
UpstreamData
a8c685a883 switched cfg_util over to new version 2022-05-06 16:20:02 -06:00
UpstreamData
09660e1934 added indicators of what function is running 2022-05-06 16:12:17 -06:00
UpstreamData
c01908ff9a added custom command functionality 2022-05-06 16:01:50 -06:00
UpstreamData
267c388a95 added restarting and rebooting miner backends 2022-05-06 15:52:21 -06:00
UpstreamData
8215d33241 added configuration button 2022-05-06 15:39:18 -06:00
UpstreamData
f4258a304a add importing configuration from miners 2022-05-06 15:14:49 -06:00
UpstreamData
514fafea58 add generate command and change config converters to non async 2022-05-06 15:06:18 -06:00
UpstreamData
e324369fe0 fixed some bugs with sorting when refreshing data and added refreshing data 2022-05-06 14:55:58 -06:00
UpstreamData
3bc9287668 add scan retries to getting data 2022-05-06 13:51:20 -06:00
UpstreamData
d90bf190c5 added reverse sorting and fixed hashrate sorting 2022-05-06 13:34:12 -06:00
UpstreamData
8cc6f66458 added sorting to the 3 main tables 2022-05-06 12:03:43 -06:00
UpstreamData
a2b071af4f fully implemented fault light command 2022-05-06 11:36:57 -06:00
UpstreamData
b7b589802f added avalon1 1066 to board util tentatively 2022-05-06 09:11:08 -06:00
UpstreamData
93912a6df6 fixed a bug with hashrate data not getting sent with some miners 2022-05-06 08:41:04 -06:00
UpstreamData
ffce15f653 fixed some bugs with latest version of toolbox 2022-05-06 08:41:04 -06:00
UpstreamData
725b14e583 added table manager, to manage tables and handle the treeview 2022-05-05 15:53:13 -06:00
UpstreamData
26c6e47f1e added the ability to update the treeview and images in it no longer are as buggy 2022-05-05 14:47:18 -06:00
UpstreamData
51dae7375f added select all button and functionality 2022-05-05 13:48:57 -06:00
UpstreamData
801cfc4ff8 updated some formatting and improved pool return format 2022-05-05 13:02:00 -06:00
UpstreamData
ac3ff7a63e justify hostname to the left 2022-05-05 12:12:10 -06:00
UpstreamData
1b22810f4b fixed formatting on hashrate 2022-05-05 12:07:57 -06:00
UpstreamData
b756c9e4a1 added getting data for btminer 2022-05-05 11:37:04 -06:00
UpstreamData
64b5e6c032 added getting data for bmminer and cgminer 2022-05-05 11:19:11 -06:00
UpstreamData
a13f5dd2d1 fix some bugs and start adding bmminer get_data function 2022-05-05 10:52:18 -06:00
UpstreamData
e6ea8d3e16 added hostname logging and a generalized get dta function for braiins OS 2022-05-05 10:35:47 -06:00
UpstreamData
af37850289 greatly improved functionality of miner factory 2022-05-05 09:17:20 -06:00
UpstreamData
6ecdfa1cf8 scanning now gets data 2022-05-04 16:04:46 -06:00
UpstreamData
c0b21ebc23 fixed scanning to the tree for commands 2022-05-04 15:06:15 -06:00
UpstreamData
184ada417f added tables and basic scanning 2022-05-04 14:44:19 -06:00
UpstreamData
b636860ecb started basic cfg util changes 2022-05-04 13:08:58 -06:00
UpstreamData
0107fdacde update requirements.txt 2022-05-02 10:36:20 -06:00
UpstreamData
ce5e1cad40 added the option to append the last octet of the IP address to the username when configuring 2022-04-29 15:37:07 -06:00
UpstreamData
d877ba01a0 fix spelling issue 2022-04-29 15:02:54 -06:00
UpstreamData
b0ed990d5a update requirements.txt 2022-04-29 14:38:33 -06:00
UpstreamData
89c8a16900 fix light functionality to work as intended 2022-04-29 13:25:08 -06:00
UpstreamData
247cf0ccc2 added fault light option to the board utility 2022-04-29 10:18:16 -06:00
UpstreamData
d0aa219a7a add first page and pie chart to board report 2022-04-28 11:12:33 -06:00
UpstreamData
87291e2a89 change some formatting with the board report and fix some bugs 2022-04-27 16:58:47 -06:00
UpstreamData
9c88d21db6 add basic board report to board util 2022-04-27 16:35:11 -06:00
UpstreamData
8b7415042f fixed a bug with the webserver 2022-04-25 14:40:32 -06:00
UpstreamData
59ab6e6c8a reformatted and clarified some code 2022-04-21 10:09:30 -06:00
UpstreamData
0724a376ea refactored some code in board util 2022-04-21 09:43:22 -06:00
UpstreamData
f9f26a5587 added better logging and process to testbench 2022-04-20 11:36:09 -06:00
UpstreamData
ed4122fb21 added better logging to testbench 2022-04-19 10:29:13 -06:00
UpstreamData
0739a7f689 added a try except block for logging errors per miner in the testbench 2022-04-19 10:15:12 -06:00
UpstreamData
c7b7a6e7c5 made sure there will always be board 6, 7, and 8 in tunerstatus 2022-04-18 16:12:12 -06:00
UpstreamData
2a132c8325 addded basic tuner status data on testbench 2022-04-18 16:02:21 -06:00
UpstreamData
154882a668 fixed an issue with pinging when done. 2022-04-18 14:29:54 -06:00
UpstreamData
3f64c9dd67 Merge remote-tracking branch 'origin/testbench-webserver' into testbench-webserver
# Conflicts:
#	miners/bosminer.py
#	tools/bad_board_util/func/decorators.py
#	tools/bad_board_util/layout.py
#	tools/bad_board_util/ui.py
2022-04-18 14:17:29 -06:00
UpstreamData
d8d66e4244 fixed a bug with not hiding the light button 2022-04-18 14:17:04 -06:00
UpstreamData
a9cdefcd43 finished adding timer 2022-04-18 14:17:04 -06:00
UpstreamData
029d3ef596 added online timer for testing 2022-04-18 14:17:04 -06:00
UpstreamData
0e474402c0 reformatted files 2022-04-18 14:17:04 -06:00
UpstreamData
b6560cdedb added fixing file exists bug 2022-04-18 14:17:04 -06:00
UpstreamData
767575703e fixed some bugs with finishing the install 2022-04-18 14:17:04 -06:00
UpstreamData
4b4d9060ed changed some printing to logging logs 2022-04-18 14:17:04 -06:00
UpstreamData
ad75b1d25c added web testbench to main apps 2022-04-18 14:17:03 -06:00
UpstreamData
4b767c5427 fixed more bugs 2022-04-18 14:17:03 -06:00
UpstreamData
a6df7a83d6 fixed many remaining bugs in testbench webserver, should be ready for use. 2022-04-18 14:17:03 -06:00
UpstreamData
93f2990399 finished miner install to be tested 2022-04-18 14:17:03 -06:00
UpstreamData
e74f67089e finished light functionality 2022-04-18 14:17:03 -06:00
UpstreamData
41a6078790 added partial fault light functionality and fixed stdout output direction 2022-04-18 14:17:03 -06:00
UpstreamData
4d93926fee added output when running install process 2022-04-18 14:17:03 -06:00
UpstreamData
03f5cafe76 added sending output from miners 2022-04-18 14:17:03 -06:00
UpstreamData
4f6ebff880 set graphs to show and hide when getting data 2022-04-18 14:17:03 -06:00
UpstreamData
af27cbbe2c set graphs to update when receiving data 2022-04-18 14:17:03 -06:00
UpstreamData
3604957c83 added auto port finding to both web apps 2022-04-18 14:17:03 -06:00
UpstreamData
3670a02aec add feeds updater to startup process 2022-04-18 14:17:03 -06:00
UpstreamData
7ebfdb3f33 added feeds auto-updater for web testbench 2022-04-18 14:17:03 -06:00
UpstreamData
b9b7da8746 add base files for web interface 2022-04-18 14:17:03 -06:00
UpstreamData
eaaf137b9b added temp fake data to the app for it to send to the JS side. 2022-04-18 14:15:46 -06:00
UpstreamData
a0311e3ce3 add base files for web interface 2022-04-18 14:15:44 -06:00
UpstreamData
8864aa7b4b added install file to do the basic install 2022-04-18 14:15:24 -06:00
UpstreamData
4d58129eee fixed a bug with not hiding the light button 2022-04-18 13:12:08 -06:00
UpstreamData
4468fe9fbb finished adding timer 2022-04-18 12:29:55 -06:00
UpstreamData
3b716a044b added online timer for testing 2022-04-18 12:13:41 -06:00
UpstreamData
25e657729c reformatted files 2022-04-18 10:24:53 -06:00
UpstreamData
cace399ed2 added fixing file exists bug 2022-04-18 10:13:48 -06:00
UpstreamData
045e1ca6ba fixed some bugs with finishing the install 2022-04-18 09:52:45 -06:00
UpstreamData
4f86dec560 changed some printing to logging logs 2022-04-18 08:49:21 -06:00
UpstreamData
13f033440d added web testbench to main apps 2022-04-14 18:43:36 -06:00
UpstreamData
b5c455ffa4 fixed more bugs 2022-04-14 18:38:29 -06:00
UpstreamData
eb5a00b706 fixed many remaining bugs in testbench webserver, should be ready for use. 2022-04-14 18:17:23 -06:00
UpstreamData
3a560472e6 finished miner install to be tested 2022-04-14 14:40:31 -06:00
UpstreamData
4776dce038 finished light functionality 2022-04-14 13:16:16 -06:00
UpstreamData
2d6891c6d2 added partial fault light functionality and fixed stdout output direction 2022-04-14 11:34:21 -06:00
UpstreamData
f5a41f7b13 added output when running install process 2022-04-14 11:08:52 -06:00
UpstreamData
4a2926df94 added sending output from miners 2022-04-14 10:57:32 -06:00
UpstreamData
8736f33a56 set graphs to show and hide when getting data 2022-04-14 10:43:26 -06:00
UpstreamData
89eb77588f set graphs to update when receiving data 2022-04-14 10:34:51 -06:00
UpstreamData
c930510226 added auto port finding to both web apps 2022-04-14 09:43:43 -06:00
UpstreamData
b7c58e5d34 add feeds updater to startup process 2022-04-14 09:37:06 -06:00
UpstreamData
ce48ae020b added feeds auto-updater for web testbench 2022-04-11 16:13:04 -06:00
UpstreamData
7809bfc0d1 added exporting a report from bad board utility 2022-04-01 15:19:12 -06:00
UpstreamData
d84fcaafdf added bos get version 2022-04-01 13:33:05 -06:00
UpstreamData
a9f600b797 add base files for web interface 2022-03-31 11:32:42 -06:00
UpstreamData
f0a8e7ba9f reformatted all files to use the Black formatting style 2022-03-31 11:30:34 -06:00
UpstreamData
c57a523553 reformatted all files to use the Black formatting style 2022-03-31 11:27:57 -06:00
UpstreamData
d905f6f414 added temp fake data to the app for it to send to the JS side. 2022-03-30 08:42:21 -06:00
UpstreamData
22f78ac405 add base files for web interface 2022-03-25 16:02:50 -06:00
UpstreamData
7a098b1c7e added install file to do the basic install 2022-03-25 15:29:30 -06:00
UpstreamData
e1383f2002 Added support for X19 models with BraiinsOS 2022-03-25 09:06:25 -06:00
UpstreamData
c3b23313ba added changing model when configuring for BOS S9s 2022-03-25 08:58:02 -06:00
UpstreamData
02581e917d add temperature graph to miner page 2022-03-21 10:02:11 -06:00
UpstreamData
e267073f76 add the start of a temperature graph to miner page 2022-03-21 09:39:54 -06:00
UpstreamData
4038dae446 fixed some bugs on linux with pipes 2022-03-18 12:02:42 -06:00
UpstreamData
134b5fe0ff added CTRL+A select all binding to cfg util and board util tables 2022-03-17 16:10:12 -06:00
UpstreamData
d452ca36b7 fixed copying from the board util table 2022-03-17 16:05:48 -06:00
UpstreamData
fdec35cd2e added disable button decorator to board util 2022-03-17 16:01:02 -06:00
UpstreamData
d488c8458c added the ability to scan a range of IPs as part of the miner network by passing a string formatted as {ip_range_1_start}-{ip_range_1_end}, {ip_range_2_start}-{ip_range_2_end} to the miner network 2022-03-17 12:05:58 -06:00
UpstreamData
6d2e40c81d added support for avalon10xx miners 2022-03-16 15:21:09 -06:00
UpstreamData
594b5d0448 improved logging format and sent output to a file 2022-03-16 14:03:32 -06:00
UpstreamData
1be12e5d4c moved _get_ssh_connection to the base miner class 2022-03-16 13:34:18 -06:00
UpstreamData
bae2ee4245 changed MinerFactory to a singleton class to ensure clearing its cache is easier and removed creation of independant miner factories for each utility 2022-03-16 12:05:44 -06:00
UpstreamData
57bd606f21 add logging to base miner API 2022-03-16 10:56:33 -06:00
UpstreamData
eb8cefa461 add logging to btminer and fix some bugs 2022-03-16 08:40:41 -06:00
UpstreamData
9edcd866bb added more logging for bosminer models. 2022-03-15 09:07:07 -06:00
UpstreamData
07a8b00a93 added logging to bmminer and X19 models 2022-03-14 16:07:47 -06:00
UpstreamData
c22be7ded8 started adding some basic logging functionality 2022-03-14 15:52:46 -06:00
UpstreamData
2380b94db1 update unknown API docstring 2022-03-14 14:12:31 -06:00
UpstreamData
d8e59afee0 Upsdated bosminer API docstrings, and fixed some errors in CGMiner API docstings 2022-03-14 14:07:17 -06:00
UpstreamData
05e14baa68 added some todos 2022-03-14 11:26:53 -06:00
UpstreamData
ff56148732 fixed some bugs with cgminer, and included VC redistributables in CXFreeze build for CFG util 2022-03-14 10:18:28 -06:00
UpstreamData
bfc5668d24 fixed some bugs with running the web app from docker 2022-03-09 10:53:26 -07:00
UpstreamData
b3103ae700 fixed fan formatting on smaller devices 2022-03-08 12:23:38 -07:00
UpstreamData
43834203a8 reformatted file structure and reformatted for phones, as well as fixed web sockets for remote devices 2022-03-08 11:39:10 -07:00
UpstreamData
7ba8044564 added dockerfile and removed cxfreeze from web_monitor requirements due to it breaking the docker setup 2022-03-08 09:09:28 -07:00
UpstreamData
7e91fe12e7 updated some ports and fixed a bug with summary keys when getting data 2022-03-07 14:54:36 -07:00
UpstreamData
02114aac65 Merge pull request #6 from UpstreamData/web_monitor
Web monitor
2022-03-07 12:40:11 -07:00
UpstreamData
244dac76af finished adding settings page 2022-03-07 12:38:56 -07:00
UpstreamData
2bd25c3f35 started adding settings page 2022-03-07 11:17:41 -07:00
UpstreamData
23350ea4b6 updated requirements, and fixed some formatting issues 2022-03-07 10:36:38 -07:00
UpstreamData
8a6917878e Merge remote-tracking branch 'origin/web_monitor' into web_monitor 2022-03-07 09:40:42 -07:00
UpstreamData
7dd00954e4 fixed some issues with the rounding on floats in the JS 2022-03-07 09:39:56 -07:00
UpstreamData
f3710f618e added miner model and hashrate as a table in the per miner stuff 2022-03-07 09:39:56 -07:00
UpstreamData
8ecdb6f5e8 fixed a bug with scanning and adding miner which didnt append to the navbar 2022-03-07 09:39:56 -07:00
UpstreamData
309b4d44fc updated some formatting on charts 2022-03-07 09:39:56 -07:00
UpstreamData
80f941d912 added remove miner functionality 2022-03-07 09:39:56 -07:00
UpstreamData
4534b09532 added custom TH/s formatting to graphs 2022-03-07 09:39:56 -07:00
UpstreamData
97a9b59acc added dashboard hashrate info 2022-03-07 09:39:56 -07:00
UpstreamData
87b8de9029 strated on basic framework for dashboard in web_monitor 2022-03-07 09:39:56 -07:00
UpstreamData
42f5146632 added different select gradient 2022-03-07 09:39:56 -07:00
UpstreamData
f613cc039f added spinner to scan 2022-03-07 09:39:56 -07:00
UpstreamData
e974c77359 added fan and hashrate data for S19s and Whatsminers 2022-03-07 09:39:56 -07:00
UpstreamData
0f324177cb added fan data for braiins OS 2022-03-07 09:39:56 -07:00
UpstreamData
46a4508cd7 updated more gradient formatting an added gradients to navbar 2022-03-07 09:39:56 -07:00
UpstreamData
d4d9b1ad3c added gradients to fan data 2022-03-07 09:39:56 -07:00
UpstreamData
322ee05fdf added bounding box to the chart 2022-03-07 09:39:56 -07:00
UpstreamData
85569366a2 sorted current miners for the navbar 2022-03-07 09:39:56 -07:00
UpstreamData
dea6ff2a96 improved chart functionality in the web monitor and added handlers for errors such as no response from the miner 2022-03-07 09:39:56 -07:00
UpstreamData
3fcd2edf6f charts on miner pages work now, they gather data from miners and put it into the graph, with a max size of 49 entried per graph 2022-03-07 09:39:56 -07:00
UpstreamData
16b84310ec added graph with fake data on each miner page, and added basic formatting to it. 2022-03-07 09:39:56 -07:00
UpstreamData
f8899521bc improved navbar formatting, added active formats for all miners, moved add miners to a miner subtab 2022-03-07 09:39:56 -07:00
UpstreamData
3558a1a6b1 finished up scan page, added the ability to add miners and them get listed in the miner list, and started adding the individual miner pages 2022-03-07 09:39:56 -07:00
UpstreamData
385943755d further improved formatting of scan page, added disabled checkboxes on scan, updated miner count on add 2022-03-07 09:39:56 -07:00
UpstreamData
3002cb4e97 added basic addition of miners to the list and improved some functionality of the web tool 2022-03-07 09:39:56 -07:00
UpstreamData
6d711520fc added add selected miners button 2022-03-07 09:39:56 -07:00
UpstreamData
584de40983 improved formatting on scan page and made the scan a bit more robust 2022-03-07 09:39:56 -07:00
UpstreamData
81911ba549 fixed some formwatting on the scan page 2022-03-07 09:39:55 -07:00
UpstreamData
e37e9e2251 added the scan page to scan for miners on a subnet 2022-03-07 09:39:55 -07:00
UpstreamData
92a65c8977 switched to fastAPi and jinja 2 for templates and html 2022-03-07 09:39:55 -07:00
UpstreamData
ae8b2cbd07 added the required directories for settings and scanning 2022-03-07 09:39:55 -07:00
UpstreamData
cda13edf85 improved formatting of index.html 2022-03-07 09:39:55 -07:00
UpstreamData
610ee57963 started adding HTML files for the web monitor program 2022-03-07 09:39:55 -07:00
UpstreamData
2ef809db54 fixed some issues with the rounding on floats in the JS 2022-03-07 09:32:06 -07:00
UpstreamData
f315c0c051 added miner model and hashrate as a table in the per miner stuff 2022-03-04 16:10:27 -07:00
UpstreamData
936c230aa3 fixed a bug with scanning and adding miner which didnt append to the navbar 2022-03-04 15:48:17 -07:00
UpstreamData
2c93f1f395 updated some formatting on charts 2022-03-04 14:36:43 -07:00
UpstreamData
727ebd9c42 added remove miner functionality 2022-03-04 14:08:27 -07:00
UpstreamData
1e4fc897e3 added custom TH/s formatting to graphs 2022-03-04 13:39:23 -07:00
UpstreamData
3945a86004 added dashboard hashrate info 2022-03-04 11:53:31 -07:00
UpstreamData
58cc64d17b strated on basic framework for dashboard in web_monitor 2022-03-04 11:24:06 -07:00
UpstreamData
b66cf6f0ba added different select gradient 2022-03-02 15:54:49 -07:00
UpstreamData
1db15a741e added spinner to scan 2022-03-02 15:47:17 -07:00
UpstreamData
5f355c833b added fan and hashrate data for S19s and Whatsminers 2022-03-02 15:38:29 -07:00
UpstreamData
a76b32e3ff added fan data for braiins OS 2022-03-02 15:15:20 -07:00
UpstreamData
f2c01dca25 updated more gradient formatting an added gradients to navbar 2022-03-02 14:36:34 -07:00
UpstreamData
abc542a0ca added gradients to fan data 2022-03-02 13:12:20 -07:00
UpstreamData
9e598ebd8c added bounding box to the chart 2022-03-02 12:15:46 -07:00
UpstreamData
7801ca5819 sorted current miners for the navbar 2022-03-02 11:16:02 -07:00
UpstreamData
482edabd27 improved chart functionality in the web monitor and added handlers for errors such as no response from the miner 2022-03-02 11:11:34 -07:00
UpstreamData
3e5998de6e charts on miner pages work now, they gather data from miners and put it into the graph, with a max size of 49 entried per graph 2022-03-01 16:17:28 -07:00
UpstreamData
c3d19607f6 added graph with fake data on each miner page, and added basic formatting to it. 2022-03-01 16:01:39 -07:00
UpstreamData
2c2648cbe7 improved navbar formatting, added active formats for all miners, moved add miners to a miner subtab 2022-03-01 12:51:49 -07:00
UpstreamData
a72c4f7797 finished up scan page, added the ability to add miners and them get listed in the miner list, and started adding the individual miner pages 2022-03-01 12:28:36 -07:00
UpstreamData
19ee9eb18f further improved formatting of scan page, added disabled checkboxes on scan, updated miner count on add 2022-03-01 11:30:48 -07:00
UpstreamData
3ae29c3883 added basic addition of miners to the list and improved some functionality of the web tool 2022-02-28 16:28:40 -07:00
UpstreamData
d9f8f53a10 added add selected miners button 2022-02-28 15:15:57 -07:00
UpstreamData
6b3e525f45 improved formatting on scan page and made the scan a bit more robust 2022-02-28 14:10:43 -07:00
UpstreamData
c8824f86af fixed some formwatting on the scan page 2022-02-25 16:11:06 -07:00
UpstreamData
cf3163dccf added the scan page to scan for miners on a subnet 2022-02-25 15:58:01 -07:00
UpstreamData
da5a784214 switched to fastAPi and jinja 2 for templates and html 2022-02-24 15:59:48 -07:00
UpstreamData
30b3315084 added the required directories for settings and scanning 2022-02-24 15:25:49 -07:00
UpstreamData
5a7dcc7fcf fixed some bugs in getting ssh connections 2022-02-24 14:42:34 -07:00
UpstreamData
c6305c57cf improved formatting of index.html 2022-02-24 09:13:07 -07:00
UpstreamData
d330e2e978 started adding HTML files for the web monitor program 2022-02-24 08:57:23 -07:00
UpstreamData
1ec2a2a4a6 update CFG-Util-README.md 2022-02-23 14:39:29 -07:00
UpstreamData
c97d384cf4 updated red row color on fault light to work with tkinter tags and be sortable. 2022-02-23 14:35:29 -07:00
UpstreamData
ca52e40a6a fixed a bug with fault lighting bugging the tool 2022-02-23 11:56:21 -07:00
UpstreamData
4a10efd7a4 added send command option in the window 2022-02-22 13:53:07 -07:00
UpstreamData
128aab1b88 switched to a monospace font in the board util. 2022-02-22 11:01:00 -07:00
UpstreamData
bb89be64f4 switched to a monospace font in the cfg tool, padded the hashrates to appear as decimal centered, and left justified hostnames for better readability. 2022-02-22 10:49:23 -07:00
UpstreamData
ef0a507306 changed the disabling buttons to use a decorator as it looks much cleaner 2022-02-18 11:10:44 -07:00
UpstreamData
908594970e disabled the buttons that can break each other when another coroutine is running 2022-02-18 10:59:10 -07:00
108 changed files with 7994 additions and 1398 deletions

8
.dockerignore Normal file
View File

@@ -0,0 +1,8 @@
# Ignore VENV
venv
# Ignore builds
build
# Ignore github files
.github

View File

@@ -2,6 +2,7 @@ import asyncio
import json
import ipaddress
import warnings
import logging
class APIError(Exception):
@@ -41,36 +42,44 @@ class BaseMinerAPI:
def get_commands(self) -> list:
"""Get a list of command accessible to a specific type of API on the miner."""
return [func for func in
# each function in self
dir(self) if callable(getattr(self, func)) and
# no __ methods
not func.startswith("__") and
# remove all functions that are in this base class
func not in
[func for func in
dir(BaseMinerAPI) if callable(getattr(BaseMinerAPI, func))
]
]
return [
func
for func in
# each function in self
dir(self)
if callable(getattr(self, func)) and
# no __ methods
not func.startswith("__") and
# remove all functions that are in this base class
func
not in [
func
for func in dir(BaseMinerAPI)
if callable(getattr(BaseMinerAPI, func))
]
]
async def multicommand(self, *commands: str) -> dict:
"""Creates and sends multiple commands as one command to the miner."""
logging.debug(f"{self.ip}: Sending multicommand: {[*commands]}")
# split the commands into a proper list
user_commands = [*commands]
allowed_commands = self.get_commands()
# make sure we can actually run the command, otherwise it will fail
commands = [command for command in user_commands if command in allowed_commands]
for item in list(set(user_commands) - set(commands)):
warnings.warn(f"""Removing incorrect command: {item}
warnings.warn(
f"""Removing incorrect command: {item}
If you are sure you want to use this command please use API.send_command("{item}", ignore_errors=True) instead.""",
APIWarning)
APIWarning,
)
# standard multicommand format is "command1+command2"
# doesnt work for S19 which is dealt with in the send command function
command = "+".join(commands)
data = None
try:
data = await self.send_command(command)
except APIError:
except APIError as e:
try:
data = {}
# S19 handler, try again
@@ -80,11 +89,17 @@ If you are sure you want to use this command please use API.send_command("{item}
except APIError as e:
raise APIError(e)
except Exception as e:
print(e)
logging.warning(f"{self.ip}: API Multicommand Error: {e}")
if data:
logging.debug(f"{self.ip}: Received multicommand data.")
return data
async def send_command(self, command: str, parameters: str or int or bool = None, ignore_errors: bool = False) -> dict:
async def send_command(
self,
command: str,
parameters: str or int or bool = None,
ignore_errors: bool = False,
) -> dict:
"""Send an API command to the miner and return the result."""
try:
# get reader and writer streams
@@ -92,7 +107,7 @@ If you are sure you want to use this command please use API.send_command("{item}
# handle OSError 121
except OSError as e:
if e.winerror == "121":
print("Semaphore Timeout has Expired.")
logging.warning("Semaphore Timeout has Expired.")
return {}
# create the command
@@ -101,7 +116,7 @@ If you are sure you want to use this command please use API.send_command("{item}
cmd["parameter"] = parameters
# send the command
writer.write(json.dumps(cmd).encode('utf-8'))
writer.write(json.dumps(cmd).encode("utf-8"))
await writer.drain()
# instantiate data
@@ -115,7 +130,7 @@ If you are sure you want to use this command please use API.send_command("{item}
break
data += d
except Exception as e:
print(e)
logging.warning(f"{self.ip}: API Command Error: {e}")
data = self.load_api_data(data)
@@ -128,12 +143,13 @@ If you are sure you want to use this command please use API.send_command("{item}
# validate the command succeeded
validation = self.validate_command_output(data)
if not validation[0]:
logging.warning(f"{self.ip}: API Command Error: {validation[1]}")
raise APIError(validation[1])
return data
@staticmethod
def validate_command_output(data: dict) -> tuple[bool, str | None]:
def validate_command_output(data: dict) -> tuple:
"""Check if the returned command output is correctly formatted."""
# check if the data returned is correct or an error
# if status isn't a key, it is a multicommand
@@ -160,14 +176,15 @@ If you are sure you want to use this command please use API.send_command("{item}
@staticmethod
def load_api_data(data: bytes) -> dict:
"""Convert API data from JSON to dict"""
str_data = None
try:
# some json from the API returns with a null byte (\x00) on the end
if data.endswith(b"\x00"):
# handle the null byte
str_data = data.decode('utf-8')[:-1]
str_data = data.decode("utf-8")[:-1]
else:
# no null byte
str_data = data.decode('utf-8')
str_data = data.decode("utf-8")
# fix an error with a btminer return having an extra comma that breaks json.loads()
str_data = str_data.replace(",}", "}")
# fix an error with a btminer return having a newline that breaks json.loads()
@@ -180,6 +197,5 @@ If you are sure you want to use this command please use API.send_command("{item}
parsed_data = json.loads(str_data)
# handle bad json
except json.decoder.JSONDecodeError as e:
print(e)
raise APIError(f"Decode Error: {str_data}")
raise APIError(f"Decode Error {e}: {str_data}")
return parsed_data

View File

@@ -18,6 +18,7 @@ class BMMinerAPI(BaseMinerAPI):
:param ip: The IP of the miner to reference the API on.
:param port: The port to reference the API on. Default is 4028.
"""
def __init__(self, ip: str, port: int = 4028) -> None:
super().__init__(ip, port)
@@ -115,11 +116,7 @@ class BMMinerAPI(BaseMinerAPI):
"""
return await self.send_command("enablepool", parameters=n)
async def addpool(self,
url: str,
username: str,
password: str
) -> dict:
async def addpool(self, url: str, username: str, password: str) -> dict:
"""Add a pool to the miner.
:param url: The URL of the new pool to add.
@@ -128,11 +125,9 @@ class BMMinerAPI(BaseMinerAPI):
:return: A confirmation of adding the pool.
"""
return await self.send_command("addpool",
parameters=f"{url}, "
f"{username}, "
f"{password}"
)
return await self.send_command(
"addpool", parameters=f"{url}, " f"{username}, " f"{password}"
)
async def poolpriority(self, *n: int) -> dict:
"""Set pool priority.
@@ -142,8 +137,7 @@ class BMMinerAPI(BaseMinerAPI):
:return: A confirmation of setting pool priority.
"""
pools = f"{','.join([str(item) for item in n])}"
return await self.send_command("poolpriority",
parameters=pools)
return await self.send_command("poolpriority", parameters=pools)
async def poolquota(self, n: int, q: int) -> dict:
"""Set pool quota.
@@ -153,10 +147,7 @@ class BMMinerAPI(BaseMinerAPI):
:return: A confirmation of setting pool quota.
"""
return await self.send_command("poolquota",
parameters=f"{n}, "
f"{q}"
)
return await self.send_command("poolquota", parameters=f"{n}, " f"{q}")
async def disablepool(self, n: int) -> dict:
"""Disable a pool.
@@ -292,9 +283,7 @@ class BMMinerAPI(BaseMinerAPI):
:return: Confirmation of setting failover-only.
"""
return await self.send_command("failover-only",
parameters=failover
)
return await self.send_command("failover-only", parameters=failover)
async def coin(self) -> dict:
"""Get information on the current coin.
@@ -337,10 +326,7 @@ class BMMinerAPI(BaseMinerAPI):
:return: The results of setting config of name to n.
"""
return await self.send_command("setconfig",
parameters=f"{name}, "
f"{n}"
)
return await self.send_command("setconfig", parameters=f"{name}, " f"{n}")
async def usbstats(self) -> dict:
"""Get stats of all USB devices except ztex.
@@ -368,15 +354,11 @@ class BMMinerAPI(BaseMinerAPI):
:return: Confirmation of setting PGA n with opt[,val].
"""
if val:
return await self.send_command("pgaset",
parameters=f"{n}, "
f"{opt}, "
f"{val}"
)
return await self.send_command(
"pgaset", parameters=f"{n}, " f"{opt}, " f"{val}"
)
else:
return await self.send_command("pgaset",
parameters=f"{n}, "
f"{opt}")
return await self.send_command("pgaset", parameters=f"{n}, " f"{opt}")
async def zero(self, which: str, summary: bool) -> dict:
"""Zero a device.

View File

@@ -2,64 +2,64 @@ from API import BaseMinerAPI
class BOSMinerAPI(BaseMinerAPI):
"""
A class that abstracts the BOSMiner API in the miners.
"""An abstraction of the BOSMiner API.
Each method corresponds to an API command in BOSMiner.
BOSMiner API documentation:
https://docs.braiins.com/os/plus-en/Development/1_api.html
Parameters:
ip: the IP address of the miner.
port (optional): the port of the API on the miner (standard is 4028)
This class abstracts use of the BOSMiner API, as well as the
methods for sending commands to it. The self.send_command()
function handles sending a command to the miner asynchronously, and
as such is the base for many of the functions in this class, which
rely on it to send the command for them.
:param ip: The IP of the miner to reference the API on.
:param port: The port to reference the API on. Default is 4028.
"""
def __init__(self, ip, port=4028):
super().__init__(ip, port)
async def asccount(self) -> dict:
"""
API 'asccount' command.
"""Get data on the number of ASC devices and their info.
Returns a dict containing the number of ASC devices.
:return: Data on all ASC devices.
"""
return await self.send_command("asccount")
async def asc(self, n: int) -> dict:
"""
API 'asc' command.
"""Get data for ASC device n.
Returns a dict containing the details of a single ASC of number N.
:param n: The device to get data for.
n: the ASC device to get details of.
:return: The data for ASC device n.
"""
return await self.send_command("asc", parameters=n)
async def devdetails(self) -> dict:
"""
API 'devdetails' command.
"""Get data on all devices with their static details.
Returns a dict containing all devices with their static details.
:return: Data on all devices with their static details.
"""
return await self.send_command("devdetails")
async def devs(self) -> dict:
"""
API 'devs' command.
"""Get data on each PGA/ASC with their details.
Returns a dict containing each PGA/ASC with their details.
:return: Data on each PGA/ASC with their details.
"""
return await self.send_command("devs")
async def edevs(self, old: bool = False) -> dict:
"""
API 'edevs' command.
"""Get data on each PGA/ASC with their details, ignoring
blacklisted and zombie devices.
Returns a dict containing each PGA/ASC with their details,
ignoring blacklisted devices and zombie devices.
:param old: Include zombie devices that became zombies less
than 'old' seconds ago
Parameters:
old (optional): include zombie devices that became zombies less than 'old' seconds ago
:return: Data on each PGA/ASC with their details.
"""
if old:
return await self.send_command("edevs", parameters="old")
@@ -67,77 +67,76 @@ class BOSMinerAPI(BaseMinerAPI):
return await self.send_command("edevs")
async def pools(self) -> dict:
"""
API 'pools' command.
"""Get pool information.
Returns a dict containing the status of each pool.
:return: Miner pool information.
"""
return await self.send_command("pools")
async def summary(self) -> dict:
"""
API 'summary' command.
"""Get the status summary of the miner.
Returns a dict containing the status summary of the miner.
:return: The status summary of the miner.
"""
return await self.send_command("summary")
async def stats(self) -> dict:
"""
API 'stats' command.
"""Get stats of each device/pool with more than 1 getwork.
Returns a dict containing stats for all device/pool with more than 1 getwork.
:return: Stats of each device/pool with more than 1 getwork.
"""
return await self.send_command("stats")
async def version(self) -> dict:
"""
API 'version' command.
"""Get miner version info.
Returns a dict containing version information.
:return: Miner version information.
"""
return await self.send_command("version")
async def estats(self) -> dict:
"""
API 'estats' command.
async def estats(self, old: bool = False) -> dict:
"""Get stats of each device/pool with more than 1 getwork,
ignoring zombie devices.
Returns a dict containing stats for all device/pool with more than 1 getwork,
:param old: Include zombie devices that became zombies less
than 'old' seconds ago.
:return: Stats of each device/pool with more than 1 getwork,
ignoring zombie devices.
"""
return await self.send_command("estats")
if old:
return await self.send_command("estats", parameters=old)
else:
return await self.send_command("estats")
async def check(self, command: str) -> dict:
"""
API 'check' command.
"""Check if the command command exists in BOSMiner.
Returns information about a command:
:param command: The command to check.
:return: Information about a command:
Exists (Y/N) <- the command exists in this version
Access (Y/N) <- you have access to use the command
Parameters:
command: the command to get information about.
"""
return await self.send_command("check", parameters=command)
async def coin(self) -> dict:
"""
API 'coin' command.
"""Get information on the current coin.
Returns information about the current coin being mined:
:return: Information about the current coin being mined:
Hash Method <- the hashing algorithm
Current Block Time <- blocktime as a float, 0 means none
Current Block Hash <- the hash of the current block, blank means none
Current Block Hash <- the hash of the current block, blank
means none
LP <- whether LP is in use on at least 1 pool
Network Difficulty: the current network difficulty
"""
return await self.send_command("coin")
async def lcd(self) -> dict:
"""
API 'lcd' command.
"""Get a general all-in-one status summary of the miner.
Returns a dict containing an all in one status summary of the miner.
:return: An all-in-one status summary of the miner.
"""
return await self.send_command("lcd")
@@ -167,53 +166,43 @@ class BOSMinerAPI(BaseMinerAPI):
# return await self.send_command("removepool", parameters=n)
async def fans(self) -> dict:
"""
API 'fans' command.
"""Get fan data.
Returns a dict containing information on fans and fan speeds.
:return: Data on the fans of the miner.
"""
return await self.send_command("fans")
async def tempctrl(self) -> dict:
"""
API 'tempctrl' command.
"""Get temperature control data.
Returns a dict containing temp control configuration.
:return: Data about the temp control settings of the miner.
"""
return await self.send_command("tempctrl")
async def temps(self) -> dict:
"""
API 'temps' command.
"""Get temperature data.
Returns a dict containing temperature information.
:return: Data on the temps of the miner.
"""
return await self.send_command("temps")
async def tunerstatus(self) -> dict:
"""
API 'tunerstatus' command.
"""Get tuner status data
Returns a dict containing tuning stats.
:return: Data on the status of autotuning.
"""
return await self.send_command("tunerstatus")
async def pause(self) -> dict:
"""
API 'pause' command.
"""Pause mining.
Pauses mining and stops power consumption and waits for resume command.
Returns a dict stating that the miner paused mining.
:return: Confirmation of pausing mining.
"""
return await self.send_command("pause")
async def resume(self) -> dict:
"""
API 'pause' command.
"""Resume mining.
Resumes mining on the miner.
Returns a dict stating that the miner resumed mining.
:return: Confirmation of resuming mining.
"""
return await self.send_command("resume")

View File

@@ -4,16 +4,15 @@ import json
import hashlib
import binascii
import base64
import logging
from passlib.handlers.md5_crypt import md5_crypt
from cryptography.hazmat.primitives.ciphers import \
Cipher, algorithms, modes
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
from API import BaseMinerAPI, APIError
from settings import WHATSMINER_PWD
### IMPORTANT ###
# you need to change the password of the miners using the Whatsminer
# tool, then you can set them back to admin with this tool, but they
@@ -35,7 +34,7 @@ def _crypt(word: str, salt: str) -> str:
:return: An MD5 hash of the word with the salt.
"""
# compile a standard format for the salt
standard_salt = re.compile('\s*\$(\d+)\$([\w\./]*)\$')
standard_salt = re.compile("\s*\$(\d+)\$([\w\./]*)\$")
# check if the salt matches
match = standard_salt.match(salt)
# if the matching fails, the salt is incorrect
@@ -58,7 +57,7 @@ def _add_to_16(string: str) -> bytes:
length.
"""
while len(string) % 16 != 0:
string += '\0'
string += "\0"
return str.encode(string) # return bytes
@@ -74,20 +73,20 @@ def parse_btminer_priviledge_data(token_data: dict, data: dict):
:return: A decoded dict version of the privileged command output.
"""
# get the encoded data from the dict
enc_data = data['enc']
enc_data = data["enc"]
# get the aes key from the token data
aeskey = hashlib.sha256(
token_data['host_passwd_md5'].encode()
).hexdigest()
aeskey = hashlib.sha256(token_data["host_passwd_md5"].encode()).hexdigest()
# unhexlify the aes key
aeskey = binascii.unhexlify(aeskey.encode())
# create the required decryptor
aes = Cipher(algorithms.AES(aeskey), modes.ECB())
decryptor = aes.decryptor()
# decode the message with the decryptor
ret_msg = json.loads(decryptor.update(
base64.decodebytes(bytes(enc_data, encoding='utf8'))
).rstrip(b'\0').decode("utf8"))
ret_msg = json.loads(
decryptor.update(base64.decodebytes(bytes(enc_data, encoding="utf8")))
.rstrip(b"\0")
.decode("utf8")
)
return ret_msg
@@ -104,11 +103,9 @@ def create_privileged_cmd(token_data: dict, command: dict) -> bytes:
:return: The encrypted privileged command to be sent to the miner.
"""
# add token to command
command['token'] = token_data['host_sign']
command["token"] = token_data["host_sign"]
# encode host_passwd data and get hexdigest
aeskey = hashlib.sha256(
token_data['host_passwd_md5'].encode()
).hexdigest()
aeskey = hashlib.sha256(token_data["host_passwd_md5"].encode()).hexdigest()
# unhexlify the encoded host_passwd
aeskey = binascii.unhexlify(aeskey.encode())
# create a new AES key
@@ -117,18 +114,16 @@ def create_privileged_cmd(token_data: dict, command: dict) -> bytes:
# dump the command to json
api_json_str = json.dumps(command)
# encode the json command with the aes key
api_json_str_enc = base64.encodebytes(
encryptor.update(
_add_to_16(
api_json_str
)
)
).decode("utf-8").replace("\n", "")
api_json_str_enc = (
base64.encodebytes(encryptor.update(_add_to_16(api_json_str)))
.decode("utf-8")
.replace("\n", "")
)
# label the data as being encoded
data_enc = {'enc': 1, 'data': api_json_str_enc}
data_enc = {"enc": 1, "data": api_json_str_enc}
# dump the labeled data to json
api_packet_str = json.dumps(data_enc)
return api_packet_str.encode('utf-8')
return api_packet_str.encode("utf-8")
class BTMinerAPI(BaseMinerAPI):
@@ -157,16 +152,18 @@ class BTMinerAPI(BaseMinerAPI):
:param port: The port to reference the API on. Default is 4028.
:param pwd: The admin password of the miner. Default is admin.
"""
def __init__(self, ip, port=4028, pwd: str = WHATSMINER_PWD):
super().__init__(ip, port)
self.admin_pwd = pwd
self.current_token = None
async def send_command(self,
command: str | bytes,
parameters: str or int or bool = None,
ignore_errors: bool = False
) -> dict:
async def send_command(
self,
command: str or bytes,
parameters: str or int or bool = None,
ignore_errors: bool = False,
) -> dict:
"""Send a command to the miner API.
Send a command using an asynchronous connection, load the data,
@@ -187,10 +184,7 @@ class BTMinerAPI(BaseMinerAPI):
command = json.dumps({"command": command}).encode("utf-8")
try:
# get reader and writer streams
reader, writer = await asyncio.open_connection(
str(self.ip),
self.port
)
reader, writer = await asyncio.open_connection(str(self.ip), self.port)
# handle OSError 121
except OSError as e:
if e.winerror == "121":
@@ -212,7 +206,7 @@ class BTMinerAPI(BaseMinerAPI):
break
data += d
except Exception as e:
print(e)
logging.info(f"{str(self.ip)}: {e}")
data = self.load_api_data(data)
@@ -221,15 +215,12 @@ class BTMinerAPI(BaseMinerAPI):
await writer.wait_closed()
# check if the returned data is encoded
if 'enc' in data.keys():
if "enc" in data.keys():
# try to parse the encoded data
try:
data = parse_btminer_priviledge_data(
self.current_token,
data
)
data = parse_btminer_priviledge_data(self.current_token, data)
except Exception as e:
print(e)
logging.info(f"{str(self.ip)}: {e}")
if not ignore_errors:
# if it fails to validate, it is likely an error
@@ -250,25 +241,24 @@ class BTMinerAPI(BaseMinerAPI):
data = await self.send_command("get_token")
# encrypt the admin password with the salt
pwd = _crypt(self.admin_pwd, "$1$" + data["Msg"]["salt"] + '$')
pwd = pwd.split('$')
pwd = _crypt(self.admin_pwd, "$1$" + data["Msg"]["salt"] + "$")
pwd = pwd.split("$")
# take the 4th item from the pwd split
host_passwd_md5 = pwd[3]
# encrypt the pwd with the time and new salt
tmp = _crypt(pwd[3] + data["Msg"]["time"],
"$1$" + data["Msg"]["newsalt"] + '$'
)
tmp = tmp.split('$')
tmp = _crypt(pwd[3] + data["Msg"]["time"], "$1$" + data["Msg"]["newsalt"] + "$")
tmp = tmp.split("$")
# take the 4th item from the encrypted pwd split
host_sign = tmp[3]
# set the current token
self.current_token = {'host_sign': host_sign,
'host_passwd_md5': host_passwd_md5
}
self.current_token = {
"host_sign": host_sign,
"host_passwd_md5": host_passwd_md5,
}
return self.current_token
#### PRIVILEGED COMMANDS ####
@@ -276,19 +266,18 @@ class BTMinerAPI(BaseMinerAPI):
# how to configure the Whatsminer API to
# use these commands.
async def update_pools(self,
pool_1: str,
worker_1: str,
passwd_1: str,
pool_2: str = None,
worker_2: str = None,
passwd_2: str = None,
pool_3: str = None,
worker_3: str = None,
passwd_3: str = None
):
async def update_pools(
self,
pool_1: str,
worker_1: str,
passwd_1: str,
pool_2: str = None,
worker_2: str = None,
passwd_2: str = None,
pool_3: str = None,
worker_3: str = None,
passwd_3: str = None,
):
"""Update the pools of the miner using the API.
Update the pools of the miner using the API, only works after
@@ -314,15 +303,12 @@ class BTMinerAPI(BaseMinerAPI):
elif pool_2 and pool_3:
command = {
"cmd": "update_pools",
"pool1": pool_1,
"worker1": worker_1,
"passwd1": passwd_1,
"pool2": pool_2,
"worker2": worker_2,
"passwd2": passwd_2,
"pool3": pool_3,
"worker3": worker_3,
"passwd3": passwd_3,
@@ -333,10 +319,9 @@ class BTMinerAPI(BaseMinerAPI):
"pool1": pool_1,
"worker1": worker_1,
"passwd1": passwd_1,
"pool2": pool_2,
"worker2": worker_2,
"passwd2": passwd_2
"passwd2": passwd_2,
}
else:
command = {
@@ -406,12 +391,13 @@ class BTMinerAPI(BaseMinerAPI):
enc_command = create_privileged_cmd(token_data, command)
return await self.send_command(enc_command)
async def set_led(self,
color: str = "red",
period: int = 2000,
duration: int = 1000,
start: int = 0
):
async def set_led(
self,
color: str = "red",
period: int = 2000,
duration: int = 1000,
start: int = 0,
):
"""Set the LED on the miner using the API.
Set the LED on the miner using the API, only works after
@@ -423,12 +409,13 @@ class BTMinerAPI(BaseMinerAPI):
:param start: LED on time offset in the cycle in ms.
:return: A reply informing of the status of setting the LED.
"""
command = {"cmd": "set_led",
"color": color,
"period": period,
"duration": duration,
"start": start
}
command = {
"cmd": "set_led",
"color": color,
"period": period,
"duration": duration,
"start": start,
}
token_data = await self.get_token()
enc_command = create_privileged_cmd(token_data, command)
return await self.send_command(enc_command)
@@ -486,10 +473,11 @@ class BTMinerAPI(BaseMinerAPI):
password.
"""
# check if password length is greater than 8 bytes
if len(new_pwd.encode('utf-8')) > 8:
if len(new_pwd.encode("utf-8")) > 8:
return APIError(
f"New password too long, the max length is 8. "
f"Password size: {len(new_pwd.encode('utf-8'))}")
f"Password size: {len(new_pwd.encode('utf-8'))}"
)
command = {"cmd": "update_pwd", "old": old_pwd, "new": new_pwd}
token_data = await self.get_token()
enc_command = create_privileged_cmd(token_data, command)
@@ -507,9 +495,11 @@ class BTMinerAPI(BaseMinerAPI):
frequency.
"""
if not -10 < percent < 100:
return APIError(f"Frequency % is outside of the allowed "
f"range. Please set a % between -10 and "
f"100")
return APIError(
f"Frequency % is outside of the allowed "
f"range. Please set a % between -10 and "
f"100"
)
command = {"cmd": "set_target_freq", "percent": str(percent)}
token_data = await self.get_token()
enc_command = create_privileged_cmd(token_data, command)
@@ -596,9 +586,11 @@ class BTMinerAPI(BaseMinerAPI):
"""
if not 0 < percent < 100:
return APIError(f"Power PCT % is outside of the allowed "
f"range. Please set a % between 0 and "
f"100")
return APIError(
f"Power PCT % is outside of the allowed "
f"range. Please set a % between 0 and "
f"100"
)
command = {"cmd": "set_power_pct", "percent": str(percent)}
token_data = await self.get_token()
enc_command = create_privileged_cmd(token_data, command)
@@ -618,12 +610,9 @@ class BTMinerAPI(BaseMinerAPI):
:return: A reply informing of the status of pre power on.
"""
if not msg == \
"wait for adjust temp" or \
"adjust complete" or \
"adjust continue":
if not msg == "wait for adjust temp" or "adjust complete" or "adjust continue":
return APIError(
'Message is incorrect, please choose one of '
"Message is incorrect, please choose one of "
'["wait for adjust temp", '
'"adjust complete", '
'"adjust continue"]'
@@ -632,10 +621,7 @@ class BTMinerAPI(BaseMinerAPI):
complete = "true"
else:
complete = "false"
command = {"cmd": "pre_power_on",
"complete": complete,
"msg": msg
}
command = {"cmd": "pre_power_on", "complete": complete, "msg": msg}
token_data = await self.get_token()
enc_command = create_privileged_cmd(token_data, command)
return await self.send_command(enc_command)

View File

@@ -2,14 +2,14 @@ from API import BaseMinerAPI
class CGMinerAPI(BaseMinerAPI):
"""An abstraction of the BMMiner API.
"""An abstraction of the CGMiner API.
Each method corresponds to an API command in BMMiner.
Each method corresponds to an API command in GGMiner.
CGMiner API documentation:
https://github.com/ckolivas/cgminer/blob/master/API-README
This class abstracts use of the BMMiner API, as well as the
This class abstracts use of the CGMiner API, as well as the
methods for sending commands to it. The self.send_command()
function handles sending a command to the miner asynchronously, and
as such is the base for many of the functions in this class, which
@@ -18,6 +18,7 @@ class CGMinerAPI(BaseMinerAPI):
:param ip: The IP of the miner to reference the API on.
:param port: The port to reference the API on. Default is 4028.
"""
def __init__(self, ip, port=4028):
super().__init__(ip, port)
@@ -111,11 +112,7 @@ class CGMinerAPI(BaseMinerAPI):
"""
return await self.send_command("enablepool", parameters=n)
async def addpool(self,
url: str,
username: str,
password: str
) -> dict:
async def addpool(self, url: str, username: str, password: str) -> dict:
"""Add a pool to the miner.
:param url: The URL of the new pool to add.
@@ -124,11 +121,9 @@ class CGMinerAPI(BaseMinerAPI):
:return: A confirmation of adding the pool.
"""
return await self.send_command("addpool",
parameters=f"{url}, "
f"{username}, "
f"{password}"
)
return await self.send_command(
"addpool", parameters=f"{url}, " f"{username}, " f"{password}"
)
async def poolpriority(self, *n: int) -> dict:
"""Set pool priority.
@@ -138,8 +133,7 @@ class CGMinerAPI(BaseMinerAPI):
:return: A confirmation of setting pool priority.
"""
pools = f"{','.join([str(item) for item in n])}"
return await self.send_command("poolpriority",
parameters=pools)
return await self.send_command("poolpriority", parameters=pools)
async def poolquota(self, n: int, q: int) -> dict:
"""Set pool quota.
@@ -149,10 +143,7 @@ class CGMinerAPI(BaseMinerAPI):
:return: A confirmation of setting pool quota.
"""
return await self.send_command("poolquota",
parameters=f"{n}, "
f"{q}"
)
return await self.send_command("poolquota", parameters=f"{n}, " f"{q}")
async def disablepool(self, n: int) -> dict:
"""Disable a pool.
@@ -185,7 +176,7 @@ class CGMinerAPI(BaseMinerAPI):
return await self.send_command("save")
async def quit(self) -> dict:
"""Quit BMMiner.
"""Quit CGMiner.
:return: A single "BYE" before CGMiner quits.
"""
@@ -270,7 +261,7 @@ class CGMinerAPI(BaseMinerAPI):
return await self.send_command("estats")
async def check(self, command: str) -> dict:
"""Check if the command command exists in BMMiner.
"""Check if the command command exists in CGMiner.
:param command: The command to check.
@@ -288,9 +279,7 @@ class CGMinerAPI(BaseMinerAPI):
:return: Confirmation of setting failover-only.
"""
return await self.send_command("failover-only",
parameters=failover
)
return await self.send_command("failover-only", parameters=failover)
async def coin(self) -> dict:
"""Get information on the current coin.
@@ -333,10 +322,7 @@ class CGMinerAPI(BaseMinerAPI):
:return: The results of setting config of name to n.
"""
return await self.send_command("setconfig",
parameters=f"{name}, "
f"{n}"
)
return await self.send_command("setconfig", parameters=f"{name}, " f"{n}")
async def usbstats(self) -> dict:
"""Get stats of all USB devices except ztex.
@@ -364,9 +350,11 @@ class CGMinerAPI(BaseMinerAPI):
:return: Confirmation of setting PGA n with opt[,val].
"""
if val:
return await self.send_command("pgaset", parameters=f"{n}, {opt}, {val}")
return await self.send_command(
"pgaset", parameters=f"{n}, " f"{opt}, " f"{val}"
)
else:
return await self.send_command("pgaset", parameters=f"{n}, {opt}")
return await self.send_command("pgaset", parameters=f"{n}, " f"{opt}")
async def zero(self, which: str, summary: bool) -> dict:
"""Zero a device.
@@ -381,7 +369,7 @@ class CGMinerAPI(BaseMinerAPI):
:return: the STATUS section with info on the zero and optional
summary.
"""
return await self.send_command("zero", parameters=f"{which}, {summary}")
return await self.send_command("zero", parameters=f"{which}, " f"{summary}")
async def hotplug(self, n: int) -> dict:
"""Enable hotplug.
@@ -482,9 +470,11 @@ class CGMinerAPI(BaseMinerAPI):
:return: Confirmation of setting option opt to value val.
"""
if val:
return await self.send_command("ascset", parameters=f"{n}, {opt}, {val}")
return await self.send_command(
"ascset", parameters=f"{n}, " f"{opt}, " f"{val}"
)
else:
return await self.send_command("ascset", parameters=f"{n}, {opt}")
return await self.send_command("ascset", parameters=f"{n}, " f"{opt}")
async def lcd(self) -> dict:
"""Get a general all-in-one status summary of the miner.

View File

@@ -2,6 +2,13 @@ from API import BaseMinerAPI
class UnknownAPI(BaseMinerAPI):
"""An abstraction of an API for a miner which is unknown.
This class is designed to try to be a intersection of as many miner APIs
and API commands as possible (API ⋂ API), to ensure that it can be used
with as many APIs as possible.
"""
def __init__(self, ip, port=4028):
super().__init__(ip, port)

13
Dockerfile Normal file
View File

@@ -0,0 +1,13 @@
FROM python:3.10-slim-buster
EXPOSE 80
WORKDIR /minerInterface-web_monitor
COPY tools/web_monitor/requirements.txt .
RUN pip install --no-cache-dir --upgrade -r requirements.txt
COPY . .
CMD ["uvicorn", "tools.web_monitor.app:app", "--host", "0.0.0.0", "--port", "80"]

View File

@@ -48,7 +48,7 @@ A basic script to find all miners on the network and get the hashrate from them
```python
import asyncio
from network import MinerNetwork
from tools.cfg_util.func.parse_data import safe_parse_api_data
from tools.cfg_util_old.func.parse_data import safe_parse_api_data
async def get_hashrate():
@@ -84,7 +84,7 @@ You can also create your own miner without scanning if you know the IP:
import asyncio
import ipaddress
from miners.miner_factory import MinerFactory
from tools.cfg_util.func.parse_data import safe_parse_api_data
from tools.cfg_util_old.func.parse_data import safe_parse_api_data
async def get_miner_hashrate(ip: str):
@@ -103,7 +103,8 @@ async def get_miner_hashrate(ip: str):
if __name__ == '__main__':
asyncio.new_event_loop().run_until_complete(get_miner_hashrate(str("192.168.1.69")))
asyncio.new_event_loop().run_until_complete(
get_miner_hashrate(str("192.168.1.69")))
```
<br>
@@ -112,7 +113,7 @@ Or generate a miner directly without the factory:
```python
import asyncio
from miners.bosminer import BOSMiner
from tools.cfg_util.func.parse_data import safe_parse_api_data
from tools.cfg_util_old.func.parse_data import safe_parse_api_data
async def get_miner_hashrate(ip: str):
@@ -127,7 +128,8 @@ async def get_miner_hashrate(ip: str):
if __name__ == '__main__':
asyncio.new_event_loop().run_until_complete(get_miner_hashrate(str("192.168.1.69")))
asyncio.new_event_loop().run_until_complete(
get_miner_hashrate(str("192.168.1.69")))
```
<br>
@@ -136,7 +138,7 @@ Or finally, just get the API directly:
```python
import asyncio
from API.bosminer import BOSMinerAPI
from tools.cfg_util.func.parse_data import safe_parse_api_data
from tools.cfg_util_old.func.parse_data import safe_parse_api_data
async def get_miner_hashrate(ip: str):
@@ -152,7 +154,8 @@ async def get_miner_hashrate(ip: str):
if __name__ == '__main__':
asyncio.new_event_loop().run_until_complete(get_miner_hashrate(str("192.168.1.69")))
asyncio.new_event_loop().run_until_complete(
get_miner_hashrate(str("192.168.1.69")))
```
@@ -165,7 +168,7 @@ Now that you know that, lets move on to some common API functions that you might
import asyncio
import ipaddress
from miners.miner_factory import MinerFactory
from tools.cfg_util.func.parse_data import safe_parse_api_data
from tools.cfg_util_old.func.parse_data import safe_parse_api_data
async def get_miner_pool_data(ip: str):
@@ -189,7 +192,8 @@ async def get_miner_pool_data(ip: str):
if __name__ == '__main__':
asyncio.new_event_loop().run_until_complete(get_miner_pool_data(str("192.168.1.69")))
asyncio.new_event_loop().run_until_complete(
get_miner_pool_data(str("192.168.1.69")))
```
* Getting temperature data:
@@ -204,7 +208,7 @@ A pretty good example of really trying to make this robust is in ```cfg_util.fun
import asyncio
import ipaddress
from miners.miner_factory import MinerFactory
from tools.cfg_util.func.parse_data import safe_parse_api_data
from tools.cfg_util_old.func.parse_data import safe_parse_api_data
async def get_miner_temperature_data(ip: str):
@@ -223,7 +227,8 @@ async def get_miner_temperature_data(ip: str):
if __name__ == '__main__':
asyncio.new_event_loop().run_until_complete(get_miner_temperature_data(str("192.168.1.69")))
asyncio.new_event_loop().run_until_complete(
get_miner_temperature_data(str("192.168.1.69")))
```
* Getting power data:
@@ -234,7 +239,7 @@ How about data on the power usage of the miner? This one only works for Whatsmi
import asyncio
import ipaddress
from miners.miner_factory import MinerFactory
from tools.cfg_util.func.parse_data import safe_parse_api_data
from tools.cfg_util_old.func.parse_data import safe_parse_api_data
async def get_miner_power_data(ip: str):
@@ -249,7 +254,8 @@ async def get_miner_power_data(ip: str):
# send the command
tunerstatus = await miner.api.tunerstatus()
# parse the return
data = await safe_parse_api_data(tunerstatus, 'TUNERSTATUS', 0, "PowerLimit")
data = await safe_parse_api_data(tunerstatus, 'TUNERSTATUS', 0,
"PowerLimit")
else:
# send the command
# whatsminers have the power info in summary
@@ -261,7 +267,8 @@ async def get_miner_power_data(ip: str):
if __name__ == '__main__':
asyncio.new_event_loop().run_until_complete(get_miner_power_data(str("192.168.1.69")))
asyncio.new_event_loop().run_until_complete(
get_miner_power_data(str("192.168.1.69")))
```
* Multicommands:
@@ -273,7 +280,7 @@ How about we get the current pool user and hashrate in 1 command?
import asyncio
import ipaddress
from miners.miner_factory import MinerFactory
from tools.cfg_util.func.parse_data import safe_parse_api_data
from tools.cfg_util_old.func.parse_data import safe_parse_api_data
async def get_miner_hashrate_and_pool(ip: str):
@@ -286,15 +293,16 @@ async def get_miner_hashrate_and_pool(ip: str):
# Get the API data
api_data = await miner.api.multicommand("pools", "summary")
if "pools" in api_data.keys():
user = await safe_parse_api_data(api_data, "pools", 0, "POOLS", 0, "User")
user = await safe_parse_api_data(api_data, "pools", 0, "POOLS", 0,
"User")
print(user)
if "summary" in api_data.keys():
hashrate = await safe_parse_api_data(api_data, "summary", 0, "SUMMARY", 0, "MHS av")
hashrate = await safe_parse_api_data(api_data, "summary", 0, "SUMMARY",
0, "MHS av")
print(hashrate)
if __name__ == '__main__':
asyncio.new_event_loop().run_until_complete(get_miner_hashrate_and_pool(str("192.168.1.9")))
asyncio.new_event_loop().run_until_complete(
get_miner_hashrate_and_pool(str("192.168.1.9")))
```

View File

@@ -1,4 +1,4 @@
from tools.bad_board_util import main
if __name__ == '__main__':
main()
if __name__ == "__main__":
main()

View File

@@ -75,4 +75,4 @@ SAMPLE CONFIG
"shutdown_duration": 3.0, # -> (default = 3.0, float, (bos: power_scaling.shutdown_duration))
}
}
"""
"""

View File

@@ -3,12 +3,12 @@ import yaml
import toml
async def bos_config_convert(config: dict):
def bos_config_convert(config: dict):
out_config = {}
for opt in config:
if opt == "format":
out_config["format"] = config[opt]
out_config["format"]["generator"] = 'upstream_config_util'
out_config["format"]["generator"] = "upstream_config_util"
out_config["format"]["timestamp"] = int(time.time())
elif opt == "temp_control":
out_config["temperature"] = {}
@@ -47,20 +47,28 @@ async def bos_config_convert(config: dict):
out_config["pool_groups"][idx]["pools"] = []
out_config["pool_groups"][idx] = {}
if "name" in config[opt][idx].keys():
out_config["pool_groups"][idx]["group_name"] = config[opt][idx]["name"]
out_config["pool_groups"][idx]["group_name"] = config[opt][idx][
"name"
]
else:
out_config["pool_groups"][idx]["group_name"] = f"group_{idx}"
if "quota" in config[opt][idx].keys():
out_config["pool_groups"][idx]["quota"] = config[opt][idx]["quota"]
else:
out_config["pool_groups"][idx]["quota"] = 1
out_config["pool_groups"][idx]["pools"] = [{} for _item in range(len(config[opt][idx]["pool"]))]
out_config["pool_groups"][idx]["pools"] = [
{} for _item in range(len(config[opt][idx]["pool"]))
]
for pool_idx in range(len(config[opt][idx]["pool"])):
out_config["pool_groups"][idx]["pools"][pool_idx]["url"] = config[opt][idx]["pool"][pool_idx]["url"]
out_config["pool_groups"][idx]["pools"][pool_idx]["username"] = config[opt][idx]["pool"][pool_idx][
"user"]
out_config["pool_groups"][idx]["pools"][pool_idx]["password"] = config[opt][idx]["pool"][pool_idx][
"password"]
out_config["pool_groups"][idx]["pools"][pool_idx]["url"] = config[
opt
][idx]["pool"][pool_idx]["url"]
out_config["pool_groups"][idx]["pools"][pool_idx][
"username"
] = config[opt][idx]["pool"][pool_idx]["user"]
out_config["pool_groups"][idx]["pools"][pool_idx][
"password"
] = config[opt][idx]["pool"][pool_idx]["password"]
elif opt == "autotuning":
out_config["autotuning"] = {}
if "enabled" in config[opt].keys():
@@ -82,27 +90,33 @@ async def bos_config_convert(config: dict):
else:
out_config["power_scaling"]["power_step"] = 100
if "min_psu_power_limit" in config[opt].keys():
out_config["power_scaling"]["min_psu_power_limit"] = config[opt]["min_psu_power_limit"]
out_config["power_scaling"]["min_psu_power_limit"] = config[opt][
"min_psu_power_limit"
]
else:
out_config["power_scaling"]["min_psu_power_limit"] = 800
if "shutdown_enabled" in config[opt].keys():
out_config["power_scaling"]["shutdown_enabled"] = config[opt]["shutdown_enabled"]
out_config["power_scaling"]["shutdown_enabled"] = config[opt][
"shutdown_enabled"
]
else:
out_config["power_scaling"]["shutdown_enabled"] = False
if "shutdown_duration" in config[opt].keys():
out_config["power_scaling"]["shutdown_duration"] = config[opt]["shutdown_duration"]
out_config["power_scaling"]["shutdown_duration"] = config[opt][
"shutdown_duration"
]
else:
out_config["power_scaling"]["shutdown_duration"] = 3.0
return yaml.dump(out_config, sort_keys=False)
async def general_config_convert_bos(yaml_config):
def general_config_convert_bos(yaml_config, user_suffix: str = None):
config = yaml.load(yaml_config, Loader=yaml.SafeLoader)
out_config = {}
for opt in config:
if opt == "format":
out_config["format"] = config[opt]
out_config["format"]["generator"] = 'upstream_config_util'
out_config["format"]["generator"] = "upstream_config_util"
out_config["format"]["timestamp"] = int(time.time())
elif opt == "temperature":
out_config["temp_control"] = {}
@@ -148,11 +162,24 @@ async def general_config_convert_bos(yaml_config):
out_config["group"][idx]["quota"] = config[opt][idx]["quota"]
else:
out_config["group"][idx]["quota"] = 1
out_config["group"][idx]["pool"] = [{} for _item in range(len(config[opt][idx]["pools"]))]
out_config["group"][idx]["pool"] = [
{} for _item in range(len(config[opt][idx]["pools"]))
]
for pool_idx in range(len(config[opt][idx]["pools"])):
out_config["group"][idx]["pool"][pool_idx]["url"] = config[opt][idx]["pools"][pool_idx]["url"]
out_config["group"][idx]["pool"][pool_idx]["user"] = config[opt][idx]["pools"][pool_idx]["username"]
out_config["group"][idx]["pool"][pool_idx]["password"] = config[opt][idx]["pools"][pool_idx]["password"]
out_config["group"][idx]["pool"][pool_idx]["url"] = config[opt][
idx
]["pools"][pool_idx]["url"]
username = config[opt][idx]["pools"][pool_idx]["username"]
if user_suffix:
if "." in username:
username = f"{username}x{user_suffix}"
else:
username = f"{username}.{user_suffix}"
out_config["group"][idx]["pool"][pool_idx]["user"] = username
out_config["group"][idx]["pool"][pool_idx]["password"] = config[
opt
][idx]["pools"][pool_idx]["password"]
elif opt == "autotuning":
out_config["autotuning"] = {}
if "enabled" in config[opt].keys():
@@ -174,15 +201,21 @@ async def general_config_convert_bos(yaml_config):
else:
out_config["power_scaling"]["power_step"] = 100
if "min_psu_power_limit" in config[opt].keys():
out_config["power_scaling"]["min_psu_power_limit"] = config[opt]["min_psu_power_limit"]
out_config["power_scaling"]["min_psu_power_limit"] = config[opt][
"min_psu_power_limit"
]
else:
out_config["power_scaling"]["min_psu_power_limit"] = 800
if "shutdown_enabled" in config[opt].keys():
out_config["power_scaling"]["shutdown_enabled"] = config[opt]["shutdown_enabled"]
out_config["power_scaling"]["shutdown_enabled"] = config[opt][
"shutdown_enabled"
]
else:
out_config["power_scaling"]["shutdown_enabled"] = False
if "shutdown_duration" in config[opt].keys():
out_config["power_scaling"]["shutdown_duration"] = config[opt]["shutdown_duration"]
out_config["power_scaling"]["shutdown_duration"] = config[opt][
"shutdown_duration"
]
else:
out_config["power_scaling"]["shutdown_duration"] = 3.0
return toml.dumps(out_config)
return out_config

View File

@@ -1,4 +1,4 @@
from tools.cfg_util import main
if __name__ == '__main__':
if __name__ == "__main__":
main()

18
logger/__init__.py Normal file
View File

@@ -0,0 +1,18 @@
import logging
from settings import DEBUG
logging.basicConfig(
# filename="logfile.txt",
# filemode="a",
format="[%(levelname)s][%(asctime)s](%(name)s) - %(message)s",
datefmt="%x %X",
)
logger = logging.getLogger()
if DEBUG:
logger.setLevel(logging.DEBUG)
logging.getLogger("asyncssh").setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
logging.getLogger("asyncssh").setLevel(logging.WARNING)

View File

@@ -19,11 +19,26 @@ version = version.strftime("%y.%m.%d")
print(version)
setup(name="UpstreamBoardUtil.exe",
version=version,
description="Upstream Data Board Utility Build",
options={"build_exe": {"build_exe": f"{os.getcwd()}\\build\\board_util\\UpstreamBoardUtil-{version}-{sys.platform}\\"
},
},
executables=[Executable("board_util.py", base=base, icon="icon.ico", target_name="UpstreamBoardUtil.exe")]
)
setup(
name="UpstreamBoardUtil.exe",
version=version,
description="Upstream Data Board Utility Build",
options={
"build_exe": {
"build_exe": f"{os.getcwd()}\\build\\board_util\\UpstreamBoardUtil-{version}-{sys.platform}\\",
"include_files": [
os.path.join(os.getcwd(), "settings/settings.toml"),
],
"include_msvcr": True,
"add_to_path": True,
},
},
executables=[
Executable(
"board_util.py",
base=base,
icon="icon.ico",
target_name="UpstreamBoardUtil.exe",
)
],
)

View File

@@ -19,13 +19,25 @@ version = version.strftime("%y.%m.%d")
print(version)
setup(name="UpstreamCFGUtil.exe",
version=version,
description="Upstream Data Config Utility Build",
options={"build_exe": {"build_exe": f"{os.getcwd()}\\build\\UpstreamCFGUtil-{version}-{sys.platform}\\",
"include_files": [os.path.join(os.getcwd(), "settings/settings.toml"),
os.path.join(os.getcwd(), "static/CFG-Util-README.md")],
},
},
executables=[Executable("config_tool.py", base=base, icon="icon.ico", target_name="UpstreamCFGUtil.exe")]
)
setup(
name="UpstreamCFGUtil.exe",
version=version,
description="Upstream Data Config Utility Build",
options={
"build_exe": {
"build_exe": f"{os.getcwd()}\\build\\UpstreamCFGUtil-{version}-{sys.platform}\\",
"include_files": [
os.path.join(os.getcwd(), "settings/settings.toml"),
os.path.join(os.getcwd(), "static/CFG-Util-README.md"),
],
},
},
executables=[
Executable(
"config_tool.py",
base=base,
icon="icon.ico",
target_name="UpstreamCFGUtil.exe",
)
],
)

View File

@@ -4,14 +4,69 @@ from API.cgminer import CGMinerAPI
from API.btminer import BTMinerAPI
from API.unknown import UnknownAPI
import ipaddress
import asyncssh
import logging
class BaseMiner:
def __init__(self, ip: str, api: BMMinerAPI | BOSMinerAPI | CGMinerAPI | BTMinerAPI | UnknownAPI) -> None:
def __init__(
self,
ip: str,
api: BMMinerAPI or BOSMinerAPI or CGMinerAPI or BTMinerAPI or UnknownAPI,
) -> None:
self.ip = ipaddress.ip_address(ip)
self.uname = None
self.pwd = None
self.api = api
self.api_type = None
self.model = None
self.light = None
self.hostname = None
self.nominal_chips = 1
async def _get_ssh_connection(self) -> asyncssh.connect:
"""Create a new asyncssh connection"""
try:
conn = await asyncssh.connect(
str(self.ip),
known_hosts=None,
username=self.uname,
password=self.pwd,
server_host_key_algs=["ssh-rsa"],
)
return conn
except asyncssh.misc.PermissionDenied:
try:
conn = await asyncssh.connect(
str(self.ip),
known_hosts=None,
username="admin",
password="admin",
server_host_key_algs=["ssh-rsa"],
)
return conn
except Exception as e:
# logging.warning(f"{self} raised an exception: {e}")
raise e
except OSError:
logging.warning(f"Connection refused: {self}")
return None
except Exception as e:
# logging.warning(f"{self} raised an exception: {e}")
raise e
async def fault_light_on(self) -> bool:
return False
async def fault_light_off(self) -> bool:
return False
async def send_file(self, src, dest):
async with (await self._get_ssh_connection()) as conn:
await asyncssh.scp(src, (conn, dest))
async def check_light(self):
return self.light
async def get_board_info(self):
return None
@@ -26,14 +81,27 @@ class BaseMiner:
return None
async def reboot(self):
return None
return False
async def restart_backend(self):
return False
async def send_config(self, *args, **kwargs):
return None
async def send_config(self, yaml_config):
return None
async def get_data(self):
data = {
"IP": str(self.ip),
"Model": "Unknown",
"Hostname": "Unknown",
"Hashrate": 0,
"Temperature": 0,
"Pool User": "Unknown",
"Wattage": 0,
"Split": "0",
"Pool 1": "Unknown",
"Pool 1 User": "Unknown",
"Pool 2": "",
"Pool 2 User": "",
}
return data

View File

@@ -1,4 +1,8 @@
import logging
import toml
from miners.bosminer import BOSMiner
from config.bos import general_config_convert_bos
class BOSMinerS9(BOSMiner):

View File

@@ -13,7 +13,7 @@ class HiveonT9(BMMiner):
async def get_board_info(self) -> dict:
"""Gets data on each board and chain in the miner."""
board_stats = await self.api.stats()
stats = board_stats['STATS'][1]
stats = board_stats["STATS"][1]
boards = {}
board_chains = {0: [2, 9, 10], 1: [3, 11, 12], 2: [4, 13, 14]}
for idx, board in enumerate(board_chains):
@@ -25,12 +25,14 @@ class HiveonT9(BMMiner):
nominal = False
else:
nominal = True
boards[board].append({
"chain": chain,
"chip_count": count,
"chip_status": chips,
"nominal": nominal
})
boards[board].append(
{
"chain": chain,
"chip_count": count,
"chip_status": chips,
"nominal": nominal,
}
)
return boards
async def get_bad_boards(self) -> dict:
@@ -43,4 +45,4 @@ class HiveonT9(BMMiner):
if board not in bad_boards.keys():
bad_boards[board] = []
bad_boards[board].append(chain)
return bad_boards
return bad_boards

View File

@@ -1,4 +1,5 @@
from miners.bmminer import BMMiner
import logging
class BMMinerX19(BMMiner):
@@ -10,9 +11,12 @@ class BMMinerX19(BMMiner):
async def get_model(self):
if self.model:
logging.debug(f"Found model for {self.ip}: {self.model}")
return self.model
version_data = await self.api.version()
if version_data:
self.model = version_data["VERSION"][0]["Type"].replace("Antminer ", "")
logging.debug(f"Found model for {self.ip}: {self.model}")
return self.model
logging.warning(f"Failed to get model for miner: {self}")
return None

View File

@@ -0,0 +1,11 @@
from miners.bosminer import BOSMiner
class BOSMinerX19(BOSMiner):
def __init__(self, ip: str) -> None:
super().__init__(ip)
self.api_type = "BOSMiner"
self.nominal_chips = 114
def __repr__(self) -> str:
return f"BOSminerX19: {str(self.ip)}"

View File

@@ -1,4 +1,5 @@
from miners.cgminer import CGMiner
import logging
class CGMinerX19(CGMiner):
@@ -11,9 +12,12 @@ class CGMinerX19(CGMiner):
async def get_model(self):
if self.model:
logging.debug(f"Found model for {self.ip}: {self.model}")
return self.model
version_data = await self.api.version()
if version_data:
self.model = version_data["VERSION"][0]["Type"].replace("Antminer ", "")
logging.debug(f"Found model for {self.ip}: {self.model}")
return self.model
logging.warning(f"Failed to get model for miner: {self}")
return None

View File

@@ -0,0 +1,61 @@
from miners.cgminer import CGMiner
import logging
class CGMinerAvalon10(CGMiner):
def __init__(self, ip: str) -> None:
super().__init__(ip)
self.model = "Avalon 10"
self.api_type = "CGMiner"
self.nominal_chips = 114
async def get_hostname(self):
try:
devdetails = await self.api.devdetails()
if devdetails:
if len(devdetails.get("DEVDETAILS")) > 0:
if "Name" in devdetails["DEVDETAILS"][0]:
host = devdetails["DEVDETAILS"][0]["Name"]
logging.debug(f"Found hostname for {self.ip}: {host}")
return host
except Exception as e:
logging.warning(f"Failed to get hostname for miner: {self}")
return "?"
logging.warning(f"Failed to get hostname for miner: {self}")
return "?"
async def get_board_info(self):
boards_chips = 0
logging.debug(f"{self}: Getting board info.")
stats = await self.api.stats()
if not stats.get("STATS") and not stats.get("STATS") == []:
print("stats error", stats)
return {0: [], 1: [], 2: []}
stats = stats["STATS"][0]
for key in stats.keys():
if key.startswith("MM") and not stats[key] == 1:
data = stats[key]
for line in data.split("]"):
if "TA[" in line:
total_chips = line.replace("TA[", "")
boards_chips = round(int(total_chips)/3)
boards = {}
for board in [0, 1, 2]:
if not boards_chips == self.nominal_chips:
nominal = False
else:
nominal = True
boards[board] = []
boards[board].append({
"chain": board,
"chip_count": boards_chips,
"chip_status": "o" * boards_chips,
"nominal": nominal,
})
return boards

View File

@@ -2,168 +2,170 @@ from miners.cgminer import CGMiner
import re
class CGMinerAvalon(CGMiner):
class CGMinerAvalon8(CGMiner):
def __init__(self, ip: str) -> None:
super().__init__(ip)
self.model = "Avalon"
self.model = "Avalon 8"
self.api_type = "CGMiner"
self.pattern = re.compile(r'Ver\[(?P<Ver>[-0-9A-Fa-f+]+)\]\s'
'DNA\[(?P<DNA>[0-9A-Fa-f]+)\]\s'
'Elapsed\[(?P<Elapsed>[-0-9]+)\]\s'
'MW\[(?P<MW>[-\s0-9]+)\]\s'
'LW\[(?P<LW>[-0-9]+)\]\s'
'MH\[(?P<MH>[-\s0-9]+)\]\s'
'HW\[(?P<HW>[-0-9]+)\]\s'
'Temp\[(?P<Temp>[0-9]+)\]\s'
'TMax\[(?P<TMax>[0-9]+)\]\s'
'Fan\[(?P<Fan>[0-9]+)\]\s'
'FanR\[(?P<FanR>[0-9]+)%\]\s'
'Vi\[(?P<Vi>[-\s0-9]+)\]\s'
'Vo\[(?P<Vo>[-\s0-9]+)\]\s'
'('
'PLL0\[(?P<PLL0>[-\s0-9]+)\]\s'
'PLL1\[(?P<PLL1>[-\s0-9]+)\]\s'
'PLL2\[(?P<PLL2>[-\s0-9]+)\]\s'
'PLL3\[(?P<PLL3>[-\s0-9]+)\]\s'
')?'
'GHSmm\[(?P<GHSmm>[-.0-9]+)\]\s'
'WU\[(?P<WU>[-.0-9]+)\]\s'
'Freq\[(?P<Freq>[.0-9]+)\]\s'
'PG\[(?P<PG>[0-9]+)\]\s'
'Led\[(?P<LED>0|1)\]\s'
'MW0\[(?P<MW0>[0-9\s]+)\]\s'
'MW1\[(?P<MW1>[0-9\s]+)\]\s'
'MW2\[(?P<MW2>[0-9\s]+)\]\s'
'MW3\[(?P<MW3>[0-9\s]+)\]\s'
'TA\[(?P<TA>[0-9]+)\]\s'
'ECHU\[(?P<ECHU>[0-9\s]+)\]\s'
'ECMM\[(?P<ECMM>[0-9]+)\]\s.*'
'FAC0\[(?P<FAC0>[-0-9]+)\]\s'
'OC\[(?P<OC>[0-9]+)\]\s'
'SF0\[(?P<SF0>[-\s0-9]+)\]\s'
'SF1\[(?P<SF1>[-\s0-9]+)\]\s'
'SF2\[(?P<SF2>[-\s0-9]+)\]\s'
'SF3\[(?P<SF3>[-\s0-9]+)\]\s'
'PMUV\[(?P<PMUV>[-\s\S*]+)\]\s'
'PVT_T0\[(?P<PVT_T0>[-0-9\s]+)\]\s'
'PVT_T1\[(?P<PVT_T1>[-0-9\s]+)\]\s'
'PVT_T2\[(?P<PVT_T2>[-0-9\s]+)\]\s'
'PVT_T3\[(?P<PVT_T3>[-0-9\s]+)\]\s'
'PVT_V0_0\[(?P<PVT_V0_0>[-0-9\s]+)\]\s'
'PVT_V0_1\[(?P<PVT_V0_1>[-0-9\s]+)\]\s'
'PVT_V0_2\[(?P<PVT_V0_2>[-0-9\s]+)\]\s'
'PVT_V0_3\[(?P<PVT_V0_3>[-0-9\s]+)\]\s'
'PVT_V0_4\[(?P<PVT_V0_4>[-0-9\s]+)\]\s'
'PVT_V0_5\[(?P<PVT_V0_5>[-0-9\s]+)\]\s'
'PVT_V0_6\[(?P<PVT_V0_6>[-0-9\s]+)\]\s'
'PVT_V0_7\[(?P<PVT_V0_7>[-0-9\s]+)\]\s'
'PVT_V0_8\[(?P<PVT_V0_8>[-0-9\s]+)\]\s'
'PVT_V0_9\[(?P<PVT_V0_9>[-0-9\s]+)\]\s'
'PVT_V0_10\[(?P<PVT_V0_10>[-0-9\s]+)\]\s'
'PVT_V0_11\[(?P<PVT_V0_11>[-0-9\s]+)\]\s'
'PVT_V0_12\[(?P<PVT_V0_12>[-0-9\s]+)\]\s'
'PVT_V0_13\[(?P<PVT_V0_13>[-0-9\s]+)\]\s'
'PVT_V0_14\[(?P<PVT_V0_14>[-0-9\s]+)\]\s'
'PVT_V0_15\[(?P<PVT_V0_15>[-0-9\s]+)\]\s'
'PVT_V0_16\[(?P<PVT_V0_16>[-0-9\s]+)\]\s'
'PVT_V0_17\[(?P<PVT_V0_17>[-0-9\s]+)\]\s'
'PVT_V0_18\[(?P<PVT_V0_18>[-0-9\s]+)\]\s'
'PVT_V0_19\[(?P<PVT_V0_19>[-0-9\s]+)\]\s'
'PVT_V0_20\[(?P<PVT_V0_20>[-0-9\s]+)\]\s'
'PVT_V0_21\[(?P<PVT_V0_21>[-0-9\s]+)\]\s'
'PVT_V0_22\[(?P<PVT_V0_22>[-0-9\s]+)\]\s'
'PVT_V0_23\[(?P<PVT_V0_23>[-0-9\s]+)\]\s'
'PVT_V0_24\[(?P<PVT_V0_24>[-0-9\s]+)\]\s'
'PVT_V0_25\[(?P<PVT_V0_25>[-0-9\s]+)\]\s'
'PVT_V1_0\[(?P<PVT_V1_0>[-0-9\s]+)\]\s'
'PVT_V1_1\[(?P<PVT_V1_1>[-0-9\s]+)\]\s'
'PVT_V1_2\[(?P<PVT_V1_2>[-0-9\s]+)\]\s'
'PVT_V1_3\[(?P<PVT_V1_3>[-0-9\s]+)\]\s'
'PVT_V1_4\[(?P<PVT_V1_4>[-0-9\s]+)\]\s'
'PVT_V1_5\[(?P<PVT_V1_5>[-0-9\s]+)\]\s'
'PVT_V1_6\[(?P<PVT_V1_6>[-0-9\s]+)\]\s'
'PVT_V1_7\[(?P<PVT_V1_7>[-0-9\s]+)\]\s'
'PVT_V1_8\[(?P<PVT_V1_8>[-0-9\s]+)\]\s'
'PVT_V1_9\[(?P<PVT_V1_9>[-0-9\s]+)\]\s'
'PVT_V1_10\[(?P<PVT_V1_10>[-0-9\s]+)\]\s'
'PVT_V1_11\[(?P<PVT_V1_11>[-0-9\s]+)\]\s'
'PVT_V1_12\[(?P<PVT_V1_12>[-0-9\s]+)\]\s'
'PVT_V1_13\[(?P<PVT_V1_13>[-0-9\s]+)\]\s'
'PVT_V1_14\[(?P<PVT_V1_14>[-0-9\s]+)\]\s'
'PVT_V1_15\[(?P<PVT_V1_15>[-0-9\s]+)\]\s'
'PVT_V1_16\[(?P<PVT_V1_16>[-0-9\s]+)\]\s'
'PVT_V1_17\[(?P<PVT_V1_17>[-0-9\s]+)\]\s'
'PVT_V1_18\[(?P<PVT_V1_18>[-0-9\s]+)\]\s'
'PVT_V1_19\[(?P<PVT_V1_19>[-0-9\s]+)\]\s'
'PVT_V1_20\[(?P<PVT_V1_20>[-0-9\s]+)\]\s'
'PVT_V1_21\[(?P<PVT_V1_21>[-0-9\s]+)\]\s'
'PVT_V1_22\[(?P<PVT_V1_22>[-0-9\s]+)\]\s'
'PVT_V1_23\[(?P<PVT_V1_23>[-0-9\s]+)\]\s'
'PVT_V1_24\[(?P<PVT_V1_24>[-0-9\s]+)\]\s'
'PVT_V1_25\[(?P<PVT_V1_25>[-0-9\s]+)\]\s'
'PVT_V2_0\[(?P<PVT_V2_0>[-0-9\s]+)\]\s'
'PVT_V2_1\[(?P<PVT_V2_1>[-0-9\s]+)\]\s'
'PVT_V2_2\[(?P<PVT_V2_2>[-0-9\s]+)\]\s'
'PVT_V2_3\[(?P<PVT_V2_3>[-0-9\s]+)\]\s'
'PVT_V2_4\[(?P<PVT_V2_4>[-0-9\s]+)\]\s'
'PVT_V2_5\[(?P<PVT_V2_5>[-0-9\s]+)\]\s'
'PVT_V2_6\[(?P<PVT_V2_6>[-0-9\s]+)\]\s'
'PVT_V2_7\[(?P<PVT_V2_7>[-0-9\s]+)\]\s'
'PVT_V2_8\[(?P<PVT_V2_8>[-0-9\s]+)\]\s'
'PVT_V2_9\[(?P<PVT_V2_9>[-0-9\s]+)\]\s'
'PVT_V2_10\[(?P<PVT_V2_10>[-0-9\s]+)\]\s'
'PVT_V2_11\[(?P<PVT_V2_11>[-0-9\s]+)\]\s'
'PVT_V2_12\[(?P<PVT_V2_12>[-0-9\s]+)\]\s'
'PVT_V2_13\[(?P<PVT_V2_13>[-0-9\s]+)\]\s'
'PVT_V2_14\[(?P<PVT_V2_14>[-0-9\s]+)\]\s'
'PVT_V2_15\[(?P<PVT_V2_15>[-0-9\s]+)\]\s'
'PVT_V2_16\[(?P<PVT_V2_16>[-0-9\s]+)\]\s'
'PVT_V2_17\[(?P<PVT_V2_17>[-0-9\s]+)\]\s'
'PVT_V2_18\[(?P<PVT_V2_18>[-0-9\s]+)\]\s'
'PVT_V2_19\[(?P<PVT_V2_19>[-0-9\s]+)\]\s'
'PVT_V2_20\[(?P<PVT_V2_20>[-0-9\s]+)\]\s'
'PVT_V2_21\[(?P<PVT_V2_21>[-0-9\s]+)\]\s'
'PVT_V2_22\[(?P<PVT_V2_22>[-0-9\s]+)\]\s'
'PVT_V2_23\[(?P<PVT_V2_23>[-0-9\s]+)\]\s'
'PVT_V2_24\[(?P<PVT_V2_24>[-0-9\s]+)\]\s'
'PVT_V2_25\[(?P<PVT_V2_25>[-0-9\s]+)\]\s'
'PVT_V3_0\[(?P<PVT_V3_0>[-0-9\s]+)\]\s'
'PVT_V3_1\[(?P<PVT_V3_1>[-0-9\s]+)\]\s'
'PVT_V3_2\[(?P<PVT_V3_2>[-0-9\s]+)\]\s'
'PVT_V3_3\[(?P<PVT_V3_3>[-0-9\s]+)\]\s'
'PVT_V3_4\[(?P<PVT_V3_4>[-0-9\s]+)\]\s'
'PVT_V3_5\[(?P<PVT_V3_5>[-0-9\s]+)\]\s'
'PVT_V3_6\[(?P<PVT_V3_6>[-0-9\s]+)\]\s'
'PVT_V3_7\[(?P<PVT_V3_7>[-0-9\s]+)\]\s'
'PVT_V3_8\[(?P<PVT_V3_8>[-0-9\s]+)\]\s'
'PVT_V3_9\[(?P<PVT_V3_9>[-0-9\s]+)\]\s'
'PVT_V3_10\[(?P<PVT_V3_10>[-0-9\s]+)\]\s'
'PVT_V3_11\[(?P<PVT_V3_11>[-0-9\s]+)\]\s'
'PVT_V3_12\[(?P<PVT_V3_12>[-0-9\s]+)\]\s'
'PVT_V3_13\[(?P<PVT_V3_13>[-0-9\s]+)\]\s'
'PVT_V3_14\[(?P<PVT_V3_14>[-0-9\s]+)\]\s'
'PVT_V3_15\[(?P<PVT_V3_15>[-0-9\s]+)\]\s'
'PVT_V3_16\[(?P<PVT_V3_16>[-0-9\s]+)\]\s'
'PVT_V3_17\[(?P<PVT_V3_17>[-0-9\s]+)\]\s'
'PVT_V3_18\[(?P<PVT_V3_18>[-0-9\s]+)\]\s'
'PVT_V3_19\[(?P<PVT_V3_19>[-0-9\s]+)\]\s'
'PVT_V3_20\[(?P<PVT_V3_20>[-0-9\s]+)\]\s'
'PVT_V3_21\[(?P<PVT_V3_21>[-0-9\s]+)\]\s'
'PVT_V3_22\[(?P<PVT_V3_22>[-0-9\s]+)\]\s'
'PVT_V3_23\[(?P<PVT_V3_23>[-0-9\s]+)\]\s'
'PVT_V3_24\[(?P<PVT_V3_24>[-0-9\s]+)\]\s'
'PVT_V3_25\[(?P<PVT_V3_25>[-0-9\s]+)\]\s'
'FM\[(?P<FM>[0-9]+)\]\s'
'CRC\[(?P<CRC>[0-9\s]+)\]', re.X
)
self.pattern = re.compile(
r"Ver\[(?P<Ver>[-0-9A-Fa-f+]+)\]\s"
"DNA\[(?P<DNA>[0-9A-Fa-f]+)\]\s"
"Elapsed\[(?P<Elapsed>[-0-9]+)\]\s"
"MW\[(?P<MW>[-\s0-9]+)\]\s"
"LW\[(?P<LW>[-0-9]+)\]\s"
"MH\[(?P<MH>[-\s0-9]+)\]\s"
"HW\[(?P<HW>[-0-9]+)\]\s"
"Temp\[(?P<Temp>[0-9]+)\]\s"
"TMax\[(?P<TMax>[0-9]+)\]\s"
"Fan\[(?P<Fan>[0-9]+)\]\s"
"FanR\[(?P<FanR>[0-9]+)%\]\s"
"Vi\[(?P<Vi>[-\s0-9]+)\]\s"
"Vo\[(?P<Vo>[-\s0-9]+)\]\s"
"("
"PLL0\[(?P<PLL0>[-\s0-9]+)\]\s"
"PLL1\[(?P<PLL1>[-\s0-9]+)\]\s"
"PLL2\[(?P<PLL2>[-\s0-9]+)\]\s"
"PLL3\[(?P<PLL3>[-\s0-9]+)\]\s"
")?"
"GHSmm\[(?P<GHSmm>[-.0-9]+)\]\s"
"WU\[(?P<WU>[-.0-9]+)\]\s"
"Freq\[(?P<Freq>[.0-9]+)\]\s"
"PG\[(?P<PG>[0-9]+)\]\s"
"Led\[(?P<LED>0|1)\]\s"
"MW0\[(?P<MW0>[0-9\s]+)\]\s"
"MW1\[(?P<MW1>[0-9\s]+)\]\s"
"MW2\[(?P<MW2>[0-9\s]+)\]\s"
"MW3\[(?P<MW3>[0-9\s]+)\]\s"
"TA\[(?P<TA>[0-9]+)\]\s"
"ECHU\[(?P<ECHU>[0-9\s]+)\]\s"
"ECMM\[(?P<ECMM>[0-9]+)\]\s.*"
"FAC0\[(?P<FAC0>[-0-9]+)\]\s"
"OC\[(?P<OC>[0-9]+)\]\s"
"SF0\[(?P<SF0>[-\s0-9]+)\]\s"
"SF1\[(?P<SF1>[-\s0-9]+)\]\s"
"SF2\[(?P<SF2>[-\s0-9]+)\]\s"
"SF3\[(?P<SF3>[-\s0-9]+)\]\s"
"PMUV\[(?P<PMUV>[-\s\S*]+)\]\s"
"PVT_T0\[(?P<PVT_T0>[-0-9\s]+)\]\s"
"PVT_T1\[(?P<PVT_T1>[-0-9\s]+)\]\s"
"PVT_T2\[(?P<PVT_T2>[-0-9\s]+)\]\s"
"PVT_T3\[(?P<PVT_T3>[-0-9\s]+)\]\s"
"PVT_V0_0\[(?P<PVT_V0_0>[-0-9\s]+)\]\s"
"PVT_V0_1\[(?P<PVT_V0_1>[-0-9\s]+)\]\s"
"PVT_V0_2\[(?P<PVT_V0_2>[-0-9\s]+)\]\s"
"PVT_V0_3\[(?P<PVT_V0_3>[-0-9\s]+)\]\s"
"PVT_V0_4\[(?P<PVT_V0_4>[-0-9\s]+)\]\s"
"PVT_V0_5\[(?P<PVT_V0_5>[-0-9\s]+)\]\s"
"PVT_V0_6\[(?P<PVT_V0_6>[-0-9\s]+)\]\s"
"PVT_V0_7\[(?P<PVT_V0_7>[-0-9\s]+)\]\s"
"PVT_V0_8\[(?P<PVT_V0_8>[-0-9\s]+)\]\s"
"PVT_V0_9\[(?P<PVT_V0_9>[-0-9\s]+)\]\s"
"PVT_V0_10\[(?P<PVT_V0_10>[-0-9\s]+)\]\s"
"PVT_V0_11\[(?P<PVT_V0_11>[-0-9\s]+)\]\s"
"PVT_V0_12\[(?P<PVT_V0_12>[-0-9\s]+)\]\s"
"PVT_V0_13\[(?P<PVT_V0_13>[-0-9\s]+)\]\s"
"PVT_V0_14\[(?P<PVT_V0_14>[-0-9\s]+)\]\s"
"PVT_V0_15\[(?P<PVT_V0_15>[-0-9\s]+)\]\s"
"PVT_V0_16\[(?P<PVT_V0_16>[-0-9\s]+)\]\s"
"PVT_V0_17\[(?P<PVT_V0_17>[-0-9\s]+)\]\s"
"PVT_V0_18\[(?P<PVT_V0_18>[-0-9\s]+)\]\s"
"PVT_V0_19\[(?P<PVT_V0_19>[-0-9\s]+)\]\s"
"PVT_V0_20\[(?P<PVT_V0_20>[-0-9\s]+)\]\s"
"PVT_V0_21\[(?P<PVT_V0_21>[-0-9\s]+)\]\s"
"PVT_V0_22\[(?P<PVT_V0_22>[-0-9\s]+)\]\s"
"PVT_V0_23\[(?P<PVT_V0_23>[-0-9\s]+)\]\s"
"PVT_V0_24\[(?P<PVT_V0_24>[-0-9\s]+)\]\s"
"PVT_V0_25\[(?P<PVT_V0_25>[-0-9\s]+)\]\s"
"PVT_V1_0\[(?P<PVT_V1_0>[-0-9\s]+)\]\s"
"PVT_V1_1\[(?P<PVT_V1_1>[-0-9\s]+)\]\s"
"PVT_V1_2\[(?P<PVT_V1_2>[-0-9\s]+)\]\s"
"PVT_V1_3\[(?P<PVT_V1_3>[-0-9\s]+)\]\s"
"PVT_V1_4\[(?P<PVT_V1_4>[-0-9\s]+)\]\s"
"PVT_V1_5\[(?P<PVT_V1_5>[-0-9\s]+)\]\s"
"PVT_V1_6\[(?P<PVT_V1_6>[-0-9\s]+)\]\s"
"PVT_V1_7\[(?P<PVT_V1_7>[-0-9\s]+)\]\s"
"PVT_V1_8\[(?P<PVT_V1_8>[-0-9\s]+)\]\s"
"PVT_V1_9\[(?P<PVT_V1_9>[-0-9\s]+)\]\s"
"PVT_V1_10\[(?P<PVT_V1_10>[-0-9\s]+)\]\s"
"PVT_V1_11\[(?P<PVT_V1_11>[-0-9\s]+)\]\s"
"PVT_V1_12\[(?P<PVT_V1_12>[-0-9\s]+)\]\s"
"PVT_V1_13\[(?P<PVT_V1_13>[-0-9\s]+)\]\s"
"PVT_V1_14\[(?P<PVT_V1_14>[-0-9\s]+)\]\s"
"PVT_V1_15\[(?P<PVT_V1_15>[-0-9\s]+)\]\s"
"PVT_V1_16\[(?P<PVT_V1_16>[-0-9\s]+)\]\s"
"PVT_V1_17\[(?P<PVT_V1_17>[-0-9\s]+)\]\s"
"PVT_V1_18\[(?P<PVT_V1_18>[-0-9\s]+)\]\s"
"PVT_V1_19\[(?P<PVT_V1_19>[-0-9\s]+)\]\s"
"PVT_V1_20\[(?P<PVT_V1_20>[-0-9\s]+)\]\s"
"PVT_V1_21\[(?P<PVT_V1_21>[-0-9\s]+)\]\s"
"PVT_V1_22\[(?P<PVT_V1_22>[-0-9\s]+)\]\s"
"PVT_V1_23\[(?P<PVT_V1_23>[-0-9\s]+)\]\s"
"PVT_V1_24\[(?P<PVT_V1_24>[-0-9\s]+)\]\s"
"PVT_V1_25\[(?P<PVT_V1_25>[-0-9\s]+)\]\s"
"PVT_V2_0\[(?P<PVT_V2_0>[-0-9\s]+)\]\s"
"PVT_V2_1\[(?P<PVT_V2_1>[-0-9\s]+)\]\s"
"PVT_V2_2\[(?P<PVT_V2_2>[-0-9\s]+)\]\s"
"PVT_V2_3\[(?P<PVT_V2_3>[-0-9\s]+)\]\s"
"PVT_V2_4\[(?P<PVT_V2_4>[-0-9\s]+)\]\s"
"PVT_V2_5\[(?P<PVT_V2_5>[-0-9\s]+)\]\s"
"PVT_V2_6\[(?P<PVT_V2_6>[-0-9\s]+)\]\s"
"PVT_V2_7\[(?P<PVT_V2_7>[-0-9\s]+)\]\s"
"PVT_V2_8\[(?P<PVT_V2_8>[-0-9\s]+)\]\s"
"PVT_V2_9\[(?P<PVT_V2_9>[-0-9\s]+)\]\s"
"PVT_V2_10\[(?P<PVT_V2_10>[-0-9\s]+)\]\s"
"PVT_V2_11\[(?P<PVT_V2_11>[-0-9\s]+)\]\s"
"PVT_V2_12\[(?P<PVT_V2_12>[-0-9\s]+)\]\s"
"PVT_V2_13\[(?P<PVT_V2_13>[-0-9\s]+)\]\s"
"PVT_V2_14\[(?P<PVT_V2_14>[-0-9\s]+)\]\s"
"PVT_V2_15\[(?P<PVT_V2_15>[-0-9\s]+)\]\s"
"PVT_V2_16\[(?P<PVT_V2_16>[-0-9\s]+)\]\s"
"PVT_V2_17\[(?P<PVT_V2_17>[-0-9\s]+)\]\s"
"PVT_V2_18\[(?P<PVT_V2_18>[-0-9\s]+)\]\s"
"PVT_V2_19\[(?P<PVT_V2_19>[-0-9\s]+)\]\s"
"PVT_V2_20\[(?P<PVT_V2_20>[-0-9\s]+)\]\s"
"PVT_V2_21\[(?P<PVT_V2_21>[-0-9\s]+)\]\s"
"PVT_V2_22\[(?P<PVT_V2_22>[-0-9\s]+)\]\s"
"PVT_V2_23\[(?P<PVT_V2_23>[-0-9\s]+)\]\s"
"PVT_V2_24\[(?P<PVT_V2_24>[-0-9\s]+)\]\s"
"PVT_V2_25\[(?P<PVT_V2_25>[-0-9\s]+)\]\s"
"PVT_V3_0\[(?P<PVT_V3_0>[-0-9\s]+)\]\s"
"PVT_V3_1\[(?P<PVT_V3_1>[-0-9\s]+)\]\s"
"PVT_V3_2\[(?P<PVT_V3_2>[-0-9\s]+)\]\s"
"PVT_V3_3\[(?P<PVT_V3_3>[-0-9\s]+)\]\s"
"PVT_V3_4\[(?P<PVT_V3_4>[-0-9\s]+)\]\s"
"PVT_V3_5\[(?P<PVT_V3_5>[-0-9\s]+)\]\s"
"PVT_V3_6\[(?P<PVT_V3_6>[-0-9\s]+)\]\s"
"PVT_V3_7\[(?P<PVT_V3_7>[-0-9\s]+)\]\s"
"PVT_V3_8\[(?P<PVT_V3_8>[-0-9\s]+)\]\s"
"PVT_V3_9\[(?P<PVT_V3_9>[-0-9\s]+)\]\s"
"PVT_V3_10\[(?P<PVT_V3_10>[-0-9\s]+)\]\s"
"PVT_V3_11\[(?P<PVT_V3_11>[-0-9\s]+)\]\s"
"PVT_V3_12\[(?P<PVT_V3_12>[-0-9\s]+)\]\s"
"PVT_V3_13\[(?P<PVT_V3_13>[-0-9\s]+)\]\s"
"PVT_V3_14\[(?P<PVT_V3_14>[-0-9\s]+)\]\s"
"PVT_V3_15\[(?P<PVT_V3_15>[-0-9\s]+)\]\s"
"PVT_V3_16\[(?P<PVT_V3_16>[-0-9\s]+)\]\s"
"PVT_V3_17\[(?P<PVT_V3_17>[-0-9\s]+)\]\s"
"PVT_V3_18\[(?P<PVT_V3_18>[-0-9\s]+)\]\s"
"PVT_V3_19\[(?P<PVT_V3_19>[-0-9\s]+)\]\s"
"PVT_V3_20\[(?P<PVT_V3_20>[-0-9\s]+)\]\s"
"PVT_V3_21\[(?P<PVT_V3_21>[-0-9\s]+)\]\s"
"PVT_V3_22\[(?P<PVT_V3_22>[-0-9\s]+)\]\s"
"PVT_V3_23\[(?P<PVT_V3_23>[-0-9\s]+)\]\s"
"PVT_V3_24\[(?P<PVT_V3_24>[-0-9\s]+)\]\s"
"PVT_V3_25\[(?P<PVT_V3_25>[-0-9\s]+)\]\s"
"FM\[(?P<FM>[0-9]+)\]\s"
"CRC\[(?P<CRC>[0-9\s]+)\]",
re.X,
)
def __repr__(self) -> str:
return f"CGMinerAvalon: {str(self.ip)}"
return f"CGMinerAvalon8: {str(self.ip)}"
def parse_estats(self, estats):
for estat in estats:
for key in estat:
if key[:5] == 'MM ID':
if key[:5] == "MM ID":
self._parse_estat(estat, key)
def _parse_estat(self, estat, key):

View File

@@ -1,6 +1,7 @@
from API.bmminer import BMMinerAPI
from miners import BaseMiner
import asyncssh
import logging
from settings import MINER_FACTORY_GET_VERSION_RETRIES as DATA_RETRIES
class BMMiner(BaseMiner):
@@ -9,66 +10,222 @@ class BMMiner(BaseMiner):
super().__init__(ip, api)
self.model = None
self.config = None
self.uname = 'root'
self.pwd = 'admin'
self.uname = "root"
self.pwd = "admin"
def __repr__(self) -> str:
return f"BMMiner: {str(self.ip)}"
async def get_model(self):
async def get_model(self) -> str or None:
"""Get miner model.
:return: Miner model or None.
"""
# check if model is cached
if self.model:
logging.debug(f"Found model for {self.ip}: {self.model}")
return self.model
# get devdetails data
version_data = await self.api.devdetails()
# if we get data back, parse it for model
if version_data:
# handle Antminer BMMiner as a base
self.model = version_data["DEVDETAILS"][0]["Model"].replace("Antminer ", "")
logging.debug(f"Found model for {self.ip}: {self.model}")
return self.model
# if we don't get devdetails, log a failed attempt
logging.warning(f"Failed to get model for miner: {self}")
return None
async def get_hostname(self) -> str:
"""Get miner hostname.
:return: The hostname of the miner as a string or "?"
"""
if self.hostname:
return self.hostname
try:
# open an ssh connection
async with (await self._get_ssh_connection()) as conn:
# if we get the connection, check hostname
if conn is not None:
data = await conn.run('cat /proc/sys/kernel/hostname')
return data.stdout.strip()
# get output of the hostname file
data = await conn.run("cat /proc/sys/kernel/hostname")
host = data.stdout.strip()
# return hostname data
logging.debug(f"Found hostname for {self.ip}: {host}")
self.hostname = host
return self.hostname
else:
# return ? if we fail to get hostname with no ssh connection
logging.warning(f"Failed to get hostname for miner: {self}")
return "?"
except Exception:
except Exception as e:
# return ? if we fail to get hostname with an exception
logging.warning(f"Failed to get hostname for miner: {self}")
return "?"
async def _get_ssh_connection(self) -> asyncssh.connect:
try:
conn = await asyncssh.connect(str(self.ip),
known_hosts=None,
username=self.uname,
password=self.pwd,
server_host_key_algs=['ssh-rsa'])
return conn
except asyncssh.misc.PermissionDenied:
try:
conn = await asyncssh.connect(str(self.ip),
known_hosts=None,
username="admin",
password="admin",
server_host_key_algs=['ssh-rsa'])
return conn
except Exception as e:
print(e)
except OSError:
print(str(self.ip) + ": Connection refused.")
return None
async def send_ssh_command(self, cmd: str) -> str or None:
"""Send a command to the miner over ssh.
async def send_ssh_command(self, cmd):
:param cmd: The command to run.
:return: Result of the command or None.
"""
result = None
# open an ssh connection
async with (await self._get_ssh_connection()) as conn:
# 3 retries
for i in range(3):
try:
# run the command and get the result
result = await conn.run(cmd)
result = result.stdout
except Exception as e:
print(f"{cmd} error: {e}")
# if the command fails, log it
logging.warning(f"{self} command {cmd} error: {e}")
# on the 3rd retry, return None
if i == 3:
return
continue
# return the result, either command output or None
return result
async def reboot(self) -> None:
await self.send_ssh_command("reboot")
async def get_config(self) -> list or None:
"""Get the pool configuration of the miner.
:return: Pool config data or None.
"""
# get pool data
pools = await self.api.pools()
pool_data = []
# ensure we got pool data
if not pools:
return
# parse all the pools
for pool in pools["POOLS"]:
pool_data.append({"url": pool["URL"], "user": pool["User"], "pwd": "123"})
return pool_data
async def reboot(self) -> bool:
logging.debug(f"{self}: Sending reboot command.")
_ret = await self.send_ssh_command("reboot")
logging.debug(f"{self}: Reboot command completed.")
if isinstance(_ret, str):
return True
return False
async def get_data(self):
data = {
"IP": str(self.ip),
"Model": "Unknown",
"Hostname": "Unknown",
"Hashrate": 0,
"Temperature": 0,
"Pool User": "Unknown",
"Wattage": 0,
"Split": 0,
"Pool 1": "Unknown",
"Pool 1 User": "Unknown",
"Pool 2": "",
"Pool 2 User": "",
}
model = await self.get_model()
hostname = await self.get_hostname()
if model:
data["Model"] = model
if hostname:
data["Hostname"] = hostname
miner_data = None
for i in range(DATA_RETRIES):
miner_data = await self.api.multicommand("summary", "pools", "stats")
if miner_data:
break
if not miner_data:
return data
summary = miner_data.get("summary")[0]
pools = miner_data.get("pools")[0]
stats = miner_data.get("stats")[0]
if summary:
hr = summary.get("SUMMARY")
if hr:
if len(hr) > 0:
hr = hr[0].get("GHS 5s")
if hr:
data["Hashrate"] = round(hr / 1000, 2)
if stats:
temp = stats.get("STATS")
if temp:
if len(temp) > 1:
for item in ["temp2", "temp1", "temp3"]:
temperature = temp[1].get(item)
if temperature and not temperature == 0.0:
data["Temperature"] = round(temperature)
if pools:
pool_1 = None
pool_2 = None
pool_1_user = None
pool_2_user = None
pool_1_quota = 1
pool_2_quota = 1
quota = 0
for pool in pools.get("POOLS"):
if not pool_1_user:
pool_1_user = pool.get("User")
pool_1 = pool["URL"]
pool_1_quota = pool["Quota"]
elif not pool_2_user:
pool_2_user = pool.get("User")
pool_2 = pool["URL"]
pool_2_quota = pool["Quota"]
if not pool.get("User") == pool_1_user:
if not pool_2_user == pool.get("User"):
pool_2_user = pool.get("User")
pool_2 = pool["URL"]
pool_2_quota = pool["Quota"]
if pool_2_user and not pool_2_user == pool_1_user:
quota = f"{pool_1_quota}/{pool_2_quota}"
if pool_1:
if pool_1.startswith("stratum+tcp://"):
pool_1.replace("stratum+tcp://", "")
if pool_1.startswith("stratum2+tcp://"):
pool_1.replace("stratum2+tcp://", "")
data["Pool 1"] = pool_1
if pool_1_user:
data["Pool 1 User"] = pool_1_user
data["Pool User"] = pool_1_user
if pool_2:
if pool_2.startswith("stratum+tcp://"):
pool_2.replace("stratum+tcp://", "")
if pool_2.startswith("stratum2+tcp://"):
pool_2.replace("stratum2+tcp://", "")
data["Pool 2"] = pool_2
if pool_2_user:
data["Pool 2 User"] = pool_2_user
if quota:
data["Split"] = str(quota)
return data

View File

@@ -1,8 +1,9 @@
from miners import BaseMiner
from API.bosminer import BOSMinerAPI
import asyncssh
import toml
from config.bos import bos_config_convert, general_config_convert_bos
import logging
from settings import MINER_FACTORY_GET_VERSION_RETRIES as DATA_RETRIES
class BOSMiner(BaseMiner):
@@ -11,126 +12,209 @@ class BOSMiner(BaseMiner):
super().__init__(ip, api)
self.model = None
self.config = None
self.uname = 'root'
self.pwd = 'admin'
self.version = None
self.uname = "root"
self.pwd = "admin"
self.nominal_chips = 63
def __repr__(self) -> str:
return f"BOSminer: {str(self.ip)}"
async def _get_ssh_connection(self) -> asyncssh.connect:
"""Create a new asyncssh connection"""
conn = await asyncssh.connect(str(self.ip), known_hosts=None, username=self.uname, password=self.pwd,
server_host_key_algs=['ssh-rsa'])
# return created connection
return conn
async def send_ssh_command(self, cmd: str) -> str or None:
"""Send a command to the miner over ssh.
async def send_ssh_command(self, cmd: str) -> None:
"""Sends SSH command to miner."""
# creates result variable
:return: Result of the command or None.
"""
result = None
# runs the command on the miner
# open an ssh connection
async with (await self._get_ssh_connection()) as conn:
# attempt to run command up to 3 times
# 3 retries
for i in range(3):
try:
# save result of the command
# run the command and get the result
result = await conn.run(cmd)
result = result.stdout
except Exception as e:
print(f"{cmd} error: {e}")
# if the command fails, log it
logging.warning(f"{self} command {cmd} error: {e}")
# on the 3rd retry, return None
if i == 3:
return
continue
# return the result, either command output or None
return str(result)
# let the user know the result of the command
if result is not None:
if result.stdout != "":
print(result.stdout)
if result.stderr != "":
print("ERROR: " + result.stderr)
elif result.stderr != "":
print("ERROR: " + result.stderr)
else:
print(cmd)
async def fault_light_on(self) -> None:
async def fault_light_on(self) -> bool:
"""Sends command to turn on fault light on the miner."""
await self.send_ssh_command('miner fault_light on')
logging.debug(f"{self}: Sending fault_light on command.")
self.light = True
_ret = await self.send_ssh_command("miner fault_light on")
logging.debug(f"{self}: fault_light on command completed.")
if isinstance(_ret, str):
return True
return False
async def fault_light_off(self) -> None:
async def fault_light_off(self) -> bool:
"""Sends command to turn off fault light on the miner."""
await self.send_ssh_command('miner fault_light off')
logging.debug(f"{self}: Sending fault_light off command.")
self.light = False
_ret = await self.send_ssh_command("miner fault_light off")
logging.debug(f"{self}: fault_light off command completed.")
if isinstance(_ret, str):
return True
return False
async def restart_backend(self):
await self.restart_bosminer()
async def restart_backend(self) -> bool:
return await self.restart_bosminer()
async def restart_bosminer(self) -> None:
async def restart_bosminer(self) -> bool:
"""Restart bosminer hashing process."""
await self.send_ssh_command('/etc/init.d/bosminer restart')
logging.debug(f"{self}: Sending bosminer restart command.")
_ret = await self.send_ssh_command("/etc/init.d/bosminer restart")
logging.debug(f"{self}: bosminer restart command completed.")
if isinstance(_ret, str):
return True
return False
async def reboot(self) -> None:
async def reboot(self) -> bool:
"""Reboots power to the physical miner."""
await self.send_ssh_command('/sbin/reboot')
logging.debug(f"{self}: Sending reboot command.")
_ret = await self.send_ssh_command("/sbin/reboot")
logging.debug(f"{self}: Reboot command completed.")
if isinstance(_ret, str):
return True
return False
async def get_config(self) -> None:
logging.debug(f"{self}: Getting config.")
async with (await self._get_ssh_connection()) as conn:
logging.debug(f"{self}: Opening SFTP connection.")
async with conn.start_sftp_client() as sftp:
async with sftp.open('/etc/bosminer.toml') as file:
logging.debug(f"{self}: Reading config file.")
async with sftp.open("/etc/bosminer.toml") as file:
toml_data = toml.loads(await file.read())
cfg = await bos_config_convert(toml_data)
logging.debug(f"{self}: Converting config file.")
cfg = bos_config_convert(toml_data)
self.config = cfg
async def get_hostname(self) -> str:
"""Attempts to get hostname from miner."""
"""Get miner hostname.
:return: The hostname of the miner as a string or "?"
"""
if self.hostname:
return self.hostname
try:
async with (await self._get_ssh_connection()) as conn:
data = await conn.run('cat /proc/sys/kernel/hostname')
return data.stdout.strip()
if conn is not None:
data = await conn.run("cat /proc/sys/kernel/hostname")
host = data.stdout.strip()
logging.debug(f"Found hostname for {self.ip}: {host}")
self.hostname = host
return self.hostname
else:
logging.warning(f"Failed to get hostname for miner: {self}")
return "?"
except Exception as e:
print(self.ip, e)
return "BOSMiner Unknown"
logging.warning(f"Failed to get hostname for miner: {self}")
return "?"
async def get_model(self):
async def get_model(self) -> str or None:
"""Get miner model.
:return: Miner model or None.
"""
# check if model is cached
if self.model:
logging.debug(f"Found model for {self.ip}: {self.model} (BOS)")
return self.model + " (BOS)"
# get devdetails data
version_data = await self.api.devdetails()
# if we get data back, parse it for model
if version_data:
if not version_data["DEVDETAILS"] == []:
self.model = version_data["DEVDETAILS"][0]["Model"].replace("Antminer ", "")
# handle Antminer BOSMiner as a base
self.model = version_data["DEVDETAILS"][0]["Model"].replace(
"Antminer ", ""
)
logging.debug(f"Found model for {self.ip}: {self.model} (BOS)")
return self.model + " (BOS)"
# if we don't get devdetails, log a failed attempt
logging.warning(f"Failed to get model for miner: {self}")
return None
async def send_config(self, yaml_config) -> None:
async def get_version(self):
"""Get miner firmware version.
:return: Miner firmware version or None.
"""
# check if version is cached
if self.version:
logging.debug(f"Found version for {self.ip}: {self.version}")
return self.version
# get output of bos version file
version_data = await self.send_ssh_command("cat /etc/bos_version")
# if we get the version data, parse it
if version_data:
self.version = version_data.stdout.split("-")[5]
logging.debug(f"Found version for {self.ip}: {self.version}")
return self.version
# if we fail to get version, log a failed attempt
logging.warning(f"Failed to get model for miner: {self}")
return None
async def send_config(self, yaml_config, ip_user: bool = False) -> None:
"""Configures miner with yaml config."""
toml_conf = await general_config_convert_bos(yaml_config)
logging.debug(f"{self}: Sending config.")
if ip_user:
suffix = str(self.ip).split(".")[-1]
toml_conf = toml.dumps(
general_config_convert_bos(yaml_config, user_suffix=suffix)
)
else:
toml_conf = toml.dumps(general_config_convert_bos(yaml_config))
async with (await self._get_ssh_connection()) as conn:
logging.debug(f"{self}: Opening SFTP connection.")
async with conn.start_sftp_client() as sftp:
async with sftp.open('/etc/bosminer.toml', 'w+') as file:
logging.debug(f"{self}: Opening config file.")
async with sftp.open("/etc/bosminer.toml", "w+") as file:
await file.write(toml_conf)
logging.debug(f"{self}: Restarting BOSMiner")
await conn.run("/etc/init.d/bosminer restart")
async def get_board_info(self) -> dict:
"""Gets data on each board and chain in the miner."""
logging.debug(f"{self}: Getting board info.")
devdetails = await self.api.devdetails()
if not devdetails.get("DEVDETAILS"):
print("devdetails error", devdetails)
return {0: [], 1: [], 2: []}
devs = devdetails['DEVDETAILS']
devs = devdetails["DEVDETAILS"]
boards = {}
offset = devs[0]["ID"]
for board in devs:
boards[board["ID"] - offset] = []
if not board['Chips'] == self.nominal_chips:
if not board["Chips"] == self.nominal_chips:
nominal = False
else:
nominal = True
boards[board["ID"] - offset].append({
"chain": board["ID"] - offset,
"chip_count": board['Chips'],
"chip_status": "o" * board['Chips'],
"nominal": nominal
})
boards[board["ID"] - offset].append(
{
"chain": board["ID"] - offset,
"chip_count": board["Chips"],
"chip_status": "o" * board["Chips"],
"nominal": nominal,
}
)
logging.debug(f"Found board data for {self}: {boards}")
return boards
async def get_bad_boards(self) -> dict:
@@ -145,14 +229,122 @@ class BOSMiner(BaseMiner):
bad_boards[board].append(chain)
return bad_boards
async def check_good_boards(self) -> str:
"""Checks for and provides list for working boards."""
devs = await self.api.devdetails()
bad = 0
chains = devs['DEVDETAILS']
chains = devs["DEVDETAILS"]
for chain in chains:
if chain['Chips'] == 0:
if chain["Chips"] == 0:
bad += 1
if not bad > 0:
return str(self.ip)
async def get_data(self):
data = {
"IP": str(self.ip),
"Model": "Unknown",
"Hostname": "Unknown",
"Hashrate": 0,
"Temperature": 0,
"Pool User": "Unknown",
"Wattage": 0,
"Split": "0",
"Pool 1": "Unknown",
"Pool 1 User": "Unknown",
"Pool 2": "",
"Pool 2 User": "",
}
model = await self.get_model()
hostname = await self.get_hostname()
if model:
data["Model"] = model
if hostname:
data["Hostname"] = hostname
miner_data = None
for i in range(DATA_RETRIES):
miner_data = await self.api.multicommand(
"summary", "temps", "tunerstatus", "pools"
)
if miner_data:
break
if not miner_data:
return data
summary = miner_data.get("summary")[0]
temps = miner_data.get("temps")[0]
tunerstatus = miner_data.get("tunerstatus")[0]
pools = miner_data.get("pools")[0]
if summary:
hr = summary.get("SUMMARY")
if hr:
if len(hr) > 0:
hr = hr[0].get("MHS 5s")
if hr:
data["Hashrate"] = round(hr / 1000000, 2)
if temps:
temp = temps.get("TEMPS")
if temp:
if len(temp) > 0:
temp = temp[0].get("Chip")
if temp:
data["Temperature"] = round(temp)
if pools:
pool_1 = None
pool_2 = None
pool_1_user = None
pool_2_user = None
pool_1_quota = 1
pool_2_quota = 1
quota = 0
for pool in pools.get("POOLS"):
if not pool_1_user:
pool_1_user = pool.get("User")
pool_1 = pool["URL"]
pool_1_quota = pool["Quota"]
elif not pool_2_user:
pool_2_user = pool.get("User")
pool_2 = pool["URL"]
pool_2_quota = pool["Quota"]
if not pool.get("User") == pool_1_user:
if not pool_2_user == pool.get("User"):
pool_2_user = pool.get("User")
pool_2 = pool["URL"]
pool_2_quota = pool["Quota"]
if pool_2_user and not pool_2_user == pool_1_user:
quota = f"{pool_1_quota}/{pool_2_quota}"
if pool_1:
pool_1 = pool_1.replace("stratum+tcp://", "")
pool_1 = pool_1.replace("stratum2+tcp://", "")
data["Pool 1"] = pool_1
if pool_1_user:
data["Pool 1 User"] = pool_1_user
data["Pool User"] = pool_1_user
if pool_2:
pool_2 = pool_2.replace("stratum+tcp://", "")
pool_2 = pool_2.replace("stratum2+tcp://", "")
data["Pool 2"] = pool_2
if pool_2_user:
data["Pool 2 User"] = pool_2_user
if quota:
data["Split"] = str(quota)
if tunerstatus:
tuner = tunerstatus.get("TUNERSTATUS")
if tuner:
if len(tuner) > 0:
wattage = tuner[0].get("PowerLimit")
if wattage:
data["Wattage"] = wattage
return data

View File

@@ -1,6 +1,8 @@
from API.btminer import BTMinerAPI
from miners import BaseMiner
from API import APIError
import logging
from settings import MINER_FACTORY_GET_VERSION_RETRIES as DATA_RETRIES
class BTMiner(BaseMiner):
@@ -15,24 +17,36 @@ class BTMiner(BaseMiner):
async def get_model(self):
if self.model:
logging.debug(f"Found model for {self.ip}: {self.model}")
return self.model
version_data = await self.api.devdetails()
if version_data:
self.model = version_data["DEVDETAILS"][0]["Model"].split("V")[0]
logging.debug(f"Found model for {self.ip}: {self.model}")
return self.model
logging.warning(f"Failed to get model for miner: {self}")
return None
async def get_hostname(self) -> str:
if self.hostname:
return self.hostname
try:
host_data = await self.api.get_miner_info()
if host_data:
return host_data["Msg"]["hostname"]
host = host_data["Msg"]["hostname"]
logging.debug(f"Found hostname for {self.ip}: {host}")
self.hostname = host
return self.hostname
except APIError:
logging.warning(f"Failed to get hostname for miner: {self}")
return "?"
except Exception as e:
logging.warning(f"Failed to get hostname for miner: {self}")
return "?"
async def get_board_info(self) -> dict:
"""Gets data on each board and chain in the miner."""
logging.debug(f"{self}: Getting board info.")
devs = await self.api.devs()
if not devs.get("DEVS"):
print("devs error", devs)
@@ -43,17 +57,129 @@ class BTMiner(BaseMiner):
for board in devs:
boards[board["ID"] - offset] = []
if "Effective Chips" in board.keys():
if not board['Effective Chips'] in self.nominal_chips:
if not board["Effective Chips"] in self.nominal_chips:
nominal = False
else:
nominal = True
boards[board["ID"] - offset].append({
"chain": board["ID"] - offset,
"chip_count": board['Effective Chips'],
"chip_status": "o" * board['Effective Chips'],
"nominal": nominal
})
boards[board["ID"] - offset].append(
{
"chain": board["ID"] - offset,
"chip_count": board["Effective Chips"],
"chip_status": "o" * board["Effective Chips"],
"nominal": nominal,
}
)
else:
logging.warning(f"Incorrect board data from {self}: {board}")
print(board)
logging.debug(f"Found board data for {self}: {boards}")
return boards
async def get_data(self):
data = {
"IP": str(self.ip),
"Model": "Unknown",
"Hostname": "Unknown",
"Hashrate": 0,
"Temperature": 0,
"Pool User": "Unknown",
"Wattage": 0,
"Split": 0,
"Pool 1": "Unknown",
"Pool 1 User": "Unknown",
"Pool 2": "",
"Pool 2 User": "",
}
model = await self.get_model()
hostname = await self.get_hostname()
if model:
data["Model"] = model
if hostname:
data["Hostname"] = hostname
miner_data = None
for i in range(DATA_RETRIES):
miner_data = await self.api.multicommand("summary", "devs", "pools")
if miner_data:
break
if not miner_data:
return data
summary = miner_data.get("summary")[0]
devs = miner_data.get("devs")[0]
pools = miner_data.get("pools")[0]
if summary:
summary_data = summary.get("SUMMARY")
if summary_data:
if len(summary_data) > 0:
hr = summary_data[0].get("MHS 5s")
if hr:
data["Hashrate"] = round(hr / 1000000, 2)
wattage = summary_data[0].get("Power")
if wattage:
data["Wattage"] = round(wattage)
if devs:
temp_data = devs.get("DEVS")
if temp_data:
for board in temp_data:
temp = board.get("Chip Temp Avg")
if temp and not temp == 0.0:
data["Temperature"] = round(temp)
break
if pools:
pool_1 = None
pool_2 = None
pool_1_user = None
pool_2_user = None
pool_1_quota = 1
pool_2_quota = 1
quota = 0
for pool in pools.get("POOLS"):
if not pool_1_user:
pool_1_user = pool.get("User")
pool_1 = pool["URL"]
pool_1_quota = pool["Quota"]
elif not pool_2_user:
pool_2_user = pool.get("User")
pool_2 = pool["URL"]
pool_2_quota = pool["Quota"]
if not pool.get("User") == pool_1_user:
if not pool_2_user == pool.get("User"):
pool_2_user = pool.get("User")
pool_2 = pool["URL"]
pool_2_quota = pool["Quota"]
if pool_2_user and not pool_2_user == pool_1_user:
quota = f"{pool_1_quota}/{pool_2_quota}"
if pool_1:
if pool_1.startswith("stratum+tcp://"):
pool_1.replace("stratum+tcp://", "")
if pool_1.startswith("stratum2+tcp://"):
pool_1.replace("stratum2+tcp://", "")
data["Pool 1"] = pool_1
if pool_1_user:
data["Pool 1 User"] = pool_1_user
data["Pool User"] = pool_1_user
if pool_2:
if pool_2.startswith("stratum+tcp://"):
pool_2.replace("stratum+tcp://", "")
if pool_2.startswith("stratum2+tcp://"):
pool_2.replace("stratum2+tcp://", "")
data["Pool 2"] = pool_2
if pool_2_user:
data["Pool 2 User"] = pool_2_user
if quota:
data["Split"] = str(quota)
return data

View File

@@ -1,7 +1,8 @@
from miners import BaseMiner
from API.cgminer import CGMinerAPI
from API import APIError
import asyncssh
from settings import MINER_FACTORY_GET_VERSION_RETRIES as DATA_RETRIES
import logging
class CGMiner(BaseMiner):
@@ -10,8 +11,8 @@ class CGMiner(BaseMiner):
super().__init__(ip, api)
self.model = None
self.config = None
self.uname = 'root'
self.pwd = 'admin'
self.uname = "root"
self.pwd = "admin"
def __repr__(self) -> str:
return f"CGMiner: {str(self.ip)}"
@@ -29,108 +30,181 @@ class CGMiner(BaseMiner):
return None
async def get_hostname(self) -> str:
if self.hostname:
return self.hostname
try:
async with (await self._get_ssh_connection()) as conn:
if conn is not None:
data = await conn.run('cat /proc/sys/kernel/hostname')
return data.stdout.strip()
data = await conn.run("cat /proc/sys/kernel/hostname")
host = data.stdout.strip()
self.hostname = host
return self.hostname
else:
return "?"
except Exception:
return "?"
async def _get_ssh_connection(self) -> asyncssh.connect:
try:
conn = await asyncssh.connect(str(self.ip),
known_hosts=None,
username=self.uname,
password=self.pwd,
server_host_key_algs=['ssh-rsa'])
return conn
except asyncssh.misc.PermissionDenied:
try:
conn = await asyncssh.connect(str(self.ip),
known_hosts=None,
username="admin",
password="admin",
server_host_key_algs=['ssh-rsa'])
return conn
except Exception as e:
print(e)
except OSError:
print(str(self.ip) + " Connection refused.")
return None
async def send_ssh_command(self, cmd):
result = None
async with (await self._get_ssh_connection()) as conn:
for i in range(3):
try:
result = await conn.run(cmd)
result = result.stdout
except Exception as e:
print(f"{cmd} error: {e}")
if i == 3:
return
continue
# handle result
self._result_handler(result)
return result
@staticmethod
def _result_handler(result: asyncssh.process.SSHCompletedProcess) -> None:
if result is not None:
# noinspection PyUnresolvedReferences
if len(result.stdout) > 0:
# noinspection PyUnresolvedReferences
print("ssh stdout: \n" + result.stdout)
# noinspection PyUnresolvedReferences
if len(result.stderr) > 0:
# noinspection PyUnresolvedReferences
print("ssh stderr: \n" + result.stderrr)
# noinspection PyUnresolvedReferences
if len(result.stdout) <= 0 and len(result.stderr) <= 0:
print("ssh stdout stderr empty")
# if result.stdout != "":
# print(result.stdout)
# if result.stderr != "":
# print("ERROR: " + result.stderr)
# elif result.stderr != "":
# print("ERROR: " + result.stderr)
# else:
# print(cmd)
async def restart_backend(self) -> bool:
return await self.restart_cgminer()
async def restart_backend(self) -> None:
await self.restart_cgminer()
async def restart_cgminer(self) -> bool:
commands = ["cgminer-api restart", "/usr/bin/cgminer-monitor >/dev/null 2>&1"]
commands = ";".join(commands)
_ret = await self.send_ssh_command(commands)
if isinstance(_ret, str):
return True
return False
async def restart_cgminer(self) -> None:
commands = ['cgminer-api restart',
'/usr/bin/cgminer-monitor >/dev/null 2>&1']
commands = ';'.join(commands)
await self.send_ssh_command(commands)
async def reboot(self) -> None:
await self.send_ssh_command("reboot")
async def reboot(self) -> bool:
logging.debug(f"{self}: Sending reboot command.")
_ret = await self.send_ssh_command("reboot")
logging.debug(f"{self}: Reboot command completed.")
if isinstance(_ret, str):
return True
return False
async def start_cgminer(self) -> None:
commands = ['mkdir -p /etc/tmp/',
'echo \"*/3 * * * * /usr/bin/cgminer-monitor\" > /etc/tmp/root',
'crontab -u root /etc/tmp/root',
'/usr/bin/cgminer-monitor >/dev/null 2>&1']
commands = ';'.join(commands)
commands = [
"mkdir -p /etc/tmp/",
'echo "*/3 * * * * /usr/bin/cgminer-monitor" > /etc/tmp/root',
"crontab -u root /etc/tmp/root",
"/usr/bin/cgminer-monitor >/dev/null 2>&1",
]
commands = ";".join(commands)
await self.send_ssh_command(commands)
async def stop_cgminer(self) -> None:
commands = ['mkdir -p /etc/tmp/',
'echo \"\" > /etc/tmp/root',
'crontab -u root /etc/tmp/root',
'killall cgminer']
commands = ';'.join(commands)
commands = [
"mkdir -p /etc/tmp/",
'echo "" > /etc/tmp/root',
"crontab -u root /etc/tmp/root",
"killall cgminer",
]
commands = ";".join(commands)
await self.send_ssh_command(commands)
async def get_config(self) -> None:
async with (await self._get_ssh_connection()) as conn:
command = 'cat /etc/config/cgminer'
command = "cat /etc/config/cgminer"
result = await conn.run(command, check=True)
self._result_handler(result)
self.config = result.stdout
print(str(self.config))
async def get_data(self):
data = {
"IP": str(self.ip),
"Model": "Unknown",
"Hostname": "Unknown",
"Hashrate": 0,
"Temperature": 0,
"Pool User": "Unknown",
"Wattage": 0,
"Split": 0,
"Pool 1": "Unknown",
"Pool 1 User": "Unknown",
"Pool 2": "",
"Pool 2 User": "",
}
model = await self.get_model()
hostname = await self.get_hostname()
if model:
data["Model"] = model
if hostname:
data["Hostname"] = hostname
miner_data = None
for i in range(DATA_RETRIES):
miner_data = await self.api.multicommand("summary", "pools", "stats")
if miner_data:
break
if not miner_data:
return data
summary = miner_data.get("summary")[0]
pools = miner_data.get("pools")[0]
stats = miner_data.get("stats")[0]
if summary:
hr = summary.get("SUMMARY")
if hr:
if len(hr) > 0:
hr = hr[0].get("GHS 5s")
if hr:
data["Hashrate"] = round(hr / 1000, 2)
if stats:
temp = stats.get("STATS")
if temp:
if len(temp) > 1:
for item in ["temp2", "temp1", "temp3"]:
temperature = temp[1].get(item)
if temperature and not temperature == 0.0:
data["Temperature"] = round(temperature)
if pools:
pool_1 = None
pool_2 = None
pool_1_user = None
pool_2_user = None
pool_1_quota = 1
pool_2_quota = 1
quota = 0
for pool in pools.get("POOLS"):
if not pool_1_user:
pool_1_user = pool.get("User")
pool_1 = pool["URL"]
pool_1_quota = pool["Quota"]
elif not pool_2_user:
pool_2_user = pool.get("User")
pool_2 = pool["URL"]
pool_2_quota = pool["Quota"]
if not pool.get("User") == pool_1_user:
if not pool_2_user == pool.get("User"):
pool_2_user = pool.get("User")
pool_2 = pool["URL"]
pool_2_quota = pool["Quota"]
if pool_2_user and not pool_2_user == pool_1_user:
quota = f"{pool_1_quota}/{pool_2_quota}"
if pool_1:
if pool_1.startswith("stratum+tcp://"):
pool_1.replace("stratum+tcp://", "")
if pool_1.startswith("stratum2+tcp://"):
pool_1.replace("stratum2+tcp://", "")
data["Pool 1"] = pool_1
if pool_1_user:
data["Pool 1 User"] = pool_1_user
data["Pool User"] = pool_1_user
if pool_2:
if pool_2.startswith("stratum+tcp://"):
pool_2.replace("stratum+tcp://", "")
if pool_2.startswith("stratum2+tcp://"):
pool_2.replace("stratum2+tcp://", "")
data["Pool 2"] = pool_2
if pool_2_user:
data["Pool 2 User"] = pool_2_user
if quota:
data["Split"] = str(quota)
return data

View File

@@ -12,6 +12,7 @@ from miners.antminer.X17.cgminer import CGMinerX17
from miners.antminer.X19.bmminer import BMMinerX19
from miners.antminer.X19.cgminer import CGMinerX19
from miners.antminer.X19.bosminer import BOSMinerX19
from miners.whatsminer.M20 import BTMinerM20
from miners.whatsminer.M21 import BTMinerM21
@@ -19,7 +20,8 @@ from miners.whatsminer.M30 import BTMinerM30
from miners.whatsminer.M31 import BTMinerM31
from miners.whatsminer.M32 import BTMinerM32
from miners.avalonminer import CGMinerAvalon
from miners.avalonminer.Avalon8 import CGMinerAvalon8
from miners.avalonminer.Avalon10 import CGMinerAvalon10
from miners.cgminer import CGMiner
from miners.bmminer import BMMiner
@@ -32,11 +34,24 @@ from API import APIError
import asyncio
import ipaddress
import json
import logging
from settings import MINER_FACTORY_GET_VERSION_RETRIES as GET_VERSION_RETRIES
from settings import (
MINER_FACTORY_GET_VERSION_RETRIES as GET_VERSION_RETRIES,
NETWORK_PING_TIMEOUT as PING_TIMEOUT,
)
class MinerFactory:
class Singleton(type):
_instances = {}
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
return cls._instances[cls]
class MinerFactory(metaclass=Singleton):
def __init__(self):
self.miners = {}
@@ -62,8 +77,10 @@ class MinerFactory:
for miner in scanned:
yield await miner
async def get_miner(self, ip: ipaddress.ip_address):
async def get_miner(self, ip: ipaddress.ip_address or str):
"""Decide a miner type using the IP address of the miner."""
if isinstance(ip, str):
ip = ipaddress.ip_address(ip)
# check if the miner already exists in cache
if ip in self.miners:
return self.miners[ip]
@@ -74,31 +91,35 @@ class MinerFactory:
# try to get the API multiple times based on retries
for i in range(GET_VERSION_RETRIES):
# get the API type, should be BOSMiner, CGMiner, BMMiner, BTMiner, or None
api = await self._get_api_type(ip)
# if we find the API type, dont need to loop anymore
if api:
break
try:
# get the API type, should be BOSMiner, CGMiner, BMMiner, BTMiner, or None
new_model, new_api = await asyncio.wait_for(
self._get_miner_type(ip), timeout=PING_TIMEOUT
)
# keep track of the API and model we found first
if new_api and not api:
api = new_api
if new_model and not model:
model = new_model
# if we find the API and model, dont need to loop anymore
if api and model:
break
except asyncio.TimeoutError:
pass
# try to get the model multiple times based on retries
for i in range(GET_VERSION_RETRIES):
# get the model, should return some miner model type, e.g. Antminer S9
model = await self._get_miner_model(ip)
# if we find the model type, dont need to loop anymore
if model:
break
# make sure we have model information
if model:
# check if the miner is an Antminer
if "Antminer" in model:
# S9 logic
if "Antminer S9" in model:
# handle the different API types
if not api:
print(ip)
logging.warning(
f"{str(ip)}: No API data found, using BraiinsOS."
)
miner = BOSMinerS9(str(ip))
elif "BOSMiner" in api:
miner = BOSMinerS9(str(ip))
@@ -106,7 +127,7 @@ class MinerFactory:
miner = CGMinerS9(str(ip))
elif "BMMiner" in api:
miner = BMMinerS9(str(ip))
elif "Antminer T9" in model:
if "BMMiner" in api:
if "Hiveon" in model:
@@ -119,7 +140,6 @@ class MinerFactory:
# X17 model logic
elif "17" in model:
# handle the different API types
if "BOSMiner" in api:
miner = BOSMinerX17(str(ip))
@@ -130,16 +150,20 @@ class MinerFactory:
# X19 logic
elif "19" in model:
# handle the different API types
if "BOSMiner" in api:
miner = BOSMinerX19(str(ip))
if "CGMiner" in api:
miner = CGMinerX19(str(ip))
elif "BMMiner" in api:
miner = BMMinerX19(str(ip))
# Avalonminer V8
# Avalonminers
elif "avalon" in model:
miner = CGMinerAvalon(str(ip))
if model == "avalon10":
miner = CGMinerAvalon10(str(ip))
else:
miner = CGMinerAvalon8(str(ip))
# Whatsminers
elif "M20" in model:
@@ -176,82 +200,116 @@ class MinerFactory:
# empty out self.miners
self.miners = {}
async def _get_miner_model(self, ip: ipaddress.ip_address or str) -> str or None:
# instantiate model as being nothing if getting it fails
async def _get_miner_type(self, ip: ipaddress.ip_address or str) -> tuple:
model = None
api = None
devdetails = None
version = None
# try block in case of APIError or OSError 121 (Semaphore timeout)
try:
data = await self._send_api_command(str(ip), "devdetails+version")
# send the devdetails command to the miner (will fail with no boards/devices)
data = await self._send_api_command(str(ip), "devdetails")
# sometimes data is b'', check for that
if data:
# status check, make sure the command succeeded
if data.get("STATUS"):
if not isinstance(data["STATUS"], str):
# if status is E, its an error
if data["STATUS"][0].get("STATUS") not in ["I", "S"]:
validation = await self._validate_command(data)
if not validation[0]:
raise APIError(validation[1])
# try an alternate method if devdetails fails
data = await self._send_api_command(str(ip), "version")
devdetails = data["devdetails"][0]
version = data["version"][0]
# make sure we have data
if data:
# check the keys are there to get the version
if data.get("VERSION"):
if data["VERSION"][0].get("Type"):
# save the model to be returned later
model = data["VERSION"][0]["Type"]
else:
# make sure devdetails actually contains data, if its empty, there are no devices
if "DEVDETAILS" in data.keys() and not data["DEVDETAILS"] == []:
except APIError as e:
data = None
# check for model, for most miners
if not data["DEVDETAILS"][0]["Model"] == "":
# model of most miners
model = data["DEVDETAILS"][0]["Model"]
if not data:
try:
devdetails = await self._send_api_command(str(ip), "devdetails")
validation = await self._validate_command(devdetails)
if not validation[0]:
version = await self._send_api_command(str(ip), "version")
# if model fails, try driver
else:
# some avalonminers have model in driver
model = data["DEVDETAILS"][0]["Driver"]
else:
# if all that fails, try just version
data = await self._send_api_command(str(ip), "version")
if "VERSION" in data.keys():
model = data["VERSION"][0]["Type"]
else:
print(data)
validation = await self._validate_command(version)
if not validation[0]:
raise APIError(validation[1])
except APIError as e:
logging.warning(f"{ip}: API Command Error: {e}")
return None, None
return model
if devdetails:
if "DEVDETAILS" in devdetails.keys() and not devdetails["DEVDETAILS"] == []:
# check for model, for most miners
if not devdetails["DEVDETAILS"][0]["Model"] == "":
# model of most miners
model = devdetails["DEVDETAILS"][0]["Model"]
# if there are errors, we just return None
except APIError:
return model
except OSError as e:
if e.winerror == 121:
print(e)
return model
else:
print(ip, e)
return model
# if model fails, try driver
else:
# some avalonminers have model in driver
model = devdetails["DEVDETAILS"][0]["Driver"]
if version:
# check if there are any BMMiner strings in any of the dict keys
if any("BMMiner" in string for string in version["VERSION"][0].keys()):
api = "BMMiner"
# check if there are any CGMiner strings in any of the dict keys
elif any("CGMiner" in string for string in version["VERSION"][0].keys()):
api = "CGMiner"
# check if there are any BOSMiner strings in any of the dict keys
elif any("BOSminer" in string for string in version["VERSION"][0].keys()):
api = "BOSMiner"
# if all that fails, check the Description to see if it is a whatsminer
elif version.get("Description") and "whatsminer" in version.get("Description"):
api = "BTMiner"
if version and not model:
if (
"VERSION" in version.keys()
and version.get("VERSION")
and not version.get("VERSION") == []
):
model = version["VERSION"][0]["Type"]
return model, api
async def _validate_command(self, data: dict) -> tuple:
"""Check if the returned command output is correctly formatted."""
# check if the data returned is correct or an error
if not data:
return False, "No API data."
# if status isn't a key, it is a multicommand
if "STATUS" not in data.keys():
for key in data.keys():
# make sure not to try to turn id into a dict
if not key == "id":
# make sure they succeeded
if "STATUS" in data[key][0].keys():
if data[key][0]["STATUS"][0]["STATUS"] not in ["S", "I"]:
# this is an error
return False, f"{key}: " + data[key][0]["STATUS"][0]["Msg"]
elif "id" not in data.keys():
if data["STATUS"] not in ["S", "I"]:
return False, data["Msg"]
else:
# make sure the command succeeded
if data["STATUS"][0]["STATUS"] not in ("S", "I"):
# this is an error
if data["STATUS"][0]["STATUS"] not in ("S", "I"):
return False, data["STATUS"][0]["Msg"]
return True, None
async def _send_api_command(self, ip: ipaddress.ip_address or str, command: str):
try:
# get reader and writer streams
reader, writer = await asyncio.open_connection(str(ip), 4028)
# handle OSError 121
except OSError as e:
if e.winerror == "121":
print("Semaphore Timeout has Expired.")
logging.warning(f"{str(ip)} - Command {command}: {e}")
return {}
# create the command
cmd = {"command": command}
# send the command
writer.write(json.dumps(cmd).encode('utf-8'))
writer.write(json.dumps(cmd).encode("utf-8"))
await writer.drain()
# instantiate data
@@ -265,16 +323,16 @@ class MinerFactory:
break
data += d
except Exception as e:
print(e)
logging.debug(f"{str(ip)}: {e}")
try:
# some json from the API returns with a null byte (\x00) on the end
if data.endswith(b"\x00"):
# handle the null byte
str_data = data.decode('utf-8')[:-1]
str_data = data.decode("utf-8")[:-1]
else:
# no null byte
str_data = data.decode('utf-8')
str_data = data.decode("utf-8")
# fix an error with a btminer return having an extra comma that breaks json.loads()
str_data = str_data.replace(",}", "}")
# fix an error with a btminer return having a newline that breaks json.loads()
@@ -293,47 +351,3 @@ class MinerFactory:
await writer.wait_closed()
return data
async def _get_api_type(self, ip: ipaddress.ip_address or str) -> dict or None:
"""Get data on the version of the miner to return the right miner."""
# instantiate API as None in case something fails
api = None
# try block to handle OSError 121 (Semaphore timeout)
try:
# try the version command,works on most miners
data = await self._send_api_command(str(ip), "version")
# if we got data back, try to parse it
if data:
# make sure the command succeeded
if data.get("STATUS") and not data.get("STATUS") == "E":
if data["STATUS"][0].get("STATUS") in ["I", "S"]:
# check if there are any BMMiner strings in any of the dict keys
if any("BMMiner" in string for string in data["VERSION"][0].keys()):
api = "BMMiner"
# check if there are any CGMiner strings in any of the dict keys
elif any("CGMiner" in string for string in data["VERSION"][0].keys()):
api = "CGMiner"
# check if there are any BOSMiner strings in any of the dict keys
elif any("BOSminer" in string for string in data["VERSION"][0].keys()):
api = "BOSMiner"
# if all that fails, check the Description to see if it is a whatsminer
elif data.get("Description") and "whatsminer" in data.get("Description"):
api = "BTMiner"
# return the API if we found it
if api:
return api
# if there are errors, return None
except OSError as e:
if e.winerror == 121:
return None
else:
print(ip, e)
return None

View File

@@ -22,7 +22,7 @@ async def get_bos_bad_tuners(ip: str = "192.168.1.0", mask: int = 24):
# run all the tuner status commands
tuner_status = await asyncio.gather(*tuner_tasks)
# create a list of all miners with bad board tuner status'
# create a list of all miners with bad board tuner status
bad_tuner_miners = []
for item in tuner_status:
# loop through and get each miners' bad board count

View File

@@ -1,14 +1,21 @@
import ipaddress
import asyncio
import logging
from network.net_range import MinerNetworkRange
from miners.miner_factory import MinerFactory
from settings import NETWORK_PING_RETRIES as PING_RETRIES, NETWORK_PING_TIMEOUT as PING_TIMEOUT, \
NETWORK_SCAN_THREADS as SCAN_THREADS
from settings import (
NETWORK_PING_RETRIES as PING_RETRIES,
NETWORK_PING_TIMEOUT as PING_TIMEOUT,
NETWORK_SCAN_THREADS as SCAN_THREADS,
)
class MinerNetwork:
def __init__(self, ip_addr: str or None = None, mask: str or int or None = None) -> None:
def __init__(
self, ip_addr: str or None = None, mask: str or int or None = None
) -> None:
self.network = None
self.miner_factory = MinerFactory()
self.ip_addr = ip_addr
self.connected_miners = {}
self.mask = mask
@@ -16,28 +23,36 @@ class MinerNetwork:
def __len__(self):
return len([item for item in self.get_network().hosts()])
def __repr__(self):
return str(self.network)
def get_network(self) -> ipaddress.ip_network:
"""Get the network using the information passed to the MinerNetwork or from cache."""
# if we have a network cached already, use that
if self.network:
return self.network
# if there is no IP address passed, default to 192.168.1.0
if not self.ip_addr:
default_gateway = "192.168.1.0"
# if we do have an IP address passed, use that
if "-" in self.ip_addr:
self.network = MinerNetworkRange(self.ip_addr)
else:
default_gateway = self.ip_addr
# if there is no IP address passed, default to 192.168.1.0
if not self.ip_addr:
default_gateway = "192.168.1.0"
# if we do have an IP address passed, use that
else:
default_gateway = self.ip_addr
# if there is no subnet mask passed, default to /24
if not self.mask:
subnet_mask = "24"
# if we do have a mask passed, use that
else:
subnet_mask = str(self.mask)
# if there is no subnet mask passed, default to /24
if not self.mask:
subnet_mask = "24"
# if we do have a mask passed, use that
else:
subnet_mask = str(self.mask)
# save the network and return it
self.network = ipaddress.ip_network(f"{default_gateway}/{subnet_mask}", strict=False)
# save the network and return it
self.network = ipaddress.ip_network(
f"{default_gateway}/{subnet_mask}", strict=False
)
return self.network
async def scan_network_for_miners(self) -> None or list:
@@ -46,6 +61,9 @@ class MinerNetwork:
local_network = self.get_network()
print(f"Scanning {local_network} for miners...")
# clear cached miners
MinerFactory().clear_cached_miners()
# create a list of tasks and miner IPs
scan_tasks = []
miner_ips = []
@@ -75,13 +93,10 @@ class MinerNetwork:
# create a list of tasks to get miners
create_miners_tasks = []
# clear cached miners
self.miner_factory.clear_cached_miners()
# try to get each miner found
for miner_ip in miner_ips:
# append to the list of tasks
create_miners_tasks.append(self.miner_factory.get_miner(miner_ip))
create_miners_tasks.append(MinerFactory().get_miner(miner_ip))
# get all miners in the list
miners = await asyncio.gather(*create_miners_tasks)
@@ -127,25 +142,33 @@ class MinerNetwork:
@staticmethod
async def ping_miner(ip: ipaddress.ip_address) -> None or ipaddress.ip_address:
for i in range(PING_RETRIES):
connection_fut = asyncio.open_connection(str(ip), 4028)
try:
# get the read and write streams from the connection
reader, writer = await asyncio.wait_for(connection_fut, timeout=PING_TIMEOUT)
# immediately close connection, we know connection happened
writer.close()
# make sure the writer is closed
await writer.wait_closed()
# ping was successful
return ip
except asyncio.exceptions.TimeoutError:
# ping failed if we time out
continue
except ConnectionRefusedError:
# handle for other connection errors
print(f"{str(ip)}: Connection Refused.")
# ping failed, likely with an exception
except Exception as e:
print(e)
return await ping_miner(ip)
async def ping_miner(
ip: ipaddress.ip_address, port=4028
) -> None or ipaddress.ip_address:
for i in range(PING_RETRIES):
connection_fut = asyncio.open_connection(str(ip), port)
try:
# get the read and write streams from the connection
reader, writer = await asyncio.wait_for(
connection_fut, timeout=PING_TIMEOUT
)
# immediately close connection, we know connection happened
writer.close()
# make sure the writer is closed
await writer.wait_closed()
# ping was successful
return ip
except asyncio.exceptions.TimeoutError:
# ping failed if we time out
continue
return
except ConnectionRefusedError:
# handle for other connection errors
logging.debug(f"{str(ip)}: Connection Refused.")
# ping failed, likely with an exception
except Exception as e:
logging.warning(f"{str(ip)}: {e}")
continue
return

31
network/net_range.py Normal file
View File

@@ -0,0 +1,31 @@
import ipaddress
class MinerNetworkRange:
"""A MinerNetwork that takes a range of IP addresses.
:param ip_range: A range of IP addresses to put in the network.
Takes a string formatted as
{ip_range_1_start}-{ip_range_1_end}, {ip_range_2_start}-{ip_range_2_end}
"""
def __init__(self, ip_range: str):
ip_ranges = ip_range.replace(" ", "").split(",")
self.host_ips = []
for item in ip_ranges:
start, end = item.split("-")
start_ip = ipaddress.ip_address(start)
end_ip = ipaddress.ip_address(end)
networks = ipaddress.summarize_address_range(start_ip, end_ip)
for network in networks:
self.host_ips.append(network.network_address)
for host in network.hosts():
if host not in self.host_ips:
self.host_ips.append(host)
if network.broadcast_address not in self.host_ips:
self.host_ips.append(network.broadcast_address)
def hosts(self):
for x in self.host_ips:
yield x

Binary file not shown.

View File

@@ -1,27 +1,48 @@
import toml
import os
NETWORK_PING_RETRIES: int = 3
NETWORK_PING_TIMEOUT: int = 5
NETWORK_SCAN_THREADS: int = 300
CFG_UTIL_REBOOT_THREADS: int = 300
CFG_UTIL_CONFIG_THREADS: int = 300
MINER_FACTORY_GET_VERSION_RETRIES: int = 3
WHATSMINER_PWD = "admin"
DEBUG = False
try:
with open(os.path.join(os.getcwd(), "settings.toml"), "r") as settings_file:
with open(
os.path.join(os.path.dirname(__file__), "settings.toml"), "r"
) as settings_file:
settings = toml.loads(settings_file.read())
except:
pass
settings_keys = settings.keys()
if "ping_retries" in settings_keys:
NETWORK_PING_RETRIES: int = settings["ping_retries"]
if "ping_timeout" in settings_keys:
NETWORK_PING_TIMEOUT: int = settings["ping_timeout"]
if "scan_threads" in settings_keys:
NETWORK_SCAN_THREADS: int = settings["scan_threads"]
if "reboot_threads" in settings_keys:
CFG_UTIL_REBOOT_THREADS: int = settings["reboot_threads"]
if "config_threads" in settings_keys:
CFG_UTIL_CONFIG_THREADS: int = settings["config_threads"]
if "get_version_retries" in settings_keys:
MINER_FACTORY_GET_VERSION_RETRIES: int = settings["get_version_retries"]
if "whatsminer_pwd" in settings_keys:
WHATSMINER_PWD: str = settings["whatsminer_pwd"]
except:
NETWORK_PING_RETRIES: int = 3
NETWORK_PING_TIMEOUT: int = 5
NETWORK_SCAN_THREADS: int = 300
CFG_UTIL_REBOOT_THREADS: int = 300
CFG_UTIL_CONFIG_THREADS: int = 300
MINER_FACTORY_GET_VERSION_RETRIES: int = 3
WHATSMINER_PWD = "admin"
if "debug" in settings_keys:
DEBUG: int = settings["debug"]

View File

@@ -1,6 +1,6 @@
get_version_retries = 3
ping_retries = 3
ping_timeout = 5
ping_timeout = 5 # Seconds
scan_threads = 300
config_threads = 300
reboot_threads = 300
@@ -11,4 +11,10 @@ reboot_threads = 300
# tool or the privileged API will not work using admin as the password.
# If you change the password, you can pass that password here.
whatsminer_pwd = "admin"
whatsminer_pwd = "admin"
### DEBUG MODE ###
# change this to debug = true
# to enable debug mode.
debug = false
# debug = true

View File

@@ -36,6 +36,9 @@
* ALL: Selects all miners in the table, or deselects all if they are already all selected.
* REFRESH DATA: Refreshes data for the currently selected miners, or all miners if none are selected.
* OPEN IN WEB: Opens all currently selected miners web interfaces in your default browser.
* REBOOT: Reboots all selected miners.
* RESTART BACKEND: Restarts the mining process on the miner (bosminer daemon, bmminer daemon, cgminer daemon, etc).
* SEND SSH COMMAND: Open a new window to send a SSH command to all selected miners (or all miners if none are selected).
### Table:
* Click any header in the table to sort that row.

View File

@@ -1,18 +1,29 @@
from tools.bad_board_util.miner_factory import miner_factory
from tools.bad_board_util.ui import ui
import asyncio
import sys
import logging
from logger import logger
logger.info("Initializing logger for Board Util.")
# Fix bug with some whatsminers and asyncio because of a socket not being shut down:
if sys.version_info[0] == 3 and sys.version_info[1] >= 8 and sys.platform.startswith('win'):
if (
sys.version_info[0] == 3
and sys.version_info[1] >= 8
and sys.platform.startswith("win")
):
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
def main():
logging.info("Starting Board Util.")
loop = asyncio.new_event_loop()
loop.run_until_complete(ui())
logging.info("Closing Board Util.")
if __name__ == '__main__':
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,29 @@
from tools.bad_board_util.layout import window
def disable_buttons(func):
button_list = [
"scan",
"import_iplist",
"export_iplist",
"select_all_ips",
"refresh_data",
"open_in_web",
"save_report_button",
"light",
]
# handle the inner function that the decorator is wrapping
async def inner(*args, **kwargs):
# disable the buttons
for button in button_list:
window[button].Update(disabled=True)
# call the original wrapped function
await func(*args, **kwargs)
# re-enable the buttons after the wrapped function completes
for button in button_list:
window[button].Update(disabled=False)
return inner

View File

@@ -1,11 +1,67 @@
import ipaddress
import os
import re
import xlsxwriter
import aiofiles
from tools.bad_board_util.func.ui import update_ui_with_data
from tools.bad_board_util.layout import window
from tools.bad_board_util.func.decorators import disable_buttons
from miners.miner_factory import MinerFactory
@disable_buttons
async def save_report(file_location):
data = {}
workbook = xlsxwriter.Workbook(file_location)
sheet = workbook.add_worksheet()
for line in window["ip_table"].Values:
data[line[0]] = {
"Model": line[1],
"Total Chips": line[2],
"Left Chips": line[3],
"Center Chips": line[5],
"Right Chips": line[7],
"Nominal": 1,
}
async for miner in MinerFactory().get_miner_generator([key for key in data.keys()]):
if miner:
data[miner.ip]["Nominal"] = miner.nominal
list_data = []
for ip in data.keys():
new_data = data[ip]
new_data["IP"] = ip
list_data.append(new_data)
data = sorted(data, reverse=True, key=lambda x: x["Total Chips"])
headers = [
"IP",
"Miner Model",
"Total Chip Count",
"Left Board Chips",
"Center Board Chips",
"Right Board Chips",
]
print(data)
row = 0
col = 0
for item in headers:
sheet.write(row, col, item)
col += 1
row = 1
for line in data:
col = 0
for point in line:
sheet.write(row, col, point)
col += 1
row += 1
workbook.close()
async def import_iplist(file_location):
@@ -14,10 +70,15 @@ async def import_iplist(file_location):
return
else:
ip_list = []
async with aiofiles.open(file_location, mode='r') as file:
async with aiofiles.open(file_location, mode="r") as file:
async for line in file:
ips = [x.group() for x in re.finditer(
"^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)", line)]
ips = [
x.group()
for x in re.finditer(
"^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)",
line,
)
]
for ip in ips:
if ip not in ip_list:
ip_list.append(ipaddress.ip_address(ip))
@@ -33,11 +94,11 @@ async def export_iplist(file_location, ip_list_selected):
return
else:
if ip_list_selected is not None and not ip_list_selected == []:
async with aiofiles.open(file_location, mode='w') as file:
async with aiofiles.open(file_location, mode="w") as file:
for item in ip_list_selected:
await file.write(str(item) + "\n")
else:
async with aiofiles.open(file_location, mode='w') as file:
for item in window['ip_table'].Values:
async with aiofiles.open(file_location, mode="w") as file:
for item in window["ip_table"].Values:
await file.write(str(item[0]) + "\n")
await update_ui_with_data("status", "")

View File

@@ -2,11 +2,45 @@ import asyncio
import ipaddress
import warnings
from tools.bad_board_util.func.ui import update_ui_with_data, update_prog_bar, set_progress_bar_len
from tools.bad_board_util.func.ui import (
update_ui_with_data,
update_prog_bar,
set_progress_bar_len,
)
from tools.bad_board_util.layout import window
from tools.bad_board_util.miner_factory import miner_factory
from miners.miner_factory import MinerFactory
from tools.bad_board_util.func.decorators import disable_buttons
@disable_buttons
async def miner_light(ips: list):
await asyncio.gather(*[flip_light(ip) for ip in ips])
async def flip_light(ip):
ip_list = window["ip_table"].Widget
miner = await MinerFactory().get_miner(ip)
index = [item[0] for item in window["ip_table"].Values].index(ip)
index_tags = ip_list.item(index + 1)["tags"]
if "light" not in index_tags and "light+bad" not in index_tags:
tag = "light"
if "bad" in index_tags:
index_tags.remove("bad")
tag = "light+bad"
index_tags.append(tag)
ip_list.item(index + 1, tags=index_tags)
await miner.fault_light_on()
else:
if "light+bad" in index_tags:
index_tags.remove("light+bad")
index_tags.append("bad")
if "light" in index_tags:
index_tags.remove("light")
ip_list.item(index + 1, tags=index_tags)
await miner.fault_light_off()
@disable_buttons
async def scan_network(network):
await update_ui_with_data("status", "Scanning")
await update_ui_with_data("ip_count", "")
@@ -24,7 +58,7 @@ async def scan_network(network):
asyncio.create_task(update_prog_bar(progress_bar_len))
progress_bar_len += network_size - len(miners)
asyncio.create_task(update_prog_bar(progress_bar_len))
get_miner_genenerator = miner_factory.get_miner_generator(miners)
get_miner_genenerator = MinerFactory().get_miner_generator(miners)
all_miners = []
async for found_miner in get_miner_genenerator:
all_miners.append(found_miner)
@@ -36,11 +70,15 @@ async def scan_network(network):
await update_ui_with_data("status", "")
@disable_buttons
async def refresh_data(ip_list: list):
await update_ui_with_data("status", "Getting Data")
ips = [ipaddress.ip_address(ip) for ip in ip_list]
if len(ips) == 0:
ips = [ipaddress.ip_address(ip) for ip in [item[0] for item in window["ip_table"].Values]]
ips = [
ipaddress.ip_address(ip)
for ip in [item[0] for item in window["ip_table"].Values]
]
await set_progress_bar_len(len(ips))
progress_bar_len = 0
asyncio.create_task(update_prog_bar(progress_bar_len))
@@ -65,22 +103,17 @@ async def refresh_data(ip_list: list):
board_right = ""
if data_point["data"]:
if 0 in data_point["data"].keys():
board_left = " ".join([chain["chip_status"] for chain in data_point["data"][0]]).replace("o", "")
else:
row_colors.append((ip_table_index, "white", "red"))
board_left = " ".join(
[chain["chip_status"] for chain in data_point["data"][0]]
).replace("o", "")
if 1 in data_point["data"].keys():
board_center = " ".join([chain["chip_status"] for chain in data_point["data"][1]]).replace("o", "")
else:
row_colors.append((ip_table_index, "white", "red"))
board_center = " ".join(
[chain["chip_status"] for chain in data_point["data"][1]]
).replace("o", "")
if 2 in data_point["data"].keys():
board_right = " ".join([chain["chip_status"] for chain in data_point["data"][2]]).replace("o", "")
else:
row_colors.append((ip_table_index, "white", "red"))
if False in [chain["nominal"] for chain in [data_point["data"][key] for key in data_point["data"].keys()][0]]:
row_colors.append((ip_table_index, "white", "red"))
else:
row_colors.append((ip_table_index, "white", "red"))
board_right = " ".join(
[chain["chip_status"] for chain in data_point["data"][2]]
).replace("o", "")
data = [
data_point["IP"],
data_point["model"],
@@ -89,109 +122,187 @@ async def refresh_data(ip_list: list):
len(board_center),
board_center,
len(board_right),
board_right
board_right,
]
ip_table_data[ip_table_index] = data
window["ip_table"].update(ip_table_data, row_colors=row_colors)
window["ip_table"].update(ip_table_data)
progress_bar_len += 1
asyncio.create_task(update_prog_bar(progress_bar_len))
await update_ui_with_data("status", "")
@disable_buttons
async def scan_and_get_data(network):
# update status and reset the table
await update_ui_with_data("status", "Scanning")
await update_ui_with_data("ip_count", "")
await update_ui_with_data("ip_table", [])
# set progress bar length to network size
network_size = len(network)
miner_generator = network.scan_network_generator()
await set_progress_bar_len(3 * network_size)
progress_bar_len = 0
miners = []
async for miner in miner_generator:
# scan the network for miners using a generator
async for miner in network.scan_network_generator():
# the generator will either return None or an IP address
if miner:
miners.append(miner)
# can output "Identifying" for each found item, but it gets a bit cluttered
# and could possibly be confusing for the end user because of timing on
# adding the IPs
# window["ip_table"].update([["Identifying..."] for miner in miners])
# add to the progress bar length after scanning an address
progress_bar_len += 1
asyncio.create_task(update_prog_bar(progress_bar_len))
# add progress for the miners that we aren't going to identify
progress_bar_len += network_size - len(miners)
asyncio.create_task(update_prog_bar(progress_bar_len))
get_miner_genenerator = miner_factory.get_miner_generator(miners)
all_miners = []
async for found_miner in get_miner_genenerator:
# identify different miner instances using the miner factory generator
async for found_miner in MinerFactory().get_miner_generator(miners):
# miner factory generator will always return a miner
all_miners.append(found_miner)
# sort the list of miners by IP address
all_miners.sort(key=lambda x: x.ip)
# add the new miner to the table
window["ip_table"].update([[str(miner.ip)] for miner in all_miners])
# update progress bar
progress_bar_len += 1
asyncio.create_task(update_prog_bar(progress_bar_len))
# update the count of found miners
await update_ui_with_data("ip_count", str(len(all_miners)))
data_gen = asyncio.as_completed([get_formatted_data(miner) for miner in miners])
# update progress bar for miners we wont get data for
progress_bar_len += network_size - len(miners)
asyncio.create_task(update_prog_bar(progress_bar_len))
# get the list of IP addresses from the table
ip_table_data = window["ip_table"].Values
ordered_all_ips = [item[0] for item in ip_table_data]
progress_bar_len += (network_size - len(miners))
asyncio.create_task(update_prog_bar(progress_bar_len))
await update_ui_with_data("status", "Getting Data")
row_colors = []
for all_data in data_gen:
# create an in place generator for getting data
for all_data in asyncio.as_completed(
[get_formatted_data(miner) for miner in miners]
):
# wait for a generator item to return
data_point = await all_data
# make sure the IP is one we have
# this will likely never fail, but a good failsafe
if data_point["IP"] in ordered_all_ips:
# get the index of the IP in the table
ip_table_index = ordered_all_ips.index(data_point["IP"])
board_left = ""
board_center = ""
board_right = ""
# make sure we have data, some miners don't allow getting board data
if data_point["data"]:
# check if the 0th board (L board) is in the data
if 0 in data_point["data"].keys():
board_left = " ".join([chain["chip_status"] for chain in data_point["data"][0]]).replace("o", "")
board_left = " ".join(
[chain["chip_status"] for chain in data_point["data"][0]]
).replace("o", "")
else:
# if the board isn't in data, highlight it red
row_colors.append((ip_table_index, "bad"))
# check if the 1st board (C board) is in the data
if 1 in data_point["data"].keys():
board_center = " ".join([chain["chip_status"] for chain in data_point["data"][1]]).replace("o", "")
board_center = " ".join(
[chain["chip_status"] for chain in data_point["data"][1]]
).replace("o", "")
else:
# if the board isn't in data, highlight it red
row_colors.append((ip_table_index, "bad"))
# check if the 2nd board (R board) is in the data
if 2 in data_point["data"].keys():
board_right = " ".join([chain["chip_status"] for chain in data_point["data"][2]]).replace("o", "")
board_right = " ".join(
[chain["chip_status"] for chain in data_point["data"][2]]
).replace("o", "")
else:
# if the board isn't in data, highlight it red
row_colors.append((ip_table_index, "bad"))
if False in [chain["nominal"] for board in [data_point["data"][key] for key in data_point["data"].keys()] for chain in board]:
# check if the miner has all nominal chips
if False in [
# True/False if the miner is nominal
chain["nominal"]
# for each board in the miner
for board in [
data_point["data"][key] for key in data_point["data"].keys()
]
# for each chain in each board in the miner
for chain in board
]:
# if the miner doesn't have all chips, highlight it red
row_colors.append((ip_table_index, "bad"))
else:
# the row is bad if we have no data
row_colors.append((ip_table_index, "bad"))
# split the chip data into thirds
board_left_chips = "\n".join(split_chips(board_left, 3))
board_center_chips = "\n".join(split_chips(board_center, 3))
board_right_chips = "\n".join(split_chips(board_right, 3))
# create data for the table
data = [
data_point["IP"],
data_point["model"],
(len(board_left) + len(board_center) + len(board_right)),
len(board_left),
board_left_chips,
len(board_center),
board_center_chips,
len(board_right),
board_right_chips
board_right_chips,
]
# put the data at the index of the IP address
ip_table_data[ip_table_index] = data
window["ip_table"].update(ip_table_data)
# configure "bad" tag to highlight red
table = window["ip_table"].Widget
table.tag_configure("bad", foreground="white", background="red")
# set tags on the row if they have been set
for row in row_colors:
table.item(row[0] + 1, tags=row[1])
# add to the progress bar
progress_bar_len += 1
asyncio.create_task(update_prog_bar(progress_bar_len))
# reset status
await update_ui_with_data("status", "")
def split_chips(string, number_of_splits):
k, m = divmod(len(string), number_of_splits)
return (string[i*k+min(i, m):(i+1)*k+min(i+1, m)] for i in range(number_of_splits))
return (
string[i * k + min(i, m) : (i + 1) * k + min(i + 1, m)]
for i in range(number_of_splits)
)
async def get_formatted_data(ip: ipaddress.ip_address):
miner = await miner_factory.get_miner(ip)
miner = await MinerFactory().get_miner(ip)
model = await miner.get_model()
warnings.filterwarnings('ignore')
warnings.filterwarnings("ignore")
board_data = await miner.get_board_info()
data = {"IP": str(ip), "model": str(model), "data": board_data}
return data

View File

@@ -0,0 +1,392 @@
import datetime
from base64 import b64decode
from io import BytesIO
from reportlab.lib.pagesizes import letter, inch
from reportlab.lib.utils import ImageReader
from reportlab.lib.styles import ParagraphStyle, TA_CENTER
from reportlab.platypus import (
SimpleDocTemplate,
KeepInFrame,
Table,
Image,
Paragraph,
TableStyle,
PageBreak,
Spacer,
)
from reportlab.lib import colors
import ipaddress
import numpy as np
import matplotlib.dates
import matplotlib.pyplot as plt
from svglib.svglib import svg2rlg
from matplotlib import cm
from matplotlib.ticker import FormatStrFormatter
from miners.miner_factory import MinerFactory
from tools.bad_board_util.func.decorators import disable_buttons
from tools.bad_board_util.img import IMAGE_SELECTION_MATRIX, LOGO
from tools.bad_board_util.layout import window
IP_STYLE = ParagraphStyle(
"IP Style",
alignment=TA_CENTER,
fontSize=7,
fontName="Helvetica-Bold",
)
TITLE_STYLE = ParagraphStyle(
"Title",
alignment=TA_CENTER,
fontSize=20,
spaceAfter=40,
fontName="Helvetica-Bold",
)
def add_first_page_number(canvas, doc):
canvas.saveState()
canvas.drawString(letter[0] - 60, 20, "Page " + str(doc.page))
canvas.restoreState()
def add_page_header(canvas, doc):
canvas.saveState()
canvas.drawCentredString(
(letter[0] / 16) * 14,
letter[1] - 57,
datetime.datetime.now().strftime("%Y-%b-%d"),
)
img_dec = b64decode(LOGO)
img = BytesIO(img_dec)
img.seek(0)
canvas.drawImage(
ImageReader(img),
30,
letter[1] - 65,
150,
35,
)
canvas.drawString(letter[0] - 60, 20, "Page " + str(doc.page))
canvas.restoreState()
@disable_buttons
async def save_report(file_location):
p1_logo, p1_title = create_first_page()
data = {}
for line in window["ip_table"].Values:
data[line[0]] = {
"Model": line[1],
"Total Chips": line[2],
"Left Chips": line[3],
"Center Chips": line[5],
"Right Chips": line[7],
"Nominal": 1,
}
async for miner in MinerFactory().get_miner_generator([key for key in data.keys()]):
if miner:
data[str(miner.ip)]["Nominal"] = miner.nominal_chips
list_data = []
for ip in data.keys():
new_data = data[ip]
new_data["IP"] = ip
list_data.append(new_data)
list_data = sorted(
list_data, reverse=False, key=lambda x: ipaddress.ip_address(x["IP"])
)
image_selection_data = {}
for miner in list_data:
miner_bad_boards = ""
if miner["Left Chips"] < miner["Nominal"]:
miner_bad_boards += "l"
if miner["Center Chips"] < miner["Nominal"]:
miner_bad_boards += "c"
if miner["Right Chips"] < miner["Nominal"]:
miner_bad_boards += "r"
image_selection_data[miner["IP"]] = miner_bad_boards
doc = SimpleDocTemplate(
file_location,
pagesize=letter,
topMargin=1 * inch,
leftMargin=1 * inch,
rightMargin=1 * inch,
bottomMargin=1 * inch,
title=f"Board Report {datetime.datetime.now().strftime('%Y/%b/%d')}",
)
pie_chart, board_table = create_boards_pie_chart(image_selection_data)
table_data = get_table_data(image_selection_data)
miner_img_table = Table(
table_data,
colWidths=0.8 * inch,
# repeatRows=1,
# rowHeights=[4 * inch],
)
miner_img_table.setStyle(
TableStyle(
[
("SPAN", (0, 0), (-1, 0)),
("LEFTPADDING", (0, 0), (-1, -1), 0),
("RIGHTPADDING", (0, 0), (-1, -1), 0),
("BOTTOMPADDING", (0, 1), (-1, -1), 0),
("TOPPADDING", (0, 1), (-1, -1), 0),
("BOTTOMPADDING", (0, 0), (-1, 0), 20),
("TOPPADDING", (0, 0), (-1, 0), 20),
]
)
)
elements = []
elements.append(p1_logo)
elements.append(p1_title)
elements.append(PageBreak())
elements.append(pie_chart)
elements.append(Spacer(0, 60))
elements.append(board_table)
elements.append(PageBreak())
elements.append(miner_img_table)
elements.append(PageBreak())
elements.append(
Paragraph(
"Board Data",
style=TITLE_STYLE,
)
)
elements.append(create_data_table(list_data))
elements.append(PageBreak())
doc.build(
elements,
onFirstPage=add_first_page_number,
onLaterPages=add_page_header,
)
def create_boards_pie_chart(data):
labels = ["All Working", "1 Bad Board", "2 Bad Boards", "3 Bad Boards"]
num_bad_boards = [0, 0, 0, 0]
for item in data.keys():
num_bad_boards[len(data[item])] += 1
cmap = plt.get_cmap("Blues")
cs = cmap(np.linspace(0.2, 0.8, num=len(num_bad_boards)))
fig1, ax = plt.subplots()
ax.pie(
num_bad_boards,
labels=labels,
autopct="%1.2f%%",
shadow=True,
startangle=180,
colors=cs,
pctdistance=0.8,
)
ax.axis("equal")
ax.set_title("Broken Boards", fontsize=24, pad=20)
imgdata = BytesIO()
fig1.savefig(imgdata, format="svg")
imgdata.seek(0) # rewind the data
drawing = svg2rlg(imgdata)
imgdata.close()
plt.close("all")
pie_chart = KeepInFrame(375, 375, [Image(drawing)], hAlign="CENTER")
table_data = [labels, num_bad_boards]
t = Table(table_data)
table_style = TableStyle(
[
# ("FONTSIZE", (0, 0), (-1, -1), 13),
# line for below titles
("LINEBELOW", (0, 0), (-1, 0), 2, colors.black),
("FONTNAME", (0, 0), (-1, 0), "Helvetica-Bold"),
# line for above totals
("LINEABOVE", (0, -1), (-1, -1), 2, colors.black),
# line for beside unit #
("LINEAFTER", (0, 0), (0, -1), 2, colors.black),
# gridlines and outline of table
("INNERGRID", (0, 0), (-1, -1), 0.25, colors.black),
("BOX", (0, 0), (-1, -1), 2, colors.black),
("LEFTPADDING", (0, 0), (-1, -1), 3),
("RIGHTPADDING", (0, 0), (-1, -1), 3),
("BOTTOMPADDING", (0, 0), (-1, -1), 3),
("TOPPADDING", (0, 0), (-1, -1), 3),
]
)
t.setStyle(table_style)
# zebra stripes on table
for each in range(len(table_data)):
if each % 2 == 0:
bg_color = colors.whitesmoke
else:
bg_color = colors.lightgrey
t.setStyle(TableStyle([("BACKGROUND", (0, each), (-1, each), bg_color)]))
return pie_chart, t
def create_first_page():
title_style = ParagraphStyle(
"Title",
alignment=TA_CENTER,
fontSize=50,
spaceAfter=40,
spaceBefore=150,
fontName="Helvetica-Bold",
)
img_dec = b64decode(LOGO)
img = BytesIO(img_dec)
img.seek(0)
logo = KeepInFrame(450, 105, [Image(img)])
title = Paragraph("Board Report", style=title_style)
return logo, title
def create_data_table(data):
left_bad_boards = 0
right_bad_boards = 0
center_bad_boards = 0
table_data = []
for miner in data:
miner_bad_boards = 0
if miner["Left Chips"] < miner["Nominal"]:
miner_bad_boards += 1
left_bad_boards += 1
if miner["Center Chips"] < miner["Nominal"]:
miner_bad_boards += 1
right_bad_boards += 1
if miner["Right Chips"] < miner["Nominal"]:
miner_bad_boards += 1
center_bad_boards += 1
table_data.append(
[
miner["IP"],
miner["Total Chips"],
miner["Left Chips"],
miner["Center Chips"],
miner["Right Chips"],
miner_bad_boards,
]
)
table_data.append(
[
"Total",
sum([miner[1] for miner in table_data]),
sum([miner[2] for miner in table_data]),
sum([miner[3] for miner in table_data]),
sum([miner[4] for miner in table_data]),
sum([miner[5] for miner in table_data]),
]
)
table_data[:0] = (
[
"IP",
"Total Chips",
"Left Board Chips",
"Center Board Chips",
"Right Board Chips",
"Failed Boards",
],
)
# create the table
t = Table(table_data, repeatRows=1)
# generate a basic table style
table_style = TableStyle(
[
("FONTSIZE", (0, 0), (-1, -1), 8),
# line for below titles
("LINEBELOW", (0, 0), (-1, 0), 2, colors.black),
("FONTNAME", (0, 0), (-1, 0), "Helvetica-Bold"),
# line for above totals
("LINEABOVE", (0, -1), (-1, -1), 2, colors.black),
("FONTNAME", (0, -1), (-1, -1), "Helvetica-Bold"),
# line for beside unit #
("LINEAFTER", (0, 0), (0, -1), 2, colors.black),
("FONTNAME", (0, 0), (0, -1), "Helvetica-Bold"),
# gridlines and outline of table
("INNERGRID", (0, 0), (-1, -1), 0.25, colors.black),
("BOX", (0, 0), (-1, -1), 2, colors.black),
]
)
for (
row,
values,
) in enumerate(table_data):
if not row == 0 and not row == (len(table_data) - 1):
failed_boards = values[5]
if not failed_boards == 0:
table_style.add("TEXTCOLOR", (5, row), (5, row), colors.red)
# set the styles to the table
t.setStyle(table_style)
# zebra stripes on table
for each in range(len(table_data)):
if each % 2 == 0:
bg_color = colors.whitesmoke
else:
bg_color = colors.lightgrey
t.setStyle(TableStyle([("BACKGROUND", (0, each), (-1, each), bg_color)]))
return t
def get_table_data(data):
table_elems = [[Paragraph("Hashboard Visual Representation", style=TITLE_STYLE)]]
table_row = []
table_style = TableStyle(
[
("LEFTPADDING", (0, 0), (-1, -1), 0),
("RIGHTPADDING", (0, 0), (-1, -1), 0),
("BOTTOMPADDING", (0, 0), (-1, -1), 0),
("BOX", (0, 0), (-1, -1), 2, colors.black),
]
)
table_width = 0.8 * inch
for ip in data.keys():
img_dec = b64decode(IMAGE_SELECTION_MATRIX[data[ip]])
img = BytesIO(img_dec)
img.seek(0)
image = KeepInFrame(table_width, table_width, [Image(img)])
ip_para = Paragraph(ip, style=IP_STYLE)
table_row.append(
Table([[ip_para], [image]], colWidths=table_width, style=table_style)
)
# table_row.append(image)
# table_row_txt.append(ip_para)
if len(table_row) > 7:
# table_elems.append(table_row_txt)
# table_elems.append(table_row)
table_elems.append(table_row)
# table_row_txt = []
table_row = []
if not table_row == []:
table_elems.append(table_row)
return table_elems

View File

@@ -6,6 +6,12 @@ from tools.bad_board_util.layout import window
import pyperclip
def table_select_all():
window["ip_table"].update(
select_rows=([row for row in range(len(window["ip_table"].Values))])
)
def copy_from_table(table):
selection = table.selection()
copy_values = []
@@ -15,10 +21,11 @@ def copy_from_table(table):
table_values = table.item(each)["values"]
ip = table_values[0]
model = table_values[1]
l_brd_chips = str(table_values[2])
c_brd_chips = str(table_values[4])
r_brd_chips = str(table_values[6])
all_values = [ip, model, l_brd_chips, c_brd_chips, r_brd_chips]
total = str(table_values[2])
l_brd_chips = str(table_values[3])
c_brd_chips = str(table_values[5])
r_brd_chips = str(table_values[7])
all_values = [ip, model, total, l_brd_chips, c_brd_chips, r_brd_chips]
value = ", ".join(all_values)
copy_values.append(str(value))
@@ -36,7 +43,7 @@ async def update_ui_with_data(key, message, append=False):
async def update_prog_bar(amount):
window["progress"].Update(amount)
percent_done = 100 * (amount / window['progress'].maxlen)
percent_done = 100 * (amount / window["progress"].maxlen)
window["progress_percent"].Update(f"{round(percent_done, 2)} %")
if percent_done == 100:
window["progress_percent"].Update("")
@@ -49,18 +56,28 @@ async def set_progress_bar_len(amount):
async def sort_data(index: int or str):
if window["scan"].Disabled:
return
await update_ui_with_data("status", "Sorting Data")
data_list = window['ip_table'].Values
data_list = window["ip_table"].Values
table = window["ip_table"].Widget
all_data = []
for idx, item in enumerate(data_list):
all_data.append({"data": item, "tags": table.item(int(idx) + 1)["tags"]})
# ip addresses
if re.match("^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)",
str(all_data[0]["data"][index])):
new_list = sorted(all_data, key=lambda x: ipaddress.ip_address(x["data"][index]))
if re.match(
"^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)",
str(all_data[0]["data"][index]),
):
new_list = sorted(
all_data, key=lambda x: ipaddress.ip_address(x["data"][index])
)
if all_data == new_list:
new_list = sorted(all_data, reverse=True, key=lambda x: ipaddress.ip_address(x["data"][index]))
new_list = sorted(
all_data,
reverse=True,
key=lambda x: ipaddress.ip_address(x["data"][index]),
)
# everything else, model, chips
else:

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1,7 +0,0 @@
"""
This file stores the MinerFactory instance used by the BadBoardUtility for use in other files.
"""
from miners.miner_factory import MinerFactory
miner_factory = MinerFactory()

View File

@@ -3,9 +3,14 @@ import sys
import PySimpleGUI as sg
from tools.bad_board_util.layout import window
from tools.bad_board_util.func.miners import refresh_data, scan_and_get_data
from tools.bad_board_util.func.miners import (
refresh_data,
scan_and_get_data,
miner_light,
)
from tools.bad_board_util.func.files import import_iplist, export_iplist
from tools.bad_board_util.func.ui import sort_data, copy_from_table
from tools.bad_board_util.func.pdf import save_report
from tools.bad_board_util.func.ui import sort_data, copy_from_table, table_select_all
from network import MinerNetwork
@@ -16,35 +21,66 @@ async def ui():
window.read(timeout=0)
table = window["ip_table"].Widget
table.bind("<Control-Key-c>", lambda x: copy_from_table(table))
table.bind("<Control-Key-a>", lambda x: table_select_all())
# light tag shows red row for fault lights
table.tag_configure("bad", foreground="white", background="orange")
table.tag_configure("light", foreground="white", background="red")
table.tag_configure("light+bad", foreground="white", background="red")
while True:
event, value = window.read(timeout=0)
if event in (None, 'Close', sg.WIN_CLOSED):
if event in (None, "Close", sg.WIN_CLOSED):
sys.exit()
if isinstance(event, tuple):
if len(window["ip_table"].Values) > 0:
if event[0] == 'ip_table':
if event[0] == "ip_table":
if event[2][0] == -1:
await sort_data(event[2][1])
if event == 'open_in_web':
if event == "open_in_web":
for row in value["ip_table"]:
webbrowser.open("http://" + window["ip_table"].Values[row][0])
if event == 'scan':
if len(value['miner_network'].split("/")) > 1:
network = value['miner_network'].split("/")
if event == "scan":
if len(value["miner_network"].split("/")) > 1:
network = value["miner_network"].split("/")
miner_network = MinerNetwork(ip_addr=network[0], mask=network[1])
else:
miner_network = MinerNetwork(value['miner_network'])
miner_network = MinerNetwork(value["miner_network"])
asyncio.create_task(scan_and_get_data(miner_network))
if event == 'select_all_ips':
if event == "save_report":
if not value["save_report"] == "":
asyncio.create_task(save_report(value["save_report"]))
window["save_report"].update("")
if event == "select_all_ips":
if len(value["ip_table"]) == len(window["ip_table"].Values):
window["ip_table"].update(select_rows=())
else:
window["ip_table"].update(select_rows=([row for row in range(len(window["ip_table"].Values))]))
window["ip_table"].update(
select_rows=([row for row in range(len(window["ip_table"].Values))])
)
if event == "light":
if len(window["ip_table"].Values) > 0:
asyncio.create_task(
miner_light(
[
window["ip_table"].Values[item][0]
for item in value["ip_table"]
]
)
)
if event == "import_iplist":
asyncio.create_task(import_iplist(value["file_iplist"]))
if event == "export_iplist":
asyncio.create_task(export_iplist(value["file_iplist"], [window['ip_table'].Values[item][0] for item in value['ip_table']]))
asyncio.create_task(
export_iplist(
value["file_iplist"],
[window["ip_table"].Values[item][0] for item in value["ip_table"]],
)
)
if event == "refresh_data":
asyncio.create_task(refresh_data([window["ip_table"].Values[item][0] for item in value["ip_table"]]))
asyncio.create_task(
refresh_data(
[window["ip_table"].Values[item][0] for item in value["ip_table"]]
)
)
if event == "__TIMEOUT__":
await asyncio.sleep(0)

View File

@@ -1 +1,20 @@
from tools.cfg_util.cfg_util_sg import main
import asyncio
import sys
from .ui import ui
# Fix bug with some whatsminers and asyncio because of a socket not being shut down:
if (
sys.version_info[0] == 3
and sys.version_info[1] >= 8
and sys.platform.startswith("win")
):
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
def main():
asyncio.run(ui())
if __name__ == "__main__":
main()

View File

@@ -1,13 +0,0 @@
from tools.cfg_util.cfg_util_sg.ui import ui
import asyncio
import sys
# Fix bug with some whatsminers and asyncio because of a socket not being shut down:
if sys.version_info[0] == 3 and sys.version_info[1] >= 8 and sys.platform.startswith('win'):
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
def main():
loop = asyncio.new_event_loop()
loop.run_until_complete(ui())

View File

@@ -1,72 +0,0 @@
import ipaddress
import re
from tools.cfg_util.cfg_util_sg.layout import window
import pyperclip
def copy_from_table(table):
selection = table.selection()
copy_values = []
for each in selection:
try:
value = table.item(each)["values"][0]
copy_values.append(str(value))
except:
pass
copy_string = "\n".join(copy_values)
pyperclip.copy(copy_string)
async def update_ui_with_data(key, message, append=False):
if append:
message = window[key].get_text() + message
window[key].update(message)
async def update_prog_bar(amount):
window["progress"].Update(amount)
percent_done = 100 * (amount / window['progress'].maxlen)
window["progress_percent"].Update(f"{round(percent_done, 2)} %")
if percent_done == 100:
window["progress_percent"].Update("")
async def set_progress_bar_len(amount):
window["progress"].Update(0, max=amount)
window["progress"].maxlen = amount
window["progress_percent"].Update("0.0 %")
async def sort_data(index: int or str):
await update_ui_with_data("status", "Sorting Data")
data_list = window['ip_table'].Values
# wattage
if re.match("[0-9]* W", str(data_list[0][index])):
new_list = sorted(data_list, key=lambda x: int(x[index].replace(" W", "")))
if data_list == new_list:
new_list = sorted(data_list, reverse=True, key=lambda x: int(x[index].replace(" W", "")))
# hashrate
elif re.match("[0-9]*\.?[0-9]* TH\/s", str(data_list[0][index])):
new_list = sorted(data_list, key=lambda x: float(x[index].replace(" TH/s", "")))
if data_list == new_list:
new_list = sorted(data_list, reverse=True, key=lambda x: float(x[index].replace(" TH/s", "")))
# ip addresses
elif re.match("^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)",
str(data_list[0][index])):
new_list = sorted(data_list, key=lambda x: ipaddress.ip_address(x[index]))
if data_list == new_list:
new_list = sorted(data_list, reverse=True, key=lambda x: ipaddress.ip_address(x[index]))
# everything else, hostname, temp, and user
else:
new_list = sorted(data_list, key=lambda x: x[index])
if data_list == new_list:
new_list = sorted(data_list, reverse=True, key=lambda x: x[index])
await update_ui_with_data("ip_table", new_list)
await update_ui_with_data("status", "")

View File

@@ -1,7 +0,0 @@
"""
This file stores the MinerFactory instance used by the ConfigUtility for use in other files.
"""
from miners.miner_factory import MinerFactory
miner_factory = MinerFactory()

View File

@@ -1,86 +0,0 @@
import asyncio
import sys
import PySimpleGUI as sg
from tools.cfg_util.cfg_util_sg.layout import window, generate_config_layout
from tools.cfg_util.cfg_util_sg.func.miners import send_config, miner_light, refresh_data, generate_config, import_config, \
scan_and_get_data, restart_miners_backend, reboot_miners
from tools.cfg_util.cfg_util_sg.func.files import import_iplist, \
import_config_file, export_iplist, export_config_file, export_csv
from tools.cfg_util.cfg_util_sg.func.ui import sort_data, copy_from_table
from network import MinerNetwork
import webbrowser
async def ui():
window.read(timeout=0)
table = window["ip_table"].Widget
table.bind("<Control-Key-c>", lambda x: copy_from_table(table))
while True:
event, value = window.read(timeout=10)
if event in (None, 'Close', sg.WIN_CLOSED):
sys.exit()
if isinstance(event, tuple):
if len(window["ip_table"].Values) > 0:
if event[0] == 'ip_table':
if event[2][0] == -1:
await sort_data(event[2][1])
if event == 'open_in_web':
for row in value["ip_table"]:
webbrowser.open("http://" + window["ip_table"].Values[row][0])
if event == 'scan':
if len(value['miner_network'].split("/")) > 1:
network = value['miner_network'].split("/")
miner_network = MinerNetwork(ip_addr=network[0], mask=network[1])
else:
miner_network = MinerNetwork(value['miner_network'])
asyncio.create_task(scan_and_get_data(miner_network))
if event == 'select_all_ips':
if len(value["ip_table"]) == len(window["ip_table"].Values):
window["ip_table"].update(select_rows=())
else:
window["ip_table"].update(select_rows=([row for row in range(len(window["ip_table"].Values))]))
if event == 'import_config':
if 2 > len(value['ip_table']) > 0:
asyncio.create_task(import_config(value['ip_table']))
if event == "restart_miner_backend":
asyncio.create_task(restart_miners_backend([window['ip_table'].Values[item][0] for item in value['ip_table']]))
if event == "reboot_miners":
asyncio.create_task(reboot_miners([window['ip_table'].Values[item][0] for item in value['ip_table']]))
if event == 'light':
asyncio.create_task(miner_light([window['ip_table'].Values[item][0] for item in value['ip_table']]))
if event == "import_iplist":
asyncio.create_task(import_iplist(value["file_iplist"]))
if event == "export_iplist":
asyncio.create_task(export_iplist(value["file_iplist"], [window['ip_table'].Values[item][0] for item in value['ip_table']]))
if event == "export_csv":
asyncio.create_task(export_csv(value["file_iplist"], [window['ip_table'].Values[item] for item in value['ip_table']]))
if event == "send_config":
asyncio.create_task(send_config([window['ip_table'].Values[item][0] for item in value['ip_table']], value['config']))
if event == "import_file_config":
asyncio.create_task(import_config_file(value['file_config']))
if event == "export_file_config":
asyncio.create_task(export_config_file(value['file_config'], value["config"]))
if event == "refresh_data":
asyncio.create_task(refresh_data([window["ip_table"].Values[item][0] for item in value["ip_table"]]))
if event == "generate_config":
await generate_config_ui()
if event == "__TIMEOUT__":
await asyncio.sleep(0)
async def generate_config_ui():
generate_config_window = sg.Window("Generate Config", generate_config_layout(), modal=True)
while True:
event, values = generate_config_window.read()
if event in (None, 'Close', sg.WIN_CLOSED):
break
if event == "generate_config_window_generate":
if values['generate_config_window_username']:
await generate_config(values['generate_config_window_username'],
values['generate_config_window_workername'],
values['generate_config_window_allow_v2'])
generate_config_window.close()
break

View File

@@ -0,0 +1,78 @@
from miners.miner_factory import MinerFactory
from tools.cfg_util.layout import window
from tools.cfg_util.tables import TableManager
from tools.cfg_util.decorators import disable_buttons
@disable_buttons("Flashing Lights")
async def btn_light(ip_idxs: list):
table_manager = TableManager()
_table = window["cmd_table"].Widget
iids = _table.get_children()
for idx in ip_idxs:
item = _table.item(iids[idx])
ip = item["values"][0]
new_light_val = not table_manager.data[ip]["Light"]
miner = await MinerFactory().get_miner(ip)
if new_light_val:
success = await miner.fault_light_on()
else:
success = await miner.fault_light_off()
if success:
table_manager.data[ip]["Light"] = new_light_val
table_manager.data[ip]["Command Output"] = "Fault Light command succeeded."
else:
table_manager.data[ip]["Command Output"] = "Fault Light command failed."
table_manager.update_tables()
@disable_buttons("Rebooting")
async def btn_reboot(ip_idxs: list):
table_manager = TableManager()
_table = window["cmd_table"].Widget
iids = _table.get_children()
for idx in ip_idxs:
item = _table.item(iids[idx])
ip = item["values"][0]
miner = await MinerFactory().get_miner(ip)
success = await miner.reboot()
if success:
table_manager.data[ip]["Command Output"] = "Reboot command succeeded."
else:
table_manager.data[ip]["Command Output"] = "Reboot command failed."
table_manager.update_tables()
@disable_buttons("Restarting Backend")
async def btn_backend(ip_idxs: list):
table_manager = TableManager()
_table = window["cmd_table"].Widget
iids = _table.get_children()
for idx in ip_idxs:
item = _table.item(iids[idx])
ip = item["values"][0]
miner = await MinerFactory().get_miner(ip)
success = await miner.restart_backend()
if success:
table_manager.data[ip][
"Command Output"
] = "Restart Backend command succeeded."
else:
table_manager.data[ip]["Command Output"] = "Restart Backend command failed."
table_manager.update_tables()
@disable_buttons("Sending Command")
async def btn_command(ip_idxs: list, command: str):
table_manager = TableManager()
_table = window["cmd_table"].Widget
iids = _table.get_children()
for idx in ip_idxs:
item = _table.item(iids[idx])
ip = item["values"][0]
miner = await MinerFactory().get_miner(ip)
success = await miner.send_ssh_command(command)
if not isinstance(success, str):
success = f"Command {command} failed."
table_manager.data[ip]["Command Output"] = success
table_manager.update_tables()

View File

@@ -0,0 +1,160 @@
import PySimpleGUI as sg
from config.bos import bos_config_convert
import time
from tools.cfg_util.layout import window, update_prog_bar
from tools.cfg_util.decorators import disable_buttons
from miners.miner_factory import MinerFactory
import asyncio
from settings import CFG_UTIL_CONFIG_THREADS as CONFIG_THREADS
from tools.cfg_util.general import update_miners_data
progress_bar_len = 0
@disable_buttons("Importing")
async def btn_import(table, selected):
if not len(selected) > 0:
return
ip = [window[table].Values[row][0] for row in selected][0]
miner = await MinerFactory().get_miner(ip)
await miner.get_config()
config = miner.config
window["cfg_config_txt"].update(config)
@disable_buttons("Configuring")
async def btn_config(table, selected, config: str, last_oct_ip: bool):
ips = [window[table].Values[row][0] for row in selected]
await send_config(ips, config, last_oct_ip)
async def send_config(ips: list, config: str, last_octet_ip: bool):
global progress_bar_len
progress_bar_len = 0
await update_prog_bar(progress_bar_len, max=(2 * len(ips)))
get_miner_genenerator = MinerFactory().get_miner_generator(ips)
all_miners = []
async for miner in get_miner_genenerator:
all_miners.append(miner)
progress_bar_len += 1
await update_prog_bar(progress_bar_len)
config_sender_generator = send_config_generator(
all_miners, config, last_octet_ip_user=last_octet_ip
)
async for _config_sender in config_sender_generator:
progress_bar_len += 1
await update_prog_bar(progress_bar_len)
await asyncio.sleep(3)
await update_miners_data(ips)
async def send_config_generator(miners: list, config, last_octet_ip_user: bool):
loop = asyncio.get_event_loop()
config_tasks = []
for miner in miners:
if len(config_tasks) >= CONFIG_THREADS:
configured = asyncio.as_completed(config_tasks)
config_tasks = []
for sent_config in configured:
yield await sent_config
config_tasks.append(
loop.create_task(miner.send_config(config, ip_user=last_octet_ip_user))
)
configured = asyncio.as_completed(config_tasks)
for sent_config in configured:
yield await sent_config
def generate_config(username: str, workername: str, v2_allowed: bool):
if username and workername:
user = f"{username}.{workername}"
elif username and not workername:
user = username
else:
return
if v2_allowed:
url_1 = "stratum2+tcp://v2.us-east.stratum.slushpool.com/u95GEReVMjK6k5YqiSFNqqTnKU4ypU2Wm8awa6tmbmDmk1bWt"
url_2 = "stratum2+tcp://v2.stratum.slushpool.com/u95GEReVMjK6k5YqiSFNqqTnKU4ypU2Wm8awa6tmbmDmk1bWt"
url_3 = "stratum+tcp://stratum.slushpool.com:3333"
else:
url_1 = "stratum+tcp://ca.stratum.slushpool.com:3333"
url_2 = "stratum+tcp://us-east.stratum.slushpool.com:3333"
url_3 = "stratum+tcp://stratum.slushpool.com:3333"
config = {
"group": [
{
"name": "group",
"quota": 1,
"pool": [
{"url": url_1, "user": user, "password": "123"},
{"url": url_2, "user": user, "password": "123"},
{"url": url_3, "user": user, "password": "123"},
],
}
],
"format": {
"version": "1.2+",
"model": "Antminer S9",
"generator": "upstream_config_util",
"timestamp": int(time.time()),
},
"temp_control": {
"target_temp": 80.0,
"hot_temp": 90.0,
"dangerous_temp": 120.0,
},
"autotuning": {"enabled": True, "psu_power_limit": 900},
}
window["cfg_config_txt"].update(bos_config_convert(config))
async def generate_config_ui():
generate_config_window = sg.Window(
"Generate Config", generate_config_layout(), modal=True
)
while True:
event, values = generate_config_window.read()
if event in (None, "Close", sg.WIN_CLOSED):
break
if event == "generate_config_window_generate":
if values["generate_config_window_username"]:
generate_config(
values["generate_config_window_username"],
values["generate_config_window_workername"],
values["generate_config_window_allow_v2"],
)
generate_config_window.close()
break
def generate_config_layout():
config_layout = [
[
sg.Text(
"Enter your pool username and password below to generate a config for SlushPool."
)
],
[sg.Text("")],
[
sg.Text("Username:", size=(19, 1)),
sg.InputText(
key="generate_config_window_username", do_not_clear=True, size=(45, 1)
),
],
[
sg.Text("Worker Name (OPT):", size=(19, 1)),
sg.InputText(
key="generate_config_window_workername", do_not_clear=True, size=(45, 1)
),
],
[
sg.Text("Allow Stratum V2?:", size=(19, 1)),
sg.Checkbox("", key="generate_config_window_allow_v2", default=True),
],
[sg.Button("Generate", key="generate_config_window_generate")],
]
return config_layout

View File

@@ -0,0 +1,24 @@
from tools.cfg_util.layout import window
from tools.cfg_util.layout import BUTTON_KEYS
def disable_buttons(status: str = ""):
def decorator(func):
# handle the inner function that the decorator is wrapping
async def inner(*args, **kwargs):
# disable the buttons
for button in BUTTON_KEYS:
window[button].Update(disabled=True)
window["status"].update(status)
# call the original wrapped function
await func(*args, **kwargs)
# re-enable the buttons after the wrapped function completes
for button in BUTTON_KEYS:
window[button].Update(disabled=False)
window["status"].update("")
return inner
return decorator

View File

@@ -0,0 +1,85 @@
import asyncio
import webbrowser
from miners.miner_factory import MinerFactory
from tools.cfg_util.decorators import disable_buttons
from tools.cfg_util.layout import TABLE_KEYS
from tools.cfg_util.layout import window, update_prog_bar
from tools.cfg_util.tables import TableManager
progress_bar_len = 0
DEFAULT_DATA = [
"Model",
"Hostname",
"Hashrate",
"Temperature",
"Pool User",
"Pool 1",
"Pool 1 User",
"Pool 2",
"Pool 2 User",
"Wattage",
"Split",
]
def btn_all(table, selected):
if table in TABLE_KEYS["table"]:
if len(selected) == len(window[table].Values):
window[table].update(select_rows=())
else:
window[table].update(
select_rows=([row for row in range(len(window[table].Values))])
)
if table in TABLE_KEYS["tree"]:
if len(selected) == len(window[table].Widget.get_children()):
_tree = window[table]
_tree.Widget.selection_set([])
else:
_tree = window[table]
rows_to_select = [i for i in _tree.Widget.get_children()]
_tree.Widget.selection_set(rows_to_select)
def btn_web(table, selected):
for row in selected:
webbrowser.open("http://" + window[table].Values[row][0])
@disable_buttons("Refreshing")
async def btn_refresh(table, selected):
ips = [window[table].Values[row][0] for row in selected]
if not len(selected) > 0:
ips = [window[table].Values[row][0] for row in range(len(window[table].Values))]
await update_miners_data(ips)
async def update_miners_data(miners: list):
data = []
for miner in miners:
_data = {}
for key in DEFAULT_DATA:
_data[key] = ""
_data["IP"] = str(miner)
data.append(_data)
TableManager().update_data(data)
global progress_bar_len
progress_bar_len = 0
await update_prog_bar(progress_bar_len, max=len(miners))
data_generator = asyncio.as_completed(
[_get_data(await MinerFactory().get_miner(miner)) for miner in miners]
)
for all_data in data_generator:
data = await all_data
TableManager().update_item(data)
progress_bar_len += 1
await update_prog_bar(progress_bar_len)
async def _get_data(miner):
return await miner.get_data()

27
tools/cfg_util/imgs.py Normal file

File diff suppressed because one or more lines are too long

661
tools/cfg_util/layout.py Normal file
View File

@@ -0,0 +1,661 @@
import PySimpleGUI as sg
from .imgs import WINDOW_ICON
WINDOW_BG = "#0F4C75"
PROGRESS_BG = "#FFFFFF"
PROGRESS_FULL = "#00A8CC"
MAIN_TABS_BG = "#0F4C75"
MAIN_TABS_SELECTED = MAIN_TABS_BG
MAIN_TABS_NORMAL = "#BBE1FA"
MAIN_TABS_TEXT_SELECTED = "#FFFFFF"
MAIN_TABS_TEXT_NORMAL = "#000000"
TAB_PAD = 0
TEXT_COLOR = "#FFFFFF"
BTN_TEXT_COLOR = "#000000"
BTN_COLOR = "#3282B8"
BTN_DISABLED_COLOR = "#BBE1FA"
BTN_DISABLED_TEXT_COLOR = "#1B262C"
BTN_DISABLED = BTN_DISABLED_TEXT_COLOR, BTN_DISABLED_COLOR
BTN_BORDER = 1
INFO_BTN_TEXT_COLOR = "#000000"
INFO_BTN_BG = "#FFFFFF"
INPUT_BG = "#BBE1FA"
INPUT_TEXT = "#000000"
POOLS_TABS_BG = "#3282B8"
POOLS_TABS_SELECTED = POOLS_TABS_BG
POOLS_TABS_NORMAL = "#BBE1FA"
POOLS_TABS_TEXT_SELECTED = "#FFFFFF"
POOLS_TABS_TEXT_NORMAL = "#000000"
POOLS_TABLE_PAD = 0
TABLE_BG = "#BBE1FA"
TABLE_TEXT = "#000000"
TABLE_HEADERS_COLOR = "#3282B8"
TABLE_HEADERS_TEXT_COLOR = "#000000"
TABLE_HEADERS_HOVER = "#27496D"
TABLE_BORDER = 1
TABLE_HEADER_BORDER = 3
TABLE_PAD = 0
SCROLLBAR_TROUGH_COLOR = "#BBE1FA"
SCROLLBAR_BACKGROUND_COLOR = "#3282B8"
SCROLLBAR_ARROW_COLOR = "#0F4C75"
SCROLLBAR_WIDTH = 16
SCROLLBAR_ARROW_WIDTH = 16
SCROLLBAR_RELIEF = sg.RELIEF_RIDGE
POOLS_TABLE_BG = TABLE_BG
POOLS_TABLE_TEXT = TABLE_TEXT
POOLS_TABLE_HEADERS_COLOR = TABLE_HEADERS_COLOR
POOLS_TABLE_HEADERS_TEXT_COLOR = TABLE_HEADERS_TEXT_COLOR
POOLS_TABLE_HEADERS_HOVER = TABLE_HEADERS_HOVER
POOLS_TABLE_BORDER = 1
POOLS_TABLE_HEADER_BORDER = 3
sg.set_options(font=("Noto Mono", 10))
# Add your new theme colors and settings
sg.LOOK_AND_FEEL_TABLE["cfg_util_theme"] = {
"BACKGROUND": WINDOW_BG,
"TEXT": TEXT_COLOR,
"INPUT": INPUT_BG,
"TEXT_INPUT": INPUT_TEXT,
"SCROLL": "#142850",
"BUTTON": (BTN_TEXT_COLOR, BTN_COLOR), # Text Color, Background
"PROGRESS": (PROGRESS_FULL, PROGRESS_BG), # Filled, Empty
"BORDER": 1,
"SLIDER_DEPTH": 0,
"PROGRESS_DEPTH": 0,
}
# Switch to use your newly created theme
sg.theme("cfg_util_theme")
TABLE_HEADERS = {
"SCAN": [
"IP",
"Model",
"Hostname",
"Hashrate",
"Temperature",
"Pool User",
"Wattage",
],
"CMD": ["IP", "Model", "Output"],
"POOLS_ALL": [
"IP",
"Split",
"Pool 1 User",
"Pool 2 User",
],
"POOLS_1": [
"IP",
"Split",
"Pool 1",
"Pool 1 User",
],
"POOLS_2": [
"IP",
"Split",
"Pool 2",
"Pool 2 User",
],
"CONFIG": ["IP", "Model", "Pool 1 User"],
}
TABLE_KEYS = {
"table": ["scan_table", "pools_table", "cfg_table"],
"tree": ["cmd_table"],
}
MINER_COUNT_BUTTONS = [
"miner_count",
]
HASHRATE_TOTAL_BUTTONS = [
"total_hashrate",
]
BUTTON_KEYS = [
"btn_scan",
"btn_cmd",
"scan_all",
"scan_refresh",
"scan_web",
"cmd_all",
"cmd_light",
"cmd_reboot",
"cmd_backend",
"pools_all",
"pools_refresh",
"pools_web",
"cfg_import",
"cfg_config",
"cfg_generate",
"cfg_all",
"cfg_web",
]
TABLE_HEIGHT = 27
IMAGE_COL_WIDTH = 8
IP_COL_WIDTH = 17
MODEL_COL_WIDTH = 15
HOST_COL_WIDTH = 15
HASHRATE_COL_WIDTH = 12
TEMP_COL_WIDTH = 14
USER_COL_WIDTH = 27
WATTAGE_COL_WIDTH = 10
SPLIT_COL_WIDTH = 8
SCAN_COL_WIDTHS = [
IP_COL_WIDTH,
MODEL_COL_WIDTH,
HOST_COL_WIDTH,
HASHRATE_COL_WIDTH,
TEMP_COL_WIDTH,
USER_COL_WIDTH,
WATTAGE_COL_WIDTH,
]
TABLE_TOTAL_WIDTH = sum(SCAN_COL_WIDTHS)
async def update_prog_bar(count: int, _max: int = None):
bar = window["progress_bar"]
bar.update_bar(count, max=_max)
if _max:
bar.maxlen = _max
if not hasattr(bar, "maxlen"):
if not _max:
_max = 100
bar.maxlen = _max
percent_done = 100 * (count / bar.maxlen)
window["progress_percent"].Update(f"{round(percent_done, 2)} %")
if percent_done == 100:
window["progress_percent"].Update("")
def get_scan_layout():
scan_layout = [
[
sg.Text("Scan IP", background_color=MAIN_TABS_BG, pad=((0, 5), (1, 1))),
sg.InputText(key="scan_ip", size=(31, 1)),
sg.Button(
"Scan",
key="btn_scan",
border_width=BTN_BORDER,
disabled_button_color=BTN_DISABLED,
mouseover_colors=BTN_DISABLED,
),
],
[
sg.Button(
"ALL",
key="scan_all",
border_width=BTN_BORDER,
disabled_button_color=BTN_DISABLED,
pad=((0, 5), (1, 1)),
),
sg.Button(
"REFRESH DATA",
key="scan_refresh",
border_width=BTN_BORDER,
disabled_button_color=BTN_DISABLED,
),
sg.Button(
"OPEN IN WEB",
key="scan_web",
border_width=BTN_BORDER,
disabled_button_color=BTN_DISABLED,
),
],
[
sg.Table(
values=[],
headings=[heading for heading in TABLE_HEADERS["SCAN"]],
auto_size_columns=False,
max_col_width=15,
justification="center",
key="scan_table",
col_widths=SCAN_COL_WIDTHS,
background_color=TABLE_BG,
text_color=TABLE_TEXT,
header_background_color=TABLE_HEADERS_COLOR,
header_text_color=TABLE_HEADERS_TEXT_COLOR,
border_width=TABLE_BORDER,
header_border_width=TABLE_HEADER_BORDER,
sbar_trough_color=SCROLLBAR_TROUGH_COLOR,
sbar_background_color=SCROLLBAR_BACKGROUND_COLOR,
sbar_arrow_color=SCROLLBAR_ARROW_COLOR,
sbar_width=SCROLLBAR_WIDTH,
sbar_arrow_width=SCROLLBAR_ARROW_WIDTH,
sbar_relief=SCROLLBAR_RELIEF,
size=(TABLE_TOTAL_WIDTH, TABLE_HEIGHT),
expand_x=True,
enable_click_events=True,
pad=TABLE_PAD,
)
],
]
return scan_layout
def get_command_layout():
data = sg.TreeData()
col_widths = [
IP_COL_WIDTH,
MODEL_COL_WIDTH,
TABLE_TOTAL_WIDTH - (IP_COL_WIDTH + MODEL_COL_WIDTH + IMAGE_COL_WIDTH + 4),
]
command_layout = [
[
sg.Text(
"Custom Command",
background_color=MAIN_TABS_BG,
pad=((0, 1), (1, 1)),
),
sg.InputText(key="cmd_txt", expand_x=True),
sg.Button(
"Send Command",
key="btn_cmd",
border_width=BTN_BORDER,
disabled_button_color=BTN_DISABLED,
),
],
[
sg.Button(
"ALL",
key="cmd_all",
border_width=BTN_BORDER,
disabled_button_color=BTN_DISABLED,
pad=((0, 1), (1, 1)),
),
sg.Button(
"LIGHT",
key="cmd_light",
border_width=BTN_BORDER,
disabled_button_color=BTN_DISABLED,
),
sg.Button(
"REBOOT",
key="cmd_reboot",
border_width=BTN_BORDER,
disabled_button_color=BTN_DISABLED,
),
sg.Button(
"RESTART BACKEND",
key="cmd_backend",
border_width=BTN_BORDER,
disabled_button_color=BTN_DISABLED,
),
],
[
sg.Tree(
data,
headings=[heading for heading in TABLE_HEADERS["CMD"]],
auto_size_columns=False,
max_col_width=15,
justification="center",
key="cmd_table",
col_widths=col_widths,
background_color=TABLE_BG,
text_color=TABLE_TEXT,
header_background_color=TABLE_HEADERS_COLOR,
header_text_color=TABLE_HEADERS_TEXT_COLOR,
border_width=TABLE_BORDER,
header_border_width=TABLE_HEADER_BORDER,
sbar_trough_color=SCROLLBAR_TROUGH_COLOR,
sbar_background_color=SCROLLBAR_BACKGROUND_COLOR,
sbar_arrow_color=SCROLLBAR_ARROW_COLOR,
sbar_width=SCROLLBAR_WIDTH,
sbar_arrow_width=SCROLLBAR_ARROW_WIDTH,
sbar_relief=SCROLLBAR_RELIEF,
expand_x=True,
expand_y=True,
col0_heading="Light",
col0_width=IMAGE_COL_WIDTH,
enable_events=True,
pad=TABLE_PAD,
)
],
]
return command_layout
def get_pools_layout():
pool_col_width = int((TABLE_TOTAL_WIDTH - (IP_COL_WIDTH + SPLIT_COL_WIDTH)) / 2)
col_widths = [
IP_COL_WIDTH,
SPLIT_COL_WIDTH,
pool_col_width,
pool_col_width,
]
pools_layout = [
[
sg.Button(
"ALL",
key="pools_all",
border_width=BTN_BORDER,
disabled_button_color=BTN_DISABLED,
pad=((0, 5), (6, 7)),
),
sg.Button(
"REFRESH DATA",
key="pools_refresh",
border_width=BTN_BORDER,
disabled_button_color=BTN_DISABLED,
),
sg.Button(
"OPEN IN WEB",
key="pools_web",
border_width=BTN_BORDER,
disabled_button_color=BTN_DISABLED,
),
],
[
sg.TabGroup(
[
[
sg.Tab(
"All",
[
[
sg.Table(
values=[],
headings=[
heading
for heading in TABLE_HEADERS["POOLS_ALL"]
],
auto_size_columns=False,
max_col_width=15,
justification="center",
key="pools_table",
background_color=POOLS_TABLE_BG,
text_color=POOLS_TABLE_TEXT,
header_background_color=POOLS_TABLE_HEADERS_COLOR,
header_text_color=POOLS_TABLE_HEADERS_TEXT_COLOR,
border_width=POOLS_TABLE_BORDER,
header_border_width=POOLS_TABLE_HEADER_BORDER,
sbar_trough_color=SCROLLBAR_TROUGH_COLOR,
sbar_background_color=SCROLLBAR_BACKGROUND_COLOR,
sbar_arrow_color=SCROLLBAR_ARROW_COLOR,
sbar_width=SCROLLBAR_WIDTH,
sbar_arrow_width=SCROLLBAR_ARROW_WIDTH,
sbar_relief=SCROLLBAR_RELIEF,
col_widths=col_widths,
size=(0, TABLE_HEIGHT),
expand_x=True,
enable_click_events=True,
pad=POOLS_TABLE_PAD,
)
]
],
background_color=POOLS_TABS_BG,
pad=TAB_PAD,
)
],
[
sg.Tab(
"Pool 1",
[
[
sg.Table(
values=[],
headings=[
heading
for heading in TABLE_HEADERS["POOLS_1"]
],
auto_size_columns=False,
max_col_width=15,
justification="center",
key="pools_1_table",
background_color=POOLS_TABLE_BG,
text_color=POOLS_TABLE_TEXT,
header_background_color=POOLS_TABLE_HEADERS_COLOR,
header_text_color=POOLS_TABLE_HEADERS_TEXT_COLOR,
border_width=POOLS_TABLE_BORDER,
header_border_width=POOLS_TABLE_HEADER_BORDER,
sbar_trough_color=SCROLLBAR_TROUGH_COLOR,
sbar_background_color=SCROLLBAR_BACKGROUND_COLOR,
sbar_arrow_color=SCROLLBAR_ARROW_COLOR,
sbar_width=SCROLLBAR_WIDTH,
sbar_arrow_width=SCROLLBAR_ARROW_WIDTH,
sbar_relief=SCROLLBAR_RELIEF,
col_widths=col_widths,
size=(0, TABLE_HEIGHT),
expand_x=True,
enable_click_events=True,
pad=POOLS_TABLE_PAD,
)
]
],
background_color=POOLS_TABS_BG,
)
],
[
sg.Tab(
"Pool 2",
[
[
sg.Table(
values=[],
headings=[
heading
for heading in TABLE_HEADERS["POOLS_2"]
],
auto_size_columns=False,
max_col_width=15,
justification="center",
key="pools_2_table",
background_color=POOLS_TABLE_BG,
text_color=POOLS_TABLE_TEXT,
header_background_color=POOLS_TABLE_HEADERS_COLOR,
header_text_color=POOLS_TABLE_HEADERS_TEXT_COLOR,
border_width=POOLS_TABLE_BORDER,
header_border_width=POOLS_TABLE_HEADER_BORDER,
sbar_trough_color=SCROLLBAR_TROUGH_COLOR,
sbar_background_color=SCROLLBAR_BACKGROUND_COLOR,
sbar_arrow_color=SCROLLBAR_ARROW_COLOR,
sbar_width=SCROLLBAR_WIDTH,
sbar_arrow_width=SCROLLBAR_ARROW_WIDTH,
sbar_relief=SCROLLBAR_RELIEF,
col_widths=col_widths,
size=(0, TABLE_HEIGHT),
expand_x=True,
enable_click_events=True,
pad=POOLS_TABLE_PAD,
)
]
],
background_color=POOLS_TABS_BG,
)
],
],
background_color=MAIN_TABS_BG,
title_color=POOLS_TABS_TEXT_NORMAL,
tab_background_color=POOLS_TABS_NORMAL,
selected_background_color=POOLS_TABS_SELECTED,
selected_title_color=POOLS_TABS_TEXT_SELECTED,
border_width=0,
tab_border_width=2,
pad=TAB_PAD,
)
],
]
return pools_layout
def get_config_layout():
config_layout = [
[
sg.Button(
"IMPORT",
key="cfg_import",
border_width=BTN_BORDER,
disabled_button_color=BTN_DISABLED,
pad=((0, 5), (5, 0)),
),
sg.Button(
"CONFIG",
key="cfg_config",
border_width=BTN_BORDER,
disabled_button_color=BTN_DISABLED,
pad=((0, 5), (5, 0)),
),
sg.Button(
"GENERATE",
key="cfg_generate",
border_width=BTN_BORDER,
disabled_button_color=BTN_DISABLED,
pad=((0, 5), (5, 0)),
),
],
[
sg.Button(
"ALL",
key="cfg_all",
border_width=BTN_BORDER,
disabled_button_color=BTN_DISABLED,
pad=((0, 5), (1, 1)),
),
sg.Button(
"OPEN IN WEB",
key="cfg_web",
border_width=BTN_BORDER,
disabled_button_color=BTN_DISABLED,
),
sg.Push(background_color=MAIN_TABS_BG),
sg.Checkbox(
"Append IP to Username",
key="cfg_append_ip",
background_color=MAIN_TABS_BG,
),
],
[
sg.Table(
values=[],
headings=[heading for heading in TABLE_HEADERS["CONFIG"]],
auto_size_columns=False,
max_col_width=15,
justification="center",
key="cfg_table",
background_color=TABLE_BG,
text_color=TABLE_TEXT,
header_background_color=TABLE_HEADERS_COLOR,
header_text_color=TABLE_HEADERS_TEXT_COLOR,
header_border_width=TABLE_HEADER_BORDER,
border_width=TABLE_BORDER,
sbar_trough_color=SCROLLBAR_TROUGH_COLOR,
sbar_background_color=SCROLLBAR_BACKGROUND_COLOR,
sbar_arrow_color=SCROLLBAR_ARROW_COLOR,
sbar_width=SCROLLBAR_WIDTH,
sbar_arrow_width=SCROLLBAR_ARROW_WIDTH,
sbar_relief=SCROLLBAR_RELIEF,
col_widths=[
IP_COL_WIDTH,
MODEL_COL_WIDTH,
TABLE_TOTAL_WIDTH - ((2 * 40) - 4),
],
size=(0, TABLE_HEIGHT),
expand_x=True,
enable_click_events=True,
pad=TABLE_PAD,
),
sg.Multiline(
size=(40, TABLE_HEIGHT + 1),
key="cfg_config_txt",
sbar_trough_color=SCROLLBAR_TROUGH_COLOR,
sbar_background_color=SCROLLBAR_BACKGROUND_COLOR,
sbar_arrow_color=SCROLLBAR_ARROW_COLOR,
sbar_width=SCROLLBAR_WIDTH,
sbar_arrow_width=SCROLLBAR_ARROW_WIDTH,
sbar_relief=SCROLLBAR_RELIEF,
),
],
]
return config_layout
layout = [
[
sg.Text("", size=(20, 1), key="status"),
sg.ProgressBar(
max_value=100, size_px=(0, 20), expand_x=True, key="progress_bar"
),
sg.Text("", size=(20, 1), key="progress_percent", justification="r"),
],
[
sg.Push(),
sg.Button(
"Hashrate: 0 TH/s",
disabled=True,
button_color=("black", "white smoke"),
disabled_button_color=("black", "white smoke"),
key="total_hashrate",
),
sg.Button(
"Miners: 0",
disabled=True,
button_color=("black", "white smoke"),
disabled_button_color=("black", "white smoke"),
key="miner_count",
),
sg.Push(),
],
[
sg.TabGroup(
[
[
sg.Tab(
"Scan",
get_scan_layout(),
background_color=MAIN_TABS_BG,
pad=TAB_PAD,
)
],
[
sg.Tab(
"Pools",
get_pools_layout(),
background_color=MAIN_TABS_BG,
pad=TAB_PAD,
)
],
[
sg.Tab(
"Configure",
get_config_layout(),
background_color=MAIN_TABS_BG,
pad=TAB_PAD,
)
],
[
sg.Tab(
"Command",
get_command_layout(),
background_color=MAIN_TABS_BG,
pad=TAB_PAD,
)
],
],
tab_background_color=MAIN_TABS_NORMAL,
title_color=MAIN_TABS_TEXT_NORMAL,
selected_background_color=MAIN_TABS_BG,
selected_title_color=MAIN_TABS_TEXT_SELECTED,
border_width=0,
tab_border_width=2,
),
],
]
window = sg.Window("Upstream Config Util", layout, icon=WINDOW_ICON)

View File

@@ -0,0 +1,95 @@
import asyncio
from miners.miner_factory import MinerFactory
from network import MinerNetwork
from tools.cfg_util.decorators import disable_buttons
from tools.cfg_util.layout import window, update_prog_bar
from tools.cfg_util.tables import clear_tables, TableManager
progress_bar_len = 0
DEFAULT_DATA = [
"Model",
"Hostname",
"Hashrate",
"Temperature",
"Pool User",
"Pool 1",
"Pool 1 User",
"Pool 2",
"Pool 2 User",
"Wattage",
"Split",
]
async def btn_all():
table = "scan_table"
window[table].update(
select_rows=([row for row in range(len(window[table].Values))])
)
async def btn_scan(scan_ip: str):
network = MinerNetwork("192.168.1.0")
if scan_ip:
if "/" in scan_ip:
ip, mask = scan_ip.split("/")
network = MinerNetwork(ip, mask=mask)
else:
network = MinerNetwork(scan_ip)
asyncio.create_task(_scan_miners(network))
@disable_buttons("Scanning")
async def _scan_miners(network: MinerNetwork):
clear_tables()
scan_generator = network.scan_network_generator()
MinerFactory().clear_cached_miners()
global progress_bar_len
progress_bar_len = 0
network_size = len(network)
await update_prog_bar(progress_bar_len, max=(3 * network_size))
scanned_miners = []
async for miner in scan_generator:
if miner:
scanned_miners.append(miner)
progress_bar_len += 1
await update_prog_bar(progress_bar_len)
progress_bar_len += network_size - len(scanned_miners)
await update_prog_bar(progress_bar_len)
get_miner_genenerator = MinerFactory().get_miner_generator(scanned_miners)
resolved_miners = []
async for found_miner in get_miner_genenerator:
resolved_miners.append(found_miner)
resolved_miners.sort(key=lambda x: x.ip)
_data = {}
for key in DEFAULT_DATA:
_data[key] = ""
_data["IP"] = str(found_miner.ip)
TableManager().update_item(_data)
progress_bar_len += 1
await update_prog_bar(progress_bar_len)
progress_bar_len += network_size - len(resolved_miners)
await update_prog_bar(progress_bar_len)
await _get_miners_data(resolved_miners)
async def _get_miners_data(miners: list):
global progress_bar_len
data_generator = asyncio.as_completed([_get_data(miner) for miner in miners])
for all_data in data_generator:
data = await all_data
TableManager().update_item(data)
progress_bar_len += 1
await update_prog_bar(progress_bar_len)
async def _get_data(miner):
return await miner.get_data()

211
tools/cfg_util/tables.py Normal file
View File

@@ -0,0 +1,211 @@
from tools.cfg_util.layout import (
MINER_COUNT_BUTTONS,
HASHRATE_TOTAL_BUTTONS,
TABLE_KEYS,
TABLE_HEADERS,
window,
)
from tools.cfg_util.imgs import TkImages, LIGHT, FAULT_LIGHT
import PySimpleGUI as sg
import ipaddress
def update_miner_count(count):
for button in MINER_COUNT_BUTTONS:
window[button].update(f"Miners: {count}")
def update_total_hr(hashrate: float):
if hashrate > 999:
hashrate = f"{round(hashrate/1000, 2)} PH/s"
else:
hashrate = f"{round(hashrate)} TH/s"
for button in HASHRATE_TOTAL_BUTTONS:
window[button].update(f"Hashrate: {hashrate}")
def update_tables(data: list or None = None):
TableManager().update_data(data)
def clear_tables():
TableManager().clear_tables()
class Singleton(type):
_instances = {}
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
return cls._instances[cls]
class TableManager(metaclass=Singleton):
_instance = None
def __init__(self):
self.images = TkImages()
self.data = {}
self.sort_key = "IP"
self.sort_reverse = False
def update_data(self, data: list):
if not data:
return
for line in data:
self.update_item(line)
def update_sort_key(self, sort_key):
if "" in sort_key or "" in sort_key:
sort_key = sort_key[:-1]
if self.sort_key == sort_key:
self.sort_reverse = not self.sort_reverse
self.sort_key = sort_key
self.update_tables()
def update_item(self, data: dict):
if not data or data == {} or not data.get("IP"):
return
if not data.get("Light"):
data["Light"] = False
if not data["IP"] in self.data.keys():
self.data[data["IP"]] = {}
for key in data.keys():
self.data[data["IP"]][key] = data[key]
self.update_tables()
def update_tables(self):
tables = {
"SCAN": [["" for _ in TABLE_HEADERS["SCAN"]] for _ in self.data],
"CMD": [["" for _ in TABLE_HEADERS["CMD"]] for _ in self.data],
"POOLS_ALL": [["" for _ in TABLE_HEADERS["POOLS_ALL"]] for _ in self.data],
"POOLS_1": [["" for _ in TABLE_HEADERS["POOLS_1"]] for _ in self.data],
"POOLS_2": [["" for _ in TABLE_HEADERS["POOLS_2"]] for _ in self.data],
"CONFIG": [["" for _ in TABLE_HEADERS["CONFIG"]] for _ in self.data],
}
ip_sorted_keys = sorted(self.data.keys(), key=lambda x: ipaddress.ip_address(x))
sorted_keys = sorted(
ip_sorted_keys, reverse=self.sort_reverse, key=lambda x: self._get_sort(x)
)
table_names = {
"SCAN": "scan_table",
"POOLS_ALL": "pools_table",
"POOLS_1": "pools_1_table",
"POOLS_2": "pools_2_table",
"CONFIG": "cfg_table",
"CMD": "cmd_table",
}
for table in TABLE_HEADERS.keys():
widget = window[table_names[table]].Widget
for idx, header in enumerate(TABLE_HEADERS[table]):
_header = header
if header == self.sort_key:
if self.sort_reverse:
_header = f"{header}"
else:
_header = f"{header}"
widget.heading(idx, text=_header)
# reset light
window["cmd_table"].Widget.heading("#0", text="Light")
# handle light sort key
if self.sort_key == "Light":
widget = window["cmd_table"].Widget
idx = "#0"
if self.sort_reverse:
_header = f"Light▼"
else:
_header = f"Light▲"
widget.heading(idx, text=_header)
for data_idx, key in enumerate(sorted_keys):
item = self.data[key]
keys = item.keys()
if "Hashrate" in keys:
if not isinstance(item["Hashrate"], str):
item[
"Hashrate"
] = f"{format(float(item['Hashrate']), '.2f').rjust(6, ' ')} TH/s"
for key in keys:
for table in TABLE_HEADERS.keys():
for idx, header in enumerate(TABLE_HEADERS[table]):
if key == header:
tables[table][data_idx][idx] = item[key]
window["scan_table"].update(tables["SCAN"])
window["pools_table"].update(tables["POOLS_ALL"])
window["pools_1_table"].update(tables["POOLS_1"])
window["pools_2_table"].update(tables["POOLS_2"])
window["cfg_table"].update(tables["CONFIG"])
treedata = sg.TreeData()
for idx, item in enumerate(tables["CMD"]):
ico = LIGHT
if self.data[item[0]]["Light"]:
ico = FAULT_LIGHT
treedata.insert("", idx, "", item, icon=ico)
window["cmd_table"].update(treedata)
update_miner_count(len(self.data))
total_hr = 0
for key in self.data.keys():
hashrate = 0
if not self.data[key]["Hashrate"] == "":
hashrate = (
self.data[key]["Hashrate"].replace(" ", "").replace("TH/s", "")
)
total_hr += float(hashrate)
update_total_hr(round(total_hr))
def _get_sort(self, data_key: str):
if self.sort_key not in self.data[data_key]:
return ""
if self.sort_key == "IP":
return ipaddress.ip_address(self.data[data_key]["IP"])
if self.sort_key == "Hashrate":
if self.data[data_key]["Hashrate"] == "":
return -1
if not isinstance(self.data[data_key]["Hashrate"], str):
return self.data[data_key]["Hashrate"]
return float(
self.data[data_key]["Hashrate"].replace(" ", "").replace("TH/s", "")
)
if self.sort_key in ["Wattage", "Temperature"]:
if isinstance(self.data[data_key][self.sort_key], str):
return -300
if self.sort_key == "Split":
if self.data[data_key][self.sort_key] == "":
return -1
if "/" not in self.data[data_key][self.sort_key]:
return 0
if not self.sort_reverse:
return int(self.data[data_key][self.sort_key].split("/")[0])
else:
return int(self.data[data_key][self.sort_key].split("/")[1])
return self.data[data_key][self.sort_key]
def clear_tables(self):
self.data = {}
for table in TABLE_KEYS["table"]:
window[table].update([])
for tree in TABLE_KEYS["tree"]:
window[tree].update(sg.TreeData())
update_miner_count(0)

139
tools/cfg_util/ui.py Normal file
View File

@@ -0,0 +1,139 @@
import PySimpleGUI as sg
import asyncio
import sys
from tools.cfg_util.imgs import TkImages
from tools.cfg_util.scan import btn_scan
from tools.cfg_util.commands import (
btn_light,
btn_reboot,
btn_backend,
btn_command,
)
from tools.cfg_util.configure import (
generate_config_ui,
btn_import,
btn_config,
)
from tools.cfg_util.layout import window
from tools.cfg_util.general import btn_all, btn_web, btn_refresh
from tools.cfg_util.tables import TableManager
import tkinter as tk
def _tree_header_click_handler(event, table):
region = table.Widget.identify("region", event.x, event.y)
if region == "heading":
col = int(table.Widget.identify_column(event.x)[1:]) - 1
if col == -1:
# handle the "Light" column, which needs a key of #0
col = "#0"
heading = table.Widget.heading(col)["text"]
mgr = TableManager()
mgr.update_sort_key(heading)
async def ui():
window.read(0)
TableManager().update_tables()
# create images used in the table, they will not show if not saved here
tk_imgs = TkImages()
# left justify hostnames
window["scan_table"].Widget.column(2, anchor=tk.W)
# cmd table sort event
window["cmd_table"].Widget.bind(
"<Button-1>", lambda x: _tree_header_click_handler(x, window["cmd_table"])
)
while True:
event, value = window.read(0)
if event in (None, "Close", sg.WIN_CLOSED):
sys.exit()
if isinstance(event, tuple):
if event[0].endswith("_table"):
if event[2][0] == -1:
mgr = TableManager()
table = window[event[0]].Widget
mgr.update_sort_key(table.heading(event[2][1])["text"])
# scan tab
if event == "scan_all":
_table = "scan_table"
btn_all(_table, value[_table])
if event == "scan_web":
_table = "scan_table"
btn_web(_table, value[_table])
if event == "scan_refresh":
_table = "scan_table"
asyncio.create_task(btn_refresh(_table, value[_table]))
if event == "btn_scan":
asyncio.create_task(btn_scan(value["scan_ip"]))
# pools tab
if event == "pools_all":
_table = "pools_table"
btn_all(_table, value[_table])
if event == "pools_web":
_table = "pools_table"
btn_web(_table, value[_table])
if event == "pools_refresh":
_table = "pools_table"
asyncio.create_task(btn_refresh(_table, value[_table]))
# configure tab
if event == "cfg_all":
_table = "cfg_table"
btn_all(_table, value[_table])
if event == "cfg_web":
_table = "cfg_table"
btn_web(_table, value[_table])
if event == "cfg_generate":
await generate_config_ui()
if event == "cfg_import":
_table = "cfg_table"
asyncio.create_task(btn_import(_table, value[_table]))
if event == "cfg_config":
_table = "cfg_table"
asyncio.create_task(
btn_config(
_table,
value[_table],
value["cfg_config_txt"],
value["cfg_append_ip"],
)
)
# commands tab
if event == "cmd_all":
_table = "cmd_table"
btn_all(_table, value[_table])
if event == "cmd_light":
_table = "cmd_table"
_ips = value[_table]
asyncio.create_task(btn_light(_ips))
if event == "cmd_reboot":
_table = "cmd_table"
_ips = value[_table]
asyncio.create_task(btn_reboot(_ips))
if event == "cmd_backend":
_table = "cmd_table"
_ips = value[_table]
asyncio.create_task(btn_backend(_ips))
if event == "btn_cmd":
_table = "cmd_table"
_ips = value[_table]
asyncio.create_task(btn_command(_ips, value["cmd_txt"]))
if event == "__TIMEOUT__":
await asyncio.sleep(0)
if __name__ == "__main__":
asyncio.run(ui())

View File

@@ -0,0 +1 @@
from tools.cfg_util_old.cfg_util_sg import main

View File

@@ -0,0 +1,26 @@
import asyncio
import sys
import logging
from tools.cfg_util_old.cfg_util_sg.ui import ui
# initialize logger and get settings
from logger import logger
logger.info("Initializing logger for CFG Util.")
# Fix bug with some whatsminers and asyncio because of a socket not being shut down:
if (
sys.version_info[0] == 3
and sys.version_info[1] >= 8
and sys.platform.startswith("win")
):
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
def main():
logging.info("Starting CFG Util.")
loop = asyncio.new_event_loop()
loop.run_until_complete(ui())
logging.info("Closing CFG Util.")

View File

@@ -0,0 +1,37 @@
from tools.cfg_util_old.cfg_util_sg.layout import window
def disable_buttons(func):
button_list = [
"scan",
"import_file_config",
"export_file_config",
"import_iplist",
"export_iplist",
"export_csv",
"select_all_ips",
"refresh_data",
"open_in_web",
"reboot_miners",
"restart_miner_backend",
"import_config",
"send_config",
"light",
"generate_config",
"send_miner_ssh_command_window",
]
# handle the inner function that the decorator is wrapping
async def inner(*args, **kwargs):
# disable the buttons
for button in button_list:
window[button].Update(disabled=True)
# call the original wrapped function
await func(*args, **kwargs)
# re-enable the buttons after the wrapped function completes
for button in button_list:
window[button].Update(disabled=False)
return inner

View File

@@ -6,8 +6,8 @@ import time
import aiofiles
import toml
from tools.cfg_util.cfg_util_sg.func.ui import update_ui_with_data
from tools.cfg_util.cfg_util_sg.layout import window
from tools.cfg_util_old.cfg_util_sg.func.ui import update_ui_with_data
from tools.cfg_util_old.cfg_util_sg.layout import window
from config.bos import bos_config_convert, general_config_convert_bos
@@ -17,10 +17,15 @@ async def import_iplist(file_location):
return
else:
ip_list = []
async with aiofiles.open(file_location, mode='r') as file:
async with aiofiles.open(file_location, mode="r") as file:
async for line in file:
ips = [x.group() for x in re.finditer(
"^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)", line)]
ips = [
x.group()
for x in re.finditer(
"^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)",
line,
)
]
for ip in ips:
if ip not in ip_list:
ip_list.append(ipaddress.ip_address(ip))
@@ -36,33 +41,34 @@ async def export_csv(file_location, ip_list_selected):
return
else:
if ip_list_selected is not None and not ip_list_selected == []:
async with aiofiles.open(file_location, mode='w') as file:
async with aiofiles.open(file_location, mode="w") as file:
for item in ip_list_selected:
await file.write(str(
", ".join([str(part) for part in item])
) + "\n")
await file.write(
str(", ".join([str(part).rstrip().lstrip() for part in item]))
+ "\n"
)
else:
async with aiofiles.open(file_location, mode='w') as file:
for item in window['ip_table'].Values:
await file.write(str(
", ".join([str(part) for part in item])
) + "\n")
async with aiofiles.open(file_location, mode="w") as file:
for item in window["ip_table"].Values:
await file.write(
str(", ".join([str(part).rstrip().lstrip() for part in item]))
+ "\n"
)
await update_ui_with_data("status", "")
async def export_iplist(file_location, ip_list_selected):
await update_ui_with_data("status", "Exporting")
if not os.path.exists(file_location):
return
else:
if ip_list_selected is not None and not ip_list_selected == []:
async with aiofiles.open(file_location, mode='w') as file:
async with aiofiles.open(file_location, mode="w") as file:
for item in ip_list_selected:
await file.write(str(item) + "\n")
else:
async with aiofiles.open(file_location, mode='w') as file:
for item in window['ip_table'].Values:
async with aiofiles.open(file_location, mode="w") as file:
for item in window["ip_table"].Values:
await file.write(str(item[0]) + "\n")
await update_ui_with_data("status", "")
@@ -72,19 +78,19 @@ async def import_config_file(file_location):
if not os.path.exists(file_location):
return
else:
async with aiofiles.open(file_location, mode='r') as file:
async with aiofiles.open(file_location, mode="r") as file:
config = await file.read()
await update_ui_with_data("config", await bos_config_convert(toml.loads(config)))
await update_ui_with_data("config", bos_config_convert(toml.loads(config)))
await update_ui_with_data("status", "")
async def export_config_file(file_location, config):
await update_ui_with_data("status", "Exporting")
config = await general_config_convert_bos(config)
config = toml.dumps(general_config_convert_bos(config))
config = toml.loads(config)
config['format']['generator'] = 'upstream_config_util'
config['format']['timestamp'] = int(time.time())
config["format"]["generator"] = "upstream_config_util"
config["format"]["timestamp"] = int(time.time())
config = toml.dumps(config)
async with aiofiles.open(file_location, mode='w+') as file:
async with aiofiles.open(file_location, mode="w+") as file:
await file.write(config)
await update_ui_with_data("status", "")

View File

@@ -2,25 +2,38 @@ import asyncio
import ipaddress
import time
import warnings
import logging
from API import APIError
from tools.cfg_util.cfg_util_sg.func.parse_data import safe_parse_api_data
from tools.cfg_util.cfg_util_sg.func.ui import update_ui_with_data, update_prog_bar, set_progress_bar_len
from tools.cfg_util.cfg_util_sg.layout import window
from tools.cfg_util.cfg_util_sg.miner_factory import miner_factory
from tools.cfg_util_old.cfg_util_sg.func.parse_data import safe_parse_api_data
from tools.cfg_util_old.cfg_util_sg.func.ui import (
update_ui_with_data,
update_prog_bar,
set_progress_bar_len,
)
from tools.cfg_util_old.cfg_util_sg.layout import window
from miners.miner_factory import MinerFactory
from config.bos import bos_config_convert
from settings import CFG_UTIL_CONFIG_THREADS as CONFIG_THREADS, CFG_UTIL_REBOOT_THREADS as REBOOT_THREADS
from tools.cfg_util_old.cfg_util_sg.func.decorators import disable_buttons
from settings import (
CFG_UTIL_CONFIG_THREADS as CONFIG_THREADS,
CFG_UTIL_REBOOT_THREADS as REBOOT_THREADS,
)
async def import_config(idx):
await update_ui_with_data("status", "Importing")
miner = await miner_factory.get_miner(ipaddress.ip_address(window["ip_table"].Values[idx[0]][0]))
miner_ip = window["ip_table"].Values[idx[0]][0]
logging.debug(f"{miner_ip}: Importing config.")
miner = await MinerFactory().get_miner(ipaddress.ip_address(miner_ip))
await miner.get_config()
config = miner.config
await update_ui_with_data("config", str(config))
logging.debug(f"{miner_ip}: Config import completed.")
await update_ui_with_data("status", "")
@disable_buttons
async def scan_network(network):
await update_ui_with_data("status", "Scanning")
await update_ui_with_data("ip_count", "")
@@ -43,7 +56,7 @@ async def scan_network(network):
asyncio.create_task(update_prog_bar(progress_bar_len))
progress_bar_len += network_size - len(miners)
asyncio.create_task(update_prog_bar(progress_bar_len))
get_miner_genenerator = miner_factory.get_miner_generator(miners)
get_miner_genenerator = MinerFactory().get_miner_generator(miners)
all_miners = []
async for found_miner in get_miner_genenerator:
all_miners.append(found_miner)
@@ -55,26 +68,73 @@ async def scan_network(network):
await update_ui_with_data("status", "")
@disable_buttons
async def miner_light(ips: list):
await asyncio.gather(*[flip_light(ip) for ip in ips])
async def flip_light(ip):
ip_list = window['ip_table'].Widget
miner = await miner_factory.get_miner(ip)
ip_list = window["ip_table"].Widget
miner = await MinerFactory().get_miner(ip)
index = [item[0] for item in window["ip_table"].Values].index(ip)
index_tags = ip_list.item(index)['tags']
index_tags = ip_list.item(index + 1)["tags"]
if "light" not in index_tags:
ip_list.item(index, tags=([*index_tags, "light"]))
window['ip_table'].update(row_colors=[(index, "white", "red")])
index_tags.append("light")
ip_list.item(index + 1, tags=index_tags)
await miner.fault_light_on()
else:
index_tags.remove("light")
ip_list.item(index, tags=index_tags)
window['ip_table'].update(row_colors=[(index, "black", "white")])
ip_list.item(index + 1, tags=index_tags)
await miner.fault_light_off()
async def send_command_generator(miners: list, command: str):
loop = asyncio.get_event_loop()
command_tasks = []
for miner in miners:
if len(command_tasks) >= CONFIG_THREADS:
cmd_sent = asyncio.as_completed(command_tasks)
command_tasks = []
for done in cmd_sent:
yield await done
command_tasks.append(loop.create_task(send_ssh_command(miner, command)))
cmd_sent = asyncio.as_completed(command_tasks)
for done in cmd_sent:
yield await done
async def send_ssh_command(miner, command: str):
proc = await miner.send_ssh_command(command)
return {"IP": miner.ip, "proc": proc}
async def send_miners_ssh_commands(ips: list, command: str, ssh_cmd_window):
get_miner_genenerator = MinerFactory().get_miner_generator(ips)
all_miners = []
async for miner in get_miner_genenerator:
all_miners.append(miner)
data = []
send_cmd_generator = send_command_generator(all_miners, command)
async for command_sent in send_cmd_generator:
data.append(command_sent)
proc_table_data = [[ip, ""] for ip in ips]
for item in data:
if item["proc"].returncode == 0:
return_data = item["proc"].stdout
else:
return_data = item["proc"].stderr
if str(item["IP"]) in ips:
proc_table_index = ips.index(str(item["IP"]))
proc_table_data[proc_table_index] = [
str(item["IP"]),
return_data.replace("\n", " "),
]
ssh_cmd_window["ssh_cmd_table"].update(proc_table_data)
async def reboot_generator(miners: list):
loop = asyncio.get_event_loop()
reboot_tasks = []
@@ -90,11 +150,12 @@ async def reboot_generator(miners: list):
yield await done
@disable_buttons
async def reboot_miners(ips: list):
await update_ui_with_data("status", "Rebooting")
await set_progress_bar_len(2 * len(ips))
progress_bar_len = 0
get_miner_genenerator = miner_factory.get_miner_generator(ips)
get_miner_genenerator = MinerFactory().get_miner_generator(ips)
all_miners = []
async for miner in get_miner_genenerator:
all_miners.append(miner)
@@ -123,25 +184,26 @@ async def restart_backend_generator(miners: list):
yield await done
@disable_buttons
async def restart_miners_backend(ips: list):
await update_ui_with_data("status", "Restarting Backends")
await set_progress_bar_len(2 * len(ips))
progress_bar_len = 0
get_miner_genenerator = miner_factory.get_miner_generator(ips)
get_miner_genenerator = MinerFactory().get_miner_generator(ips)
all_miners = []
async for miner in get_miner_genenerator:
all_miners.append(miner)
progress_bar_len += 1
asyncio.create_task(update_prog_bar(progress_bar_len))
reboot_miners_generator = reboot_generator(all_miners)
async for _rebooter in reboot_miners_generator:
restart_backend_gen = restart_backend_generator(all_miners)
async for _rebooter in restart_backend_gen:
progress_bar_len += 1
asyncio.create_task(update_prog_bar(progress_bar_len))
await update_ui_with_data("status", "")
async def send_config_generator(miners: list, config):
async def send_config_generator(miners: list, config, last_octet_ip_user: bool = False):
loop = asyncio.get_event_loop()
config_tasks = []
for miner in miners:
@@ -150,25 +212,30 @@ async def send_config_generator(miners: list, config):
config_tasks = []
for sent_config in configured:
yield await sent_config
config_tasks.append(loop.create_task(miner.send_config(config)))
config_tasks.append(
loop.create_task(miner.send_config(config, ip_user=last_octet_ip_user))
)
configured = asyncio.as_completed(config_tasks)
for sent_config in configured:
yield await sent_config
async def send_config(ips: list, config):
@disable_buttons
async def send_config(ips: list, config, last_octet_ip: bool = False):
await update_ui_with_data("status", "Configuring")
await set_progress_bar_len(2 * len(ips))
progress_bar_len = 0
asyncio.create_task(update_prog_bar(progress_bar_len))
get_miner_genenerator = miner_factory.get_miner_generator(ips)
get_miner_genenerator = MinerFactory().get_miner_generator(ips)
all_miners = []
async for miner in get_miner_genenerator:
all_miners.append(miner)
progress_bar_len += 1
asyncio.create_task(update_prog_bar(progress_bar_len))
config_sender_generator = send_config_generator(all_miners, config)
config_sender_generator = send_config_generator(
all_miners, config, last_octet_ip_user=last_octet_ip
)
async for _config_sender in config_sender_generator:
progress_bar_len += 1
asyncio.create_task(update_prog_bar(progress_bar_len))
@@ -177,12 +244,16 @@ async def send_config(ips: list, config):
await refresh_data(ips)
@disable_buttons
async def refresh_data(ip_list: list):
await update_ui_with_data("status", "Getting Data")
await update_ui_with_data("hr_total", "")
ips = [ipaddress.ip_address(ip) for ip in ip_list]
if len(ips) == 0:
ips = [ipaddress.ip_address(ip) for ip in [item[0] for item in window["ip_table"].Values]]
ips = [
ipaddress.ip_address(ip)
for ip in [item[0] for item in window["ip_table"].Values]
]
await set_progress_bar_len(len(ips))
progress_bar_len = 0
asyncio.create_task(update_prog_bar(progress_bar_len))
@@ -202,9 +273,13 @@ async def refresh_data(ip_list: list):
if data_point["IP"] in ordered_all_ips:
ip_table_index = ordered_all_ips.index(data_point["IP"])
ip_table_data[ip_table_index] = [
data_point["IP"], data_point["model"], data_point["host"], str(data_point['TH/s']) + " TH/s",
data_point["IP"],
data_point["model"],
data_point["host"],
str(data_point["TH/s"]) + " TH/s ",
data_point["temp"],
data_point['user'], str(data_point['wattage']) + " W"
data_point["user"],
str(data_point["wattage"]) + " W",
]
window["ip_table"].update(ip_table_data)
progress_bar_len += 1
@@ -214,8 +289,10 @@ async def refresh_data(ip_list: list):
hr_idx = 3
for item, _ in enumerate(window["ip_table"].Values):
if len(window["ip_table"].Values[item]) > hr_idx:
if not window["ip_table"].Values[item][hr_idx] == '':
hashrate_list.append(float(window["ip_table"].Values[item][hr_idx].replace(" TH/s", "")))
if not window["ip_table"].Values[item][hr_idx] == "":
hashrate_list.append(
float(window["ip_table"].Values[item][hr_idx].replace(" TH/s ", ""))
)
else:
hashrate_list.append(0)
else:
@@ -227,6 +304,7 @@ async def refresh_data(ip_list: list):
await update_ui_with_data("status", "")
@disable_buttons
async def scan_and_get_data(network):
await update_ui_with_data("status", "Scanning")
await update_ui_with_data("hr_total", "")
@@ -234,6 +312,10 @@ async def scan_and_get_data(network):
await update_ui_with_data("ip_table", [])
network_size = len(network)
miner_generator = network.scan_network_generator()
MinerFactory().clear_cached_miners()
logging.info(f"Scanning network: {str(network)}")
await set_progress_bar_len(3 * network_size)
progress_bar_len = 0
miners = []
@@ -246,9 +328,11 @@ async def scan_and_get_data(network):
# window["ip_table"].update([["Identifying..."] for miner in miners])
progress_bar_len += 1
asyncio.create_task(update_prog_bar(progress_bar_len))
logging.info(f"Found {len(miners)} Miners")
logging.debug(f"Found miners: {miners}")
progress_bar_len += network_size - len(miners)
asyncio.create_task(update_prog_bar(progress_bar_len))
get_miner_genenerator = miner_factory.get_miner_generator(miners)
get_miner_genenerator = MinerFactory().get_miner_generator(miners)
all_miners = []
async for found_miner in get_miner_genenerator:
all_miners.append(found_miner)
@@ -256,34 +340,46 @@ async def scan_and_get_data(network):
window["ip_table"].update([[str(miner.ip)] for miner in all_miners])
progress_bar_len += 1
asyncio.create_task(update_prog_bar(progress_bar_len))
logging.info(f"Resolved {len(all_miners)} Miners")
logging.debug(f"Resolved to miner types: {all_miners}")
await update_ui_with_data("ip_count", str(len(all_miners)))
data_gen = asyncio.as_completed([get_formatted_data(miner) for miner in miners])
ip_table_data = window["ip_table"].Values
ordered_all_ips = [item[0] for item in ip_table_data]
progress_bar_len += (network_size - len(miners))
progress_bar_len += network_size - len(miners)
asyncio.create_task(update_prog_bar(progress_bar_len))
await update_ui_with_data("status", "Getting Data")
logging.debug("Getting data on miners.")
for all_data in data_gen:
data_point = await all_data
if data_point["IP"] in ordered_all_ips:
ip_table_index = ordered_all_ips.index(data_point["IP"])
ip_table_data[ip_table_index] = [
data_point["IP"], data_point["model"], data_point["host"], str(data_point['TH/s']) + " TH/s",
data_point["IP"],
data_point["model"],
data_point["host"],
str(data_point["TH/s"]) + " TH/s ",
data_point["temp"],
data_point['user'], str(data_point['wattage']) + " W"
data_point["user"],
str(data_point["wattage"]) + " W",
]
window["ip_table"].update(ip_table_data)
progress_bar_len += 1
asyncio.create_task(update_prog_bar(progress_bar_len))
hashrate_list = [float(item[3].replace(" TH/s", "")) for item in window["ip_table"].Values if not item[3] == '']
hashrate_list = [
float(item[3].replace(" TH/s ", ""))
for item in window["ip_table"].Values
if not item[3] == ""
]
total_hr = round(sum(hashrate_list), 2)
await update_ui_with_data("hr_total", f"{total_hr} TH/s")
await update_ui_with_data("status", "")
async def get_formatted_data(ip: ipaddress.ip_address):
miner = await miner_factory.get_miner(ip)
warnings.filterwarnings('ignore')
miner = await MinerFactory().get_miner(ip)
logging.debug(f"Getting data for miner: {miner.ip}")
warnings.filterwarnings("ignore")
miner_data = None
host = await miner.get_hostname()
try:
@@ -298,80 +394,148 @@ async def get_formatted_data(ip: ipaddress.ip_address):
user = "?"
try:
miner_data = await miner.api.multicommand("summary", "devs", "temps", "tunerstatus", "pools", "stats")
miner_data = await miner.api.multicommand(
"summary", "devs", "temps", "tunerstatus", "pools", "stats"
)
except APIError:
try:
# no devs command, it will fail in this case
miner_data = await miner.api.multicommand("summary", "temps", "tunerstatus", "pools", "stats")
miner_data = await miner.api.multicommand(
"summary", "temps", "tunerstatus", "pools", "stats"
)
except APIError as e:
print(e)
return {'TH/s': 0, 'IP': str(miner.ip), 'model': 'Unknown', 'temp': 0, 'host': 'Unknown', 'user': 'Unknown',
'wattage': 0}
logging.warning(f"{str(ip)}: {e}")
return {
"TH/s": 0,
"IP": str(miner.ip),
"model": "Unknown",
"temp": 0,
"host": "Unknown",
"user": "Unknown",
"wattage": 0,
}
if miner_data:
logging.info(f"Received miner data for miner: {miner.ip}")
# get all data from summary
if "summary" in miner_data.keys():
if not miner_data["summary"][0].get("SUMMARY") == []:
if (
not miner_data["summary"][0].get("SUMMARY") == []
and "SUMMARY" in miner_data["summary"][0].keys()
):
# temperature data, this is the idea spot to get this
if "Temperature" in miner_data['summary'][0]['SUMMARY'][0].keys():
if not round(miner_data['summary'][0]['SUMMARY'][0]["Temperature"]) == 0:
temps = miner_data['summary'][0]['SUMMARY'][0]["Temperature"]
# hashrate data, this is the only place to get this for most miners as far as I know
if 'MHS av' in miner_data['summary'][0]['SUMMARY'][0].keys():
th5s = round(await safe_parse_api_data(miner_data, 'summary', 0, 'SUMMARY', 0, 'MHS av') / 1000000, 2)
elif 'GHS av' in miner_data['summary'][0]['SUMMARY'][0].keys():
if not miner_data['summary'][0]['SUMMARY'][0]['GHS av'] == "":
th5s = round(
float(await safe_parse_api_data(miner_data, 'summary', 0, 'SUMMARY', 0, 'GHS av')) / 1000,
2)
if "Temperature" in miner_data["summary"][0]["SUMMARY"][0].keys():
if (
not round(miner_data["summary"][0]["SUMMARY"][0]["Temperature"])
== 0
):
temps = miner_data["summary"][0]["SUMMARY"][0]["Temperature"]
# hashrate data
if "MHS av" in miner_data["summary"][0]["SUMMARY"][0].keys():
th5s = format(
round(
await safe_parse_api_data(
miner_data, "summary", 0, "SUMMARY", 0, "MHS av"
)
/ 1000000,
2,
),
".2f",
).rjust(6, " ")
elif "GHS av" in miner_data["summary"][0]["SUMMARY"][0].keys():
if not miner_data["summary"][0]["SUMMARY"][0]["GHS av"] == "":
th5s = format(
round(
float(
await safe_parse_api_data(
miner_data, "summary", 0, "SUMMARY", 0, "GHS av"
)
)
/ 1000,
2,
),
".2f",
).rjust(6, " ")
# alternate temperature data, for BraiinsOS
if "temps" in miner_data.keys():
if not miner_data["temps"][0]['TEMPS'] == []:
if "Chip" in miner_data["temps"][0]['TEMPS'][0].keys():
for board in miner_data["temps"][0]['TEMPS']:
if not miner_data["temps"][0].get("TEMPS") == []:
if "Chip" in miner_data["temps"][0]["TEMPS"][0].keys():
for board in miner_data["temps"][0]["TEMPS"]:
if board["Chip"] is not None and not board["Chip"] == 0.0:
temps = board["Chip"]
# alternate temperature data, for Whatsminers
if "devs" in miner_data.keys():
if not miner_data["devs"][0].get('DEVS') == []:
if "Chip Temp Avg" in miner_data["devs"][0]['DEVS'][0].keys():
for board in miner_data["devs"][0]['DEVS']:
if board['Chip Temp Avg'] is not None and not board['Chip Temp Avg'] == 0.0:
temps = board['Chip Temp Avg']
if not miner_data["devs"][0].get("DEVS") == []:
if "Chip Temp Avg" in miner_data["devs"][0]["DEVS"][0].keys():
for board in miner_data["devs"][0]["DEVS"]:
if (
board["Chip Temp Avg"] is not None
and not board["Chip Temp Avg"] == 0.0
):
temps = board["Chip Temp Avg"]
# alternate temperature data
if "stats" in miner_data.keys():
if not miner_data["stats"][0]['STATS'] == []:
if not miner_data["stats"][0]["STATS"] == []:
for temp in ["temp2", "temp1", "temp3"]:
if temp in miner_data["stats"][0]['STATS'][1].keys():
if miner_data["stats"][0]['STATS'][1][temp] is not None and not miner_data["stats"][0]['STATS'][1][temp] == 0.0:
temps = miner_data["stats"][0]['STATS'][1][temp]
if temp in miner_data["stats"][0]["STATS"][1].keys():
if (
miner_data["stats"][0]["STATS"][1][temp] is not None
and not miner_data["stats"][0]["STATS"][1][temp] == 0.0
):
temps = miner_data["stats"][0]["STATS"][1][temp]
# alternate temperature data, for Avalonminers
miner_data["stats"][0]['STATS'][0].keys()
if any("MM ID" in string for string in miner_data["stats"][0]['STATS'][0].keys()):
miner_data["stats"][0]["STATS"][0].keys()
if any(
"MM ID" in string
for string in miner_data["stats"][0]["STATS"][0].keys()
):
temp_all = []
for key in [string for string in miner_data["stats"][0]['STATS'][0].keys() if "MM ID" in string]:
for value in [string for string in miner_data["stats"][0]['STATS'][0][key].split(" ") if
"TMax" in string]:
for key in [
string
for string in miner_data["stats"][0]["STATS"][0].keys()
if "MM ID" in string
]:
for value in [
string
for string in miner_data["stats"][0]["STATS"][0][key].split(" ")
if "TMax" in string
]:
temp_all.append(int(value.split("[")[1].replace("]", "")))
temps = round(sum(temp_all) / len(temp_all))
# pool information
if "pools" in miner_data.keys():
if not miner_data['pools'][0].get('POOLS') == []:
user = await safe_parse_api_data(miner_data, 'pools', 0, 'POOLS', 0, 'User')
if not miner_data["pools"][0].get("POOLS") == []:
user = await safe_parse_api_data(
miner_data, "pools", 0, "POOLS", 0, "User"
)
else:
print(miner_data['pools'][0])
print(miner_data["pools"][0])
user = "Blank"
# braiins tuner status / wattage
if "tunerstatus" in miner_data.keys():
wattage = await safe_parse_api_data(miner_data, "tunerstatus", 0, 'TUNERSTATUS', 0, "PowerLimit")
wattage = await safe_parse_api_data(
miner_data, "tunerstatus", 0, "TUNERSTATUS", 0, "PowerLimit"
)
elif "Power" in miner_data["summary"][0]["SUMMARY"][0].keys():
wattage = await safe_parse_api_data(miner_data, "summary", 0, 'SUMMARY', 0, "Power")
wattage = await safe_parse_api_data(
miner_data, "summary", 0, "SUMMARY", 0, "Power"
)
return {'TH/s': th5s, 'IP': str(miner.ip), 'model': model,
'temp': round(temps), 'host': host, 'user': user,
'wattage': wattage}
ret_data = {
"TH/s": th5s,
"IP": str(miner.ip),
"model": model,
"temp": round(temps),
"host": host,
"user": user,
"wattage": wattage,
}
logging.debug(f"{ret_data}")
return ret_data
async def generate_config(username, workername, v2_allowed):
@@ -383,45 +547,37 @@ async def generate_config(username, workername, v2_allowed):
return
if v2_allowed:
url_1 = 'stratum2+tcp://v2.us-east.stratum.slushpool.com/u95GEReVMjK6k5YqiSFNqqTnKU4ypU2Wm8awa6tmbmDmk1bWt'
url_2 = 'stratum2+tcp://v2.stratum.slushpool.com/u95GEReVMjK6k5YqiSFNqqTnKU4ypU2Wm8awa6tmbmDmk1bWt'
url_3 = 'stratum+tcp://stratum.slushpool.com:3333'
url_1 = "stratum2+tcp://v2.us-east.stratum.slushpool.com/u95GEReVMjK6k5YqiSFNqqTnKU4ypU2Wm8awa6tmbmDmk1bWt"
url_2 = "stratum2+tcp://v2.stratum.slushpool.com/u95GEReVMjK6k5YqiSFNqqTnKU4ypU2Wm8awa6tmbmDmk1bWt"
url_3 = "stratum+tcp://stratum.slushpool.com:3333"
else:
url_1 = 'stratum+tcp://ca.stratum.slushpool.com:3333'
url_2 = 'stratum+tcp://us-east.stratum.slushpool.com:3333'
url_3 = 'stratum+tcp://stratum.slushpool.com:3333'
url_1 = "stratum+tcp://ca.stratum.slushpool.com:3333"
url_2 = "stratum+tcp://us-east.stratum.slushpool.com:3333"
url_3 = "stratum+tcp://stratum.slushpool.com:3333"
config = {'group': [{
'name': 'group',
'quota': 1,
'pool': [{
'url': url_1,
'user': user,
'password': '123'
}, {
'url': url_2,
'user': user,
'password': '123'
}, {
'url': url_3,
'user': user,
'password': '123'
}]
}],
'format': {
'version': '1.2+',
'model': 'Antminer S9',
'generator': 'upstream_config_util',
'timestamp': int(time.time())
config = {
"group": [
{
"name": "group",
"quota": 1,
"pool": [
{"url": url_1, "user": user, "password": "123"},
{"url": url_2, "user": user, "password": "123"},
{"url": url_3, "user": user, "password": "123"},
],
}
],
"format": {
"version": "1.2+",
"model": "Antminer S9",
"generator": "upstream_config_util",
"timestamp": int(time.time()),
},
'temp_control': {
'target_temp': 80.0,
'hot_temp': 90.0,
'dangerous_temp': 120.0
"temp_control": {
"target_temp": 80.0,
"hot_temp": 90.0,
"dangerous_temp": 120.0,
},
'autotuning': {
'enabled': True,
'psu_power_limit': 900
}
"autotuning": {"enabled": True, "psu_power_limit": 900},
}
window['config'].update(await bos_config_convert(config))
window["config"].update(bos_config_convert(config))

View File

@@ -4,7 +4,7 @@ from API import APIError
# noinspection PyPep8
async def safe_parse_api_data(data: dict or list, *path: str or int, idx: int = 0):
path = [*path]
if len(path) == idx+1:
if len(path) == idx + 1:
if isinstance(path[idx], str):
if isinstance(data, dict):
if path[idx] in data.keys():
@@ -17,34 +17,50 @@ async def safe_parse_api_data(data: dict or list, *path: str or int, idx: int =
if isinstance(path[idx], str):
if isinstance(data, dict):
if path[idx] in data.keys():
parsed_data = await safe_parse_api_data(data[path[idx]], idx=idx+1, *path)
parsed_data = await safe_parse_api_data(
data[path[idx]], idx=idx + 1, *path
)
# has to be == None, or else it fails on 0.0 hashrates
# noinspection PyPep8
if parsed_data == None:
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
raise APIError(
f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}"
)
return parsed_data
else:
if idx == 0:
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
raise APIError(
f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}"
)
return False
else:
if idx == 0:
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
raise APIError(
f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}"
)
return False
elif isinstance(path[idx], int):
if isinstance(data, list):
if len(data) > path[idx]:
parsed_data = await safe_parse_api_data(data[path[idx]], idx=idx+1, *path)
parsed_data = await safe_parse_api_data(
data[path[idx]], idx=idx + 1, *path
)
# has to be == None, or else it fails on 0.0 hashrates
# noinspection PyPep8
if parsed_data == None:
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
raise APIError(
f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}"
)
return parsed_data
else:
if idx == 0:
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
raise APIError(
f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}"
)
return False
else:
if idx == 0:
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
raise APIError(
f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}"
)
return False

View File

@@ -0,0 +1,124 @@
import ipaddress
import re
from tools.cfg_util_old.cfg_util_sg.layout import window
import pyperclip
def table_select_all():
window["ip_table"].update(
select_rows=([row for row in range(len(window["ip_table"].Values))])
)
def copy_from_table(table):
selection = table.selection()
copy_values = []
for each in selection:
try:
value = table.item(each)["values"][0]
copy_values.append(str(value))
except:
pass
copy_string = "\n".join(copy_values)
pyperclip.copy(copy_string)
def copy_from_ssh_table(table):
selection = table.selection()
copy_values = []
for each in selection:
try:
value = ", ".join(table.item(each)["values"])
copy_values.append(str(value))
except:
pass
copy_string = "\n".join(copy_values)
pyperclip.copy(copy_string)
async def update_ui_with_data(key, message, append=False):
if append:
message = window[key].get_text() + message
window[key].update(message)
async def update_prog_bar(amount):
window["progress"].Update(amount)
percent_done = 100 * (amount / window["progress"].maxlen)
window["progress_percent"].Update(f"{round(percent_done, 2)} %")
if percent_done == 100:
window["progress_percent"].Update("")
async def set_progress_bar_len(amount):
window["progress"].Update(0, max=amount)
window["progress"].maxlen = amount
window["progress_percent"].Update("0.0 %")
async def sort_data(index: int or str):
if window["scan"].Disabled:
return
await update_ui_with_data("status", "Sorting Data")
data_list = window["ip_table"].Values
table = window["ip_table"].Widget
all_data = []
for idx, item in enumerate(data_list):
all_data.append({"data": item, "tags": table.item(int(idx) + 1)["tags"]})
# wattage
if re.match("[0-9]* W", str(all_data[0]["data"][index])):
new_list = sorted(
all_data, key=lambda x: int(x["data"][index].replace(" W", ""))
)
if all_data == new_list:
new_list = sorted(
all_data,
reverse=True,
key=lambda x: int(x["data"][index].replace(" W", "")),
)
# hashrate
elif re.match("[0-9]*\.?[0-9]* TH\/s", str(all_data[0]["data"][index])):
new_list = sorted(
all_data, key=lambda x: float(x["data"][index].replace(" TH/s", ""))
)
if all_data == new_list:
new_list = sorted(
all_data,
reverse=True,
key=lambda x: float(x["data"][index].replace(" TH/s", "")),
)
# ip addresses
elif re.match(
"^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)",
str(all_data[0]["data"][index]),
):
new_list = sorted(
all_data, key=lambda x: ipaddress.ip_address(x["data"][index])
)
if all_data == new_list:
new_list = sorted(
all_data,
reverse=True,
key=lambda x: ipaddress.ip_address(x["data"][index]),
)
# everything else, hostname, temp, and user
else:
new_list = sorted(all_data, key=lambda x: x["data"][index])
if all_data == new_list:
new_list = sorted(all_data, reverse=True, key=lambda x: x["data"][index])
new_data = []
for item in new_list:
new_data.append(item["data"])
await update_ui_with_data("ip_table", new_data)
for idx, item in enumerate(new_list):
table.item(idx + 1, tags=item["tags"])
await update_ui_with_data("status", "")

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,206 @@
import asyncio
import sys
import PySimpleGUI as sg
import tkinter as tk
from tools.cfg_util_old.cfg_util_sg.layout import (
window,
generate_config_layout,
send_ssh_cmd_layout,
)
from tools.cfg_util_old.cfg_util_sg.func.miners import (
send_config,
miner_light,
refresh_data,
generate_config,
import_config,
scan_and_get_data,
restart_miners_backend,
reboot_miners,
send_miners_ssh_commands,
)
from tools.cfg_util_old.cfg_util_sg.func.files import (
import_iplist,
import_config_file,
export_iplist,
export_config_file,
export_csv,
)
from tools.cfg_util_old.cfg_util_sg.func.decorators import disable_buttons
from tools.cfg_util_old.cfg_util_sg.func.ui import (
sort_data,
copy_from_table,
table_select_all,
copy_from_ssh_table,
)
from network import MinerNetwork
import webbrowser
async def ui():
window.read(timeout=0)
table = window["ip_table"].Widget
table.bind("<Control-Key-c>", lambda x: copy_from_table(table))
table.bind("<Control-Key-a>", lambda x: table_select_all())
# light tag shows red row for fault lights
table.tag_configure("light", foreground="white", background="red")
# left justify the hostnames
table.column(2, anchor=tk.W)
while True:
event, value = window.read(timeout=0)
if event in (None, "Close", sg.WIN_CLOSED):
sys.exit()
if isinstance(event, tuple):
if len(window["ip_table"].Values) > 0:
if event[0] == "ip_table":
if event[2][0] == -1:
await sort_data(event[2][1])
if event == "open_in_web":
for row in value["ip_table"]:
webbrowser.open("http://" + window["ip_table"].Values[row][0])
if event == "scan":
if len(value["miner_network"].split("/")) > 1:
network = value["miner_network"].split("/")
miner_network = MinerNetwork(ip_addr=network[0], mask=network[1])
else:
miner_network = MinerNetwork(value["miner_network"])
asyncio.create_task(scan_and_get_data(miner_network))
if event == "select_all_ips":
if len(value["ip_table"]) == len(window["ip_table"].Values):
window["ip_table"].update(select_rows=())
else:
window["ip_table"].update(
select_rows=([row for row in range(len(window["ip_table"].Values))])
)
if event == "import_config":
if 2 > len(value["ip_table"]) > 0:
asyncio.create_task(import_config(value["ip_table"]))
if event == "restart_miner_backend":
if len(window["ip_table"].Values) > 0:
asyncio.create_task(
restart_miners_backend(
[
window["ip_table"].Values[item][0]
for item in value["ip_table"]
]
)
)
if event == "reboot_miners":
if len(window["ip_table"].Values) > 0:
asyncio.create_task(
reboot_miners(
[
window["ip_table"].Values[item][0]
for item in value["ip_table"]
]
)
)
if event == "send_miner_ssh_command_window":
ips = [window["ip_table"].Values[item][0] for item in value["ip_table"]]
if len(ips) == 0:
ips = [item[0] for item in window["ip_table"].Values]
if not len(ips) == 0:
await generate_ssh_cmd_ui(ips)
if event == "light":
if len(window["ip_table"].Values) > 0:
asyncio.create_task(
miner_light(
[
window["ip_table"].Values[item][0]
for item in value["ip_table"]
]
)
)
if event == "import_iplist":
asyncio.create_task(import_iplist(value["file_iplist"]))
if event == "export_iplist":
asyncio.create_task(
export_iplist(
value["file_iplist"],
[window["ip_table"].Values[item][0] for item in value["ip_table"]],
)
)
if event == "export_csv":
asyncio.create_task(
export_csv(
value["file_iplist"],
[window["ip_table"].Values[item] for item in value["ip_table"]],
)
)
if event == "send_config":
if len(window["ip_table"].Values) > 0:
asyncio.create_task(
send_config(
[
window["ip_table"].Values[item][0]
for item in value["ip_table"]
],
value["config"],
last_octet_ip=value["last_octet_user"],
)
)
if event == "import_file_config":
asyncio.create_task(import_config_file(value["file_config"]))
if event == "export_file_config":
asyncio.create_task(
export_config_file(value["file_config"], value["config"])
)
if event == "refresh_data":
if len(window["ip_table"].Values) > 0:
asyncio.create_task(
refresh_data(
[
window["ip_table"].Values[item][0]
for item in value["ip_table"]
]
)
)
if event == "generate_config":
await generate_config_ui()
if event == "__TIMEOUT__":
await asyncio.sleep(0)
async def generate_config_ui():
generate_config_window = sg.Window(
"Generate Config", generate_config_layout(), modal=True
)
while True:
event, values = generate_config_window.read()
if event in (None, "Close", sg.WIN_CLOSED):
break
if event == "generate_config_window_generate":
if values["generate_config_window_username"]:
await generate_config(
values["generate_config_window_username"],
values["generate_config_window_workername"],
values["generate_config_window_allow_v2"],
)
generate_config_window.close()
break
@disable_buttons
async def generate_ssh_cmd_ui(selected_miners: list):
ssh_cmd_window = sg.Window(
"Send Command", send_ssh_cmd_layout(selected_miners), modal=True
)
ssh_cmd_window.read(timeout=0)
table = ssh_cmd_window["ssh_cmd_table"].Widget
table.bind("<Control-Key-c>", lambda x: copy_from_ssh_table(table))
# left justify the results
table.column(1, anchor=tk.W)
while True:
event, values = ssh_cmd_window.read(timeout=0)
if event in (None, "Close", sg.WIN_CLOSED):
break
if event == "ssh_command_window_send_cmd":
asyncio.create_task(
send_miners_ssh_commands(
selected_miners, values["ssh_command_window_cmd"], ssh_cmd_window
)
)
if event == "__TIMEOUT__":
await asyncio.sleep(0)

View File

@@ -4,7 +4,7 @@ from API import APIError
# noinspection PyPep8
async def safe_parse_api_data(data: dict or list, *path: str or int, idx: int = 0):
path = [*path]
if len(path) == idx+1:
if len(path) == idx + 1:
if isinstance(path[idx], str):
if isinstance(data, dict):
if path[idx] in data.keys():
@@ -17,34 +17,50 @@ async def safe_parse_api_data(data: dict or list, *path: str or int, idx: int =
if isinstance(path[idx], str):
if isinstance(data, dict):
if path[idx] in data.keys():
parsed_data = await safe_parse_api_data(data[path[idx]], idx=idx+1, *path)
parsed_data = await safe_parse_api_data(
data[path[idx]], idx=idx + 1, *path
)
# has to be == None, or else it fails on 0.0 hashrates
# noinspection PyPep8
if parsed_data == None:
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
raise APIError(
f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}"
)
return parsed_data
else:
if idx == 0:
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
raise APIError(
f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}"
)
return False
else:
if idx == 0:
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
raise APIError(
f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}"
)
return False
elif isinstance(path[idx], int):
if isinstance(data, list):
if len(data) > path[idx]:
parsed_data = await safe_parse_api_data(data[path[idx]], idx=idx+1, *path)
parsed_data = await safe_parse_api_data(
data[path[idx]], idx=idx + 1, *path
)
# has to be == None, or else it fails on 0.0 hashrates
# noinspection PyPep8
if parsed_data == None:
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
raise APIError(
f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}"
)
return parsed_data
else:
if idx == 0:
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
raise APIError(
f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}"
)
return False
else:
if idx == 0:
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
raise APIError(
f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}"
)
return False

View File

@@ -0,0 +1,10 @@
from tools.web_monitor.app import app
import uvicorn
def main():
uvicorn.run("app:app", host="0.0.0.0", port=80)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,36 @@
from fastapi import Request, APIRouter
from fastapi.responses import RedirectResponse
from tools.web_monitor.template import templates
from tools.web_monitor.func import get_current_miner_list
from tools.web_monitor._settings.func import get_current_settings, update_settings
router = APIRouter()
@router.route("/", methods=["GET", "POST"])
async def settings(request: Request):
return templates.TemplateResponse(
"settings.html",
{
"request": request,
"cur_miners": get_current_miner_list(),
"settings": get_current_settings(),
},
)
@router.post("/update")
async def update_settings_page(request: Request):
data = await request.form()
graph_data_sleep_time = data.get("graph_data_sleep_time")
miner_data_timeout = data.get("miner_data_timeout")
miner_identify_timeout = data.get("miner_identify_timeout")
new_settings = {
"graph_data_sleep_time": int(graph_data_sleep_time),
"miner_data_timeout": int(miner_data_timeout),
"miner_identify_timeout": int(miner_identify_timeout),
}
update_settings(new_settings)
return RedirectResponse(request.url_for("settings"))

View File

@@ -0,0 +1,24 @@
import toml
import os
def get_current_settings():
try:
with open(
os.path.join(os.getcwd(), "settings/web_settings.toml"), "r"
) as settings_file:
settings = toml.loads(settings_file.read())
except:
settings = {
"graph_data_sleep_time": 1,
"miner_data_timeout": 5,
"miner_identify_timeout": 5,
}
return settings
def update_settings(settings):
with open(
os.path.join(os.getcwd(), "settings/web_settings.toml"), "w"
) as settings_file:
settings_file.write(toml.dumps(settings))

View File

@@ -0,0 +1,3 @@
graph_data_sleep_time = 1
miner_data_timeout = 5
miner_identify_timeout = 5

35
tools/web_monitor/app.py Normal file
View File

@@ -0,0 +1,35 @@
import os
import uvicorn
from fastapi import FastAPI, Request
from fastapi.responses import RedirectResponse
from fastapi.staticfiles import StaticFiles
from tools.web_monitor.dashboard import router as dashboard_router
from tools.web_monitor.miner import router as miner_router
from tools.web_monitor.scan import router as scan_router
from tools.web_monitor._settings import router as settings_router
app = FastAPI()
app.mount(
"/static",
StaticFiles(directory=os.path.join(os.path.dirname(__file__), "static")),
name="static",
)
app.include_router(dashboard_router, tags=["dashboard"])
app.include_router(miner_router, tags=["miner"], prefix="/miner")
app.include_router(scan_router, tags=["scan"], prefix="/scan")
app.include_router(settings_router, tags=["settings"], prefix="/settings")
@app.get("/remove_all_miners")
async def remove_all_miners(request: Request):
file = open("miner_list.txt", "w")
file.close()
return RedirectResponse(request.url_for("settings"))
if __name__ == "__main__":
uvicorn.run("app:app", host="0.0.0.0", port=80)

View File

@@ -0,0 +1,22 @@
from fastapi import Request, APIRouter
from fastapi.responses import RedirectResponse
from tools.web_monitor.template import templates
from tools.web_monitor.func import get_current_miner_list
from .ws import router as ws_router
router = APIRouter()
router.include_router(ws_router)
@router.get("/")
def index(request: Request):
return RedirectResponse(request.url_for("dashboard"))
@router.get("/dashboard")
def dashboard(request: Request):
return templates.TemplateResponse(
"index.html", {"request": request, "cur_miners": get_current_miner_list()}
)

View File

@@ -0,0 +1,43 @@
import asyncio
from miners.miner_factory import MinerFactory
from tools.web_monitor._settings.func import get_current_settings
async def get_miner_data_dashboard(miner_ip):
try:
settings = get_current_settings()
miner_identify_timeout = settings["miner_identify_timeout"]
miner_data_timeout = settings["miner_data_timeout"]
miner_ip = await asyncio.wait_for(
MinerFactory().get_miner(miner_ip), miner_identify_timeout
)
miner_summary = await asyncio.wait_for(
miner_ip.api.summary(), miner_data_timeout
)
if miner_summary:
if "MHS av" in miner_summary["SUMMARY"][0].keys():
hashrate = format(
round(miner_summary["SUMMARY"][0]["MHS av"] / 1000000, 2), ".2f"
)
elif "GHS av" in miner_summary["SUMMARY"][0].keys():
hashrate = format(
round(miner_summary["SUMMARY"][0]["GHS av"] / 1000, 2), ".2f"
)
else:
hashrate = 0
else:
hashrate = 0
return {"ip": str(miner_ip.ip), "hashrate": hashrate}
except asyncio.exceptions.TimeoutError:
return {"ip": miner_ip, "error": "The miner is not responding."}
except KeyError:
return {
"ip": miner_ip,
"error": "The miner returned unusable/unsupported data.",
}

View File

@@ -0,0 +1,41 @@
import asyncio
import datetime
import websockets.exceptions
from fastapi import WebSocket, WebSocketDisconnect, APIRouter
from tools.web_monitor.func import get_current_miner_list
from tools.web_monitor._settings.func import get_current_settings
from tools.web_monitor.dashboard.func import get_miner_data_dashboard
router = APIRouter()
@router.websocket("/dashboard/ws")
async def dashboard_websocket(websocket: WebSocket):
await websocket.accept()
graph_sleep_time = get_current_settings()["graph_data_sleep_time"]
try:
while True:
miners = get_current_miner_list()
all_miner_data = []
data_gen = asyncio.as_completed(
[get_miner_data_dashboard(miner_ip) for miner_ip in miners]
)
for all_data in data_gen:
data_point = await all_data
all_miner_data.append(data_point)
all_miner_data.sort(key=lambda x: x["ip"])
await websocket.send_json(
{
"datetime": datetime.datetime.now().isoformat(),
"miners": all_miner_data,
}
)
await asyncio.sleep(graph_sleep_time)
except WebSocketDisconnect:
print("Websocket disconnected.")
pass
except websockets.exceptions.ConnectionClosedOK:
pass

View File

@@ -0,0 +1,12 @@
import os
import ipaddress
def get_current_miner_list():
cur_miners = []
if os.path.exists(os.path.join(os.getcwd(), "miner_list.txt")):
with open(os.path.join(os.getcwd(), "miner_list.txt")) as file:
for line in file.readlines():
cur_miners.append(line.strip())
cur_miners = sorted(cur_miners, key=lambda x: ipaddress.ip_address(x))
return cur_miners

View File

@@ -0,0 +1,22 @@
from fastapi import Request, APIRouter
from tools.web_monitor.template import templates
from tools.web_monitor.func import get_current_miner_list
from .ws import router as ws_router
router = APIRouter()
router.include_router(ws_router)
@router.get("/")
def miner(_request: Request, _miner_ip):
return get_miner
@router.get("/{miner_ip}")
def get_miner(request: Request, miner_ip):
return templates.TemplateResponse(
"miner.html",
{"request": request, "cur_miners": get_current_miner_list(), "miner": miner_ip},
)

View File

@@ -0,0 +1,16 @@
from fastapi import Request
from fastapi.responses import RedirectResponse
from tools.web_monitor.app import app
from tools.web_monitor.func import get_current_miner_list
@app.get("/{miner_ip}/remove")
def get_miner(request: Request, miner_ip):
miners = get_current_miner_list()
miners.remove(miner_ip)
with open("miner_list.txt", "w") as file:
for miner_ip in miners:
file.write(miner_ip + "\n")
return RedirectResponse(request.url_for("dashboard"))

View File

@@ -0,0 +1,167 @@
import asyncio
import datetime
import websockets.exceptions
from fastapi import WebSocket, WebSocketDisconnect, APIRouter
from miners.miner_factory import MinerFactory
from tools.web_monitor._settings.func import get_current_settings
router = APIRouter()
@router.websocket("/{miner_ip}/ws")
async def miner_websocket(websocket: WebSocket, miner_ip):
await websocket.accept()
settings = get_current_settings()
miner_identify_timeout = settings["miner_identify_timeout"]
miner_data_timeout = settings["miner_data_timeout"]
try:
while True:
try:
cur_miner = await asyncio.wait_for(
MinerFactory().get_miner(str(miner_ip)), miner_identify_timeout
)
data = await asyncio.wait_for(
cur_miner.api.multicommand(
"summary", "fans", "stats", "devs", "temps"
),
miner_data_timeout,
)
miner_model = await cur_miner.get_model()
miner_summary = None
miner_fans = None
if "summary" in data.keys():
miner_summary = data["summary"][0]
elif "SUMMARY" in data.keys():
miner_summary = data
miner_fans = {"FANS": []}
for item in ["Fan Speed In", "Fan Speed Out"]:
if item in miner_summary["SUMMARY"][0].keys():
miner_fans["FANS"].append(
{"RPM": miner_summary["SUMMARY"][0][item]}
)
if "fans" in data.keys():
miner_fans = data["fans"][0]
miner_temp_list = []
if "temps" in data.keys():
miner_temps = data["temps"][0]
for board in miner_temps["TEMPS"]:
if board["Chip"] is not None and not board["Chip"] == 0.0:
miner_temp_list.append(board["Chip"])
if "devs" in data.keys() and not len(miner_temp_list) > 0:
if not data["devs"][0].get("DEVS") == []:
if "Chip Temp Avg" in data["devs"][0]["DEVS"][0].keys():
for board in data["devs"][0]["DEVS"]:
if (
board["Chip Temp Avg"] is not None
and not board["Chip Temp Avg"] == 0.0
):
miner_temp_list.append(board["Chip Temp Avg"])
if "stats" in data.keys() and not len(miner_temp_list) > 0:
if not data["stats"][0]["STATS"] == []:
for temp in ["temp2", "temp1", "temp3"]:
if temp in data["stats"][0]["STATS"][1].keys():
if (
data["stats"][0]["STATS"][1][temp] is not None
and not data["stats"][0]["STATS"][1][temp] == 0.0
):
miner_temp_list.append(
data["stats"][0]["STATS"][1][temp]
)
data["stats"][0]["STATS"][0].keys()
if any(
"MM ID" in string
for string in data["stats"][0]["STATS"][0].keys()
):
temp_all = []
for key in [
string
for string in data["stats"][0]["STATS"][0].keys()
if "MM ID" in string
]:
for value in [
string
for string in data["stats"][0]["STATS"][0][key].split(
" "
)
if "TMax" in string
]:
temp_all.append(
int(value.split("[")[1].replace("]", ""))
)
miner_temp_list.append(round(sum(temp_all) / len(temp_all)))
if "stats" in data.keys() and not miner_fans:
miner_stats = data["stats"][0]
miner_fans = {"FANS": []}
for item in ["fan1", "fan2", "fan3", "fan4"]:
if item in miner_stats["STATS"][1].keys():
miner_fans["FANS"].append(
{"RPM": miner_stats["STATS"][1][item]}
)
if miner_summary:
if "MHS av" in miner_summary["SUMMARY"][0].keys():
hashrate = float(
format(
round(
miner_summary["SUMMARY"][0]["MHS av"] / 1000000, 2
),
".2f",
)
)
elif "GHS av" in miner_summary["SUMMARY"][0].keys():
hashrate = float(
format(
round(miner_summary["SUMMARY"][0]["GHS av"] / 1000, 2),
".2f",
)
)
else:
hashrate = 0
else:
hashrate = 0
fan_speeds = []
if miner_fans:
for fan in miner_fans["FANS"]:
fan_speeds.append(fan["RPM"])
while len(fan_speeds) < 4:
fan_speeds.append(0)
if len(miner_temp_list) == 0:
miner_temp_list = [0]
data = {
"hashrate": hashrate,
"fans": fan_speeds,
"temp": round(sum(miner_temp_list) / len(miner_temp_list), 2),
"datetime": datetime.datetime.now().isoformat(),
"model": miner_model,
}
print(data)
await websocket.send_json(data)
await asyncio.sleep(settings["graph_data_sleep_time"])
except asyncio.exceptions.TimeoutError:
data = {"error": "The miner is not responding."}
await websocket.send_json(data)
await asyncio.sleep(0.5)
except KeyError as e:
print(e)
data = {"error": "The miner returned unusable/unsupported data."}
await websocket.send_json(data)
await asyncio.sleep(0.5)
except WebSocketDisconnect:
print("Websocket disconnected.")
except websockets.exceptions.ConnectionClosedOK:
pass

Binary file not shown.

View File

@@ -0,0 +1,25 @@
from fastapi import Request, APIRouter
from tools.web_monitor.template import templates
from tools.web_monitor.func import get_current_miner_list
from .ws import router as ws_router
router = APIRouter()
router.include_router(ws_router)
@router.get("/")
def scan(request: Request):
return templates.TemplateResponse(
"scan.html", {"request": request, "cur_miners": get_current_miner_list()}
)
@router.post("/add_miners")
async def add_miners_scan(request: Request):
miners = await request.json()
with open("miner_list.txt", "a+") as file:
for miner_ip in miners["miners"]:
file.write(miner_ip + "\n")
return scan

View File

@@ -0,0 +1,39 @@
import asyncio
from fastapi import WebSocket
from network import MinerNetwork
from tools.web_monitor.func import get_current_miner_list
from miners.miner_factory import MinerFactory
async def do_websocket_scan(websocket: WebSocket, network_ip: str):
cur_miners = get_current_miner_list()
try:
if "/" in network_ip:
network_ip, network_subnet = network_ip.split("/")
network = MinerNetwork(network_ip, mask=network_subnet)
else:
network = MinerNetwork(network_ip)
miner_generator = network.scan_network_generator()
miners = []
async for miner_ip in miner_generator:
if miner_ip and str(miner_ip) not in cur_miners:
miners.append(miner_ip)
get_miner_generator = MinerFactory().get_miner_generator(miners)
all_miners = []
async for found_miner in get_miner_generator:
all_miners.append(
{"ip": found_miner.ip, "model": await found_miner.get_model()}
)
all_miners.sort(key=lambda x: x["ip"])
send_miners = []
for miner_ip in all_miners:
send_miners.append(
{"ip": str(miner_ip["ip"]), "model": miner_ip["model"]}
)
await websocket.send_json(send_miners)
await websocket.send_text("Done")
except asyncio.CancelledError:
raise

View File

@@ -0,0 +1,35 @@
import asyncio
import websockets.exceptions
from fastapi import WebSocket, WebSocketDisconnect, APIRouter
from tools.web_monitor.scan.func import do_websocket_scan
router = APIRouter()
@router.websocket("/ws")
async def websocket_scan(websocket: WebSocket):
await websocket.accept()
cur_task = None
try:
while True:
ws_data = await websocket.receive_text()
if "-Cancel-" in ws_data:
if cur_task:
cur_task.cancel()
print("Cancelling scan...")
try:
await cur_task
except asyncio.CancelledError:
cur_task = None
await websocket.send_text("Cancelled")
else:
cur_task = asyncio.create_task(do_websocket_scan(websocket, ws_data))
if cur_task and cur_task.done():
cur_task = None
except WebSocketDisconnect:
print("Websocket disconnected.")
except websockets.exceptions.ConnectionClosedOK:
pass

View File

@@ -0,0 +1,165 @@
body {
min-height: 100vh;
min-height: -webkit-fill-available;
}
html {
height: -webkit-fill-available;
}
main {
display: flex;
flex-wrap: nowrap;
height: 100vh;
height: -webkit-fill-available;
max-height: 100vh;
overflow-x: auto;
overflow-y: hidden;
}
.bi {
vertical-align: -.125em;
pointer-events: none;
fill: currentColor;
}
.dropdown-toggle { outline: 0; }
.nav-flush .nav-link {
border-radius: 0;
}
.btn-toggle-nav a {
display: inline-flex;
padding: .1875rem .5rem;
margin-top: .125rem;
margin-left: 1.25rem;
text-decoration: none;
}
.btn-toggle-nav a:hover,
.btn-toggle-nav a:focus {
background-color: #0d6efd;
}
.scrollarea {
overflow-y: auto;
}
.fw-semibold { font-weight: 600; }
.sidebar {
position: fixed;
top: 0;
/* rtl:raw:
right: 0;
*/
bottom: 0;
/* rtl:remove */
left: 0;
z-index: 100; /* Behind the navbar */
padding: 0px 0 0; /* Height of navbar */
box-shadow: inset -1px 0 0 rgba(0, 0, 0, .1);
}
@media (max-width: 991.98px) {
.sidebar {
top: 48px;
}
}
.sidebar-sticky {
position: relative;
top: 0;
height: calc(100vh - 48px);
padding-top: .5rem;
overflow-x: hidden;
overflow-y: auto;
}
.sidebar .nav-link {
font-weight: 500;
color: #333;
}
.sidebar .nav-link .feather {
margin-right: 4px;
color: #727272;
}
.sidebar .nav-link.active {
color: #2470dc;
}
.sidebar .nav-link:hover .feather,
.sidebar .nav-link.active .feather {
color: inherit;
}
.sidebar-heading {
font-size: .75rem;
text-transform: uppercase;
}
.navbar-brand {
padding-top: .75rem;
padding-bottom: .75rem;
font-size: 1rem;
background-color: rgba(0, 0, 0, .25);
box-shadow: inset -1px 0 0 rgba(0, 0, 0, .25);
}
.navbar .navbar-toggler {
top: .25rem;
right: 1rem;
}
.navbar .form-control {
padding: .75rem 1rem;
border-width: 0;
border-radius: 0;
}
.form-control-dark {
color: #fff;
background-color: rgba(255, 255, 255, .1);
border-color: rgba(255, 255, 255, .1);
}
.form-control-dark:focus {
border-color: transparent;
box-shadow: 0 0 0 3px rgba(255, 255, 255, .25);
}
.btn-toggle-nav{
max-height: 300px;
-webkit-overflow-scrolling: touch;
}
/* Scrollbar */
.btn-toggle-nav::-webkit-scrollbar {
width: 5px;
}
.btn-toggle-nav::-webkit-scrollbar-track {
box-shadow: inset 0 0 5px grey;
border-radius: 10px;
}
.btn-toggle-nav::-webkit-scrollbar-thumb {
background-image: linear-gradient(180deg, #D0368A 0%, #708AD4 99%);
box-shadow: inset 2px 2px 5px 0 rgba(#fff, 0.5);
border-radius: 100px;
}
.nav-pills .nav-link.active {
color: #212529;
background-image: linear-gradient(180deg, #D0368A 0%, #708AD4 99%);
}
.navbar-link:hover {
background-image: linear-gradient(180deg, #760A45 0%, #23449F 99%);
}
.navbar-link {
transition: unset;
color: unset;
}

View File

@@ -0,0 +1,7 @@
import os
from fastapi.templating import Jinja2Templates
templates = Jinja2Templates(
directory=os.path.join(os.path.dirname(__file__), "templates")
)

View File

@@ -0,0 +1,128 @@
{% extends 'navbar.html'%}
{% block content %}
<script src="https://cdn.jsdelivr.net/npm/chart.js"></script>
<script src="https://cdn.jsdelivr.net/npm/luxon@2.3.1/build/global/luxon.min.js"></script>
<canvas id="line-chart" class="grad-border mt-3 mb-4" width="600" height="360"></canvas>
{% if cur_miners|length == 0 %}<a role="button" href="/scan" id="noMiners" class="w-100 btn btn-info">Click here to add miners.</a>{% endif %}
<div id="errors"></div>
<script>
if (!window.WebSocket) alert("WebSocket not supported by this browser");
var ws = new WebSocket("ws://{{request.url.hostname}}:{% if request.port %}{{request.url.port}}{% else %}80{% endif %}/dashboard/ws");
let all_data = []
let all_labels = []
ws.onmessage = function(event) {
var new_data = JSON.parse(event.data)
if (!new_data["miners"].length == 0) {
total_hashrate = parseFloat(0)
errors = document.getElementById("errors")
for (i = 0; i< new_data["miners"].length; i++) {
if (new_data["miners"][i].hasOwnProperty("error")) {
if (!document.getElementById(new_data["miners"][i]["ip"] + "_error")) {
errors.innerHTML += "<div id='" + new_data["miners"][i]["ip"] + "_error" +
"' class='d-flex align-items-center p-1 mb-1 alert alert-danger'><strong class='p-0 m-0'>" +
new_data["miners"][i]["ip"] + ": " +
new_data["miners"][i]["error"] +
"</strong><div class='spinner-border spinner-border-sm ms-auto'></div></div>"
}
} else {
if (document.getElementById(new_data["miners"][i]["ip"] + "_error")) {
document.getElementById(new_data["miners"][i]["ip"] + "_error").remove()
}
total_hashrate += parseFloat(new_data["miners"][i]["hashrate"])
}
};
var chart = document.getElementById("line-chart")
datetime = luxon.DateTime.fromISO(new_data["datetime"]).toLocal();
if (minerDataChart.data.labels.length > 50) minerDataChart.data.labels.shift();
if (minerDataChart.data.datasets[0].data.length > 50) minerDataChart.data.datasets[0].data.shift();
minerDataChart.data.labels.push(datetime.toLocaleString(luxon.DateTime.TIME_WITH_SECONDS));
minerDataChart.data.datasets[0].data.push(total_hashrate.toFixed(2));
minerDataChart.update();
}
};
var ctx = document.getElementById("line-chart").getContext("2d");
var width = document.getElementById("line-chart").width;
var chartGradient = ctx.createLinearGradient(0, 0, width, 0)
chartGradient.addColorStop(0, '#D0368A');
chartGradient.addColorStop(1, '#708AD4');
const chartAreaBorder = {
id: 'chartAreaBorder',
beforeDraw(chart, args, options) {
const {ctx, chartArea: {left, top, width, height}} = chart;
ctx.save();
ctx.strokeStyle = options.borderColor;
ctx.lineWidth = options.borderWidth;
ctx.strokeRect(left, top, width, height);
ctx.restore();
}
};
var minerDataChart = new Chart(document.getElementById("line-chart"), {
type: 'line',
data: {
labels: [
],
datasets: [{
label: "Hashrate",
borderColor: chartGradient,
pointBorderColor: chartGradient,
pointBackgroundColor: chartGradient,
pointHoverBackgroundColor: chartGradient,
pointHoverBorderColor: chartGradient,
data: [
],
}
]
},
plugins: [chartAreaBorder],
options: {
animation: {
easing: 'easeInSine',
duration: 0
},
plugins: {
chartAreaBorder: {
borderColor: chartGradient,
borderWidth: 1
},
legend: {
labels: {
color: chartGradient
}
},
tooltip: {
callbacks: {
label: function(data) {
return data.dataset.data[data.dataIndex] + " TH/s";
}
}
}
},
scales: {
y: {
min: 0, // minimum value
suggestedMax: 100,
stepSize: 10,
ticks: {
callback: function(value, index, ticks) {
return value + " TH/s";
}
}
},
x: {
ticks: {
maxTicksLimit: 6,
maxRotation: 0,
}
}
}
}
});
</script>
{% endblock content %}

View File

@@ -0,0 +1,376 @@
{% extends 'navbar.html'%}
{% block content %}
<script src="https://cdn.jsdelivr.net/npm/chart.js"></script>
<script src="https://cdn.jsdelivr.net/npm/luxon@2.3.1/build/global/luxon.min.js"></script>
<div class="row mt-2">
<div class="col">
<h2 class="ms-3 mt-1">{{miner}}</h2>
</div>
<div class="col">
<div class="d-flex flex-row-reverse">
<button type="button" class="btn btn-outline-danger mx-1" data-bs-toggle="modal" data-bs-target="#removeModal">
Remove Miner
</button>
<!-- Modal -->
<div class="modal fade" id="removeModal" tabindex="-1" aria-labelledby="removeModalLabel" aria-hidden="true">
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title" id="removeModalLabel">Remove Miner</h5>
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
</div>
<div class="modal-body">
Do you really want to remove this miner?
</div>
<div class="modal-footer">
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Cancel</button>
<a class="btn btn-danger" href="{{url_for('miner')}}/{{miner}}/remove" role="button">Remove</a>
</div>
</div>
</div>
</div>
<a class="btn btn-primary mx-1" target="_blank" href="http://{{miner}}" role="button">Web Interface</a>
</div>
</div>
</div>
<div class="row">
<ul class="nav nav-tabs" id="myTab" role="tablist">
<li class="nav-item" role="presentation">
<button class="nav-link active text-dark" id="hashrate-tab" data-bs-toggle="tab" data-bs-target="#hashrate" type="button" role="tab" aria-controls="hashrate" aria-selected="true">Hashrate</button>
</li>
<li class="nav-item" role="presentation">
<button class="nav-link text-dark" id="temp-tab" data-bs-toggle="tab" data-bs-target="#temp" type="button" role="tab" aria-controls="temp" aria-selected="false">Temperature</button>
</li>
</ul>
<div class="tab-content" id="hashrateTempTabs">
<div class="tab-pane fade show active" id="hashrate" role="tabpanel" aria-labelledby="hashrate-tab">
<div class="col-12 line_chart">
<canvas id="hr-chart" class="grad-border mt-3" width="600" height="200"></canvas>
</div>
</div>
<div class="tab-pane fade" id="temp" role="tabpanel" aria-labelledby="temp-tab">
<div class="col-12 line_chart">
<canvas id="temp-chart" class="grad-border mt-3" width="600" height="200"></canvas>
</div>
</div>
</div>
<div class="col-3">
<div class="d-flex justify-content-center text-nowrap" id="fan1">Fan 1</div>
<canvas class="mb-2" id="fan-chart-1" width="100" height="100"></canvas>
</div>
<div class="col-3">
<div class="d-flex justify-content-center text-nowrap" id="fan2">Fan 2</div>
<canvas class="mb-2" id="fan-chart-2" width="100" height="100"></canvas>
</div>
<div class="col-3">
<div class="d-flex justify-content-center text-nowrap" id="fan3">Fan 3</div>
<canvas class="mb-2" id="fan-chart-3" width="100" height="100"></canvas>
</div>
<div class="col-3">
<div class="d-flex justify-content-center text-nowrap" id="fan4">Fan 4</div>
<canvas class="mb-2" id="fan-chart-4" width="100" height="100"></canvas>
</div>
</div>
<div class="d-flex align-items-center mt-4 alert alert-secondary">
<div class="mx-auto">Model:</div>
<div class="mx-auto fw-bolder" id="minerModel">?</div>
<div class="mx-auto" style="border-left: 1px solid grey; height: 50px;"></div>
<div class="mx-auto">Hashrate:</div>
<div class="mx-auto fw-bolder" id="minerHashrate">?</div>
</div>
<div id="errorContainer" class="d-flex align-items-center mt-4 alert alert-danger invisible">
<strong id="errorCode"></strong>
<div class="spinner-border ms-auto"></div>
</div>
<script>
var ws = new WebSocket("ws://{{request.url.hostname}}:{% if request.port %}{{request.url.port}}{% else %}80{% endif %}/miner/{{miner}}/ws");
let all_data = []
let all_labels = []
ws.onmessage = function(event) {
var new_data = JSON.parse(event.data)
if (new_data.hasOwnProperty("error")) {
var err_container = document.getElementById("errorContainer")
var err_code = document.getElementById("errorCode")
err_code.innerHTML = new_data['error']
err_container.classList.remove("invisible")
var miner_hr = document.getElementById("minerHashrate")
miner_hr.innerHTML = "?"
} else {
var chart = document.getElementById("hr-chart")
var err_container = document.getElementById("errorContainer")
if (!err_container.classList.hasOwnProperty("invisible")) {
err_container.classList.add("invisible")
}
datetime = luxon.DateTime.fromISO(new_data["datetime"]).toLocal();
if (minerDataChart.data.labels.length > 50) minerDataChart.data.labels.shift();
if (minerDataChart.data.datasets[0].data.length > 50) minerDataChart.data.datasets[0].data.shift();
if (minerTempChart.data.labels.length > 50) minerTempChart.data.labels.shift();
if (minerTempChart.data.datasets[0].data.length > 50) minerTempChart.data.datasets[0].data.shift();
minerDataChart.data.labels.push(datetime.toLocaleString(luxon.DateTime.TIME_WITH_SECONDS));
minerDataChart.data.datasets[0].data.push(new_data["hashrate"].toFixed(2));
minerTempChart.data.labels.push(datetime.toLocaleString(luxon.DateTime.TIME_WITH_SECONDS));
minerTempChart.data.datasets[0].data.push(new_data["temp"].toFixed(2));
fan1Chart.data.datasets[0].data = [new_data["fans"][0], 6000-new_data["fans"][0]]
fan2Chart.data.datasets[0].data = [new_data["fans"][1], 6000-new_data["fans"][1]]
fan3Chart.data.datasets[0].data = [new_data["fans"][2], 6000-new_data["fans"][2]]
fan4Chart.data.datasets[0].data = [new_data["fans"][3], 6000-new_data["fans"][3]]
document.getElementById("fan1").innerHTML = "Fan 1: " + new_data["fans"][0]
document.getElementById("fan2").innerHTML = "Fan 2: " + new_data["fans"][1]
document.getElementById("fan3").innerHTML = "Fan 3: " + new_data["fans"][2]
document.getElementById("fan4").innerHTML = "Fan 4: " + new_data["fans"][3]
fan1Chart.update();
fan2Chart.update();
fan3Chart.update();
fan4Chart.update();
minerDataChart.update();
minerTempChart.update();
var miner_hr = document.getElementById("minerHashrate")
miner_hr.innerHTML = new_data["hashrate"].toFixed(2) + " TH/s"
var miner_model = document.getElementById("minerModel")
miner_model.innerHTML = new_data["model"]
};
};
var ctx = document.getElementById("hr-chart").getContext("2d");
var width = document.getElementById("hr-chart").width;
var chartGradient = ctx.createLinearGradient(0, 0, width, 0)
chartGradient.addColorStop(0, '#D0368A');
chartGradient.addColorStop(1, '#708AD4');
const chartAreaBorder = {
id: 'chartAreaBorder',
beforeDraw(chart, args, options) {
const {ctx, chartArea: {left, top, width, height}} = chart;
ctx.save();
ctx.strokeStyle = options.borderColor;
ctx.lineWidth = options.borderWidth;
ctx.strokeRect(left, top, width, height);
ctx.restore();
}
};
var minerDataChart = new Chart(document.getElementById("hr-chart"), {
type: 'line',
data: {
labels: [
],
datasets: [{
label: "Hashrate",
borderColor: chartGradient,
pointBorderColor: chartGradient,
pointBackgroundColor: chartGradient,
pointHoverBackgroundColor: chartGradient,
pointHoverBorderColor: chartGradient,
data: [
],
}
]
},
plugins: [chartAreaBorder],
options: {
animation: {
easing: 'easeInSine',
duration: 0
},
plugins: {
chartAreaBorder: {
borderColor: chartGradient,
borderWidth: 1
},
legend: {
labels: {
color: chartGradient
}
},
tooltip: {
callbacks: {
label: function(data) {
return data.dataset.data[data.dataIndex] + " TH/s";
}
}
}
},
scales: {
y: {
min: 0, // minimum value
suggestedMax: 10,
stepSize: 1,
ticks: {
callback: function(value, index, ticks) {
return value + " TH/s";
}
}
},
x: {
ticks: {
maxTicksLimit: 6,
maxRotation: 0
}
}
}
}
});
var minerTempChart = new Chart(document.getElementById("temp-chart"), {
type: 'line',
data: {
labels: [
],
datasets: [{
label: "Temperature",
borderColor: chartGradient,
pointBorderColor: chartGradient,
pointBackgroundColor: chartGradient,
pointHoverBackgroundColor: chartGradient,
pointHoverBorderColor: chartGradient,
data: [
],
}
]
},
plugins: [chartAreaBorder],
options: {
animation: {
easing: 'easeInSine',
duration: 0
},
plugins: {
chartAreaBorder: {
borderColor: chartGradient,
borderWidth: 1
},
legend: {
labels: {
color: chartGradient
}
},
tooltip: {
callbacks: {
label: function(data) {
return data.dataset.data[data.dataIndex] + " °C";
}
}
}
},
scales: {
y: {
min: 0, // minimum value
suggestedMax: 100,
stepSize: 5,
ticks: {
callback: function(value, index, ticks) {
return value + " °C";
}
}
},
x: {
ticks: {
maxTicksLimit: 6,
maxRotation: 0
}
}
}
}
});
var options_fans = {
animation: {
easing: 'easeInSine',
duration: 250,
},
aspectRatio: 1.5,
events: [],
responsive: true,
plugins: {
legend: {
display: false,
}
}
};
var fanCtx = document.getElementById("fan-chart-1").getContext("2d");
var fanWidth = document.getElementById("fan-chart-1").width;
var fanChartGradient = fanCtx.createLinearGradient(0, 0, fanWidth, -fanWidth)
fanChartGradient.addColorStop(0, '#D0368A');
fanChartGradient.addColorStop(1, '#708AD4');
var fan1Chart = new Chart(document.getElementById("fan-chart-1"), {
type: "doughnut",
data: {
labels: ["Fan 1"],
datasets: [
{
data: [0, 6000],
// add colors
backgroundColor: [
fanChartGradient,
"rgba(199, 199, 199, 1)"
]
},
]
},
options: options_fans
});
var fan2Chart = new Chart(document.getElementById("fan-chart-2"), {
type: "doughnut",
data: {
labels: ["Fan 2"],
datasets: [
{
data: [0, 6000],
// add colors
backgroundColor: [
fanChartGradient,
"rgba(199, 199, 199, 1)"
]
},
]
},
options: options_fans
});
var fan3Chart = new Chart(document.getElementById("fan-chart-3"), {
type: "doughnut",
data: {
labels: ["Fan 3"],
datasets: [
{
data: [0, 6000],
// add colors
backgroundColor: [
fanChartGradient,
"rgba(199, 199, 199, 1)"
]
},
]
},
options: options_fans
});
var fan4Chart = new Chart(document.getElementById("fan-chart-4"), {
type: "doughnut",
data: {
labels: ["Fan 4"],
datasets: [
{
data: [0, 6000],
// add colors
backgroundColor: [
fanChartGradient,
"rgba(199, 199, 199, 1)"
]
},
]
},
options: options_fans
});
</script>
{% endblock content %}

View File

@@ -0,0 +1,108 @@
<!DOCTYPE html>
<html lang="en">
<head>
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.0.2/dist/js/bootstrap.bundle.min.js" integrity="sha384-MrcW6ZMFYlzcLA8Nl+NtUVF0sA7MsXsP1UyJoMp4YLEuNSfAP+JcXn/tWtIaxVXM" crossorigin="anonymous"></script>
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.0.2/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-EVSTQN3/azprG1Anm3QDgpJLIm9Nao0Yz1ztcQTwFspd3yD65VohhpuuCOmLASjC" crossorigin="anonymous">
<link href="{{ url_for('static', path='/navbar.css')}}" rel="stylesheet">
<meta charset="UTF-8">
<title>Title</title>
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta http-equiv="Content-Security-Policy" content="default-src * gap: data: blob: 'unsafe-inline' 'unsafe-eval' ws: wss:;">
</head>
<body>
<svg xmlns="http://www.w3.org/2000/svg" style="display: none;">
<symbol id="dashboard" viewBox="0 0 16 16">
<path d="M8.354 1.146a.5.5 0 0 0-.708 0l-6 6A.5.5 0 0 0 1.5 7.5v7a.5.5 0 0 0 .5.5h4.5a.5.5 0 0 0 .5-.5v-4h2v4a.5.5 0 0 0 .5.5H14a.5.5 0 0 0 .5-.5v-7a.5.5 0 0 0-.146-.354L13 5.793V2.5a.5.5 0 0 0-.5-.5h-1a.5.5 0 0 0-.5.5v1.293L8.354 1.146zM2.5 14V7.707l5.5-5.5 5.5 5.5V14H10v-4a.5.5 0 0 0-.5-.5h-3a.5.5 0 0 0-.5.5v4H2.5z"></path>
</symbol>
<symbol id="miners" viewBox="0 0 16 16">
<path d="M8 4a.5.5 0 0 1 .5.5V6a.5.5 0 0 1-1 0V4.5A.5.5 0 0 1 8 4zM3.732 5.732a.5.5 0 0 1 .707 0l.915.914a.5.5 0 1 1-.708.708l-.914-.915a.5.5 0 0 1 0-.707zM2 10a.5.5 0 0 1 .5-.5h1.586a.5.5 0 0 1 0 1H2.5A.5.5 0 0 1 2 10zm9.5 0a.5.5 0 0 1 .5-.5h1.5a.5.5 0 0 1 0 1H12a.5.5 0 0 1-.5-.5zm.754-4.246a.389.389 0 0 0-.527-.02L7.547 9.31a.91.91 0 1 0 1.302 1.258l3.434-4.297a.389.389 0 0 0-.029-.518z"></path>
<path fill-rule="evenodd" d="M0 10a8 8 0 1 1 15.547 2.661c-.442 1.253-1.845 1.602-2.932 1.25C11.309 13.488 9.475 13 8 13c-1.474 0-3.31.488-4.615.911-1.087.352-2.49.003-2.932-1.25A7.988 7.988 0 0 1 0 10zm8-7a7 7 0 0 0-6.603 9.329c.203.575.923.876 1.68.63C4.397 12.533 6.358 12 8 12s3.604.532 4.923.96c.757.245 1.477-.056 1.68-.631A7 7 0 0 0 8 3z"></path>
</symbol>
<symbol id="settings" viewBox="0 0 16 16">
<path d="M8 4.754a3.246 3.246 0 1 0 0 6.492 3.246 3.246 0 0 0 0-6.492zM5.754 8a2.246 2.246 0 1 1 4.492 0 2.246 2.246 0 0 1-4.492 0z"/>
<path d="M9.796 1.343c-.527-1.79-3.065-1.79-3.592 0l-.094.319a.873.873 0 0 1-1.255.52l-.292-.16c-1.64-.892-3.433.902-2.54 2.541l.159.292a.873.873 0 0 1-.52 1.255l-.319.094c-1.79.527-1.79 3.065 0 3.592l.319.094a.873.873 0 0 1 .52 1.255l-.16.292c-.892 1.64.901 3.434 2.541 2.54l.292-.159a.873.873 0 0 1 1.255.52l.094.319c.527 1.79 3.065 1.79 3.592 0l.094-.319a.873.873 0 0 1 1.255-.52l.292.16c1.64.893 3.434-.902 2.54-2.541l-.159-.292a.873.873 0 0 1 .52-1.255l.319-.094c1.79-.527 1.79-3.065 0-3.592l-.319-.094a.873.873 0 0 1-.52-1.255l.16-.292c.893-1.64-.902-3.433-2.541-2.54l-.292.159a.873.873 0 0 1-1.255-.52l-.094-.319zm-2.633.283c.246-.835 1.428-.835 1.674 0l.094.319a1.873 1.873 0 0 0 2.693 1.115l.291-.16c.764-.415 1.6.42 1.184 1.185l-.159.292a1.873 1.873 0 0 0 1.116 2.692l.318.094c.835.246.835 1.428 0 1.674l-.319.094a1.873 1.873 0 0 0-1.115 2.693l.16.291c.415.764-.42 1.6-1.185 1.184l-.291-.159a1.873 1.873 0 0 0-2.693 1.116l-.094.318c-.246.835-1.428.835-1.674 0l-.094-.319a1.873 1.873 0 0 0-2.692-1.115l-.292.16c-.764.415-1.6-.42-1.184-1.185l.159-.291A1.873 1.873 0 0 0 1.945 8.93l-.319-.094c-.835-.246-.835-1.428 0-1.674l.319-.094A1.873 1.873 0 0 0 3.06 4.377l-.16-.292c-.415-.764.42-1.6 1.185-1.184l.292.159a1.873 1.873 0 0 0 2.692-1.115l.094-.319z"/>
</symbol>
<symbol id="scan" viewBox="0 0 16 16">
<path d="M14 1a1 1 0 0 1 1 1v12a1 1 0 0 1-1 1H2a1 1 0 0 1-1-1V2a1 1 0 0 1 1-1h12zM2 0a2 2 0 0 0-2 2v12a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V2a2 2 0 0 0-2-2H2z"/>
<path d="M8 4a.5.5 0 0 1 .5.5v3h3a.5.5 0 0 1 0 1h-3v3a.5.5 0 0 1-1 0v-3h-3a.5.5 0 0 1 0-1h3v-3A.5.5 0 0 1 8 4z"/>
</symbol>
<symbol id="miner" viewBox="0 0 16 16">
<path d="M11.5 2a.5.5 0 0 1 .5.5v7a.5.5 0 0 1-1 0v-7a.5.5 0 0 1 .5-.5Zm2 0a.5.5 0 0 1 .5.5v7a.5.5 0 0 1-1 0v-7a.5.5 0 0 1 .5-.5Zm-10 8a.5.5 0 0 0 0 1h6a.5.5 0 0 0 0-1h-6Zm0 2a.5.5 0 0 0 0 1h6a.5.5 0 0 0 0-1h-6ZM5 3a1 1 0 0 0-1 1h-.5a.5.5 0 0 0 0 1H4v1h-.5a.5.5 0 0 0 0 1H4a1 1 0 0 0 1 1v.5a.5.5 0 0 0 1 0V8h1v.5a.5.5 0 0 0 1 0V8a1 1 0 0 0 1-1h.5a.5.5 0 0 0 0-1H9V5h.5a.5.5 0 0 0 0-1H9a1 1 0 0 0-1-1v-.5a.5.5 0 0 0-1 0V3H6v-.5a.5.5 0 0 0-1 0V3Zm0 1h3v3H5V4Zm6.5 7a.5.5 0 0 0-.5.5v1a.5.5 0 0 0 .5.5h2a.5.5 0 0 0 .5-.5v-1a.5.5 0 0 0-.5-.5h-2Z"/>
<path d="M1 2a2 2 0 0 1 2-2h11a2 2 0 0 1 2 2v11a2 2 0 0 1-2 2H3a2 2 0 0 1-2-2v-2H.5a.5.5 0 0 1-.5-.5v-1A.5.5 0 0 1 .5 9H1V8H.5a.5.5 0 0 1-.5-.5v-1A.5.5 0 0 1 .5 6H1V5H.5a.5.5 0 0 1-.5-.5v-2A.5.5 0 0 1 .5 2H1Zm1 11a1 1 0 0 0 1 1h11a1 1 0 0 0 1-1V2a1 1 0 0 0-1-1H3a1 1 0 0 0-1 1v11Z"/>
</symbol>
</svg>
<header class="navbar navbar-dark sticky-top bg-dark flex-xl-nowrap p-0 shadow">
<a class="d-lg-none col-lg-3 col-xl-2 me-0 px-3" style="height: 50px;" href="#"></a>
<button class="navbar-toggler position-absolute d-lg-none collapsed" type="button" data-bs-toggle="collapse" data-bs-target="#sidebarMenu" aria-controls="sidebarMenu" aria-expanded="false" aria-label="Toggle navigation">
<span class="navbar-toggler-icon"></span>
</button>
<ul class="navbar-nav px-3">
<li class="nav-item text-nowrap">
</li>
</ul>
</header>
<div class="container-fluid">
<div class="row">
<nav id="sidebarMenu" class="text-white bg-dark col-lg-3 col-xl-2 d-lg-block sidebar collapse">
<div class="position-sticky pt-3">
<ul class="nav nav-pills flex-column">
<li class="nav-item mb-1 mx-2">
<a href="/dashboard" class="nav-link navbar-link {% if request.path.strip('/') == 'dashboard' %}active{% else %}text-white{% endif %}">
<svg class="bi me-2" width="16" height="16"><use xlink:href="#dashboard"></use></svg>
Dashboard
</a>
</li>
<li class="nav-item mb-1 mx-2">
<a href="" class="nav-link navbar-link {% if request.path.strip('/') == 'scan' or request.path.split('/')[1] == 'miner' %}active{% else %}text-white{% endif %}" data-bs-toggle="collapse" data-bs-target="#miners-collapse" aria-expanded="false">
<svg class="bi me-2" width="16" height="16"><use xlink:href="#miners"></use></svg>
Miners
</a>
<div class="collapse mt-1" id="miners-collapse" style="">
<ul id="navMiners" class="btn-toggle-nav overflow-auto list-unstyled fw-normal pb-1 small">
<li>
<a href="/scan" class="nav-link navbar-link {% if request.path.strip('/') == 'scan' %}active{% else %}text-white{% endif %}">
<svg class="bi me-2 mt-1" width="16" height="16"><use xlink:href="#scan"></use></svg>
Add Miners
</a>
</li>
{% for miner in cur_miners %}
<li>
<a href="/miner/{{miner}}" class="nav-link navbar-link {% if request.path.strip('/') == 'miner/' + miner %}active{% else %}text-white{% endif %}">
<svg class="bi me-2 mt-1" width="16" height="16"><use xlink:href="#miner"></use></svg>
{{miner}}
</a>
</li>
{% endfor %}
</ul>
</div>
</li>
<li class="border-top my-3"></li>
<li class="nav-item mb-1 mx-2">
<a href="/settings" class="nav-link navbar-link {% if request.path.strip('/') == 'settings' %}active{% else %}text-white{% endif %}">
<svg class="bi me-2" width="16" height="16"><use xlink:href="#settings"></use></svg>
Settings
</a>
</li>
</ul>
</div>
</nav>
<div class="col-lg-9 ms-md-auto col-xl-10 px-lg-4 ps-4">
{% block content %}
{% endblock content %}
</div>
</div>
</div>
</body>
</html>

View File

@@ -0,0 +1,158 @@
{% extends 'navbar.html'%}
{% block content %}
<div class="row w-100 my-4">
<form action="" onsubmit="sendMessage(event)">
<div class="input-group mb-3">
<span class="input-group-text" id="scan-ip">Subnet</span>
<input type="text" class="form-control" id="messageText" placeholder="192.168.1.0/24" aria-describedby="scan-ip">
<button class="btn btn-danger" onclick="cancelScan()" style="display:none;" type="button" id="cancelButton">Cancel</button>
<button class="btn btn-primary" onclick="scanMiners()" type="button" id="scanButton">Scan</button>
</div>
</form>
</div>
<div class="row w-100">
<button class="btn btn-primary mb-4 mx-1" onclick="addMiners()" type="button" id="addButton">Add Selected Miners</button>
</div>
<div class="row w-100">
<table class="table table-striped table-responsive" style="max-height:300px;">
<thead>
<tr>
<th class="active col-1">
<input type="checkbox" class="select-all checkbox" name="select-all" id="selectAllCheckbox"/>
</th>
<th>IP</th>
<th>Model</th>
<th id="scanStatus" class="col-2">0 Miners</th>
</tr>
</thead>
<tbody id="minerTable">
</tbody>
</table>
</div>
<script src="https://code.jquery.com/jquery-3.3.1.slim.min.js" integrity="sha384-q8i/X+965DzO0rT7abK41JStQIAqVgRVzpbzo5smXKp4YfRvH+8abtTE1Pi6jizo" crossorigin="anonymous"></script>
<script>
$(function(){
//column checkbox select all or cancel
$("input.select-all").click(function () {
var checked = this.checked;
$("input.select-item").each(function (index,item) {
item.checked = checked;
});
});
//check selected items
$("input.select-item").click(function () {
var checked = this.checked;
var all = $("input.select-all")[0];
var total = $("input.select-item").length;
var len = $("input.select-item:checked:checked").length;
all.checked = len===total;
});
});
</script>
<script>
window.post = function(url, data) {
return fetch(url, {method: "POST", headers: {'Content-Type': 'application/json'}, body: JSON.stringify(data)});
}
var ws = new WebSocket("ws://{{request.url.hostname}}:{{request.url.port}}/scan/ws");
ws.onmessage = function(event) {
if (event.data == "Done") {
document.getElementById("scanButton").innerHTML = "Scan"
document.getElementById("scanButton").disabled = false
document.getElementById("selectAllCheckbox").disabled = false
document.getElementById("scanStatus").innerHTML = document.getElementById('minerTable').rows.length + " Miners"
document.getElementById("cancelButton").style = "display:none;"
enableCheckboxes();
} else if (event.data == "Cancelled") {
document.getElementById("scanButton").innerHTML = "Scan"
document.getElementById("scanButton").disabled = false
document.getElementById("selectAllCheckbox").disabled = false
document.getElementById("scanStatus").innerHTML = document.getElementById('minerTable').rows.length + " Miners"
document.getElementById("cancelButton").style = "display:none;"
enableCheckboxes();
} else {
var miner_data = JSON.parse(event.data)
var miners = document.getElementById('minerTable')
miners.innerHTML = ""
miner_data.forEach(function(miner) {
var tr = document.createElement('tr')
tr.id = miner["ip"]
var checkbox_td = document.createElement('td')
checkbox_td.innerHTML = '<input type="checkbox" class="select-item checkbox" name="minerCheckboxes" value="' + miner["ip"] + '" />'
checkbox_td.className = "active"
var ip_td = document.createElement('td')
ip_td.innerHTML = miner["ip"]
var model_td = document.createElement('td')
model_td.innerHTML = miner["model"]
var empty_td = document.createElement('td')
tr.append(checkbox_td)
tr.append(ip_td)
tr.append(model_td)
tr.append(empty_td)
miners.append(tr)
});
disableCheckboxes();
};
};
function scanMiners(event) {
var input = document.getElementById("messageText")
var miners = document.getElementById('minerTable')
miners.innerHTML = ""
document.getElementById("scanStatus").innerHTML = "<span class='spinner-border spinner-border-sm'></span> Scanning"
document.getElementById("scanButton").innerHTML = "<span class='spinner-border spinner-border-sm'></span> Scanning"
document.getElementById("scanButton").disabled = true
document.getElementById("selectAllCheckbox").disabled = true
document.getElementById("cancelButton").style = ""
if (input.value != "") {
ws.send(input.value)
event.preventDefault()
} else {
ws.send("192.168.1.0/24")
};
};
function cancelScan(event) {
document.getElementById("scanStatus").innerHTML = "Canceling..."
document.getElementById("scanButton").innerHTML = "Canceling..."
document.getElementById("cancelButton").style = "display:none;"
ws.send("-Cancel-")
};
function addMiners(event) {
var checkedBoxes = document.querySelectorAll('input[name=minerCheckboxes]:checked');
if (checkedBoxes.length != 0) {
var minerList = [];
for (i = 0; i< checkedBoxes.length; i++) {
minerList.push(checkedBoxes[i].defaultValue);
}
post("{{url_for('add_miners_scan')}}", {miners: minerList})
for (i = 0; i< minerList.length; i++) {
var tr_to_remove = document.getElementById(minerList[i])
tr_to_remove.remove()
var navbar_miners = document.getElementById("navMiners")
navbar_miners.innerHTML += '<li><a href="/miner/' + minerList[i] + '" class="nav-link text-white"><svg class="bi me-2 mt-1" width="16" height="16"><use xlink:href="#miner"></use></svg>' + minerList[i] + '</a></li>'
}
document.getElementById("scanStatus").innerHTML = document.getElementById('minerTable').rows.length + " Miners"
};
};
function disableCheckboxes() {
var checkBoxes = document.querySelectorAll('input[name=minerCheckboxes]');
for (i = 0; i< checkBoxes.length; i++) {
checkBoxes[i].disabled = true
};
};
function enableCheckboxes() {
var checkBoxes = document.querySelectorAll('input[name=minerCheckboxes]');
for (i = 0; i< checkBoxes.length; i++) {
checkBoxes[i].disabled = false
};
};
</script>
{% endblock content %}

View File

@@ -0,0 +1,46 @@
{% extends 'navbar.html'%}
{% block content %}
<div class="row my-2">
<div class="col">
<div class="d-flex flex-row-reverse">
<button type="button" class="btn btn-outline-danger mx-1" data-bs-toggle="modal" data-bs-target="#removeModal">
Remove All Miners
</button>
<!-- Modal -->
<div class="modal fade" id="removeModal" tabindex="-1" aria-labelledby="removeModalLabel" aria-hidden="true">
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title" id="removeModalLabel">Remove Miner</h5>
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
</div>
<div class="modal-body">
Do you really want to remove all miners?
</div>
<div class="modal-footer">
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Cancel</button>
<a class="btn btn-danger" href="{{url_for('remove_all_miners')}}" role="button">Remove</a>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<form method="post" action="/settings/update">
<div class="input-group mb-3">
<span class="input-group-text">Graph Data Sleep Time</span>
<input type="number" class="form-control" value="{{settings['graph_data_sleep_time']}}" name="graph_data_sleep_time" id="graph_data_sleep_time">
</div>
<div class="input-group mb-3">
<span class="input-group-text">Miner Data Timeout</span>
<input type="number" class="form-control" value="{{settings['miner_data_timeout']}}" name="miner_data_timeout" id="miner_data_timeout">
</div>
<div class="input-group mb-3">
<span class="input-group-text">Miner Identification Timeout</span>
<input type="number" class="form-control" value="{{settings['miner_identify_timeout']}}" name="miner_identify_timeout" id=" ">
</div>
<button type="submit" class="btn btn-primary w-100">Submit</button>
</form>
{% endblock content %}

View File

View File

@@ -0,0 +1,352 @@
from ipaddress import ip_address
import asyncio
import os
import logging
import datetime
from network import ping_miner
from miners.miner_factory import MinerFactory
from miners.antminer.S9.bosminer import BOSMinerS9
from tools.web_testbench.connections import ConnectionManager
from tools.web_testbench.feeds import get_local_versions
from settings import NETWORK_PING_TIMEOUT as PING_TIMEOUT
REFERRAL_FILE_S9 = os.path.join(os.path.dirname(__file__), "files", "referral.ipk")
UPDATE_FILE_S9 = os.path.join(os.path.dirname(__file__), "files", "update.tar")
CONFIG_FILE = os.path.join(os.path.dirname(__file__), "files", "config.toml")
# static states
(START, UNLOCK, INSTALL, UPDATE, REFERRAL, DONE, ERROR) = range(7)
class TestbenchMiner:
def __init__(self, host: ip_address):
self.host = host
self.state = START
self.latest_version = None
self.start_time = None
async def get_bos_version(self):
miner = await MinerFactory().get_miner(self.host)
result = await miner.send_ssh_command("cat /etc/bos_version")
version_base = result.stdout
version_base = version_base.strip()
version_base = version_base.split("-")
version = version_base[-2]
return version
def get_online_time(self):
online_time = "0:00:00"
if self.start_time:
online_time = str(datetime.datetime.now() - self.start_time).split(".")[0]
return online_time
async def add_to_output(self, message):
data = {
"IP": str(self.host),
"text": str(message).replace("\r", "") + "\n",
"Light": "hide",
"online": self.get_online_time(),
}
await ConnectionManager().broadcast_json(data)
return
async def remove_from_cache(self):
if self.host in MinerFactory().miners.keys():
MinerFactory().miners.remove(self.host)
async def wait_for_disconnect(self, wait_time: int = 1):
await self.add_to_output("Waiting for disconnect...")
while await ping_miner(self.host):
await asyncio.sleep(wait_time)
self.state = START
async def install_start(self):
try:
if not await ping_miner(self.host, 80):
await self.add_to_output("Waiting for miner connection...")
return
except asyncio.exceptions.TimeoutError:
await self.add_to_output("Waiting for miner connection...")
return
self.start_time = datetime.datetime.now()
await ConnectionManager().broadcast_json(
{"IP": str(self.host), "Light": "hide", "online": self.get_online_time()}
)
await self.remove_from_cache()
miner = await MinerFactory().get_miner(self.host)
await self.add_to_output("Found miner: " + str(miner))
if isinstance(miner, BOSMinerS9):
try:
if await self.get_bos_version() == self.latest_version:
await self.add_to_output(
f"Already running the latest version of BraiinsOS, {self.latest_version}, configuring."
)
self.state = REFERRAL
return
except AttributeError:
return
await self.add_to_output("Already running BraiinsOS, updating.")
self.state = UPDATE
return
if await ping_miner(self.host, 22):
await self.add_to_output("Miner is unlocked, installing.")
self.state = INSTALL
return
await self.add_to_output("Miner needs unlock, unlocking.")
self.state = UNLOCK
async def install_unlock(self):
if await self.ssh_unlock():
await self.add_to_output("Unlocked miner, installing.")
self.state = INSTALL
return
await self.add_to_output("Failed to unlock miner, please pin reset.")
self.state = START
await self.wait_for_disconnect()
async def ssh_unlock(self):
proc = await asyncio.create_subprocess_shell(
f'{os.path.join(os.path.dirname(__file__), "files", "asicseer_installer.exe")} -p -f {str(self.host)} root',
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE,
)
stdout, stderr = await proc.communicate()
if str(stdout).find("webUI") != -1:
return False
return True
async def fix_file_exists_bug(self):
miner = await MinerFactory().get_miner(self.host)
await miner.send_ssh_command(
"rm /lib/ld-musl-armhf.so.1; rm /usr/lib/openssh/sftp-server; rm /usr/sbin/fw_printenv"
)
async def do_install(self):
await self.add_to_output("Running install...")
error = None
proc = await asyncio.create_subprocess_shell(
f'{os.path.join(os.path.dirname(__file__), "files", "bos-toolbox", "bos-toolbox.bat")} install {str(self.host)} --no-keep-pools --psu-power-limit 900 --no-nand-backup --feeds-url file:./feeds/',
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE,
stdin=asyncio.subprocess.PIPE
)
# get stdout of the install
stdout = None
await self.add_to_output("Getting output...")
while True:
try:
stdout = await asyncio.wait_for(proc.stderr.readuntil(b"\r"), 20)
except asyncio.exceptions.IncompleteReadError:
break
except asyncio.exceptions.TimeoutError:
if not stdout:
await self.add_to_output("Miner encountered an error when installing, attempting to re-unlock. If this fails, you may need to factory reset the miner.")
self.state = UNLOCK
proc.kill()
return
continue
stdout_data = stdout.decode("utf-8").strip()
if "ERROR:File" in stdout_data:
error = "FILE"
if "ERROR:Auth" in stdout_data:
error = "AUTH"
proc.kill()
await self.add_to_output(stdout_data)
if stdout == b"":
break
await self.add_to_output("Waiting for process to complete...")
await proc.wait()
if not error:
await self.add_to_output("Waiting for miner to finish rebooting...")
while not await ping_miner(self.host):
await asyncio.sleep(3)
await asyncio.sleep(5)
if error == "FILE":
await self.add_to_output("Encountered error, attempting to fix.")
await self.fix_file_exists_bug()
self.state = START
return
elif error == "AUTH":
await self.add_to_output("Encountered unlock error, please pin reset.")
self.state = ERROR
return
await self.add_to_output("Install complete, configuring.")
self.state = REFERRAL
async def install_update(self):
await self.add_to_output("Updating miner...")
await self.remove_from_cache()
miner = await MinerFactory().get_miner(self.host)
try:
await miner.send_file(UPDATE_FILE_S9, "/tmp/firmware.tar")
await miner.send_ssh_command("sysupgrade /tmp/firmware.tar")
except Exception as e:
logging.warning(f"{str(self.host)} Exception: {e}")
await self.add_to_output("Failed to update, restarting.")
self.state = START
return
await asyncio.sleep(10)
await self.add_to_output("Update complete, configuring.")
self.state = REFERRAL
async def install_referral(self):
while not await ping_miner(self.host):
await asyncio.sleep(1)
miner = await MinerFactory().get_miner(self.host)
try:
await miner.send_file(REFERRAL_FILE_S9, "/tmp/referral.ipk")
await miner.send_file(CONFIG_FILE, "/etc/bosminer.toml")
await miner.send_ssh_command(
"opkg install /tmp/referral.ipk && /etc/init.d/bosminer restart"
)
except Exception as e:
await self.add_to_output(
"Failed to add referral and configure, restarting."
)
self.state = START
return
await asyncio.sleep(5)
await self.add_to_output("Configuration complete.")
self.state = DONE
async def get_web_data(self):
miner = await MinerFactory().get_miner(self.host)
if not isinstance(miner, BOSMinerS9):
await self.add_to_output("Miner type changed, restarting.")
self.state = START
return
try:
all_data = await miner.api.multicommand(
"devs", "temps", "fans", "tunerstatus"
)
devs_raw = all_data["devs"][0]
temps_raw = all_data["temps"][0]
fans_raw = all_data["fans"][0]
tunerstatus_raw = all_data["tunerstatus"][0]
# parse temperature data
temps_data = {}
for board in range(len(temps_raw["TEMPS"])):
temps_data[f"board_{temps_raw['TEMPS'][board]['ID']}"] = {}
temps_data[f"board_{temps_raw['TEMPS'][board]['ID']}"][
"Board"
] = temps_raw["TEMPS"][board]["Board"]
temps_data[f"board_{temps_raw['TEMPS'][board]['ID']}"][
"Chip"
] = temps_raw["TEMPS"][board]["Chip"]
if len(temps_data.keys()) < 3:
for board in [6, 7, 8]:
if f"board_{board}" not in temps_data.keys():
temps_data[f"board_{board}"] = {"Chip": 0, "Board": 0}
# parse individual board and chip temperature data
for board in temps_data.keys():
if "Board" not in temps_data[board].keys():
temps_data[board]["Board"] = 0
if "Chip" not in temps_data[board].keys():
temps_data[board]["Chip"] = 0
# parse hashrate data
hr_data = {}
for board in range(len(devs_raw["DEVS"])):
hr_data[f"board_{devs_raw['DEVS'][board]['ID']}"] = {}
hr_data[f"board_{devs_raw['DEVS'][board]['ID']}"]["HR"] = round(
devs_raw["DEVS"][board]["MHS 5s"] / 1000000, 2
)
if len(hr_data.keys()) < 3:
print(devs_raw["DEVS"])
for board in [6, 7, 8]:
if f"board_{board}" not in hr_data.keys():
hr_data[f"board_{board}"] = {"HR": 0}
# parse fan data
fans_data = {}
for fan in range(len(fans_raw["FANS"])):
fans_data[f"fan_{fans_raw['FANS'][fan]['ID']}"] = {}
fans_data[f"fan_{fans_raw['FANS'][fan]['ID']}"]["RPM"] = fans_raw[
"FANS"
][fan]["RPM"]
# parse tuner data
tuner_data = {}
if tunerstatus_raw:
for board in tunerstatus_raw["TUNERSTATUS"][0]["TunerChainStatus"]:
tuner_data[f"board_{board['HashchainIndex']}"] = {
"power_limit": board["PowerLimitWatt"],
"real_power": board["ApproximatePowerConsumptionWatt"],
"status": board["Status"],
}
if len(tuner_data.keys()) < 3:
for board in [6, 7, 8]:
if f"board_{board}" not in tuner_data.keys():
temps_data[f"board_{board}"] = {
"power_limit": 0,
"real_power": 0,
"status": "ERROR: No board found!",
}
# set the miner data
miner_data = {
"IP": str(self.host),
"Light": "show",
"Fans": fans_data,
"HR": hr_data,
"Temps": temps_data,
"online": self.get_online_time(),
"Tuner": tuner_data,
}
# return stats
return miner_data
except:
return
async def install_done(self):
await self.add_to_output("Waiting for disconnect...")
try:
while (
await asyncio.wait_for(ping_miner(self.host), PING_TIMEOUT + 3)
and self.state == DONE
):
data = await self.get_web_data()
await ConnectionManager().broadcast_json(data)
await asyncio.sleep(1)
except:
self.state = START
await self.add_to_output("Miner disconnected, waiting for new miner.")
self.start_time = None
return
self.state = START
await self.add_to_output("Miner disconnected, waiting for new miner.")
self.start_time = None
async def install_loop(self):
self.latest_version = sorted(await get_local_versions(), reverse=True)[0]
while True:
try:
if self.state == START:
self.start_time = None
await self.install_start()
if self.state == UNLOCK:
await self.install_unlock()
if self.state == INSTALL:
await self.do_install()
if self.state == UPDATE:
await self.install_update()
if self.state == REFERRAL:
await self.install_referral()
if self.state == DONE:
await self.install_done()
if self.state == ERROR:
await self.wait_for_disconnect(wait_time=5)
except Exception as E:
logging.error(f"{self.host}: {E}")
await self.add_to_output(f"Error: {E}")

View File

@@ -0,0 +1,3 @@
from network import MinerNetwork
miner_network = MinerNetwork("192.168.1.11-192.168.1.34").get_network()

View File

@@ -0,0 +1,86 @@
from fastapi import FastAPI, WebSocket, Request
from fastapi.websockets import WebSocketDisconnect
import asyncio
from fastapi.staticfiles import StaticFiles
import uvicorn
import os
from fastapi.templating import Jinja2Templates
from tools.web_testbench.feeds import update_installer_files
from miners.miner_factory import MinerFactory
from tools.web_testbench.connections import ConnectionManager
from tools.web_testbench._miners import TestbenchMiner
from tools.web_testbench._network import miner_network
app = FastAPI()
app.mount(
"/public",
StaticFiles(directory=os.path.join(os.path.dirname(__file__), "public")),
name="public",
)
templates = Jinja2Templates(
directory=os.path.join(os.path.dirname(__file__), "templates")
)
@app.websocket("/ws")
async def ws(websocket: WebSocket):
await ConnectionManager().connect(websocket)
try:
while True:
data = await websocket.receive_json()
if "IP" in data.keys():
miner = await MinerFactory().get_miner(data["IP"])
try:
if data["Data"] == "unlight":
if data["IP"] in ConnectionManager.lit_miners:
ConnectionManager.lit_miners.remove(data["IP"])
await miner.fault_light_off()
if data["Data"] == "light":
if data["IP"] not in ConnectionManager().lit_miners:
ConnectionManager.lit_miners.append(data["IP"])
await miner.fault_light_on()
except AttributeError:
await ConnectionManager().broadcast_json(
{
"IP": data["IP"],
"text": "Fault light command failed, miner is not running BraiinsOS.",
}
)
except WebSocketDisconnect:
ConnectionManager().disconnect(websocket)
except RuntimeError:
ConnectionManager().disconnect(websocket)
@app.get("/")
def dashboard(request: Request):
return templates.TemplateResponse(
"index.html",
{
"request": request,
},
)
@app.on_event("startup")
async def update_installer():
await update_installer_files()
@app.on_event("startup")
def start_install():
asyncio.create_task(install())
async def install():
for host in miner_network.hosts():
miner = TestbenchMiner(host)
asyncio.create_task(miner.install_loop())
if __name__ == "__main__":
uvicorn.run("app:app", host="0.0.0.0", port=80)

View File

@@ -0,0 +1,43 @@
from fastapi import WebSocket
import logging
from miners.miner_factory import MinerFactory
from tools.web_testbench._network import miner_network
class ConnectionManager:
_instance = None
_connections = []
lit_miners = []
def __new__(cls):
if not cls._instance:
cls._instance = super(ConnectionManager, cls).__new__(cls)
return cls._instance
async def connect(self, websocket: WebSocket):
await websocket.accept()
miners = []
for host in miner_network.hosts():
if str(host) in ConnectionManager.lit_miners:
miners.append(
{
"IP": str(host),
"Light_On": True,
}
)
else:
miners.append({"IP": str(host), "Light_On": False})
await websocket.send_json({"miners": miners})
ConnectionManager._connections.append(websocket)
def disconnect(self, websocket: WebSocket):
logging.info("Disconnected")
ConnectionManager._connections.remove(websocket)
async def broadcast_json(self, data: dict):
for connection in ConnectionManager._connections:
try:
await connection.send_json(data)
except Exception as e:
self.disconnect(connection)

Some files were not shown because too many files have changed in this diff Show More