Compare commits

...

450 Commits

Author SHA1 Message Date
UpstreamData
c9e7fa2629 bump version number 2022-07-22 13:05:13 -06:00
UpstreamData
9d3f2b5968 add support for M20 versions and update docs 2022-07-22 13:04:47 -06:00
UpstreamData
283e3d5e11 bump version number 2022-07-21 08:43:30 -06:00
UpstreamData
add4b575c2 update shields and improve typing and handling of fault light checks 2022-07-21 08:42:35 -06:00
UpstreamData
af2f1e9ad5 misc docs changes 2022-07-20 14:54:59 -06:00
UpstreamData
8258320a7b fix a bug with avalonminer imports and bump version number 2022-07-20 14:42:33 -06:00
UpstreamData
a5dc7f485b bump version number 2022-07-20 14:37:04 -06:00
UpstreamData
025b5bf6f0 improved avalonminer handler and added fault light to get_data 2022-07-20 14:36:13 -06:00
UpstreamData
3d3064d78e improve some type hinting compatibility 2022-07-20 11:19:13 -06:00
UpstreamData
2e3991355b Update README.md 2022-07-20 10:05:23 -06:00
UpstreamData
73a4cf5834 bump version number 2022-07-19 16:16:56 -06:00
UpstreamData
b120064e80 fixed a bug with miner factory not handling ConnectionRefused errors properly 2022-07-19 16:16:36 -06:00
UpstreamData
3ec833e700 add copyright license, using Apache 2.0 license 2022-07-19 15:43:24 -06:00
UpstreamData
29aeea1194 bump version number 2022-07-19 13:02:55 -06:00
UpstreamData
994d53ae3b removed arbitrary scan thread limitation dividing 2022-07-19 13:01:39 -06:00
UpstreamData
a95333eb1c removed arbitrary scan thread limitation dividing 2022-07-19 13:01:28 -06:00
UpstreamData
c5f2d71791 improved the speed of scanning by only checking secondary ports if the 4028 connection is refused 2022-07-19 13:00:15 -06:00
UpstreamData
26ae6ebfb2 bump version number 2022-07-19 11:19:32 -06:00
UpstreamData
e65cb0573d update miner factory to handle some types of stock fw S9s 2022-07-19 11:18:55 -06:00
UpstreamData
f8590b0c5f improve more typing 2022-07-18 14:46:17 -06:00
UpstreamData
43b4992cee improve logging and some documentation 2022-07-18 14:38:54 -06:00
UpstreamData
98e2cfae84 bump version number 2022-07-18 12:05:44 -06:00
UpstreamData
cb01c1a8ee update network to scan fast even if some miners are not responding properly 2022-07-18 12:05:22 -06:00
UpstreamData
36a273ec2b bump version number 2022-07-18 11:45:14 -06:00
UpstreamData
6a0dc03b9d update to a better way to handle settings 2022-07-18 11:44:22 -06:00
UpstreamData
ce7b006c8f bump version number 2022-07-18 11:23:15 -06:00
UpstreamData
88cc05bcea handle for BraiinsOS miners that dont have bosminer running for some reason 2022-07-18 11:21:40 -06:00
UpstreamData
ae749f4a90 add additional scan ports as backups in case 4028 doesn't respond 2022-07-18 10:03:39 -06:00
UpstreamData
36b30a2cdd added supported miners to the docs 2022-07-14 16:32:43 -06:00
UpstreamData
ae9f103578 bump version number 2022-07-14 11:49:34 -06:00
UpstreamData
13b583b739 fixed some bugs and added support for M20Sv10 and 20 2022-07-14 11:39:55 -06:00
UpstreamData
aaf0d7fa75 bump version number 2022-07-14 09:47:36 -06:00
UpstreamData
a8cbb6394e fix a bug with ints being passed to miner network 2022-07-14 09:45:21 -06:00
UpstreamData
ca6980b1ad update documentation and add docs from config 2022-07-13 16:17:08 -06:00
UpstreamData
c6c87a864d fix pyproject.toml 2022-07-13 14:29:06 -06:00
UpstreamData
ed17a0f436 update documentation and bump version number 2022-07-13 14:25:12 -06:00
UpstreamData
36fead3dd1 Update README.md 2022-07-13 11:57:33 -06:00
UpstreamData
ecb16c10ca Update README.md 2022-07-13 11:56:28 -06:00
UpstreamData
a540db3246 update docs requirements 2022-07-13 11:28:12 -06:00
UpstreamData
81a2f99fbf add jinja 2 to docs requirements to fix mkdocs bug 2022-07-13 11:22:00 -06:00
UpstreamData
1dd9f742ad Revert "dealete readthedocs.yaml"
This reverts commit 7bd6a0f136.
2022-07-13 11:18:56 -06:00
UpstreamData
7bd6a0f136 dealete readthedocs.yaml 2022-07-13 11:17:53 -06:00
UpstreamData
7297f12e88 update readthedocs info 2022-07-13 11:15:34 -06:00
UpstreamData
0e009c3a16 add readthedocs info 2022-07-13 11:12:29 -06:00
UpstreamData
95b0cc364b add miner data documentation 2022-07-13 11:08:12 -06:00
UpstreamData
2dcc4f0cfc add docs for miner factory and miner network 2022-07-13 10:52:42 -06:00
UpstreamData
d7e9498018 add docs for the rest of the APIs 2022-07-13 10:11:05 -06:00
UpstreamData
0324a21e79 add bmminer docs 2022-07-13 09:13:21 -06:00
UpstreamData
5700bd1c9c start adding some basic documentation 2022-07-12 16:25:05 -06:00
UpstreamData
abc6494f18 bump version number 2022-07-12 11:58:59 -06:00
UpstreamData
5de8fc064e fix a bug with hashrate parsing on braiins os devices with kh hashrates 2022-07-12 11:58:21 -06:00
UpstreamData
c9d620105b add support for braiins OS errors 2022-07-12 11:55:42 -06:00
UpstreamData
5d6fc5b26d add support for whatsminer error codes in get_data() 2022-07-12 11:41:38 -06:00
UpstreamData
6bd319355d bump version number 2022-07-12 10:26:25 -06:00
UpstreamData
31827e7dd1 fix a bug with old versions of bosminer returning not ready from fans 2022-07-12 10:25:48 -06:00
UpstreamData
26961a5d8c bump version number 2022-07-11 15:02:45 -06:00
UpstreamData
2ff09a3765 add support for getting hashrates from each board for bosminer, bmminer, and btminer 2022-07-11 15:02:04 -06:00
UpstreamData
18c26adbb6 remove dockerignore 2022-07-11 13:23:19 -06:00
UpstreamData
4bfafabe9d bump version number 2022-07-11 10:45:34 -06:00
UpstreamData
19e6ed90ec update README.md 2022-07-11 10:28:18 -06:00
UpstreamData
eca60d1eae improved whatsminer power limit handling 2022-07-11 10:15:55 -06:00
UpstreamData
8b8a592308 bump version number to 0.9.3 2022-07-11 08:29:36 -06:00
UpstreamData
c3de4188d6 Merge remote-tracking branch 'origin/master'
# Conflicts:
#	pyproject.toml
2022-07-11 08:25:36 -06:00
UpstreamData
490138fd1a bump version number 2022-07-11 08:25:24 -06:00
UpstreamData
f566b7fcb9 bump version number 2022-07-11 08:23:04 -06:00
upstreamdata
7fb4237e51 update publish workflow to use correct secret name 2022-07-07 15:44:28 -06:00
upstreamdata
eeffdecde1 update publish workflow 2022-07-07 15:34:26 -06:00
UpstreamData
477a411c87 Create python-publish.yml 2022-07-07 15:12:38 -06:00
upstreamdata
ca77573624 update pyproject.toml 2022-07-07 14:40:07 -06:00
upstreamdata
3ec147990b added power limit vs power draw in get_data 2022-07-07 14:34:21 -06:00
upstreamdata
082240bdb6 add some missing imports 2022-07-07 14:30:19 -06:00
UpstreamData
7a7fc2c5a6 Update README.md 2022-07-07 08:09:00 -06:00
UpstreamData
dcc3e07998 Dev (#12)
* changed over to package format and removed tools, added poetry

* reformat into miner_interface project

* add dist to .gitignore

* update readme and finish reformatting

* Added couple missing imports. (#13)

* change name to pyasic

Co-authored-by: upstreamdata <brett@upstreamdata.ca>
Co-authored-by: Mika Impola <mika@impola.fi>
2022-07-07 07:57:34 -06:00
UpstreamData
5261b00aad fixed logfile in settings to allow for adding or removing a logfile 2022-06-22 13:28:37 -06:00
UpstreamData
f18d37a19e add gitignore and fix a small bug with settings if the file doesn't exist 2022-06-14 09:42:20 -06:00
UpstreamData
7c3af3da41 fixed a bug with old bosminers not updating properly 2022-06-10 13:21:31 -06:00
UpstreamData
8948af55f2 fixed a small bug with bosminer MAC 2022-06-10 11:30:24 -06:00
UpstreamData
dd8fe41ad1 added estimate env temp for X19 and change format of X19 and X17 files 2022-06-10 11:22:41 -06:00
UpstreamData
198eedcd43 added env_temp for whatsminers 2022-06-10 11:03:09 -06:00
UpstreamData
f7309decdb finish adding support for a bunch of new avalonminers 2022-06-09 14:38:51 -06:00
UpstreamData
078579d8e1 add a ton of new avalonminers to be added to miner factory later. 2022-06-09 14:10:12 -06:00
UpstreamData
39eeb13409 improved the implementation of fault lights on avalonminers by fixing a bad implementation of ascset. 2022-06-09 13:49:15 -06:00
UpstreamData
dfccd67ccb added fault lights to 1066 miners, and framework for configuring (although it may not work, the documentation implementation is broken) 2022-06-08 15:43:34 -06:00
UpstreamData
10949225c0 fix generate report pie chart to fix overlapping labels when all boards are working 2022-06-08 10:59:52 -06:00
UpstreamData
3a60a3584a added support for avalon 1066 miners 2022-06-08 10:42:19 -06:00
UpstreamData
480aab550c added advanced config file generator 2022-06-07 15:55:43 -06:00
UpstreamData
fa83e61249 fix a bug with config tool generating configs 2022-06-07 14:47:21 -06:00
UpstreamData
2f3411e12d add documentation for MinerConfig 2022-06-07 13:17:44 -06:00
UpstreamData
3e7311687e Update README.md 2022-06-07 12:01:31 -06:00
UpstreamData
bc2d549ce5 moved MinerConfig to config.__init__.py and removed old config methods 2022-06-07 11:50:36 -06:00
UpstreamData
3d31d89c9e update dev-requirements.txt 2022-06-07 11:43:00 -06:00
UpstreamData
15fc27e6fa added configuration for X19 miners 2022-06-07 11:12:26 -06:00
UpstreamData
943ebc77a1 switch braiins miners over to using new config dataclass 2022-06-07 10:49:41 -06:00
UpstreamData
733437ef03 create basic config dataclass to be used to configure miners 2022-06-06 16:05:09 -06:00
UpstreamData
b444245e98 added new whatsminer types to miner factory 2022-06-06 10:09:11 -06:00
UpstreamData
481d31a0f1 added more new whatsminer types 2022-06-06 10:06:17 -06:00
UpstreamData
264db3bdd6 fix a bug with whatsminer M21S missing import 2022-06-06 09:41:10 -06:00
UpstreamData
d292b9c195 improved whatsminer handling, and added VF20 to miner dict 2022-06-06 09:26:38 -06:00
UpstreamData
dce25a679f added new miner type M30S+VF20 2022-06-06 09:17:42 -06:00
UpstreamData
c903631742 improved build process 2022-06-06 09:17:22 -06:00
Colin Crossman
e70bfdc886 Fix indent issue that caused missing MAC addresses (#10) 2022-06-05 15:50:07 -06:00
UpstreamData
8e1803add1 made slight optimizations to get_data and the way the miner gets mac data 2022-06-03 15:30:09 -06:00
UpstreamData
7d61056ea3 added whatsminer M30S+ VE40 2022-06-03 15:00:04 -06:00
UpstreamData
0d497baa45 added mac for M20 series 2022-06-03 14:55:03 -06:00
UpstreamData
d3a71c5a93 added mac addresses to get_data 2022-06-03 14:29:10 -06:00
UpstreamData
895a5b7ac8 fixed more bugs with whatsminers and added more versions 2022-06-03 11:20:34 -06:00
UpstreamData
7a5a0b287c fixed a bug with some versions of whatsminer and improved logging 2022-06-03 09:35:55 -06:00
UpstreamData
c7d73276c8 fixed a small bug with sorting 2022-06-03 08:59:15 -06:00
UpstreamData
4bbb9d0b08 added a basis for configuration of X17 and X19 miners by getting pool info from config file. 2022-06-02 16:06:36 -06:00
UpstreamData
3ee49e6fd7 fixed a warning with ylim being set to 0 2022-06-02 14:52:17 -06:00
UpstreamData
dcd3e99d73 added interval to recording 2022-06-02 14:25:55 -06:00
UpstreamData
64018cdad8 completed basic recording functionality 2022-06-02 14:17:08 -06:00
UpstreamData
e7d269008c added the basics of the recording functionality, just need to write out to file. 2022-06-02 11:08:14 -06:00
UpstreamData
7dfe25e5d2 added base for recording miner data to pdf file. 2022-06-01 16:13:30 -06:00
UpstreamData
382f9cff76 added reboot command for X19 and X17 models on BMMiner 2022-06-01 14:02:34 -06:00
UpstreamData
a5195ff1db fix a bug with testbench where toolbox finds braiins but bench does not 2022-06-01 11:44:07 -06:00
UpstreamData
b1ec726d18 added some docstrings to data 2022-06-01 11:22:30 -06:00
UpstreamData
5ae2cb2b22 fixed a bug with not all table data getting reset on data update 2022-06-01 11:22:12 -06:00
UpstreamData
472a15f4ca added fault light function for X17 BMMiner models 2022-06-01 10:54:45 -06:00
UpstreamData
7cc7973587 fixed a bug with some BOS S17e not returning data frm devdetails and fans 2022-06-01 10:19:58 -06:00
UpstreamData
ab964e4c88 fixed a bug with sorting by chip % 2022-06-01 08:15:35 -06:00
UpstreamData
4087874b4a added get hostname to X19 miners 2022-05-31 17:05:05 -06:00
UpstreamData
844deec0d3 add fault light command to X19 miners 2022-05-31 16:54:56 -06:00
UpstreamData
d36eef4c33 switched to httpx 2022-05-31 16:08:17 -06:00
UpstreamData
69d4ee5570 Revert "add .readthedocs.yaml"
This reverts commit e7b01ccdab.
2022-05-31 13:23:41 -06:00
UpstreamData
e6d3ec01fe Merge remote-tracking branch 'origin/master' 2022-05-31 13:18:59 -06:00
UpstreamData
e7b01ccdab add .readthedocs.yaml 2022-05-31 13:18:52 -06:00
UpstreamData
38506903ea fixed an issue with BMMiner get data and bosminer get data not identifying correct board number. 2022-05-31 08:45:49 -06:00
UpstreamData
c9a1560052 Merge remote-tracking branch 'origin/master' 2022-05-30 14:20:48 -06:00
UpstreamData
88f8ff10b7 fixed a bug with sorting 2022-05-30 14:20:37 -06:00
UpstreamData
11d38c9c3b fixed a bug with sorting 2022-05-30 14:19:57 -06:00
UpstreamData
0082037f45 add dev-requirements and remove cx-freeze from requirements 2022-05-30 13:29:47 -06:00
UpstreamData
dd5ccafa1e added listener function to cfg util 2022-05-30 13:27:56 -06:00
UpstreamData
739126935a fixed some bugs with differing version of BTMiner and different versions of M30S++ having different chip counts 2022-05-30 11:13:37 -06:00
UpstreamData
5c850a43a9 ignore errors with S19 multicommands 2022-05-30 09:46:05 -06:00
UpstreamData
24b037f273 fixed a bug with bmminer stats 2022-05-30 09:40:30 -06:00
UpstreamData
f847700c05 fixed a bug with antminers not reporting type because of fans in testbench, and added a long running get data for long tests 2022-05-27 11:54:51 -06:00
UpstreamData
69820dd9d2 slightly improved getting data from bmminer X9, X17, and X19 with an improvement to finding offset 2022-05-27 11:47:44 -06:00
UpstreamData
ad4b710cb7 miner is no longer cached in miner factory if it is unknown 2022-05-27 11:05:35 -06:00
UpstreamData
c53c18654b improved bmminer with a fan and board offset 2022-05-27 11:01:25 -06:00
UpstreamData
18797f4b56 added S9 data for bmminer 2022-05-27 10:41:41 -06:00
UpstreamData
e86c93e287 fixed refreshing data 2022-05-26 16:19:15 -06:00
UpstreamData
89cfde28f5 added chips for M30S 2022-05-26 16:05:48 -06:00
UpstreamData
0f2a867828 fix wrong import from collections 2022-05-26 15:53:38 -06:00
UpstreamData
4f5aef2d45 update some type hints and comments in miner factory, and remove some uneeded imports 2022-05-26 15:51:57 -06:00
UpstreamData
96801f93d1 made fault lights and async generator to make them much faster 2022-05-26 15:41:41 -06:00
UpstreamData
a8ce73c3d6 fixed an issue with the windows event loop not working with asyncio.create_subprocess_shell 2022-05-26 15:25:04 -06:00
UpstreamData
513dd2b981 fixed abug with testbench not removing miners when there were 0 online 2022-05-26 13:48:49 -06:00
UpstreamData
c35b30e949 fixed formatting issues 2022-05-26 13:23:32 -06:00
UpstreamData
942f2a1c8d improved type hinting and formatting in miner factory 2022-05-26 12:14:28 -06:00
UpstreamData
9078df680e added get_data to web_monitor dashboard 2022-05-26 11:15:01 -06:00
UpstreamData
527997cc58 fixed a bug with refreshing data 2022-05-26 11:13:39 -06:00
UpstreamData
41433bcaf5 change hashrate data to 1m as it seems more consistent, and add get_data to web monitor 2022-05-26 11:12:31 -06:00
UpstreamData
3451b88669 added temps and fans for bmminer and cgminer 2022-05-26 10:57:52 -06:00
UpstreamData
a42af2764e fixed a bug with ideal chips not getting set fast enough 2022-05-26 10:43:10 -06:00
UpstreamData
baaad73eb8 fixed a bug with pool prefix not getting removed when getting data 2022-05-26 10:39:39 -06:00
UpstreamData
34c9f85098 added btminer fan data and per chip temps 2022-05-26 10:36:39 -06:00
UpstreamData
d6638fa4d2 added fan counts to miners, and added more data to bosminer and miner data 2022-05-26 10:26:40 -06:00
UpstreamData
0f51487d3f added miner data to base miner 2022-05-26 08:41:51 -06:00
UpstreamData
3a11b173c3 added chip percent to config tool 2022-05-25 15:02:48 -06:00
UpstreamData
568f86700b improved X19 miner scan speed and implemented miner data in miners 2022-05-25 14:44:23 -06:00
UpstreamData
3b702aac2c improved handling of MinerData by improving dataclass 2022-05-25 14:01:52 -06:00
UpstreamData
6fbd9faffd updated network test to use unittest 2022-05-25 11:49:38 -06:00
UpstreamData
9eb2259aae removed extra print statements and a loop that wasnt needed in miner factory 2022-05-25 09:02:37 -06:00
UpstreamData
149c386a4c fix a bug with X17 not responding 2022-05-25 08:56:02 -06:00
UpstreamData
726e7ff0f0 add support for basic S9is 2022-05-24 14:43:22 -06:00
UpstreamData
87a690eb00 create basic dataclass for miner data 2022-05-20 10:12:51 -06:00
UpstreamData
fd5dba4036 fixed some bugs and improved testbench look 2022-05-19 15:54:29 -06:00
UpstreamData
e54847337a update testbench color palette 2022-05-19 15:31:12 -06:00
UpstreamData
3ff43c3ccd removed old tools that will no longer work 2022-05-19 11:56:12 -06:00
UpstreamData
ec5563f2f0 slightly improved network functionality and added tests for network 2022-05-19 11:55:38 -06:00
UpstreamData
40f14876cc made miner count a fixed position bar 2022-05-19 11:05:40 -06:00
UpstreamData
6abfe8a503 switch testbench to dark mode and add miner count 2022-05-19 10:47:36 -06:00
Dewey Cox
0a4d52ef03 fixed a bug with matplotlib.pyplot.subplots() causing resizing of windows 2022-05-19 09:11:28 -06:00
Colin Crossman
e4207e0120 Allow MinerFactory to take a list of discrete IPs (#7) 2022-05-18 20:16:47 -06:00
UpstreamData
ed89476866 fixed a bug with temp not displaying on the cfg tool 2022-05-18 12:50:44 -06:00
UpstreamData
7f7964526c fixed some bugs with scanning being too fast to get data and killing the tasks 2022-05-18 12:13:20 -06:00
UpstreamData
85b282740a update scanning in web interface 2022-05-18 11:39:14 -06:00
UpstreamData
8cbf3a20a3 make miner ips a true ip address, and allow sorting miners using __lt__ and __gt__ 2022-05-18 11:34:39 -06:00
UpstreamData
8ebcbd3c33 vastly improved scanning in web monitor 2022-05-18 11:12:38 -06:00
UpstreamData
c3e285a9ee fix some bugs in web monitor 2022-05-18 11:06:21 -06:00
UpstreamData
9f19b42de5 fixed some bugs with older versions of braiins OS +, and fixed a bug in testbench 2022-05-17 13:09:10 -06:00
UpstreamData
3d265e823b update README.md 2022-05-17 09:08:50 -06:00
UpstreamData
5e6bc8c8ef add mac addresses for bosminer, and reformat 2022-05-17 08:51:56 -06:00
UpstreamData
871499b77f fix some bugs in miner listener 2022-05-16 16:22:05 -06:00
UpstreamData
117a161fd5 added miner listener to listen for ip reporting 2022-05-16 16:16:22 -06:00
UpstreamData
40bacbf41c add getting mac for whatsminers 2022-05-16 15:01:04 -06:00
UpstreamData
e091863aa7 fixed a bug with suspended whatsminers 2022-05-16 14:06:23 -06:00
UpstreamData
85e8ac63f1 update light column 2022-05-16 13:46:16 -06:00
UpstreamData
a5252e3a84 update README.md 2022-05-16 12:30:04 -06:00
UpstreamData
404d6590db improved resizing and light 2022-05-16 11:55:13 -06:00
UpstreamData
1d04399daf made the window of the cfg util resizeable 2022-05-16 11:35:45 -06:00
UpstreamData
03ebcacca5 added an old version of Bosminer for non plus miners to be able to update 2022-05-16 11:24:32 -06:00
UpstreamData
75934fd7fe fixed another bug with testbench putting miners into recovery mode 2022-05-16 10:50:07 -06:00
UpstreamData
bbeca15799 attempted to fix a bug with testbench 2022-05-16 10:40:28 -06:00
UpstreamData
45befb569b updated a bunch of miner chip counts, added S19a, and fixed a bug with whatsminer M30S++ 2022-05-16 08:42:26 -06:00
UpstreamData
61334ed99e Merge remote-tracking branch 'origin/combine_board_cfg_util' into combine_board_cfg_util 2022-05-13 15:37:27 -06:00
UpstreamData
2bf059df01 remove board util 2022-05-13 15:35:42 -06:00
UpstreamData
9c2de26182 switched over to hashrate av to be more accurate when getting data 2022-05-13 15:35:42 -06:00
UpstreamData
714983cddc added exporting reports from config tool 2022-05-13 15:35:42 -06:00
UpstreamData
191f1d24b9 improved send command functionality with a generator and added progress to it 2022-05-13 15:35:42 -06:00
UpstreamData
5a0bafb964 fixed a bug for scanning S9s with no boards for testing 2022-05-13 15:35:42 -06:00
UpstreamData
67aedd319d update README.md 2022-05-13 15:35:41 -06:00
UpstreamData
44012c50d6 finished updating the miner type handlers to create subclasses of the backend and type to create a miner, each of which handles its own data to simplify creation of new miner types 2022-05-13 15:35:41 -06:00
UpstreamData
06540efc98 changed the way antminers and whatsminers are handled in the factory to allow for more precision on chip counts 2022-05-13 15:35:41 -06:00
UpstreamData
9d0d1a24d9 added S19 board handler 2022-05-13 15:35:41 -06:00
UpstreamData
8568f91482 added btminer board data 2022-05-13 15:35:41 -06:00
UpstreamData
64918e5552 added bosminer board data 2022-05-13 15:35:41 -06:00
UpstreamData
53d5ecd04a added images for boards 2022-05-13 15:35:41 -06:00
UpstreamData
1b0e80a418 added basic framework for boards in config util 2022-05-13 15:35:41 -06:00
UpstreamData
9ad506a313 remove board util 2022-05-13 15:35:11 -06:00
UpstreamData
18c4bbd09c switched over to hashrate av to be more accurate when getting data 2022-05-13 15:31:32 -06:00
UpstreamData
0d123d5dd8 added exporting reports from config tool 2022-05-13 15:23:51 -06:00
UpstreamData
b9b91293fe improved send command functionality with a generator and added progress to it 2022-05-13 14:28:51 -06:00
UpstreamData
47a702c94c fixed a bug for scanning S9s with no boards for testing 2022-05-13 14:03:27 -06:00
UpstreamData
6d5a288120 update README.md 2022-05-13 11:35:37 -06:00
UpstreamData
038aae95ac finished updating the miner type handlers to create subclasses of the backend and type to create a miner, each of which handles its own data to simplify creation of new miner types 2022-05-13 11:27:56 -06:00
UpstreamData
dd84aede25 changed the way antminers and whatsminers are handled in the factory to allow for more precision on chip counts 2022-05-12 16:42:02 -06:00
UpstreamData
dc8ad271de added S19 board handler 2022-05-12 15:16:05 -06:00
UpstreamData
b78c1cdca5 added wattage to configure tab 2022-05-12 14:42:37 -06:00
UpstreamData
0eb7ced932 added btminer board data 2022-05-12 13:20:57 -06:00
UpstreamData
8e58f4492f added bosminer board data 2022-05-12 13:12:54 -06:00
UpstreamData
95fb32de19 added images for boards 2022-05-12 12:35:33 -06:00
UpstreamData
5145dc19f8 added basic framework for boards in config util 2022-05-12 11:29:28 -06:00
UpstreamData
1808d62bba fix references to some table headers 2022-05-11 14:42:47 -06:00
UpstreamData
97ef4dfe37 fixed some bugs with testbench on the latest version 2022-05-11 14:28:53 -06:00
UpstreamData
174a132e75 attempt to fix a bug in testbench 2022-05-11 14:04:05 -06:00
UpstreamData
84d6e58ebe change progress bar completion animation 2022-05-11 13:24:54 -06:00
UpstreamData
e9a1483e5f fixed some small bugs with whatsminers and progress bar 2022-05-11 13:20:14 -06:00
UpstreamData
4eb51eed20 update CFG-Util-README.md 2022-05-11 10:51:36 -06:00
UpstreamData
066fc1a4b3 changed Temperature to Temp and added more spacing to pool user 2022-05-11 08:43:32 -06:00
UpstreamData
cc24236c0a update requirements.txt 2022-05-11 08:40:00 -06:00
UpstreamData
564cd42eae added ctrl c and ctrl a functionality to the tables 2022-05-11 08:37:17 -06:00
UpstreamData
8677eff491 moved miner count and hashrate to top of tool 2022-05-10 14:00:50 -06:00
UpstreamData
63a21ea9aa updated formatting on scrollbars 2022-05-10 13:53:18 -06:00
UpstreamData
1c9d3dc84d updated formatting on page 2022-05-10 13:44:08 -06:00
UpstreamData
0dacd3d294 changed sorting to show up on the table headers 2022-05-10 11:51:26 -06:00
UpstreamData
6fa74613b4 updated look of CFG util 2022-05-10 11:13:27 -06:00
UpstreamData
f7fb7a3acb update requirements.txt 2022-05-09 10:25:25 -06:00
UpstreamData
666c5bfc64 added new text buttons to show total hashrate and current sort key 2022-05-09 10:24:48 -06:00
UpstreamData
1f8d92f6bb fixed some bugs with sorting 2022-05-09 09:59:48 -06:00
UpstreamData
ef336a9e23 added asyncio event loop policy update to fix some bugs 2022-05-09 09:20:21 -06:00
UpstreamData
7fe6fd47fb added sorting to command table (Tree) 2022-05-09 09:14:32 -06:00
UpstreamData
91a0298d96 fix a bug where unknown miners would break configuration 2022-05-06 16:29:07 -06:00
UpstreamData
ed3d8fc815 Merge branch 'pyqt_gui_cfg_util' 2022-05-06 16:22:28 -06:00
UpstreamData
4f2d630746 fix formatting on readme 2022-05-06 16:22:13 -06:00
UpstreamData
a8c685a883 switched cfg_util over to new version 2022-05-06 16:20:02 -06:00
UpstreamData
09660e1934 added indicators of what function is running 2022-05-06 16:12:17 -06:00
UpstreamData
c01908ff9a added custom command functionality 2022-05-06 16:01:50 -06:00
UpstreamData
267c388a95 added restarting and rebooting miner backends 2022-05-06 15:52:21 -06:00
UpstreamData
8215d33241 added configuration button 2022-05-06 15:39:18 -06:00
UpstreamData
f4258a304a add importing configuration from miners 2022-05-06 15:14:49 -06:00
UpstreamData
514fafea58 add generate command and change config converters to non async 2022-05-06 15:06:18 -06:00
UpstreamData
e324369fe0 fixed some bugs with sorting when refreshing data and added refreshing data 2022-05-06 14:55:58 -06:00
UpstreamData
3bc9287668 add scan retries to getting data 2022-05-06 13:51:20 -06:00
UpstreamData
d90bf190c5 added reverse sorting and fixed hashrate sorting 2022-05-06 13:34:12 -06:00
UpstreamData
8cc6f66458 added sorting to the 3 main tables 2022-05-06 12:03:43 -06:00
UpstreamData
a2b071af4f fully implemented fault light command 2022-05-06 11:36:57 -06:00
UpstreamData
b7b589802f added avalon1 1066 to board util tentatively 2022-05-06 09:11:08 -06:00
UpstreamData
93912a6df6 fixed a bug with hashrate data not getting sent with some miners 2022-05-06 08:41:04 -06:00
UpstreamData
ffce15f653 fixed some bugs with latest version of toolbox 2022-05-06 08:41:04 -06:00
UpstreamData
725b14e583 added table manager, to manage tables and handle the treeview 2022-05-05 15:53:13 -06:00
UpstreamData
26c6e47f1e added the ability to update the treeview and images in it no longer are as buggy 2022-05-05 14:47:18 -06:00
UpstreamData
51dae7375f added select all button and functionality 2022-05-05 13:48:57 -06:00
UpstreamData
801cfc4ff8 updated some formatting and improved pool return format 2022-05-05 13:02:00 -06:00
UpstreamData
ac3ff7a63e justify hostname to the left 2022-05-05 12:12:10 -06:00
UpstreamData
1b22810f4b fixed formatting on hashrate 2022-05-05 12:07:57 -06:00
UpstreamData
b756c9e4a1 added getting data for btminer 2022-05-05 11:37:04 -06:00
UpstreamData
64b5e6c032 added getting data for bmminer and cgminer 2022-05-05 11:19:11 -06:00
UpstreamData
a13f5dd2d1 fix some bugs and start adding bmminer get_data function 2022-05-05 10:52:18 -06:00
UpstreamData
e6ea8d3e16 added hostname logging and a generalized get dta function for braiins OS 2022-05-05 10:35:47 -06:00
UpstreamData
af37850289 greatly improved functionality of miner factory 2022-05-05 09:17:20 -06:00
UpstreamData
6ecdfa1cf8 scanning now gets data 2022-05-04 16:04:46 -06:00
UpstreamData
c0b21ebc23 fixed scanning to the tree for commands 2022-05-04 15:06:15 -06:00
UpstreamData
184ada417f added tables and basic scanning 2022-05-04 14:44:19 -06:00
UpstreamData
b636860ecb started basic cfg util changes 2022-05-04 13:08:58 -06:00
UpstreamData
0107fdacde update requirements.txt 2022-05-02 10:36:20 -06:00
UpstreamData
ce5e1cad40 added the option to append the last octet of the IP address to the username when configuring 2022-04-29 15:37:07 -06:00
UpstreamData
d877ba01a0 fix spelling issue 2022-04-29 15:02:54 -06:00
UpstreamData
b0ed990d5a update requirements.txt 2022-04-29 14:38:33 -06:00
UpstreamData
89c8a16900 fix light functionality to work as intended 2022-04-29 13:25:08 -06:00
UpstreamData
247cf0ccc2 added fault light option to the board utility 2022-04-29 10:18:16 -06:00
UpstreamData
d0aa219a7a add first page and pie chart to board report 2022-04-28 11:12:33 -06:00
UpstreamData
87291e2a89 change some formatting with the board report and fix some bugs 2022-04-27 16:58:47 -06:00
UpstreamData
9c88d21db6 add basic board report to board util 2022-04-27 16:35:11 -06:00
UpstreamData
8b7415042f fixed a bug with the webserver 2022-04-25 14:40:32 -06:00
UpstreamData
59ab6e6c8a reformatted and clarified some code 2022-04-21 10:09:30 -06:00
UpstreamData
0724a376ea refactored some code in board util 2022-04-21 09:43:22 -06:00
UpstreamData
f9f26a5587 added better logging and process to testbench 2022-04-20 11:36:09 -06:00
UpstreamData
ed4122fb21 added better logging to testbench 2022-04-19 10:29:13 -06:00
UpstreamData
0739a7f689 added a try except block for logging errors per miner in the testbench 2022-04-19 10:15:12 -06:00
UpstreamData
c7b7a6e7c5 made sure there will always be board 6, 7, and 8 in tunerstatus 2022-04-18 16:12:12 -06:00
UpstreamData
2a132c8325 addded basic tuner status data on testbench 2022-04-18 16:02:21 -06:00
UpstreamData
154882a668 fixed an issue with pinging when done. 2022-04-18 14:29:54 -06:00
UpstreamData
3f64c9dd67 Merge remote-tracking branch 'origin/testbench-webserver' into testbench-webserver
# Conflicts:
#	miners/bosminer.py
#	tools/bad_board_util/func/decorators.py
#	tools/bad_board_util/layout.py
#	tools/bad_board_util/ui.py
2022-04-18 14:17:29 -06:00
UpstreamData
d8d66e4244 fixed a bug with not hiding the light button 2022-04-18 14:17:04 -06:00
UpstreamData
a9cdefcd43 finished adding timer 2022-04-18 14:17:04 -06:00
UpstreamData
029d3ef596 added online timer for testing 2022-04-18 14:17:04 -06:00
UpstreamData
0e474402c0 reformatted files 2022-04-18 14:17:04 -06:00
UpstreamData
b6560cdedb added fixing file exists bug 2022-04-18 14:17:04 -06:00
UpstreamData
767575703e fixed some bugs with finishing the install 2022-04-18 14:17:04 -06:00
UpstreamData
4b4d9060ed changed some printing to logging logs 2022-04-18 14:17:04 -06:00
UpstreamData
ad75b1d25c added web testbench to main apps 2022-04-18 14:17:03 -06:00
UpstreamData
4b767c5427 fixed more bugs 2022-04-18 14:17:03 -06:00
UpstreamData
a6df7a83d6 fixed many remaining bugs in testbench webserver, should be ready for use. 2022-04-18 14:17:03 -06:00
UpstreamData
93f2990399 finished miner install to be tested 2022-04-18 14:17:03 -06:00
UpstreamData
e74f67089e finished light functionality 2022-04-18 14:17:03 -06:00
UpstreamData
41a6078790 added partial fault light functionality and fixed stdout output direction 2022-04-18 14:17:03 -06:00
UpstreamData
4d93926fee added output when running install process 2022-04-18 14:17:03 -06:00
UpstreamData
03f5cafe76 added sending output from miners 2022-04-18 14:17:03 -06:00
UpstreamData
4f6ebff880 set graphs to show and hide when getting data 2022-04-18 14:17:03 -06:00
UpstreamData
af27cbbe2c set graphs to update when receiving data 2022-04-18 14:17:03 -06:00
UpstreamData
3604957c83 added auto port finding to both web apps 2022-04-18 14:17:03 -06:00
UpstreamData
3670a02aec add feeds updater to startup process 2022-04-18 14:17:03 -06:00
UpstreamData
7ebfdb3f33 added feeds auto-updater for web testbench 2022-04-18 14:17:03 -06:00
UpstreamData
b9b7da8746 add base files for web interface 2022-04-18 14:17:03 -06:00
UpstreamData
eaaf137b9b added temp fake data to the app for it to send to the JS side. 2022-04-18 14:15:46 -06:00
UpstreamData
a0311e3ce3 add base files for web interface 2022-04-18 14:15:44 -06:00
UpstreamData
8864aa7b4b added install file to do the basic install 2022-04-18 14:15:24 -06:00
UpstreamData
4d58129eee fixed a bug with not hiding the light button 2022-04-18 13:12:08 -06:00
UpstreamData
4468fe9fbb finished adding timer 2022-04-18 12:29:55 -06:00
UpstreamData
3b716a044b added online timer for testing 2022-04-18 12:13:41 -06:00
UpstreamData
25e657729c reformatted files 2022-04-18 10:24:53 -06:00
UpstreamData
cace399ed2 added fixing file exists bug 2022-04-18 10:13:48 -06:00
UpstreamData
045e1ca6ba fixed some bugs with finishing the install 2022-04-18 09:52:45 -06:00
UpstreamData
4f86dec560 changed some printing to logging logs 2022-04-18 08:49:21 -06:00
UpstreamData
13f033440d added web testbench to main apps 2022-04-14 18:43:36 -06:00
UpstreamData
b5c455ffa4 fixed more bugs 2022-04-14 18:38:29 -06:00
UpstreamData
eb5a00b706 fixed many remaining bugs in testbench webserver, should be ready for use. 2022-04-14 18:17:23 -06:00
UpstreamData
3a560472e6 finished miner install to be tested 2022-04-14 14:40:31 -06:00
UpstreamData
4776dce038 finished light functionality 2022-04-14 13:16:16 -06:00
UpstreamData
2d6891c6d2 added partial fault light functionality and fixed stdout output direction 2022-04-14 11:34:21 -06:00
UpstreamData
f5a41f7b13 added output when running install process 2022-04-14 11:08:52 -06:00
UpstreamData
4a2926df94 added sending output from miners 2022-04-14 10:57:32 -06:00
UpstreamData
8736f33a56 set graphs to show and hide when getting data 2022-04-14 10:43:26 -06:00
UpstreamData
89eb77588f set graphs to update when receiving data 2022-04-14 10:34:51 -06:00
UpstreamData
c930510226 added auto port finding to both web apps 2022-04-14 09:43:43 -06:00
UpstreamData
b7c58e5d34 add feeds updater to startup process 2022-04-14 09:37:06 -06:00
UpstreamData
ce48ae020b added feeds auto-updater for web testbench 2022-04-11 16:13:04 -06:00
UpstreamData
7809bfc0d1 added exporting a report from bad board utility 2022-04-01 15:19:12 -06:00
UpstreamData
d84fcaafdf added bos get version 2022-04-01 13:33:05 -06:00
UpstreamData
a9f600b797 add base files for web interface 2022-03-31 11:32:42 -06:00
UpstreamData
f0a8e7ba9f reformatted all files to use the Black formatting style 2022-03-31 11:30:34 -06:00
UpstreamData
c57a523553 reformatted all files to use the Black formatting style 2022-03-31 11:27:57 -06:00
UpstreamData
d905f6f414 added temp fake data to the app for it to send to the JS side. 2022-03-30 08:42:21 -06:00
UpstreamData
22f78ac405 add base files for web interface 2022-03-25 16:02:50 -06:00
UpstreamData
7a098b1c7e added install file to do the basic install 2022-03-25 15:29:30 -06:00
UpstreamData
e1383f2002 Added support for X19 models with BraiinsOS 2022-03-25 09:06:25 -06:00
UpstreamData
c3b23313ba added changing model when configuring for BOS S9s 2022-03-25 08:58:02 -06:00
UpstreamData
02581e917d add temperature graph to miner page 2022-03-21 10:02:11 -06:00
UpstreamData
e267073f76 add the start of a temperature graph to miner page 2022-03-21 09:39:54 -06:00
UpstreamData
4038dae446 fixed some bugs on linux with pipes 2022-03-18 12:02:42 -06:00
UpstreamData
134b5fe0ff added CTRL+A select all binding to cfg util and board util tables 2022-03-17 16:10:12 -06:00
UpstreamData
d452ca36b7 fixed copying from the board util table 2022-03-17 16:05:48 -06:00
UpstreamData
fdec35cd2e added disable button decorator to board util 2022-03-17 16:01:02 -06:00
UpstreamData
d488c8458c added the ability to scan a range of IPs as part of the miner network by passing a string formatted as {ip_range_1_start}-{ip_range_1_end}, {ip_range_2_start}-{ip_range_2_end} to the miner network 2022-03-17 12:05:58 -06:00
UpstreamData
6d2e40c81d added support for avalon10xx miners 2022-03-16 15:21:09 -06:00
UpstreamData
594b5d0448 improved logging format and sent output to a file 2022-03-16 14:03:32 -06:00
UpstreamData
1be12e5d4c moved _get_ssh_connection to the base miner class 2022-03-16 13:34:18 -06:00
UpstreamData
bae2ee4245 changed MinerFactory to a singleton class to ensure clearing its cache is easier and removed creation of independant miner factories for each utility 2022-03-16 12:05:44 -06:00
UpstreamData
57bd606f21 add logging to base miner API 2022-03-16 10:56:33 -06:00
UpstreamData
eb8cefa461 add logging to btminer and fix some bugs 2022-03-16 08:40:41 -06:00
UpstreamData
9edcd866bb added more logging for bosminer models. 2022-03-15 09:07:07 -06:00
UpstreamData
07a8b00a93 added logging to bmminer and X19 models 2022-03-14 16:07:47 -06:00
UpstreamData
c22be7ded8 started adding some basic logging functionality 2022-03-14 15:52:46 -06:00
UpstreamData
2380b94db1 update unknown API docstring 2022-03-14 14:12:31 -06:00
UpstreamData
d8e59afee0 Upsdated bosminer API docstrings, and fixed some errors in CGMiner API docstings 2022-03-14 14:07:17 -06:00
UpstreamData
05e14baa68 added some todos 2022-03-14 11:26:53 -06:00
UpstreamData
ff56148732 fixed some bugs with cgminer, and included VC redistributables in CXFreeze build for CFG util 2022-03-14 10:18:28 -06:00
UpstreamData
bfc5668d24 fixed some bugs with running the web app from docker 2022-03-09 10:53:26 -07:00
UpstreamData
b3103ae700 fixed fan formatting on smaller devices 2022-03-08 12:23:38 -07:00
UpstreamData
43834203a8 reformatted file structure and reformatted for phones, as well as fixed web sockets for remote devices 2022-03-08 11:39:10 -07:00
UpstreamData
7ba8044564 added dockerfile and removed cxfreeze from web_monitor requirements due to it breaking the docker setup 2022-03-08 09:09:28 -07:00
UpstreamData
7e91fe12e7 updated some ports and fixed a bug with summary keys when getting data 2022-03-07 14:54:36 -07:00
UpstreamData
02114aac65 Merge pull request #6 from UpstreamData/web_monitor
Web monitor
2022-03-07 12:40:11 -07:00
UpstreamData
244dac76af finished adding settings page 2022-03-07 12:38:56 -07:00
UpstreamData
2bd25c3f35 started adding settings page 2022-03-07 11:17:41 -07:00
UpstreamData
23350ea4b6 updated requirements, and fixed some formatting issues 2022-03-07 10:36:38 -07:00
UpstreamData
8a6917878e Merge remote-tracking branch 'origin/web_monitor' into web_monitor 2022-03-07 09:40:42 -07:00
UpstreamData
7dd00954e4 fixed some issues with the rounding on floats in the JS 2022-03-07 09:39:56 -07:00
UpstreamData
f3710f618e added miner model and hashrate as a table in the per miner stuff 2022-03-07 09:39:56 -07:00
UpstreamData
8ecdb6f5e8 fixed a bug with scanning and adding miner which didnt append to the navbar 2022-03-07 09:39:56 -07:00
UpstreamData
309b4d44fc updated some formatting on charts 2022-03-07 09:39:56 -07:00
UpstreamData
80f941d912 added remove miner functionality 2022-03-07 09:39:56 -07:00
UpstreamData
4534b09532 added custom TH/s formatting to graphs 2022-03-07 09:39:56 -07:00
UpstreamData
97a9b59acc added dashboard hashrate info 2022-03-07 09:39:56 -07:00
UpstreamData
87b8de9029 strated on basic framework for dashboard in web_monitor 2022-03-07 09:39:56 -07:00
UpstreamData
42f5146632 added different select gradient 2022-03-07 09:39:56 -07:00
UpstreamData
f613cc039f added spinner to scan 2022-03-07 09:39:56 -07:00
UpstreamData
e974c77359 added fan and hashrate data for S19s and Whatsminers 2022-03-07 09:39:56 -07:00
UpstreamData
0f324177cb added fan data for braiins OS 2022-03-07 09:39:56 -07:00
UpstreamData
46a4508cd7 updated more gradient formatting an added gradients to navbar 2022-03-07 09:39:56 -07:00
UpstreamData
d4d9b1ad3c added gradients to fan data 2022-03-07 09:39:56 -07:00
UpstreamData
322ee05fdf added bounding box to the chart 2022-03-07 09:39:56 -07:00
UpstreamData
85569366a2 sorted current miners for the navbar 2022-03-07 09:39:56 -07:00
UpstreamData
dea6ff2a96 improved chart functionality in the web monitor and added handlers for errors such as no response from the miner 2022-03-07 09:39:56 -07:00
UpstreamData
3fcd2edf6f charts on miner pages work now, they gather data from miners and put it into the graph, with a max size of 49 entried per graph 2022-03-07 09:39:56 -07:00
UpstreamData
16b84310ec added graph with fake data on each miner page, and added basic formatting to it. 2022-03-07 09:39:56 -07:00
UpstreamData
f8899521bc improved navbar formatting, added active formats for all miners, moved add miners to a miner subtab 2022-03-07 09:39:56 -07:00
UpstreamData
3558a1a6b1 finished up scan page, added the ability to add miners and them get listed in the miner list, and started adding the individual miner pages 2022-03-07 09:39:56 -07:00
UpstreamData
385943755d further improved formatting of scan page, added disabled checkboxes on scan, updated miner count on add 2022-03-07 09:39:56 -07:00
UpstreamData
3002cb4e97 added basic addition of miners to the list and improved some functionality of the web tool 2022-03-07 09:39:56 -07:00
UpstreamData
6d711520fc added add selected miners button 2022-03-07 09:39:56 -07:00
UpstreamData
584de40983 improved formatting on scan page and made the scan a bit more robust 2022-03-07 09:39:56 -07:00
UpstreamData
81911ba549 fixed some formwatting on the scan page 2022-03-07 09:39:55 -07:00
UpstreamData
e37e9e2251 added the scan page to scan for miners on a subnet 2022-03-07 09:39:55 -07:00
UpstreamData
92a65c8977 switched to fastAPi and jinja 2 for templates and html 2022-03-07 09:39:55 -07:00
UpstreamData
ae8b2cbd07 added the required directories for settings and scanning 2022-03-07 09:39:55 -07:00
UpstreamData
cda13edf85 improved formatting of index.html 2022-03-07 09:39:55 -07:00
UpstreamData
610ee57963 started adding HTML files for the web monitor program 2022-03-07 09:39:55 -07:00
UpstreamData
2ef809db54 fixed some issues with the rounding on floats in the JS 2022-03-07 09:32:06 -07:00
UpstreamData
f315c0c051 added miner model and hashrate as a table in the per miner stuff 2022-03-04 16:10:27 -07:00
UpstreamData
936c230aa3 fixed a bug with scanning and adding miner which didnt append to the navbar 2022-03-04 15:48:17 -07:00
UpstreamData
2c93f1f395 updated some formatting on charts 2022-03-04 14:36:43 -07:00
UpstreamData
727ebd9c42 added remove miner functionality 2022-03-04 14:08:27 -07:00
UpstreamData
1e4fc897e3 added custom TH/s formatting to graphs 2022-03-04 13:39:23 -07:00
UpstreamData
3945a86004 added dashboard hashrate info 2022-03-04 11:53:31 -07:00
UpstreamData
58cc64d17b strated on basic framework for dashboard in web_monitor 2022-03-04 11:24:06 -07:00
UpstreamData
b66cf6f0ba added different select gradient 2022-03-02 15:54:49 -07:00
UpstreamData
1db15a741e added spinner to scan 2022-03-02 15:47:17 -07:00
UpstreamData
5f355c833b added fan and hashrate data for S19s and Whatsminers 2022-03-02 15:38:29 -07:00
UpstreamData
a76b32e3ff added fan data for braiins OS 2022-03-02 15:15:20 -07:00
UpstreamData
f2c01dca25 updated more gradient formatting an added gradients to navbar 2022-03-02 14:36:34 -07:00
UpstreamData
abc542a0ca added gradients to fan data 2022-03-02 13:12:20 -07:00
UpstreamData
9e598ebd8c added bounding box to the chart 2022-03-02 12:15:46 -07:00
UpstreamData
7801ca5819 sorted current miners for the navbar 2022-03-02 11:16:02 -07:00
UpstreamData
482edabd27 improved chart functionality in the web monitor and added handlers for errors such as no response from the miner 2022-03-02 11:11:34 -07:00
UpstreamData
3e5998de6e charts on miner pages work now, they gather data from miners and put it into the graph, with a max size of 49 entried per graph 2022-03-01 16:17:28 -07:00
UpstreamData
c3d19607f6 added graph with fake data on each miner page, and added basic formatting to it. 2022-03-01 16:01:39 -07:00
UpstreamData
2c2648cbe7 improved navbar formatting, added active formats for all miners, moved add miners to a miner subtab 2022-03-01 12:51:49 -07:00
UpstreamData
a72c4f7797 finished up scan page, added the ability to add miners and them get listed in the miner list, and started adding the individual miner pages 2022-03-01 12:28:36 -07:00
UpstreamData
19ee9eb18f further improved formatting of scan page, added disabled checkboxes on scan, updated miner count on add 2022-03-01 11:30:48 -07:00
UpstreamData
3ae29c3883 added basic addition of miners to the list and improved some functionality of the web tool 2022-02-28 16:28:40 -07:00
UpstreamData
d9f8f53a10 added add selected miners button 2022-02-28 15:15:57 -07:00
UpstreamData
6b3e525f45 improved formatting on scan page and made the scan a bit more robust 2022-02-28 14:10:43 -07:00
UpstreamData
c8824f86af fixed some formwatting on the scan page 2022-02-25 16:11:06 -07:00
UpstreamData
cf3163dccf added the scan page to scan for miners on a subnet 2022-02-25 15:58:01 -07:00
UpstreamData
da5a784214 switched to fastAPi and jinja 2 for templates and html 2022-02-24 15:59:48 -07:00
UpstreamData
30b3315084 added the required directories for settings and scanning 2022-02-24 15:25:49 -07:00
UpstreamData
5a7dcc7fcf fixed some bugs in getting ssh connections 2022-02-24 14:42:34 -07:00
UpstreamData
c6305c57cf improved formatting of index.html 2022-02-24 09:13:07 -07:00
UpstreamData
d330e2e978 started adding HTML files for the web monitor program 2022-02-24 08:57:23 -07:00
UpstreamData
1ec2a2a4a6 update CFG-Util-README.md 2022-02-23 14:39:29 -07:00
UpstreamData
c97d384cf4 updated red row color on fault light to work with tkinter tags and be sortable. 2022-02-23 14:35:29 -07:00
UpstreamData
ca52e40a6a fixed a bug with fault lighting bugging the tool 2022-02-23 11:56:21 -07:00
UpstreamData
4a10efd7a4 added send command option in the window 2022-02-22 13:53:07 -07:00
UpstreamData
128aab1b88 switched to a monospace font in the board util. 2022-02-22 11:01:00 -07:00
UpstreamData
bb89be64f4 switched to a monospace font in the cfg tool, padded the hashrates to appear as decimal centered, and left justified hostnames for better readability. 2022-02-22 10:49:23 -07:00
UpstreamData
ef0a507306 changed the disabling buttons to use a decorator as it looks much cleaner 2022-02-18 11:10:44 -07:00
UpstreamData
908594970e disabled the buttons that can break each other when another coroutine is running 2022-02-18 10:59:10 -07:00
282 changed files with 12100 additions and 5113 deletions

22
.github/workflows/python-publish.yml vendored Normal file
View File

@@ -0,0 +1,22 @@
name: PyPI
on:
push:
tags:
- "v*.*.*"
paths-ignore:
- '**.md'
- 'docs/**'
- 'docsrc/**'
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Publish GH release
uses: softprops/action-gh-release@v0.1.14
- name: Build using poetry and publish to PyPi
uses: JRubics/poetry-publish@v1.11
with:
pypi_token: ${{ secrets.PYPI_API_KEY }}

8
.gitignore vendored Normal file
View File

@@ -0,0 +1,8 @@
venv/
build/
dist/
__pycache__/
pyvenv.cfg
.env/
bin/
lib/

20
.readthedocs.yaml Normal file
View File

@@ -0,0 +1,20 @@
# .readthedocs.yaml
# Read the Docs configuration file
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
# Required
version: 2
# Set the version of Python and other tools you might need
build:
os: ubuntu-20.04
tools:
python: "3.9"
mkdocs:
configuration: mkdocs.yml
# Optionally declare the Python requirements required to build your docs
python:
install:
- requirements: docs/requirements.txt

View File

@@ -1,185 +0,0 @@
import asyncio
import json
import ipaddress
import warnings
class APIError(Exception):
def __init__(self, *args):
if args:
self.message = args[0]
else:
self.message = None
def __str__(self):
if self.message:
return f"{self.message}"
else:
return "Incorrect API parameters."
class APIWarning(Warning):
def __init__(self, *args):
if args:
self.message = args[0]
else:
self.message = None
def __str__(self):
if self.message:
return f"{self.message}"
else:
return "Incorrect API parameters."
class BaseMinerAPI:
def __init__(self, ip: str, port: int = 4028) -> None:
# api port, should be 4028
self.port = port
# ip address of the miner
self.ip = ipaddress.ip_address(ip)
def get_commands(self) -> list:
"""Get a list of command accessible to a specific type of API on the miner."""
return [func for func in
# each function in self
dir(self) if callable(getattr(self, func)) and
# no __ methods
not func.startswith("__") and
# remove all functions that are in this base class
func not in
[func for func in
dir(BaseMinerAPI) if callable(getattr(BaseMinerAPI, func))
]
]
async def multicommand(self, *commands: str) -> dict:
"""Creates and sends multiple commands as one command to the miner."""
# split the commands into a proper list
user_commands = [*commands]
allowed_commands = self.get_commands()
# make sure we can actually run the command, otherwise it will fail
commands = [command for command in user_commands if command in allowed_commands]
for item in list(set(user_commands) - set(commands)):
warnings.warn(f"""Removing incorrect command: {item}
If you are sure you want to use this command please use API.send_command("{item}", ignore_errors=True) instead.""",
APIWarning)
# standard multicommand format is "command1+command2"
# doesnt work for S19 which is dealt with in the send command function
command = "+".join(commands)
data = None
try:
data = await self.send_command(command)
except APIError:
try:
data = {}
# S19 handler, try again
for cmd in command.split("+"):
data[cmd] = []
data[cmd].append(await self.send_command(cmd))
except APIError as e:
raise APIError(e)
except Exception as e:
print(e)
if data:
return data
async def send_command(self, command: str, parameters: str or int or bool = None, ignore_errors: bool = False) -> dict:
"""Send an API command to the miner and return the result."""
try:
# get reader and writer streams
reader, writer = await asyncio.open_connection(str(self.ip), self.port)
# handle OSError 121
except OSError as e:
if e.winerror == "121":
print("Semaphore Timeout has Expired.")
return {}
# create the command
cmd = {"command": command}
if parameters is not None:
cmd["parameter"] = parameters
# send the command
writer.write(json.dumps(cmd).encode('utf-8'))
await writer.drain()
# instantiate data
data = b""
# loop to receive all the data
try:
while True:
d = await reader.read(4096)
if not d:
break
data += d
except Exception as e:
print(e)
data = self.load_api_data(data)
# close the connection
writer.close()
await writer.wait_closed()
# check for if the user wants to allow errors to return
if not ignore_errors:
# validate the command succeeded
validation = self.validate_command_output(data)
if not validation[0]:
raise APIError(validation[1])
return data
@staticmethod
def validate_command_output(data: dict) -> tuple[bool, str | None]:
"""Check if the returned command output is correctly formatted."""
# check if the data returned is correct or an error
# if status isn't a key, it is a multicommand
if "STATUS" not in data.keys():
for key in data.keys():
# make sure not to try to turn id into a dict
if not key == "id":
# make sure they succeeded
if "STATUS" in data[key][0].keys():
if data[key][0]["STATUS"][0]["STATUS"] not in ["S", "I"]:
# this is an error
return False, f"{key}: " + data[key][0]["STATUS"][0]["Msg"]
elif "id" not in data.keys():
if data["STATUS"] not in ["S", "I"]:
return False, data["Msg"]
else:
# make sure the command succeeded
if data["STATUS"][0]["STATUS"] not in ("S", "I"):
# this is an error
if data["STATUS"][0]["STATUS"] not in ("S", "I"):
return False, data["STATUS"][0]["Msg"]
return True, None
@staticmethod
def load_api_data(data: bytes) -> dict:
"""Convert API data from JSON to dict"""
try:
# some json from the API returns with a null byte (\x00) on the end
if data.endswith(b"\x00"):
# handle the null byte
str_data = data.decode('utf-8')[:-1]
else:
# no null byte
str_data = data.decode('utf-8')
# fix an error with a btminer return having an extra comma that breaks json.loads()
str_data = str_data.replace(",}", "}")
# fix an error with a btminer return having a newline that breaks json.loads()
str_data = str_data.replace("\n", "")
# fix an error with a bmminer return not having a specific comma that breaks json.loads()
str_data = str_data.replace("}{", "},{")
# fix an error with a bmminer return having a specific comma that breaks json.loads()
str_data = str_data.replace("[,{", "[{")
# parse the json
parsed_data = json.loads(str_data)
# handle bad json
except json.decoder.JSONDecodeError as e:
print(e)
raise APIError(f"Decode Error: {str_data}")
return parsed_data

View File

@@ -1,511 +0,0 @@
from API import BaseMinerAPI
class BMMinerAPI(BaseMinerAPI):
"""An abstraction of the BMMiner API.
Each method corresponds to an API command in BMMiner.
BMMiner API documentation:
https://github.com/jameshilliard/bmminer/blob/master/API-README
This class abstracts use of the BMMiner API, as well as the
methods for sending commands to it. The self.send_command()
function handles sending a command to the miner asynchronously, and
as such is the base for many of the functions in this class, which
rely on it to send the command for them.
:param ip: The IP of the miner to reference the API on.
:param port: The port to reference the API on. Default is 4028.
"""
def __init__(self, ip: str, port: int = 4028) -> None:
super().__init__(ip, port)
async def version(self) -> dict:
"""Get miner version info.
:return: Miner version information.
"""
return await self.send_command("version")
async def config(self) -> dict:
"""Get some basic configuration info.
:return: Some miner configuration information:
ASC Count <- the number of ASCs
PGA Count <- the number of PGAs
Pool Count <- the number of Pools
Strategy <- the current pool strategy
Log Interval <- the interval of logging
Device Code <- list of compiled device drivers
OS <- the current operating system
Failover-Only <- failover-only setting
Scan Time <- scan-time setting
Queue <- queue setting
Expiry <- expiry setting
"""
return await self.send_command("config")
async def summary(self) -> dict:
"""Get the status summary of the miner.
:return: The status summary of the miner.
"""
return await self.send_command("summary")
async def pools(self) -> dict:
"""Get pool information.
:return: Miner pool information.
"""
return await self.send_command("pools")
async def devs(self) -> dict:
"""Get data on each PGA/ASC with their details.
:return: Data on each PGA/ASC with their details.
"""
return await self.send_command("devs")
async def edevs(self, old: bool = False) -> dict:
"""Get data on each PGA/ASC with their details, ignoring
blacklisted and zombie devices.
:param old: Include zombie devices that became zombies less
than 'old' seconds ago
:return: Data on each PGA/ASC with their details.
"""
if old:
return await self.send_command("edevs", parameters=old)
else:
return await self.send_command("edevs")
async def pga(self, n: int) -> dict:
"""Get data from PGA n.
:param n: The PGA number to get data from.
:return: Data on the PGA n.
"""
return await self.send_command("pga", parameters=n)
async def pgacount(self) -> dict:
"""Get data fon all PGAs.
:return: Data on the PGAs connected.
"""
return await self.send_command("pgacount")
async def switchpool(self, n: int) -> dict:
"""Switch pools to pool n.
:param n: The pool to switch to.
:return: A confirmation of switching to pool n.
"""
return await self.send_command("switchpool", parameters=n)
async def enablepool(self, n: int) -> dict:
"""Enable pool n.
:param n: The pool to enable.
:return: A confirmation of enabling pool n.
"""
return await self.send_command("enablepool", parameters=n)
async def addpool(self,
url: str,
username: str,
password: str
) -> dict:
"""Add a pool to the miner.
:param url: The URL of the new pool to add.
:param username: The users username on the new pool.
:param password: The worker password on the new pool.
:return: A confirmation of adding the pool.
"""
return await self.send_command("addpool",
parameters=f"{url}, "
f"{username}, "
f"{password}"
)
async def poolpriority(self, *n: int) -> dict:
"""Set pool priority.
:param n: Pools in order of priority.
:return: A confirmation of setting pool priority.
"""
pools = f"{','.join([str(item) for item in n])}"
return await self.send_command("poolpriority",
parameters=pools)
async def poolquota(self, n: int, q: int) -> dict:
"""Set pool quota.
:param n: Pool number to set quota on.
:param q: Quota to set the pool to.
:return: A confirmation of setting pool quota.
"""
return await self.send_command("poolquota",
parameters=f"{n}, "
f"{q}"
)
async def disablepool(self, n: int) -> dict:
"""Disable a pool.
:param n: Pool to disable.
:return: A confirmation of diabling the pool.
"""
return await self.send_command("disablepool", parameters=n)
async def removepool(self, n: int) -> dict:
"""Remove a pool.
:param n: Pool to remove.
:return: A confirmation of removing the pool.
"""
return await self.send_command("removepool", parameters=n)
async def save(self, filename: str = None) -> dict:
"""Save the config.
:param filename: Filename to save the config as.
:return: A confirmation of saving the config.
"""
if filename:
return await self.send_command("save", parameters=filename)
else:
return await self.send_command("save")
async def quit(self) -> dict:
"""Quit BMMiner.
:return: A single "BYE" before BMMiner quits.
"""
return await self.send_command("quit")
async def notify(self) -> dict:
"""Notify the user of past errors.
:return: The last status and count of each devices problem(s).
"""
return await self.send_command("notify")
async def privileged(self) -> dict:
"""Check if you have privileged access.
:return: The STATUS section with an error if you have no
privileged access, or success if you have privileged access.
"""
return await self.send_command("privileged")
async def pgaenable(self, n: int) -> dict:
"""Enable PGA n.
:param n: The PGA to enable.
:return: A confirmation of enabling PGA n.
"""
return await self.send_command("pgaenable", parameters=n)
async def pgadisable(self, n: int) -> dict:
"""Disable PGA n.
:param n: The PGA to disable.
:return: A confirmation of disabling PGA n.
"""
return await self.send_command("pgadisable", parameters=n)
async def pgaidentify(self, n: int) -> dict:
"""Identify PGA n.
:param n: The PGA to identify.
:return: A confirmation of identifying PGA n.
"""
return await self.send_command("pgaidentify", parameters=n)
async def devdetails(self) -> dict:
"""Get data on all devices with their static details.
:return: Data on all devices with their static details.
"""
return await self.send_command("devdetails")
async def restart(self) -> dict:
"""Restart BMMiner using the API.
:return: A reply informing of the restart.
"""
return await self.send_command("restart")
async def stats(self) -> dict:
"""Get stats of each device/pool with more than 1 getwork.
:return: Stats of each device/pool with more than 1 getwork.
"""
return await self.send_command("stats")
async def estats(self, old: bool = False) -> dict:
"""Get stats of each device/pool with more than 1 getwork,
ignoring zombie devices.
:param old: Include zombie devices that became zombies less
than 'old' seconds ago.
:return: Stats of each device/pool with more than 1 getwork,
ignoring zombie devices.
"""
if old:
return await self.send_command("estats", parameters=old)
else:
return await self.send_command("estats")
async def check(self, command: str) -> dict:
"""Check if the command command exists in BMMiner.
:param command: The command to check.
:return: Information about a command:
Exists (Y/N) <- the command exists in this version
Access (Y/N) <- you have access to use the command
"""
return await self.send_command("check", parameters=command)
async def failover_only(self, failover: bool) -> dict:
"""Set failover-only.
:param failover: What to set failover-only to.
:return: Confirmation of setting failover-only.
"""
return await self.send_command("failover-only",
parameters=failover
)
async def coin(self) -> dict:
"""Get information on the current coin.
:return: Information about the current coin being mined:
Hash Method <- the hashing algorithm
Current Block Time <- blocktime as a float, 0 means none
Current Block Hash <- the hash of the current block, blank
means none
LP <- whether LP is in use on at least 1 pool
Network Difficulty: the current network difficulty
"""
return await self.send_command("coin")
async def debug(self, setting: str) -> dict:
"""Set a debug setting.
:param setting: Which setting to switch to. Options are:
Silent,
Quiet,
Verbose,
Debug,
RPCProto,
PerDevice,
WorkTime,
Normal.
:return: Data on which debug setting was enabled or disabled.
"""
return await self.send_command("debug", parameters=setting)
async def setconfig(self, name: str, n: int) -> dict:
"""Set config of name to value n.
:param name: The name of the config setting to set. Options are:
queue,
scantime,
expiry.
:param n: The value to set the 'name' setting to.
:return: The results of setting config of name to n.
"""
return await self.send_command("setconfig",
parameters=f"{name}, "
f"{n}"
)
async def usbstats(self) -> dict:
"""Get stats of all USB devices except ztex.
:return: The stats of all USB devices except ztex.
"""
return await self.send_command("usbstats")
async def pgaset(self, n: int, opt: str, val: int = None) -> dict:
"""Set PGA option opt to val on PGA n.
Options:
MMQ -
opt: clock
val: 160 - 230 (multiple of 2)
CMR -
opt: clock
val: 100 - 220
:param n: The PGA to set the options on.
:param opt: The option to set. Setting this to 'help'
returns a help message.
:param val: The value to set the option to.
:return: Confirmation of setting PGA n with opt[,val].
"""
if val:
return await self.send_command("pgaset",
parameters=f"{n}, "
f"{opt}, "
f"{val}"
)
else:
return await self.send_command("pgaset",
parameters=f"{n}, "
f"{opt}")
async def zero(self, which: str, summary: bool) -> dict:
"""Zero a device.
:param which: Which device to zero.
Setting this to 'all' zeros all devices.
Setting this to 'bestshare' zeros only the bestshare values
for each pool and global.
:param summary: Whether or not to show a full summary.
:return: the STATUS section with info on the zero and optional
summary.
"""
return await self.send_command("zero", parameters=f"{which}, {summary}")
async def hotplug(self, n: int) -> dict:
"""Enable hotplug.
:param n: The device number to set hotplug on.
:return: Information on hotplug status.
"""
return await self.send_command("hotplug", parameters=n)
async def asc(self, n: int) -> dict:
"""Get data for ASC device n.
:param n: The device to get data for.
:return: The data for ASC device n.
"""
return await self.send_command("asc", parameters=n)
async def ascenable(self, n: int) -> dict:
"""Enable ASC device n.
:param n: The device to enable.
:return: Confirmation of enabling ASC device n.
"""
return await self.send_command("ascenable", parameters=n)
async def ascdisable(self, n: int) -> dict:
"""Disable ASC device n.
:param n: The device to disable.
:return: Confirmation of disabling ASC device n.
"""
return await self.send_command("ascdisable", parameters=n)
async def ascidentify(self, n: int) -> dict:
"""Identify ASC device n.
:param n: The device to identify.
:return: Confirmation of identifying ASC device n.
"""
return await self.send_command("ascidentify", parameters=n)
async def asccount(self) -> dict:
"""Get data on the number of ASC devices and their info.
:return: Data on all ASC devices.
"""
return await self.send_command("asccount")
async def ascset(self, n: int, opt: str, val: int = None) -> dict:
"""Set ASC n option opt to value val.
Sets an option on the ASC n to a value. Allowed options are:
AVA+BTB -
opt: freq
val: 256 - 1024 (chip frequency)
BTB -
opt: millivolts
val: 1000 - 1400 (core voltage)
MBA -
opt: reset
val: 0 - # of chips (reset a chip)
opt: freq
val: 0 - # of chips, 100 - 1400 (chip frequency)
opt: ledcount
val: 0 - 100 (chip count for LED)
opt: ledlimit
val: 0 - 200 (LED off below GH/s)
opt: spidelay
val: 0 - 9999 (SPI per I/O delay)
opt: spireset
val: i or s, 0 - 9999 (SPI regular reset)
opt: spisleep
val: 0 - 9999 (SPI reset sleep in ms)
BMA -
opt: volt
val: 0 - 9
opt: clock
val: 0 - 15
:param n: The ASC to set the options on.
:param opt: The option to set. Setting this to 'help' returns a
help message.
:param val: The value to set the option to.
:return: Confirmation of setting option opt to value val.
"""
if val:
return await self.send_command("ascset", parameters=f"{n}, {opt}, {val}")
else:
return await self.send_command("ascset", parameters=f"{n}, {opt}")
async def lcd(self) -> dict:
"""Get a general all-in-one status summary of the miner.
:return: An all-in-one status summary of the miner.
"""
return await self.send_command("lcd")
async def lockstats(self) -> dict:
"""Write lockstats to STDERR.
:return: The result of writing the lock stats to STDERR.
"""
return await self.send_command("lockstats")

View File

@@ -1,219 +0,0 @@
from API import BaseMinerAPI
class BOSMinerAPI(BaseMinerAPI):
"""
A class that abstracts the BOSMiner API in the miners.
Each method corresponds to an API command in BOSMiner.
BOSMiner API documentation:
https://docs.braiins.com/os/plus-en/Development/1_api.html
Parameters:
ip: the IP address of the miner.
port (optional): the port of the API on the miner (standard is 4028)
"""
def __init__(self, ip, port=4028):
super().__init__(ip, port)
async def asccount(self) -> dict:
"""
API 'asccount' command.
Returns a dict containing the number of ASC devices.
"""
return await self.send_command("asccount")
async def asc(self, n: int) -> dict:
"""
API 'asc' command.
Returns a dict containing the details of a single ASC of number N.
n: the ASC device to get details of.
"""
return await self.send_command("asc", parameters=n)
async def devdetails(self) -> dict:
"""
API 'devdetails' command.
Returns a dict containing all devices with their static details.
"""
return await self.send_command("devdetails")
async def devs(self) -> dict:
"""
API 'devs' command.
Returns a dict containing each PGA/ASC with their details.
"""
return await self.send_command("devs")
async def edevs(self, old: bool = False) -> dict:
"""
API 'edevs' command.
Returns a dict containing each PGA/ASC with their details,
ignoring blacklisted devices and zombie devices.
Parameters:
old (optional): include zombie devices that became zombies less than 'old' seconds ago
"""
if old:
return await self.send_command("edevs", parameters="old")
else:
return await self.send_command("edevs")
async def pools(self) -> dict:
"""
API 'pools' command.
Returns a dict containing the status of each pool.
"""
return await self.send_command("pools")
async def summary(self) -> dict:
"""
API 'summary' command.
Returns a dict containing the status summary of the miner.
"""
return await self.send_command("summary")
async def stats(self) -> dict:
"""
API 'stats' command.
Returns a dict containing stats for all device/pool with more than 1 getwork.
"""
return await self.send_command("stats")
async def version(self) -> dict:
"""
API 'version' command.
Returns a dict containing version information.
"""
return await self.send_command("version")
async def estats(self) -> dict:
"""
API 'estats' command.
Returns a dict containing stats for all device/pool with more than 1 getwork,
ignoring zombie devices.
"""
return await self.send_command("estats")
async def check(self, command: str) -> dict:
"""
API 'check' command.
Returns information about a command:
Exists (Y/N) <- the command exists in this version
Access (Y/N) <- you have access to use the command
Parameters:
command: the command to get information about.
"""
return await self.send_command("check", parameters=command)
async def coin(self) -> dict:
"""
API 'coin' command.
Returns information about the current coin being mined:
Hash Method <- the hashing algorithm
Current Block Time <- blocktime as a float, 0 means none
Current Block Hash <- the hash of the current block, blank means none
LP <- whether LP is in use on at least 1 pool
Network Difficulty: the current network difficulty
"""
return await self.send_command("coin")
async def lcd(self) -> dict:
"""
API 'lcd' command.
Returns a dict containing an all in one status summary of the miner.
"""
return await self.send_command("lcd")
async def switchpool(self, n: int) -> dict:
# BOS has not implemented this yet, they will in the future
raise NotImplementedError
# return await self.send_command("switchpool", parameters=n)
async def enablepool(self, n: int) -> dict:
# BOS has not implemented this yet, they will in the future
raise NotImplementedError
# return await self.send_command("enablepool", parameters=n)
async def disablepool(self, n: int) -> dict:
# BOS has not implemented this yet, they will in the future
raise NotImplementedError
# return await self.send_command("disablepool", parameters=n)
async def addpool(self, url: str, username: str, password: str) -> dict:
# BOS has not implemented this yet, they will in the future
raise NotImplementedError
# return await self.send_command("addpool", parameters=f"{url}, {username}, {password}")
async def removepool(self, n: int) -> dict:
# BOS has not implemented this yet, they will in the future
raise NotImplementedError
# return await self.send_command("removepool", parameters=n)
async def fans(self) -> dict:
"""
API 'fans' command.
Returns a dict containing information on fans and fan speeds.
"""
return await self.send_command("fans")
async def tempctrl(self) -> dict:
"""
API 'tempctrl' command.
Returns a dict containing temp control configuration.
"""
return await self.send_command("tempctrl")
async def temps(self) -> dict:
"""
API 'temps' command.
Returns a dict containing temperature information.
"""
return await self.send_command("temps")
async def tunerstatus(self) -> dict:
"""
API 'tunerstatus' command.
Returns a dict containing tuning stats.
"""
return await self.send_command("tunerstatus")
async def pause(self) -> dict:
"""
API 'pause' command.
Pauses mining and stops power consumption and waits for resume command.
Returns a dict stating that the miner paused mining.
"""
return await self.send_command("pause")
async def resume(self) -> dict:
"""
API 'pause' command.
Resumes mining on the miner.
Returns a dict stating that the miner resumed mining.
"""
return await self.send_command("resume")

View File

@@ -1,501 +0,0 @@
from API import BaseMinerAPI
class CGMinerAPI(BaseMinerAPI):
"""An abstraction of the BMMiner API.
Each method corresponds to an API command in BMMiner.
CGMiner API documentation:
https://github.com/ckolivas/cgminer/blob/master/API-README
This class abstracts use of the BMMiner API, as well as the
methods for sending commands to it. The self.send_command()
function handles sending a command to the miner asynchronously, and
as such is the base for many of the functions in this class, which
rely on it to send the command for them.
:param ip: The IP of the miner to reference the API on.
:param port: The port to reference the API on. Default is 4028.
"""
def __init__(self, ip, port=4028):
super().__init__(ip, port)
async def version(self) -> dict:
"""Get miner version info.
:return: Miner version information.
"""
return await self.send_command("version")
async def config(self) -> dict:
"""Get some basic configuration info.
:return: Some miner configuration information:
ASC Count <- the number of ASCs
PGA Count <- the number of PGAs
Pool Count <- the number of Pools
Strategy <- the current pool strategy
Log Interval <- the interval of logging
Device Code <- list of compiled device drivers
OS <- the current operating system
"""
return await self.send_command("config")
async def summary(self) -> dict:
"""Get the status summary of the miner.
:return: The status summary of the miner.
"""
return await self.send_command("summary")
async def pools(self) -> dict:
"""Get pool information.
:return: Miner pool information.
"""
return await self.send_command("pools")
async def devs(self) -> dict:
"""Get data on each PGA/ASC with their details.
:return: Data on each PGA/ASC with their details.
"""
return await self.send_command("devs")
async def edevs(self, old: bool = False) -> dict:
"""Get data on each PGA/ASC with their details, ignoring
blacklisted and zombie devices.
:param old: Include zombie devices that became zombies less
than 'old' seconds ago
:return: Data on each PGA/ASC with their details.
"""
if old:
return await self.send_command("edevs", parameters="old")
else:
return await self.send_command("edevs")
async def pga(self, n: int) -> dict:
"""Get data from PGA n.
:param n: The PGA number to get data from.
:return: Data on the PGA n.
"""
return await self.send_command("pga", parameters=n)
async def pgacount(self) -> dict:
"""Get data fon all PGAs.
:return: Data on the PGAs connected.
"""
return await self.send_command("pgacount")
async def switchpool(self, n: int) -> dict:
"""Switch pools to pool n.
:param n: The pool to switch to.
:return: A confirmation of switching to pool n.
"""
return await self.send_command("switchpool", parameters=n)
async def enablepool(self, n: int) -> dict:
"""Enable pool n.
:param n: The pool to enable.
:return: A confirmation of enabling pool n.
"""
return await self.send_command("enablepool", parameters=n)
async def addpool(self,
url: str,
username: str,
password: str
) -> dict:
"""Add a pool to the miner.
:param url: The URL of the new pool to add.
:param username: The users username on the new pool.
:param password: The worker password on the new pool.
:return: A confirmation of adding the pool.
"""
return await self.send_command("addpool",
parameters=f"{url}, "
f"{username}, "
f"{password}"
)
async def poolpriority(self, *n: int) -> dict:
"""Set pool priority.
:param n: Pools in order of priority.
:return: A confirmation of setting pool priority.
"""
pools = f"{','.join([str(item) for item in n])}"
return await self.send_command("poolpriority",
parameters=pools)
async def poolquota(self, n: int, q: int) -> dict:
"""Set pool quota.
:param n: Pool number to set quota on.
:param q: Quota to set the pool to.
:return: A confirmation of setting pool quota.
"""
return await self.send_command("poolquota",
parameters=f"{n}, "
f"{q}"
)
async def disablepool(self, n: int) -> dict:
"""Disable a pool.
:param n: Pool to disable.
:return: A confirmation of diabling the pool.
"""
return await self.send_command("disablepool", parameters=n)
async def removepool(self, n: int) -> dict:
"""Remove a pool.
:param n: Pool to remove.
:return: A confirmation of removing the pool.
"""
return await self.send_command("removepool", parameters=n)
async def save(self, filename: str = None) -> dict:
"""Save the config.
:param filename: Filename to save the config as.
:return: A confirmation of saving the config.
"""
if filename:
return await self.send_command("save", parameters=filename)
else:
return await self.send_command("save")
async def quit(self) -> dict:
"""Quit BMMiner.
:return: A single "BYE" before CGMiner quits.
"""
return await self.send_command("quit")
async def notify(self) -> dict:
"""Notify the user of past errors.
:return: The last status and count of each devices problem(s).
"""
return await self.send_command("notify")
async def privileged(self) -> dict:
"""Check if you have privileged access.
:return: The STATUS section with an error if you have no
privileged access, or success if you have privileged access.
"""
return await self.send_command("privileged")
async def pgaenable(self, n: int) -> dict:
"""Enable PGA n.
:param n: The PGA to enable.
:return: A confirmation of enabling PGA n.
"""
return await self.send_command("pgaenable", parameters=n)
async def pgadisable(self, n: int) -> dict:
"""Disable PGA n.
:param n: The PGA to disable.
:return: A confirmation of disabling PGA n.
"""
return await self.send_command("pgadisable", parameters=n)
async def pgaidentify(self, n: int) -> dict:
"""Identify PGA n.
:param n: The PGA to identify.
:return: A confirmation of identifying PGA n.
"""
return await self.send_command("pgaidentify", parameters=n)
async def devdetails(self) -> dict:
"""Get data on all devices with their static details.
:return: Data on all devices with their static details.
"""
return await self.send_command("devdetails")
async def restart(self) -> dict:
"""Restart CGMiner using the API.
:return: A reply informing of the restart.
"""
return await self.send_command("restart")
async def stats(self) -> dict:
"""Get stats of each device/pool with more than 1 getwork.
:return: Stats of each device/pool with more than 1 getwork.
"""
return await self.send_command("stats")
async def estats(self, old: bool = False) -> dict:
"""Get stats of each device/pool with more than 1 getwork,
ignoring zombie devices.
:param old: Include zombie devices that became zombies less
than 'old' seconds ago.
:return: Stats of each device/pool with more than 1 getwork,
ignoring zombie devices.
"""
if old:
return await self.send_command("estats", parameters=old)
else:
return await self.send_command("estats")
async def check(self, command: str) -> dict:
"""Check if the command command exists in BMMiner.
:param command: The command to check.
:return: Information about a command:
Exists (Y/N) <- the command exists in this version
Access (Y/N) <- you have access to use the command
"""
return await self.send_command("check", parameters=command)
async def failover_only(self, failover: bool) -> dict:
"""Set failover-only.
:param failover: What to set failover-only to.
:return: Confirmation of setting failover-only.
"""
return await self.send_command("failover-only",
parameters=failover
)
async def coin(self) -> dict:
"""Get information on the current coin.
:return: Information about the current coin being mined:
Hash Method <- the hashing algorithm
Current Block Time <- blocktime as a float, 0 means none
Current Block Hash <- the hash of the current block, blank
means none
LP <- whether LP is in use on at least 1 pool
Network Difficulty: the current network difficulty
"""
return await self.send_command("coin")
async def debug(self, setting: str) -> dict:
"""Set a debug setting.
:param setting: Which setting to switch to. Options are:
Silent,
Quiet,
Verbose,
Debug,
RPCProto,
PerDevice,
WorkTime,
Normal.
:return: Data on which debug setting was enabled or disabled.
"""
return await self.send_command("debug", parameters=setting)
async def setconfig(self, name: str, n: int) -> dict:
"""Set config of name to value n.
:param name: The name of the config setting to set. Options are:
queue,
scantime,
expiry.
:param n: The value to set the 'name' setting to.
:return: The results of setting config of name to n.
"""
return await self.send_command("setconfig",
parameters=f"{name}, "
f"{n}"
)
async def usbstats(self) -> dict:
"""Get stats of all USB devices except ztex.
:return: The stats of all USB devices except ztex.
"""
return await self.send_command("usbstats")
async def pgaset(self, n: int, opt: str, val: int = None) -> dict:
"""Set PGA option opt to val on PGA n.
Options:
MMQ -
opt: clock
val: 160 - 230 (multiple of 2)
CMR -
opt: clock
val: 100 - 220
:param n: The PGA to set the options on.
:param opt: The option to set. Setting this to 'help'
returns a help message.
:param val: The value to set the option to.
:return: Confirmation of setting PGA n with opt[,val].
"""
if val:
return await self.send_command("pgaset", parameters=f"{n}, {opt}, {val}")
else:
return await self.send_command("pgaset", parameters=f"{n}, {opt}")
async def zero(self, which: str, summary: bool) -> dict:
"""Zero a device.
:param which: Which device to zero.
Setting this to 'all' zeros all devices.
Setting this to 'bestshare' zeros only the bestshare values
for each pool and global.
:param summary: Whether or not to show a full summary.
:return: the STATUS section with info on the zero and optional
summary.
"""
return await self.send_command("zero", parameters=f"{which}, {summary}")
async def hotplug(self, n: int) -> dict:
"""Enable hotplug.
:param n: The device number to set hotplug on.
:return: Information on hotplug status.
"""
return await self.send_command("hotplug", parameters=n)
async def asc(self, n: int) -> dict:
"""Get data for ASC device n.
:param n: The device to get data for.
:return: The data for ASC device n.
"""
return await self.send_command("asc", parameters=n)
async def ascenable(self, n: int) -> dict:
"""Enable ASC device n.
:param n: The device to enable.
:return: Confirmation of enabling ASC device n.
"""
return await self.send_command("ascenable", parameters=n)
async def ascdisable(self, n: int) -> dict:
"""Disable ASC device n.
:param n: The device to disable.
:return: Confirmation of disabling ASC device n.
"""
return await self.send_command("ascdisable", parameters=n)
async def ascidentify(self, n: int) -> dict:
"""Identify ASC device n.
:param n: The device to identify.
:return: Confirmation of identifying ASC device n.
"""
return await self.send_command("ascidentify", parameters=n)
async def asccount(self) -> dict:
"""Get data on the number of ASC devices and their info.
:return: Data on all ASC devices.
"""
return await self.send_command("asccount")
async def ascset(self, n: int, opt: str, val: int = None) -> dict:
"""Set ASC n option opt to value val.
Sets an option on the ASC n to a value. Allowed options are:
AVA+BTB -
opt: freq
val: 256 - 1024 (chip frequency)
BTB -
opt: millivolts
val: 1000 - 1400 (core voltage)
MBA -
opt: reset
val: 0 - # of chips (reset a chip)
opt: freq
val: 0 - # of chips, 100 - 1400 (chip frequency)
opt: ledcount
val: 0 - 100 (chip count for LED)
opt: ledlimit
val: 0 - 200 (LED off below GH/s)
opt: spidelay
val: 0 - 9999 (SPI per I/O delay)
opt: spireset
val: i or s, 0 - 9999 (SPI regular reset)
opt: spisleep
val: 0 - 9999 (SPI reset sleep in ms)
BMA -
opt: volt
val: 0 - 9
opt: clock
val: 0 - 15
:param n: The ASC to set the options on.
:param opt: The option to set. Setting this to 'help' returns a
help message.
:param val: The value to set the option to.
:return: Confirmation of setting option opt to value val.
"""
if val:
return await self.send_command("ascset", parameters=f"{n}, {opt}, {val}")
else:
return await self.send_command("ascset", parameters=f"{n}, {opt}")
async def lcd(self) -> dict:
"""Get a general all-in-one status summary of the miner.
:return: An all-in-one status summary of the miner.
"""
return await self.send_command("lcd")
async def lockstats(self) -> dict:
"""Write lockstats to STDERR.
:return: The result of writing the lock stats to STDERR.
"""
return await self.send_command("lockstats")

176
LICENSE.txt Normal file
View File

@@ -0,0 +1,176 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS

387
README.md
View File

@@ -1,29 +1,28 @@
# minerInterface
# pyasic
*A set of modules for interfacing with many common types of ASIC bitcoin miners, using both their API and SSH.*
[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
[![pypi](https://img.shields.io/pypi/v/pyasic.svg)](https://pypi.org/project/pyasic/)
[![python](https://img.shields.io/pypi/pyversions/pyasic.svg)](https://pypi.org/project/pyasic/)
[![Read the Docs](https://img.shields.io/readthedocs/pyasic)](https://pyasic.readthedocs.io/en/latest/)
[![GitHub](https://img.shields.io/github/license/UpstreamData/pyasic)](https://github.com/UpstreamData/pyasic/blob/master/LICENSE.txt)
[![CodeFactor Grade](https://img.shields.io/codefactor/grade/github/UpstreamData/pyasic)](https://www.codefactor.io/repository/github/upstreamdata/pyasic)
## Documentation
Documentation is located on Read the Docs as [pyasic](https://pyasic.readthedocs.io/en/latest/)
## Usage
### Standard Usage
You can install pyasic directly from pip with the command `pip install pyasic`
For those of you who aren't comfortable with code and developer tools, there are windows builds of GUI applications that use this library here -> (https://drive.google.com/drive/folders/1DjR8UOS_g0ehfiJcgmrV0FFoqFvE9akW?usp=sharing)
### Developers
To use this repo, first download it, create a virtual environment, enter the virtual environment, and install relevant packages by navigating to this directory and running ```pip install -r requirements.txt``` on Windows or ```pip3 install -r requirements.txt``` on Mac or UNIX if the first command fails.
For those of you who aren't comfortable with code and developer tools, there are windows builds of the GUI applications here -> (https://drive.google.com/drive/folders/1DjR8UOS_g0ehfiJcgmrV0FFoqFvE9akW?usp=sharing)
### CFG Util
*CFG Util is a GUI for interfacing with the miners easily, it is mostly self-explanatory.*
To use CFG Util you have 2 options -
1. Run it directly with the file ```config_tool.py``` or import it with ```from cfg_util import main```, then run the ```main()``` function in an asyncio event loop like -
```python
from tools.cfg_util import main
if __name__ == '__main__':
main()
```
2. Make a build of the CFG Util for your system using cx_freeze and ```make_cfg_tool_exe.py```
(Alternatively, you can get a build made by me here -> https://drive.google.com/drive/folders/1nzojuGRu0IszIGpwx7SvG5RlJ2_KXIOv)
1. Open either Command Prompt on Windows or Terminal on Mac or UNIX.
2. Navigate to this directory, and run ```make_cfg_tool_exe.py build``` on Windows or ```python3 make_cfg_tool_exe.py``` on Mac or UNIX.
You can also use poetry by initializing and running ```poetry install```
### Interfacing with miners programmatically
<br>
##### Note: If you are trying to interface with Whatsminers, there is a bug in the way they are interacted with on Windows, so to fix that you need to change the event loop policy using this code:
```python
@@ -43,258 +42,130 @@ To write your own custom programs with this repo, you have many options.
It is recommended that you explore the files in this repo to familiarize yourself with them, try starting with the miners module and going from there.
A basic script to find all miners on the network and get the hashrate from them looks like this -
There are 2 main ways to get a miner and it's functions via scanning or via the MinerFactory.
#### Scanning for miners
```python
import asyncio
from network import MinerNetwork
from tools.cfg_util.func.parse_data import safe_parse_api_data
import sys
from pyasic.network import MinerNetwork
# Fix whatsminer bug
# if the computer is windows, set the event loop policy to a WindowsSelector policy
if sys.version_info[0] == 3 and sys.version_info[1] >= 8 and sys.platform.startswith('win'):
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
async def get_hashrate():
# Miner Network class allows for easy scanning of a network
# Give it any IP on a network and it will find the whole subnet
# It can also be passed a subnet mask:
# miner_network = MinerNetwork('192.168.1.55', mask=23)
miner_network = MinerNetwork('192.168.1.1')
# Miner Network scan function returns Miner classes for all miners found
miners = await miner_network.scan_network_for_miners()
# Each miner will return with its own set of functions, and an API class instance
tasks = [miner.api.summary() for miner in miners]
# define asynchronous function to scan for miners
async def scan_and_get_data():
# Define network range to be used for scanning
# This can take a list of IPs, a constructor string, or an IP and subnet mask
# The standard mask is /24, and you can pass any IP address in the subnet
net = MinerNetwork("192.168.1.69", mask=24)
# Scan the network for miners
# This function returns a list of miners of the correct type as a class
miners: list = await net.scan_network_for_miners()
# We can now get data from any of these miners
# To do them all we have to create a list of tasks and gather them
tasks = [miner.get_data() for miner in miners]
# Gather all tasks asynchronously and run them
data = await asyncio.gather(*tasks)
parse_tasks = []
# Data is now a list of MinerData, and we can reference any part of that
# Print out all data for now
for item in data:
# safe_parse_api_data parses the data from a miner API
# It will raise an APIError (from API import APIError) if there is a problem
parse_tasks.append(safe_parse_api_data(item, 'SUMMARY', 0, 'MHS 5s'))
# Gather all tasks asynchronously and run them
data = await asyncio.gather(*parse_tasks)
# Print a list of all the hashrates
print(item)
if __name__ == "__main__":
asyncio.run(scan_and_get_data())
```
</br>
#### Getting a miner if you know the IP
```python
import asyncio
import sys
from pyasic.miners.miner_factory import MinerFactory
# Fix whatsminer bug
# if the computer is windows, set the event loop policy to a WindowsSelector policy
if sys.version_info[0] == 3 and sys.version_info[1] >= 8 and sys.platform.startswith('win'):
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
# define asynchronous function to get miner and data
async def get_miner_data(miner_ip: str):
# Use MinerFactory to get miner
# MinerFactory is a singleton, so we can just get the instance in place
miner = await MinerFactory().get_miner(miner_ip)
# Get data from the miner
data = await miner.get_data()
print(data)
if __name__ == '__main__':
asyncio.new_event_loop().run_until_complete(get_hashrate())
if __name__ == "__main__":
asyncio.run(get_miner_data("192.168.1.69"))
```
<br>
You can also create your own miner without scanning if you know the IP:
### Advanced data gathering
If needed, this library exposes a wrapper for the miner API that can be used for advanced data gathering.
#### List available API commands
```python
import asyncio
import sys
from pyasic.miners.miner_factory import MinerFactory
# Fix whatsminer bug
# if the computer is windows, set the event loop policy to a WindowsSelector policy
if sys.version_info[0] == 3 and sys.version_info[1] >= 8 and sys.platform.startswith('win'):
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
async def get_api_commands(miner_ip: str):
# Get the miner
miner = await MinerFactory().get_miner(miner_ip)
# List all available commands
print(miner.api.get_commands())
if __name__ == "__main__":
asyncio.run(get_api_commands("192.168.1.69"))
```
#### Use miner API commands to gather data
The miner API commands will raise an `APIError` if they fail with a bad status code, to bypass this you must send them manually by using `miner.api.send_command(command, ignore_errors=True)`
```python
import asyncio
import ipaddress
from miners.miner_factory import MinerFactory
from tools.cfg_util.func.parse_data import safe_parse_api_data
import sys
from pyasic.miners.miner_factory import MinerFactory
# Fix whatsminer bug
# if the computer is windows, set the event loop policy to a WindowsSelector policy
if sys.version_info[0] == 3 and sys.version_info[1] >= 8 and sys.platform.startswith('win'):
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
async def get_miner_hashrate(ip: str):
# Instantiate a Miner Factory to generate miners from their IP
miner_factory = MinerFactory()
# Make the string IP into an IP address
miner_ip = ipaddress.ip_address(ip)
# Wait for the factory to return the miner
miner = await miner_factory.get_miner(miner_ip)
# Get the API data
summary = await miner.api.summary()
# safe_parse_api_data parses the data from a miner API
# It will raise an APIError (from API import APIError) if there is a problem
data = await safe_parse_api_data(summary, 'SUMMARY', 0, 'MHS 5s')
print(data)
if __name__ == '__main__':
asyncio.new_event_loop().run_until_complete(get_miner_hashrate(str("192.168.1.69")))
async def get_api_commands(miner_ip: str):
# Get the miner
miner = await MinerFactory().get_miner(miner_ip)
# Run the devdetails command
# This is equivalent to await miner.api.send_command("devdetails")
devdetails: dict = await miner.api.devdetails()
print(devdetails)
if __name__ == "__main__":
asyncio.run(get_api_commands("192.168.1.69"))
```
<br>
Or generate a miner directly without the factory:
```python
import asyncio
from miners.bosminer import BOSMiner
from tools.cfg_util.func.parse_data import safe_parse_api_data
async def get_miner_hashrate(ip: str):
# Create a BOSminer miner object
miner = BOSMiner(ip)
# Get the API data
summary = await miner.api.summary()
# safe_parse_api_data parses the data from a miner API
# It will raise an APIError (from API import APIError) if there is a problem
data = await safe_parse_api_data(summary, 'SUMMARY', 0, 'MHS 5s')
print(data)
if __name__ == '__main__':
asyncio.new_event_loop().run_until_complete(get_miner_hashrate(str("192.168.1.69")))
```
<br>
Or finally, just get the API directly:
```python
import asyncio
from API.bosminer import BOSMinerAPI
from tools.cfg_util.func.parse_data import safe_parse_api_data
async def get_miner_hashrate(ip: str):
# Create a BOSminerAPI object
# Port can be declared manually, if not it defaults to 4028
api = BOSMinerAPI(ip, port=4028)
# Get the API data
summary = await api.summary()
# safe_parse_api_data parses the data from a miner API
# It will raise an APIError (from API import APIError) if there is a problem
data = await safe_parse_api_data(summary, 'SUMMARY', 0, 'MHS 5s')
print(data)
if __name__ == '__main__':
asyncio.new_event_loop().run_until_complete(get_miner_hashrate(str("192.168.1.69")))
```
Now that you know that, lets move on to some common API functions that you might want to use.
### Common commands:
* Getting pool data:
```python
import asyncio
import ipaddress
from miners.miner_factory import MinerFactory
from tools.cfg_util.func.parse_data import safe_parse_api_data
async def get_miner_pool_data(ip: str):
# Instantiate a Miner Factory to generate miners from their IP
miner_factory = MinerFactory()
# Make the string IP into an IP address
miner_ip = ipaddress.ip_address(ip)
# Wait for the factory to return the miner
miner = await miner_factory.get_miner(miner_ip)
# Get the API data
pools = await miner.api.pools()
# safe_parse_api_data parses the data from a miner API
# It will raise an APIError (from API import APIError) if there is a problem
data = await safe_parse_api_data(pools, 'POOLS')
# parse further from here to get all the pool info you want.
# each pool is on a different index eg:
# data[0] is pool 1
# data[1] is pool 2
# etc
print(data)
if __name__ == '__main__':
asyncio.new_event_loop().run_until_complete(get_miner_pool_data(str("192.168.1.69")))
```
* Getting temperature data:
This one is a bit tougher, lots of miners do this a different way, you might need to experiment a bit to find what works for you.
BraiinsOS uses the "temps" command, Whatsminers has it in "devs", Avalonminers put it in "stats" as well as some other miners,
but the spot I like to try first is in "summary".
A pretty good example of really trying to make this robust is in ```cfg_util.func.miners``` in the ```get_formatted_data()``` function.
```python
import asyncio
import ipaddress
from miners.miner_factory import MinerFactory
from tools.cfg_util.func.parse_data import safe_parse_api_data
async def get_miner_temperature_data(ip: str):
# Instantiate a Miner Factory to generate miners from their IP
miner_factory = MinerFactory()
# Make the string IP into an IP address
miner_ip = ipaddress.ip_address(ip)
# Wait for the factory to return the miner
miner = await miner_factory.get_miner(miner_ip)
# Get the API data
summary = await miner.api.summary()
# safe_parse_api_data parses the data from a miner API
# It will raise an APIError (from API import APIError) if there is a problem
data = await safe_parse_api_data(summary, 'SUMMARY', 0, "Temperature")
print(data)
if __name__ == '__main__':
asyncio.new_event_loop().run_until_complete(get_miner_temperature_data(str("192.168.1.69")))
```
* Getting power data:
How about data on the power usage of the miner? This one only works for Whatsminers and BraiinsOS for now, and the Braiins one just uses the tuning setting, but its good enough for basic uses.
```python
import asyncio
import ipaddress
from miners.miner_factory import MinerFactory
from tools.cfg_util.func.parse_data import safe_parse_api_data
async def get_miner_power_data(ip: str):
# Instantiate a Miner Factory to generate miners from their IP
miner_factory = MinerFactory()
# Make the string IP into an IP address
miner_ip = ipaddress.ip_address(ip)
# Wait for the factory to return the miner
miner = await miner_factory.get_miner(miner_ip)
# check if this can be sent the "tunerstatus" command, BraiinsOS only
if "tunerstatus" in miner.api.get_commands():
# send the command
tunerstatus = await miner.api.tunerstatus()
# parse the return
data = await safe_parse_api_data(tunerstatus, 'TUNERSTATUS', 0, "PowerLimit")
else:
# send the command
# whatsminers have the power info in summary
summary = await miner.api.summary()
# parse the return
data = await safe_parse_api_data(summary, 'SUMMARY', 0, "Power")
print(data)
if __name__ == '__main__':
asyncio.new_event_loop().run_until_complete(get_miner_power_data(str("192.168.1.69")))
```
* Multicommands:
Multicommands make it much easier to get many types of data all at once. The multicommand function will also remove any commands that your API can't handle automatically.
How about we get the current pool user and hashrate in 1 command?
```python
import asyncio
import ipaddress
from miners.miner_factory import MinerFactory
from tools.cfg_util.func.parse_data import safe_parse_api_data
async def get_miner_hashrate_and_pool(ip: str):
# Instantiate a Miner Factory to generate miners from their IP
miner_factory = MinerFactory()
# Make the string IP into an IP address
miner_ip = ipaddress.ip_address(ip)
# Wait for the factory to return the miner
miner = await miner_factory.get_miner(miner_ip)
# Get the API data
api_data = await miner.api.multicommand("pools", "summary")
if "pools" in api_data.keys():
user = await safe_parse_api_data(api_data, "pools", 0, "POOLS", 0, "User")
print(user)
if "summary" in api_data.keys():
hashrate = await safe_parse_api_data(api_data, "summary", 0, "SUMMARY", 0, "MHS av")
print(hashrate)
if __name__ == '__main__':
asyncio.new_event_loop().run_until_complete(get_miner_hashrate_and_pool(str("192.168.1.9")))
```

View File

@@ -1,4 +0,0 @@
from tools.bad_board_util import main
if __name__ == '__main__':
main()

View File

@@ -1,78 +0,0 @@
"""
SAMPLE CONFIG
-------------------
{
"format": {
"version": "1.2+", # -> (default = "1.2+", str, (bos: format.version))
"model": "Antminer S9", # -> (default = "Antminer S9", str, (bos: format.model))
"generator": "upstream_config_util", # -> (hidden, always = "upstream_config_util", str, (bos: format.generator))
"timestamp": 1606842000, # -> (hidden, always = int(time.time()) (current unix time), int, (bos: format.timestamp))
},
"temperature": {
"mode": "auto", # -> (default = "auto", str["auto", "manual", "disabled"], (bos: temp_control.mode))
"target": 70.0, # -> (default = 70.0, float, (bos: temp_control.target_temp))
"hot": 80.0, # -> (default = 80.0, float, (bos: temp_control.hot_temp))
"danger": 90.0, # -> (default = 90.0, float, (bos: temp_control.dangerous_temp))
},
"fans": { # -> (optional, required if temperature["mode"] == "disabled", (bos: fan_control))
"min_fans": 1, # -> (default = 1, int, (bos: fan_control.min_fans))
"speed": 100, # -> (default = 100, 0 < int < 100, (bos: fan_control.speed))
},
"asicboost": True, # -> (default = True, bool, (bos : hash_chain_global.asic_boost))
"pool_groups": [
{
"group_name": "Upstream", # -> (default = "group_{index}" (group_0), str, (bos: group.[index].name))
"quota": 1, # -> (default = 1, int, (bos: group.[index].quota))
"pools": [
{
"url": "stratum+tcp://stratum.slushpool.com:3333", # -> (str, (bos: group.[index].pool.[index].url))
"username": "UpstreamDataInc.test", # -> (str, (bos: group.[index].pool.[index].user))
"password": "123", # -> (str, (bos: group.[index].pool.[index].password))
},
{
"url": "stratum+tcp://us-east.stratum.slushpool.com:3333", # -> (str, (bos: group.[index].pool.[index].url))
"username": "UpstreamDataInc.test", # -> (str, (bos: group.[index].pool.[index].user))
"password": "123", # -> (str, (bos: group.[index].pool.[index].password))
},
{
"url": "stratum+tcp://ca.stratum.slushpool.com:3333", # -> (str, (bos: group.[index].pool.[index].url))
"username": "UpstreamDataInc.test", # -> (str, (bos: group.[index].pool.[index].user))
"password": "123", # -> (str, (bos: group.[index].pool.[index].password))
},
]
},
{
"group_name": "Upstream2", # -> (default = "group_{index}" (group_1), str, (bos: group.[index].name))
"quota": 4, # -> (default = 1, int, (bos: group.[index].quota))
"pools": [
{
"url": "stratum+tcp://stratum.slushpool.com:3333", # -> (str, (bos: group.[index].pool.[index].url))
"username": "UpstreamDataTesting.test", # -> (str, (bos: group.[index].pool.[index].user))
"password": "123", # -> (str, (bos: group.[index].pool.[index].password))
},
{
"url": "stratum+tcp://us-east.stratum.slushpool.com:3333", # -> (str, (bos: group.[index].pool.[index].url))
"username": "UpstreamDataTesting.test", # -> (str, (bos: group.[index].pool.[index].user))
"password": "123", # -> (str, (bos: group.[index].pool.[index].password))
},
{
"url": "stratum+tcp://ca.stratum.slushpool.com:3333", # -> (str, (bos: group.[index].pool.[index].url))
"username": "UpstreamDataTesting.test", # -> (str, (bos: group.[index].pool.[index].user))
"password": "123", # -> (str, (bos: group.[index].pool.[index].password))
},
]
},
],
"autotuning": {
"enabled": True, # -> (default = True, bool), (bos: autotuning.enabled)
"wattage": 900, # -> (default = 900, int, (bos: autotuning.psu_power_limit))
},
"power_scaling": {
"enabled": False, # -> (default = False, bool, (bos: power_scaling.enabled))
"power_step": 100, # -> (default = 100, int, (bos: power_scaling.power_step))
"min_psu_power_limit": 800, # -> (default = 800, int, (bos: power_scaling.min_psu_power_limit))
"shutdown_enabled": True, # -> (default = False, bool, (bos: power_scaling.shutdown_enabled))
"shutdown_duration": 3.0, # -> (default = 3.0, float, (bos: power_scaling.shutdown_duration))
}
}
"""

View File

@@ -1,188 +0,0 @@
import time
import yaml
import toml
async def bos_config_convert(config: dict):
out_config = {}
for opt in config:
if opt == "format":
out_config["format"] = config[opt]
out_config["format"]["generator"] = 'upstream_config_util'
out_config["format"]["timestamp"] = int(time.time())
elif opt == "temp_control":
out_config["temperature"] = {}
if "mode" in config[opt].keys():
out_config["temperature"]["mode"] = config[opt]["mode"]
else:
out_config["temperature"]["mode"] = "auto"
if "target_temp" in config[opt].keys():
out_config["temperature"]["target"] = config[opt]["target_temp"]
else:
out_config["temperature"]["target"] = 70.0
if "hot_temp" in config[opt].keys():
out_config["temperature"]["hot"] = config[opt]["hot_temp"]
else:
out_config["temperature"]["hot"] = 80.0
if "dangerous_temp" in config[opt].keys():
out_config["temperature"]["danger"] = config[opt]["dangerous_temp"]
else:
out_config["temperature"]["danger"] = 90.0
elif opt == "fan_control":
out_config["fans"] = {}
if "min_fans" in config[opt].keys():
out_config["fans"]["min_fans"] = config[opt]["min_fans"]
else:
out_config["fans"]["min_fans"] = 1
if "speed" in config[opt].keys():
out_config["fans"]["speed"] = config[opt]["speed"]
else:
out_config["fans"]["speed"] = 100
elif opt == "group":
out_config["pool_groups"] = [{} for _item in range(len(config[opt]))]
for idx in range(len(config[opt])):
out_config["pool_groups"][idx]["pools"] = []
out_config["pool_groups"][idx] = {}
if "name" in config[opt][idx].keys():
out_config["pool_groups"][idx]["group_name"] = config[opt][idx]["name"]
else:
out_config["pool_groups"][idx]["group_name"] = f"group_{idx}"
if "quota" in config[opt][idx].keys():
out_config["pool_groups"][idx]["quota"] = config[opt][idx]["quota"]
else:
out_config["pool_groups"][idx]["quota"] = 1
out_config["pool_groups"][idx]["pools"] = [{} for _item in range(len(config[opt][idx]["pool"]))]
for pool_idx in range(len(config[opt][idx]["pool"])):
out_config["pool_groups"][idx]["pools"][pool_idx]["url"] = config[opt][idx]["pool"][pool_idx]["url"]
out_config["pool_groups"][idx]["pools"][pool_idx]["username"] = config[opt][idx]["pool"][pool_idx][
"user"]
out_config["pool_groups"][idx]["pools"][pool_idx]["password"] = config[opt][idx]["pool"][pool_idx][
"password"]
elif opt == "autotuning":
out_config["autotuning"] = {}
if "enabled" in config[opt].keys():
out_config["autotuning"]["enabled"] = config[opt]["enabled"]
else:
out_config["autotuning"]["enabled"] = True
if "psu_power_limit" in config[opt].keys():
out_config["autotuning"]["wattage"] = config[opt]["psu_power_limit"]
else:
out_config["autotuning"]["wattage"] = 900
elif opt == "power_scaling":
out_config["power_scaling"] = {}
if "enabled" in config[opt].keys():
out_config["power_scaling"]["enabled"] = config[opt]["enabled"]
else:
out_config["power_scaling"]["enabled"] = False
if "power_step" in config[opt].keys():
out_config["power_scaling"]["power_step"] = config[opt]["power_step"]
else:
out_config["power_scaling"]["power_step"] = 100
if "min_psu_power_limit" in config[opt].keys():
out_config["power_scaling"]["min_psu_power_limit"] = config[opt]["min_psu_power_limit"]
else:
out_config["power_scaling"]["min_psu_power_limit"] = 800
if "shutdown_enabled" in config[opt].keys():
out_config["power_scaling"]["shutdown_enabled"] = config[opt]["shutdown_enabled"]
else:
out_config["power_scaling"]["shutdown_enabled"] = False
if "shutdown_duration" in config[opt].keys():
out_config["power_scaling"]["shutdown_duration"] = config[opt]["shutdown_duration"]
else:
out_config["power_scaling"]["shutdown_duration"] = 3.0
return yaml.dump(out_config, sort_keys=False)
async def general_config_convert_bos(yaml_config):
config = yaml.load(yaml_config, Loader=yaml.SafeLoader)
out_config = {}
for opt in config:
if opt == "format":
out_config["format"] = config[opt]
out_config["format"]["generator"] = 'upstream_config_util'
out_config["format"]["timestamp"] = int(time.time())
elif opt == "temperature":
out_config["temp_control"] = {}
if "mode" in config[opt].keys():
out_config["temp_control"]["mode"] = config[opt]["mode"]
else:
out_config["temp_control"]["mode"] = "auto"
if "target" in config[opt].keys():
out_config["temp_control"]["target_temp"] = config[opt]["target"]
else:
out_config["temp_control"]["target_temp"] = 70.0
if "hot" in config[opt].keys():
out_config["temp_control"]["hot_temp"] = config[opt]["hot"]
else:
out_config["temp_control"]["hot_temp"] = 80.0
if "danger" in config[opt].keys():
out_config["temp_control"]["dangerous_temp"] = config[opt]["danger"]
else:
out_config["temp_control"]["dangerous_temp"] = 90.0
elif opt == "fans":
out_config["fan_control"] = {}
if "min_fans" in config[opt].keys():
out_config["fan_control"]["min_fans"] = config[opt]["min_fans"]
else:
out_config["fan_control"]["min_fans"] = 1
if "speed" in config[opt].keys():
out_config["fan_control"]["speed"] = config[opt]["speed"]
else:
out_config["fan_control"]["speed"] = 100
elif opt == "pool_groups":
out_config["group"] = [{} for _item in range(len(config[opt]))]
for idx in range(len(config[opt])):
out_config["group"][idx]["pools"] = []
out_config["group"][idx] = {}
if "group_name" in config[opt][idx].keys():
out_config["group"][idx]["name"] = config[opt][idx]["group_name"]
else:
out_config["group"][idx]["name"] = f"group_{idx}"
if "quota" in config[opt][idx].keys():
out_config["group"][idx]["quota"] = config[opt][idx]["quota"]
else:
out_config["group"][idx]["quota"] = 1
out_config["group"][idx]["pool"] = [{} for _item in range(len(config[opt][idx]["pools"]))]
for pool_idx in range(len(config[opt][idx]["pools"])):
out_config["group"][idx]["pool"][pool_idx]["url"] = config[opt][idx]["pools"][pool_idx]["url"]
out_config["group"][idx]["pool"][pool_idx]["user"] = config[opt][idx]["pools"][pool_idx]["username"]
out_config["group"][idx]["pool"][pool_idx]["password"] = config[opt][idx]["pools"][pool_idx]["password"]
elif opt == "autotuning":
out_config["autotuning"] = {}
if "enabled" in config[opt].keys():
out_config["autotuning"]["enabled"] = config[opt]["enabled"]
else:
out_config["autotuning"]["enabled"] = True
if "wattage" in config[opt].keys():
out_config["autotuning"]["psu_power_limit"] = config[opt]["wattage"]
else:
out_config["autotuning"]["psu_power_limit"] = 900
elif opt == "power_scaling":
out_config["power_scaling"] = {}
if "enabled" in config[opt].keys():
out_config["power_scaling"]["enabled"] = config[opt]["enabled"]
else:
out_config["power_scaling"]["enabled"] = False
if "power_step" in config[opt].keys():
out_config["power_scaling"]["power_step"] = config[opt]["power_step"]
else:
out_config["power_scaling"]["power_step"] = 100
if "min_psu_power_limit" in config[opt].keys():
out_config["power_scaling"]["min_psu_power_limit"] = config[opt]["min_psu_power_limit"]
else:
out_config["power_scaling"]["min_psu_power_limit"] = 800
if "shutdown_enabled" in config[opt].keys():
out_config["power_scaling"]["shutdown_enabled"] = config[opt]["shutdown_enabled"]
else:
out_config["power_scaling"]["shutdown_enabled"] = False
if "shutdown_duration" in config[opt].keys():
out_config["power_scaling"]["shutdown_duration"] = config[opt]["shutdown_duration"]
else:
out_config["power_scaling"]["shutdown_duration"] = 3.0
return toml.dumps(out_config)

View File

@@ -1,17 +0,0 @@
config cgminer 'default'
option pool1pw 'x'
option pool2pw 'x'
option pool3pw 'x'
option voltage_level_offset '0'
option fan '10'
option api_allow 'W:0/0'
option power_mode 'balance'
option pool1url 'stratum+tcp://ca.stratum.slushpool.com:3333'
option pool1user 'poolacct.worker1'
option pool2url 'stratum+tcp://ca.stratum.slushpool.com:3333'
option pool2user 'poolacct.worker2'
option pool3url 'stratum+tcp://ca.stratum.slushpool.com:3333'
option pool3user 'poolacct.worker3'
option ntp_enable 'openwrt'

View File

@@ -1,4 +0,0 @@
from tools.cfg_util import main
if __name__ == '__main__':
main()

24
docs/API/api.md Normal file
View File

@@ -0,0 +1,24 @@
# pyasic
## Miner APIs
Each miner has a unique API that is used to communicate with it.
Each of these API types has commands that differ between them, and some commands have data that others do not.
Each miner that is a subclass of `BaseMiner` should have an API linked to it as `Miner.api`.
All API implementations inherit from [`BaseMinerAPI`][pyasic.API.BaseMinerAPI], which implements the basic communications protocols.
BaseMinerAPI should never be used unless inheriting to create a new miner API class for a new type of miner (which should be exceedingly rare).
Use these instead -
#### [BMMiner API][pyasic.API.bmminer.BMMinerAPI]
#### [BOSMiner API][pyasic.API.bosminer.BOSMinerAPI]
#### [BTMiner API][pyasic.API.btminer.BTMinerAPI]
#### [CGMiner API][pyasic.API.cgminer.CGMinerAPI]
#### [Unknown API][pyasic.API.unknown.UnknownAPI]
<br>
## BaseMinerAPI
::: pyasic.API.BaseMinerAPI
handler: python
options:
heading_level: 4

7
docs/API/bmminer.md Normal file
View File

@@ -0,0 +1,7 @@
# pyasic
## BMMinerAPI
::: pyasic.API.bmminer.BMMinerAPI
handler: python
options:
show_root_heading: false
heading_level: 4

7
docs/API/bosminer.md Normal file
View File

@@ -0,0 +1,7 @@
# pyasic
## BOSMinerAPI
::: pyasic.API.bosminer.BOSMinerAPI
handler: python
options:
show_root_heading: false
heading_level: 4

7
docs/API/btminer.md Normal file
View File

@@ -0,0 +1,7 @@
# pyasic
## BTMinerAPI
::: pyasic.API.btminer.BTMinerAPI
handler: python
options:
show_root_heading: false
heading_level: 4

7
docs/API/cgminer.md Normal file
View File

@@ -0,0 +1,7 @@
# pyasic
## CGMinerAPI
::: pyasic.API.cgminer.CGMinerAPI
handler: python
options:
show_root_heading: false
heading_level: 4

7
docs/API/unknown.md Normal file
View File

@@ -0,0 +1,7 @@
# pyasic
## UnknownAPI
::: pyasic.API.unknown.UnknownAPI
handler: python
options:
show_root_heading: false
heading_level: 4

View File

@@ -0,0 +1,24 @@
# pyasic
## Miner Config
::: pyasic.config.MinerConfig
handler: python
options:
show_root_heading: false
heading_level: 4
## Pool Groups
::: pyasic.config._PoolGroup
handler: python
options:
show_root_heading: false
heading_level: 4
## Pools
::: pyasic.config._Pool
handler: python
options:
show_root_heading: false
heading_level: 4

8
docs/data/miner_data.md Normal file
View File

@@ -0,0 +1,8 @@
# pyasic
## Miner Data
::: pyasic.data.MinerData
handler: python
options:
show_root_heading: false
heading_level: 4

103
docs/index.md Normal file
View File

@@ -0,0 +1,103 @@
# pyasic
*A set of modules for interfacing with many common types of ASIC bitcoin miners, using both their API and SSH.*
[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
[![pypi](https://img.shields.io/pypi/v/pyasic.svg)](https://pypi.org/project/pyasic/)
[![python](https://img.shields.io/pypi/pyversions/pyasic.svg)](https://pypi.org/project/pyasic/)
[![Read the Docs](https://img.shields.io/readthedocs/pyasic)](https://pyasic.readthedocs.io/en/latest/)
[![GitHub](https://img.shields.io/github/license/UpstreamData/pyasic)](https://github.com/UpstreamData/pyasic/blob/master/LICENSE.txt)
[![CodeFactor Grade](https://img.shields.io/codefactor/grade/github/UpstreamData/pyasic)](https://www.codefactor.io/repository/github/upstreamdata/pyasic)
## Intro
Welcome to pyasic! Pyasic uses an asynchronous method of communicating with asic miners on your network, which makes it super fast.
[Supported Miner Types](miners/supported_types.md)
Getting started with pyasic is easy. First, find your miner (or miners) on the network by scanning for them or getting the correct class automatically for them if you know the IP.
<br>
## Scanning for miners
To scan for miners in pyasic, we use the class [`MinerNetwork`][pyasic.network.MinerNetwork], which abstracts the search, communication, identification, setup, and return of a miner to 1 command.
The command [`MinerNetwork().scan_network_for_miners()`][pyasic.network.MinerNetwork.scan_network_for_miners] returns a list that contains any miners found.
```python
import asyncio # asyncio for handling the async part
from pyasic.network import MinerNetwork # miner network handles the scanning
async def scan_miners(): # define async scan function to allow awaiting
# create a miner network
# you can pass in any IP and it will use that in a subnet with a /24 mask (255 IPs).
network = MinerNetwork("192.168.1.50") # this uses the 192.168.1.0-255 network
# scan for miners asynchronously
# this will return the correct type of miners if they are supported with all functionality.
miners = await network.scan_network_for_miners()
print(miners)
if __name__ == "__main__":
asyncio.run(scan_miners()) # run the scan asynchronously with asyncio.run()
```
<br>
## Creating miners based on IP
If you already know the IP address of your miner or miners, you can use the [`MinerFactory`][pyasic.miners.miner_factory.MinerFactory] to communicate and identify the miners.
The function [`MinerFactory().get_miner()`][pyasic.miners.miner_factory.MinerFactory.get_miner] will return any miner it found at the IP address specified, or an `UnknownMiner` if it cannot identify the miner.
```python
import asyncio # asyncio for handling the async part
from pyasic.miners.miner_factory import MinerFactory # miner factory handles miners creation
async def get_miners(): # define async scan function to allow awaiting
# get the miner with miner factory
# miner factory is a singleton, and will always use the same object and cache
# this means you can always call it as MinerFactory().get_miner()
miner_1 = await MinerFactory().get_miner("192.168.1.75")
miner_2 = await MinerFactory().get_miner("192.168.1.76")
print(miner_1, miner_2)
if __name__ == "__main__":
asyncio.run(get_miners()) # get the miners asynchronously with asyncio.run()
```
<br>
## Getting data from miners
Once you have your miner(s) identified, you will likely want to get data from the miner(s). You can do this using a built in function in each miner called `get_data()`.
This function will return a instance of the dataclass [`MinerData`][pyasic.data.MinerData] with all data it can gather from the miner.
Each piece of data in a [`MinerData`][pyasic.data.MinerData] instance can be referenced by getting it as an attribute, such as [`MinerData().hashrate`][pyasic.data.MinerData].
```python
import asyncio
from pyasic.miners.miner_factory import MinerFactory
async def gather_miner_data():
miner = await MinerFactory().get_miner("192.168.1.75")
miner_data = await miner.get_data()
print(miner_data) # all data from the dataclass
print(miner_data.hashrate) # hashrate of the miner in TH/s
if __name__ == "__main__":
asyncio.run(gather_miner_data())
```
You can do something similar with multiple miners, with only needing to make a small change to get all the data at once.
```python
import asyncio # asyncio for handling the async part
from pyasic.network import MinerNetwork # miner network handles the scanning
async def gather_miner_data(): # define async scan function to allow awaiting
network = MinerNetwork("192.168.1.50")
miners = await network.scan_network_for_miners()
# we need to asyncio.gather() all the miners get_data() functions to make them run together
all_miner_data = await asyncio.gather(*[miner.get_data() for miner in miners])
for miner_data in all_miner_data:
print(miner_data) # print out all the data one by one
if __name__ == "__main__":
asyncio.run(gather_miner_data())
```

View File

@@ -0,0 +1,59 @@
# pyasic
## X17 Models
## S17
::: pyasic.miners.antminer.bmminer.X17.S17.BMMinerS17
handler: python
options:
show_root_heading: false
heading_level: 4
## S17+
::: pyasic.miners.antminer.bmminer.X17.S17_Plus.BMMinerS17Plus
handler: python
options:
show_root_heading: false
heading_level: 4
## S17 Pro
::: pyasic.miners.antminer.bmminer.X17.S17_Pro.BMMinerS17Pro
handler: python
options:
show_root_heading: false
heading_level: 4
## S17e
::: pyasic.miners.antminer.bmminer.X17.S17e.BMMinerS17e
handler: python
options:
show_root_heading: false
heading_level: 4
## T17
::: pyasic.miners.antminer.bmminer.X17.T17.BMMinerT17
handler: python
options:
show_root_heading: false
heading_level: 4
## T17+
::: pyasic.miners.antminer.bmminer.X17.T17_Plus.BMMinerT17Plus
handler: python
options:
show_root_heading: false
heading_level: 4
## T17e
::: pyasic.miners.antminer.bmminer.X17.T17e.BMMinerT17e
handler: python
options:
show_root_heading: false
heading_level: 4

View File

@@ -0,0 +1,52 @@
# pyasic
## X19 Models
## S19
::: pyasic.miners.antminer.bmminer.X19.S19.BMMinerS19
handler: python
options:
show_root_heading: false
heading_level: 4
## S19 Pro
::: pyasic.miners.antminer.bmminer.X19.S19_Pro.BMMinerS19Pro
handler: python
options:
show_root_heading: false
heading_level: 4
## S19a
::: pyasic.miners.antminer.bmminer.X19.S19a.BMMinerS19a
handler: python
options:
show_root_heading: false
heading_level: 4
## S19j
::: pyasic.miners.antminer.bmminer.X19.S19j.BMMinerS19j
handler: python
options:
show_root_heading: false
heading_level: 4
## S19j Pro
::: pyasic.miners.antminer.bmminer.X19.S19j_Pro.BMMinerS19jPro
handler: python
options:
show_root_heading: false
heading_level: 4
## T19
::: pyasic.miners.antminer.bmminer.X19.T19.BMMinerT19
handler: python
options:
show_root_heading: false
heading_level: 4

View File

@@ -0,0 +1,26 @@
# pyasic
## X9 Models
## S9
::: pyasic.miners.antminer.bmminer.X9.S9.BMMinerS9
handler: python
options:
show_root_heading: false
heading_level: 4
## S9i
::: pyasic.miners.antminer.bmminer.X9.S9i.BMMinerS9i
handler: python
options:
show_root_heading: false
heading_level: 4
## T9
::: pyasic.miners.antminer.bmminer.X9.T9.BMMinerT9
handler: python
options:
show_root_heading: false
heading_level: 4

View File

@@ -0,0 +1,26 @@
# pyasic
## A10X Models
## A1026
::: pyasic.miners.avalonminer.cgminer.A10X.A1026.CGMinerAvalon1026
handler: python
options:
show_root_heading: false
heading_level: 4
## A1047
::: pyasic.miners.avalonminer.cgminer.A10X.A1047.CGMinerAvalon1047
handler: python
options:
show_root_heading: false
heading_level: 4
## A1066
::: pyasic.miners.avalonminer.cgminer.A10X.A1066.CGMinerAvalon1066
handler: python
options:
show_root_heading: false
heading_level: 4

View File

@@ -0,0 +1,26 @@
# pyasic
## A7X Models
## A721
::: pyasic.miners.avalonminer.cgminer.A7X.A721.CGMinerAvalon721
handler: python
options:
show_root_heading: false
heading_level: 4
## A741
::: pyasic.miners.avalonminer.cgminer.A7X.A741.CGMinerAvalon741
handler: python
options:
show_root_heading: false
heading_level: 4
## A761
::: pyasic.miners.avalonminer.cgminer.A7X.A761.CGMinerAvalon761
handler: python
options:
show_root_heading: false
heading_level: 4

View File

@@ -0,0 +1,26 @@
# pyasic
## A8X Models
## A821
::: pyasic.miners.avalonminer.cgminer.A8X.A821.CGMinerAvalon821
handler: python
options:
show_root_heading: false
heading_level: 4
## A841
::: pyasic.miners.avalonminer.cgminer.A8X.A841.CGMinerAvalon841
handler: python
options:
show_root_heading: false
heading_level: 4
## A851
::: pyasic.miners.avalonminer.cgminer.A8X.A851.CGMinerAvalon851
handler: python
options:
show_root_heading: false
heading_level: 4

View File

@@ -0,0 +1,10 @@
# pyasic
## A9X Models
## A921
::: pyasic.miners.avalonminer.cgminer.A9X.A921.CGMinerAvalon921
handler: python
options:
show_root_heading: false
heading_level: 4

View File

@@ -0,0 +1,8 @@
# pyasic
## BMMiner Backend
::: pyasic.miners._backends.bmminer.BMMiner
handler: python
options:
show_root_heading: false
heading_level: 4

View File

@@ -0,0 +1,8 @@
# pyasic
## BOSMiner Backend
::: pyasic.miners._backends.bosminer.BOSMiner
handler: python
options:
show_root_heading: false
heading_level: 4

View File

@@ -0,0 +1,8 @@
# pyasic
## BTMiner Backend
::: pyasic.miners._backends.btminer.BTMiner
handler: python
options:
show_root_heading: false
heading_level: 4

View File

@@ -0,0 +1,8 @@
# pyasic
## CGMiner Backend
::: pyasic.miners._backends.cgminer.CGMiner
handler: python
options:
show_root_heading: false
heading_level: 4

View File

@@ -0,0 +1,8 @@
# pyasic
## Hiveon Backend
::: pyasic.miners._backends.hiveon.Hiveon
handler: python
options:
show_root_heading: false
heading_level: 4

View File

@@ -0,0 +1,8 @@
# pyasic
## Miner Factory
::: pyasic.miners.miner_factory.MinerFactory
handler: python
options:
show_root_heading: false
heading_level: 4

View File

@@ -0,0 +1,75 @@
# pyasic
## Supported Miners
Supported miner types are here on this list. If your miner (or miner version) is not on this list, please feel free to [open an issue on GitHub](https://github.com/UpstreamData/pyasic/issues) to get it added.
## Miner List
##### pyasic currently supports the following miners and subtypes:
* Braiins OS+ Devices:
* All devices supported by BraiinsOS+ are supported here.
* Stock Firmware Whatsminers:
* M3X Series:
* [M30S][pyasic.miners.whatsminer.btminer.M3X.M30S.BTMinerM30S]:
* [VE10][pyasic.miners.whatsminer.btminer.M3X.M30S.BTMinerM30SVE10]
* [VG20][pyasic.miners.whatsminer.btminer.M3X.M30S.BTMinerM30SVG20]
* [VE20][pyasic.miners.whatsminer.btminer.M3X.M30S.BTMinerM30SVE20]
* [V50][pyasic.miners.whatsminer.btminer.M3X.M30S.BTMinerM30SV50]
* [M30S+][pyasic.miners.whatsminer.btminer.M3X.M30S_Plus.BTMinerM30SPlus]:
* [VF20][pyasic.miners.whatsminer.btminer.M3X.M30S_Plus.BTMinerM30SPlusVF20]
* [VE40][pyasic.miners.whatsminer.btminer.M3X.M30S_Plus.BTMinerM30SPlusVE40]
* [VG60][pyasic.miners.whatsminer.btminer.M3X.M30S_Plus.BTMinerM30SPlusVG60]
* [M30S++][pyasic.miners.whatsminer.btminer.M3X.M30S_Plus_Plus.BTMinerM30SPlusPlus]:
* [VG30][pyasic.miners.whatsminer.btminer.M3X.M30S_Plus_Plus.BTMinerM30SPlusPlusVG30]
* [VG40][pyasic.miners.whatsminer.btminer.M3X.M30S_Plus_Plus.BTMinerM30SPlusPlusVG40]
* [M31S][pyasic.miners.whatsminer.btminer.M3X.M31S.BTMinerM31S]
* [M31S+][pyasic.miners.whatsminer.btminer.M3X.M31S_Plus.BTMinerM31SPlus]:
* [VE20][pyasic.miners.whatsminer.btminer.M3X.M31S_Plus.BTMinerM31SPlusVE20]
* [M32S][pyasic.miners.whatsminer.btminer.M3X.M32S.BTMinerM32S]
* M2X Series:
* [M20][pyasic.miners.whatsminer.btminer.M2X.M20.BTMinerM20]:
* [V10][pyasic.miners.whatsminer.btminer.M2X.M20.BTMinerM20V10]
* [M20S][pyasic.miners.whatsminer.btminer.M2X.M20S.BTMinerM20S]:
* [V10][pyasic.miners.whatsminer.btminer.M2X.M20S.BTMinerM20SV10]
* [V20][pyasic.miners.whatsminer.btminer.M2X.M20S.BTMinerM20SV20]
* [M20S+][pyasic.miners.whatsminer.btminer.M2X.M20S_Plus.BTMinerM20SPlus]
* [M21][pyasic.miners.whatsminer.btminer.M2X.M21.BTMinerM21]
* [M21S][pyasic.miners.whatsminer.btminer.M2X.M21S.BTMinerM21S]:
* [V20][pyasic.miners.whatsminer.btminer.M2X.M21S.BTMinerM21SV20]
* [V60][pyasic.miners.whatsminer.btminer.M2X.M21S.BTMinerM21SV60]
* [M21S+][pyasic.miners.whatsminer.btminer.M2X.M21S_Plus.BTMinerM21SPlus]
* Stock Firmware Antminers:
* X19 Series:
* [S19][pyasic.miners.antminer.bmminer.X19.S19.BMMinerS19]
* [S19 Pro][pyasic.miners.antminer.bmminer.X19.S19_Pro.BMMinerS19Pro]
* [S19a][pyasic.miners.antminer.bmminer.X19.S19a.BMMinerS19a]
* [S19j][pyasic.miners.antminer.bmminer.X19.S19j.BMMinerS19j]
* [S19j Pro][pyasic.miners.antminer.bmminer.X19.S19j_Pro.BMMinerS19jPro]
* [T19][pyasic.miners.antminer.bmminer.X19.T19.BMMinerT19]
* X17 Series:
* [S17][pyasic.miners.antminer.bmminer.X17.S17.BMMinerS17]
* [S17+][pyasic.miners.antminer.bmminer.X17.S17_Plus.BMMinerS17Plus]
* [S17 Pro][pyasic.miners.antminer.bmminer.X17.S17_Pro.BMMinerS17Pro]
* [S17e][pyasic.miners.antminer.bmminer.X17.S17e.BMMinerS17e]
* [T17][pyasic.miners.antminer.bmminer.X17.T17.BMMinerT17]
* [T17+][pyasic.miners.antminer.bmminer.X17.T17_Plus.BMMinerT17Plus]
* [T17e][pyasic.miners.antminer.bmminer.X17.T17e.BMMinerT17e]
* X9 Series:
* [S9][pyasic.miners.antminer.bmminer.X9.S9.BMMinerS9]
* [S9i][pyasic.miners.antminer.bmminer.X9.S9i.BMMinerS9i]
* [T9][pyasic.miners.antminer.bmminer.X9.T9.BMMinerT9]
* Stock Firmware Avalonminers:
* A7X Series:
* [A721][pyasic.miners.avalonminer.cgminer.A7X.A721.CGMinerAvalon721]
* [A741][pyasic.miners.avalonminer.cgminer.A7X.A741.CGMinerAvalon741]
* [A761][pyasic.miners.avalonminer.cgminer.A7X.A761.CGMinerAvalon761]
* A8X Series:
* [A821][pyasic.miners.avalonminer.cgminer.A8X.A821.CGMinerAvalon821]
* [A841][pyasic.miners.avalonminer.cgminer.A8X.A841.CGMinerAvalon841]
* [A851][pyasic.miners.avalonminer.cgminer.A8X.A851.CGMinerAvalon851]
* A9X Series:
* [A921][pyasic.miners.avalonminer.cgminer.A9X.A921.CGMinerAvalon921]
* A10X Series:
* [A1026][pyasic.miners.avalonminer.cgminer.A10X.A1026.CGMinerAvalon1026]
* [A1047][pyasic.miners.avalonminer.cgminer.A10X.A1047.CGMinerAvalon1047]
* [A1066][pyasic.miners.avalonminer.cgminer.A10X.A1066.CGMinerAvalon1066]

View File

@@ -0,0 +1,91 @@
# pyasic
## M2X Models
## M20
::: pyasic.miners.whatsminer.btminer.M2X.M20.BTMinerM20
handler: python
options:
show_root_heading: false
heading_level: 4
## M20V10
::: pyasic.miners.whatsminer.btminer.M2X.M20.BTMinerM20V10
handler: python
options:
show_root_heading: false
heading_level: 4
## M20S
::: pyasic.miners.whatsminer.btminer.M2X.M20S.BTMinerM20S
handler: python
options:
show_root_heading: false
heading_level: 4
## M20SV10
::: pyasic.miners.whatsminer.btminer.M2X.M20S.BTMinerM20SV10
handler: python
options:
show_root_heading: false
heading_level: 4
## M20SV20
::: pyasic.miners.whatsminer.btminer.M2X.M20S.BTMinerM20SV20
handler: python
options:
show_root_heading: false
heading_level: 4
## M20S+
::: pyasic.miners.whatsminer.btminer.M2X.M20S_Plus.BTMinerM20SPlus
handler: python
options:
show_root_heading: false
heading_level: 4
## M21
::: pyasic.miners.whatsminer.btminer.M2X.M21.BTMinerM21
handler: python
options:
show_root_heading: false
heading_level: 4
## M21S
::: pyasic.miners.whatsminer.btminer.M2X.M21S.BTMinerM21S
handler: python
options:
show_root_heading: false
heading_level: 4
## M21SV20
::: pyasic.miners.whatsminer.btminer.M2X.M21S.BTMinerM21SV20
handler: python
options:
show_root_heading: false
heading_level: 4
## M21SV60
::: pyasic.miners.whatsminer.btminer.M2X.M21S.BTMinerM21SV60
handler: python
options:
show_root_heading: false
heading_level: 4
## M21S+
::: pyasic.miners.whatsminer.btminer.M2X.M21S_Plus.BTMinerM21SPlus
handler: python
options:
show_root_heading: false
heading_level: 4

View File

@@ -0,0 +1,131 @@
# pyasic
## M3X Models
## M30S
::: pyasic.miners.whatsminer.btminer.M3X.M30S.BTMinerM30S
handler: python
options:
show_root_heading: false
heading_level: 4
## M30SVE10
::: pyasic.miners.whatsminer.btminer.M3X.M30S.BTMinerM30SVE10
handler: python
options:
show_root_heading: false
heading_level: 4
## M30SVG20
::: pyasic.miners.whatsminer.btminer.M3X.M30S.BTMinerM30SVG20
handler: python
options:
show_root_heading: false
heading_level: 4
## M30SVE20
::: pyasic.miners.whatsminer.btminer.M3X.M30S.BTMinerM30SVE20
handler: python
options:
show_root_heading: false
heading_level: 4
## M30SV50
::: pyasic.miners.whatsminer.btminer.M3X.M30S.BTMinerM30SV50
handler: python
options:
show_root_heading: false
heading_level: 4
## M30S+
::: pyasic.miners.whatsminer.btminer.M3X.M30S_Plus.BTMinerM30SPlus
handler: python
options:
show_root_heading: false
heading_level: 4
## M30S+VF20
::: pyasic.miners.whatsminer.btminer.M3X.M30S_Plus.BTMinerM30SPlusVF20
handler: python
options:
show_root_heading: false
heading_level: 4
## M30S+VE40
::: pyasic.miners.whatsminer.btminer.M3X.M30S_Plus.BTMinerM30SPlusVE40
handler: python
options:
show_root_heading: false
heading_level: 4
## M30S+VG60
::: pyasic.miners.whatsminer.btminer.M3X.M30S_Plus.BTMinerM30SPlusVG60
handler: python
options:
show_root_heading: false
heading_level: 4
## M30S++
::: pyasic.miners.whatsminer.btminer.M3X.M30S_Plus_Plus.BTMinerM30SPlusPlus
handler: python
options:
show_root_heading: false
heading_level: 4
## M30S++VG30
::: pyasic.miners.whatsminer.btminer.M3X.M30S_Plus_Plus.BTMinerM30SPlusPlusVG30
handler: python
options:
show_root_heading: false
heading_level: 4
## M30S+VG40
::: pyasic.miners.whatsminer.btminer.M3X.M30S_Plus_Plus.BTMinerM30SPlusPlusVG40
handler: python
options:
show_root_heading: false
heading_level: 4
## M31S
::: pyasic.miners.whatsminer.btminer.M3X.M31S.BTMinerM31S
handler: python
options:
show_root_heading: false
heading_level: 4
## M31S+
::: pyasic.miners.whatsminer.btminer.M3X.M31S_Plus.BTMinerM31SPlus
handler: python
options:
show_root_heading: false
heading_level: 4
## M31S+VE20
::: pyasic.miners.whatsminer.btminer.M3X.M31S_Plus.BTMinerM31SPlusVE20
handler: python
options:
show_root_heading: false
heading_level: 4
## M32S
::: pyasic.miners.whatsminer.btminer.M3X.M32S.BTMinerM32S
handler: python
options:
show_root_heading: false
heading_level: 4

View File

@@ -0,0 +1,8 @@
# pyasic
## Miner Network
::: pyasic.network.MinerNetwork
handler: python
options:
show_root_heading: false
heading_level: 4

View File

@@ -0,0 +1,12 @@
# pyasic
## Miner Network Range
`MinerNetworkRange` is a class used by [`MinerNetwork`][pyasic.network.MinerNetwork] to handle any constructor stings.
The goal is to emulate what is produced by `ipaddress.ip_network` by allowing [`MinerNetwork`][pyasic.network.MinerNetwork] to get a list of hosts.
This allows this class to be the [`MinerNetwork.network`][pyasic.network.MinerNetwork] and hence be used for scanning.
::: pyasic.network.net_range.MinerNetworkRange
handler: python
options:
show_root_heading: false
heading_level: 4

3
docs/requirements.txt Normal file
View File

@@ -0,0 +1,3 @@
jinja2<3.1.0
mkdocs
mkdocstrings[python]

BIN
icon.ico

Binary file not shown.

Before

Width:  |  Height:  |  Size: 116 KiB

View File

@@ -1,29 +0,0 @@
"""
Make a build of the board tool.
Usage: make_board_tool_exe.py build
The build will show up in the build directory.
"""
import datetime
import sys
import os
from cx_Freeze import setup, Executable
base = None
if sys.platform == "win32":
base = "Win32GUI"
version = datetime.datetime.now()
version = version.strftime("%y.%m.%d")
print(version)
setup(name="UpstreamBoardUtil.exe",
version=version,
description="Upstream Data Board Utility Build",
options={"build_exe": {"build_exe": f"{os.getcwd()}\\build\\board_util\\UpstreamBoardUtil-{version}-{sys.platform}\\"
},
},
executables=[Executable("board_util.py", base=base, icon="icon.ico", target_name="UpstreamBoardUtil.exe")]
)

View File

@@ -1,31 +0,0 @@
"""
Make a build of the config tool.
Usage: make_config_tool.py build
The build will show up in the build directory.
"""
import datetime
import sys
import os
from cx_Freeze import setup, Executable
base = None
if sys.platform == "win32":
base = "Win32GUI"
version = datetime.datetime.now()
version = version.strftime("%y.%m.%d")
print(version)
setup(name="UpstreamCFGUtil.exe",
version=version,
description="Upstream Data Config Utility Build",
options={"build_exe": {"build_exe": f"{os.getcwd()}\\build\\UpstreamCFGUtil-{version}-{sys.platform}\\",
"include_files": [os.path.join(os.getcwd(), "settings/settings.toml"),
os.path.join(os.getcwd(), "static/CFG-Util-README.md")],
},
},
executables=[Executable("config_tool.py", base=base, icon="icon.ico", target_name="UpstreamCFGUtil.exe")]
)

View File

@@ -1,39 +0,0 @@
from API.bmminer import BMMinerAPI
from API.bosminer import BOSMinerAPI
from API.cgminer import CGMinerAPI
from API.btminer import BTMinerAPI
from API.unknown import UnknownAPI
import ipaddress
class BaseMiner:
def __init__(self, ip: str, api: BMMinerAPI | BOSMinerAPI | CGMinerAPI | BTMinerAPI | UnknownAPI) -> None:
self.ip = ipaddress.ip_address(ip)
self.api = api
self.api_type = None
self.model = None
async def get_board_info(self):
return None
async def get_config(self):
return None
async def get_hostname(self):
return None
async def get_model(self):
return None
async def reboot(self):
return None
async def restart_backend(self):
return None
async def send_config(self, yaml_config):
return None

View File

@@ -1,11 +0,0 @@
from miners.bmminer import BMMiner
class BMMinerS9(BMMiner):
def __init__(self, ip: str) -> None:
super().__init__(ip)
self.model = "S9"
self.api_type = "BMMiner"
def __repr__(self) -> str:
return f"BMMinerS9: {str(self.ip)}"

View File

@@ -1,11 +0,0 @@
from miners.bosminer import BOSMiner
class BOSMinerS9(BOSMiner):
def __init__(self, ip: str) -> None:
super().__init__(ip)
self.model = "S9"
self.api_type = "BOSMiner"
def __repr__(self) -> str:
return f"BOSminerS9: {str(self.ip)}"

View File

@@ -1,11 +0,0 @@
from miners.cgminer import CGMiner
class CGMinerS9(CGMiner):
def __init__(self, ip: str) -> None:
super().__init__(ip)
self.model = "S9"
self.api_type = "CGMiner"
def __repr__(self) -> str:
return f"CGMinerS9: {str(self.ip)}"

View File

@@ -1,11 +0,0 @@
from miners.bmminer import BMMiner
class BMMinerT9(BMMiner):
def __init__(self, ip: str) -> None:
super().__init__(ip)
self.model = "T9"
self.api_type = "BMMiner"
def __repr__(self) -> str:
return f"BMMinerT9: {str(self.ip)}"

View File

@@ -1,11 +0,0 @@
from miners.cgminer import CGMiner
class CGMinerT9(CGMiner):
def __init__(self, ip: str) -> None:
super().__init__(ip)
self.model = "T9"
self.api_type = "CGMiner"
def __repr__(self) -> str:
return f"CGMinerT9: {str(self.ip)}"

View File

@@ -1,9 +0,0 @@
from miners.bmminer import BMMiner
class BMMinerX17(BMMiner):
def __init__(self, ip: str) -> None:
super().__init__(ip)
def __repr__(self) -> str:
return f"BMMinerX17: {str(self.ip)}"

View File

@@ -1,11 +0,0 @@
from miners.bosminer import BOSMiner
class BOSMinerX17(BOSMiner):
def __init__(self, ip: str) -> None:
super().__init__(ip)
self.api_type = "BOSMiner"
self.nominal_chips = 65
def __repr__(self) -> str:
return f"BOSminerX17: {str(self.ip)}"

View File

@@ -1,10 +0,0 @@
from miners.cgminer import CGMiner
class CGMinerX17(CGMiner):
def __init__(self, ip: str) -> None:
super().__init__(ip)
self.api_type = "CGMiner"
def __repr__(self) -> str:
return f"CGMinerX17: {str(self.ip)}"

View File

@@ -1,18 +0,0 @@
from miners.bmminer import BMMiner
class BMMinerX19(BMMiner):
def __init__(self, ip: str) -> None:
super().__init__(ip)
def __repr__(self) -> str:
return f"BMMinerX19: {str(self.ip)}"
async def get_model(self):
if self.model:
return self.model
version_data = await self.api.version()
if version_data:
self.model = version_data["VERSION"][0]["Type"].replace("Antminer ", "")
return self.model
return None

View File

@@ -1,19 +0,0 @@
from miners.cgminer import CGMiner
class CGMinerX19(CGMiner):
def __init__(self, ip: str) -> None:
super().__init__(ip)
self.api_type = "CGMiner"
def __repr__(self) -> str:
return f"CGMinerX19: {str(self.ip)}"
async def get_model(self):
if self.model:
return self.model
version_data = await self.api.version()
if version_data:
self.model = version_data["VERSION"][0]["Type"].replace("Antminer ", "")
return self.model
return None

View File

@@ -1,175 +0,0 @@
from miners.cgminer import CGMiner
import re
class CGMinerAvalon(CGMiner):
def __init__(self, ip: str) -> None:
super().__init__(ip)
self.model = "Avalon"
self.api_type = "CGMiner"
self.pattern = re.compile(r'Ver\[(?P<Ver>[-0-9A-Fa-f+]+)\]\s'
'DNA\[(?P<DNA>[0-9A-Fa-f]+)\]\s'
'Elapsed\[(?P<Elapsed>[-0-9]+)\]\s'
'MW\[(?P<MW>[-\s0-9]+)\]\s'
'LW\[(?P<LW>[-0-9]+)\]\s'
'MH\[(?P<MH>[-\s0-9]+)\]\s'
'HW\[(?P<HW>[-0-9]+)\]\s'
'Temp\[(?P<Temp>[0-9]+)\]\s'
'TMax\[(?P<TMax>[0-9]+)\]\s'
'Fan\[(?P<Fan>[0-9]+)\]\s'
'FanR\[(?P<FanR>[0-9]+)%\]\s'
'Vi\[(?P<Vi>[-\s0-9]+)\]\s'
'Vo\[(?P<Vo>[-\s0-9]+)\]\s'
'('
'PLL0\[(?P<PLL0>[-\s0-9]+)\]\s'
'PLL1\[(?P<PLL1>[-\s0-9]+)\]\s'
'PLL2\[(?P<PLL2>[-\s0-9]+)\]\s'
'PLL3\[(?P<PLL3>[-\s0-9]+)\]\s'
')?'
'GHSmm\[(?P<GHSmm>[-.0-9]+)\]\s'
'WU\[(?P<WU>[-.0-9]+)\]\s'
'Freq\[(?P<Freq>[.0-9]+)\]\s'
'PG\[(?P<PG>[0-9]+)\]\s'
'Led\[(?P<LED>0|1)\]\s'
'MW0\[(?P<MW0>[0-9\s]+)\]\s'
'MW1\[(?P<MW1>[0-9\s]+)\]\s'
'MW2\[(?P<MW2>[0-9\s]+)\]\s'
'MW3\[(?P<MW3>[0-9\s]+)\]\s'
'TA\[(?P<TA>[0-9]+)\]\s'
'ECHU\[(?P<ECHU>[0-9\s]+)\]\s'
'ECMM\[(?P<ECMM>[0-9]+)\]\s.*'
'FAC0\[(?P<FAC0>[-0-9]+)\]\s'
'OC\[(?P<OC>[0-9]+)\]\s'
'SF0\[(?P<SF0>[-\s0-9]+)\]\s'
'SF1\[(?P<SF1>[-\s0-9]+)\]\s'
'SF2\[(?P<SF2>[-\s0-9]+)\]\s'
'SF3\[(?P<SF3>[-\s0-9]+)\]\s'
'PMUV\[(?P<PMUV>[-\s\S*]+)\]\s'
'PVT_T0\[(?P<PVT_T0>[-0-9\s]+)\]\s'
'PVT_T1\[(?P<PVT_T1>[-0-9\s]+)\]\s'
'PVT_T2\[(?P<PVT_T2>[-0-9\s]+)\]\s'
'PVT_T3\[(?P<PVT_T3>[-0-9\s]+)\]\s'
'PVT_V0_0\[(?P<PVT_V0_0>[-0-9\s]+)\]\s'
'PVT_V0_1\[(?P<PVT_V0_1>[-0-9\s]+)\]\s'
'PVT_V0_2\[(?P<PVT_V0_2>[-0-9\s]+)\]\s'
'PVT_V0_3\[(?P<PVT_V0_3>[-0-9\s]+)\]\s'
'PVT_V0_4\[(?P<PVT_V0_4>[-0-9\s]+)\]\s'
'PVT_V0_5\[(?P<PVT_V0_5>[-0-9\s]+)\]\s'
'PVT_V0_6\[(?P<PVT_V0_6>[-0-9\s]+)\]\s'
'PVT_V0_7\[(?P<PVT_V0_7>[-0-9\s]+)\]\s'
'PVT_V0_8\[(?P<PVT_V0_8>[-0-9\s]+)\]\s'
'PVT_V0_9\[(?P<PVT_V0_9>[-0-9\s]+)\]\s'
'PVT_V0_10\[(?P<PVT_V0_10>[-0-9\s]+)\]\s'
'PVT_V0_11\[(?P<PVT_V0_11>[-0-9\s]+)\]\s'
'PVT_V0_12\[(?P<PVT_V0_12>[-0-9\s]+)\]\s'
'PVT_V0_13\[(?P<PVT_V0_13>[-0-9\s]+)\]\s'
'PVT_V0_14\[(?P<PVT_V0_14>[-0-9\s]+)\]\s'
'PVT_V0_15\[(?P<PVT_V0_15>[-0-9\s]+)\]\s'
'PVT_V0_16\[(?P<PVT_V0_16>[-0-9\s]+)\]\s'
'PVT_V0_17\[(?P<PVT_V0_17>[-0-9\s]+)\]\s'
'PVT_V0_18\[(?P<PVT_V0_18>[-0-9\s]+)\]\s'
'PVT_V0_19\[(?P<PVT_V0_19>[-0-9\s]+)\]\s'
'PVT_V0_20\[(?P<PVT_V0_20>[-0-9\s]+)\]\s'
'PVT_V0_21\[(?P<PVT_V0_21>[-0-9\s]+)\]\s'
'PVT_V0_22\[(?P<PVT_V0_22>[-0-9\s]+)\]\s'
'PVT_V0_23\[(?P<PVT_V0_23>[-0-9\s]+)\]\s'
'PVT_V0_24\[(?P<PVT_V0_24>[-0-9\s]+)\]\s'
'PVT_V0_25\[(?P<PVT_V0_25>[-0-9\s]+)\]\s'
'PVT_V1_0\[(?P<PVT_V1_0>[-0-9\s]+)\]\s'
'PVT_V1_1\[(?P<PVT_V1_1>[-0-9\s]+)\]\s'
'PVT_V1_2\[(?P<PVT_V1_2>[-0-9\s]+)\]\s'
'PVT_V1_3\[(?P<PVT_V1_3>[-0-9\s]+)\]\s'
'PVT_V1_4\[(?P<PVT_V1_4>[-0-9\s]+)\]\s'
'PVT_V1_5\[(?P<PVT_V1_5>[-0-9\s]+)\]\s'
'PVT_V1_6\[(?P<PVT_V1_6>[-0-9\s]+)\]\s'
'PVT_V1_7\[(?P<PVT_V1_7>[-0-9\s]+)\]\s'
'PVT_V1_8\[(?P<PVT_V1_8>[-0-9\s]+)\]\s'
'PVT_V1_9\[(?P<PVT_V1_9>[-0-9\s]+)\]\s'
'PVT_V1_10\[(?P<PVT_V1_10>[-0-9\s]+)\]\s'
'PVT_V1_11\[(?P<PVT_V1_11>[-0-9\s]+)\]\s'
'PVT_V1_12\[(?P<PVT_V1_12>[-0-9\s]+)\]\s'
'PVT_V1_13\[(?P<PVT_V1_13>[-0-9\s]+)\]\s'
'PVT_V1_14\[(?P<PVT_V1_14>[-0-9\s]+)\]\s'
'PVT_V1_15\[(?P<PVT_V1_15>[-0-9\s]+)\]\s'
'PVT_V1_16\[(?P<PVT_V1_16>[-0-9\s]+)\]\s'
'PVT_V1_17\[(?P<PVT_V1_17>[-0-9\s]+)\]\s'
'PVT_V1_18\[(?P<PVT_V1_18>[-0-9\s]+)\]\s'
'PVT_V1_19\[(?P<PVT_V1_19>[-0-9\s]+)\]\s'
'PVT_V1_20\[(?P<PVT_V1_20>[-0-9\s]+)\]\s'
'PVT_V1_21\[(?P<PVT_V1_21>[-0-9\s]+)\]\s'
'PVT_V1_22\[(?P<PVT_V1_22>[-0-9\s]+)\]\s'
'PVT_V1_23\[(?P<PVT_V1_23>[-0-9\s]+)\]\s'
'PVT_V1_24\[(?P<PVT_V1_24>[-0-9\s]+)\]\s'
'PVT_V1_25\[(?P<PVT_V1_25>[-0-9\s]+)\]\s'
'PVT_V2_0\[(?P<PVT_V2_0>[-0-9\s]+)\]\s'
'PVT_V2_1\[(?P<PVT_V2_1>[-0-9\s]+)\]\s'
'PVT_V2_2\[(?P<PVT_V2_2>[-0-9\s]+)\]\s'
'PVT_V2_3\[(?P<PVT_V2_3>[-0-9\s]+)\]\s'
'PVT_V2_4\[(?P<PVT_V2_4>[-0-9\s]+)\]\s'
'PVT_V2_5\[(?P<PVT_V2_5>[-0-9\s]+)\]\s'
'PVT_V2_6\[(?P<PVT_V2_6>[-0-9\s]+)\]\s'
'PVT_V2_7\[(?P<PVT_V2_7>[-0-9\s]+)\]\s'
'PVT_V2_8\[(?P<PVT_V2_8>[-0-9\s]+)\]\s'
'PVT_V2_9\[(?P<PVT_V2_9>[-0-9\s]+)\]\s'
'PVT_V2_10\[(?P<PVT_V2_10>[-0-9\s]+)\]\s'
'PVT_V2_11\[(?P<PVT_V2_11>[-0-9\s]+)\]\s'
'PVT_V2_12\[(?P<PVT_V2_12>[-0-9\s]+)\]\s'
'PVT_V2_13\[(?P<PVT_V2_13>[-0-9\s]+)\]\s'
'PVT_V2_14\[(?P<PVT_V2_14>[-0-9\s]+)\]\s'
'PVT_V2_15\[(?P<PVT_V2_15>[-0-9\s]+)\]\s'
'PVT_V2_16\[(?P<PVT_V2_16>[-0-9\s]+)\]\s'
'PVT_V2_17\[(?P<PVT_V2_17>[-0-9\s]+)\]\s'
'PVT_V2_18\[(?P<PVT_V2_18>[-0-9\s]+)\]\s'
'PVT_V2_19\[(?P<PVT_V2_19>[-0-9\s]+)\]\s'
'PVT_V2_20\[(?P<PVT_V2_20>[-0-9\s]+)\]\s'
'PVT_V2_21\[(?P<PVT_V2_21>[-0-9\s]+)\]\s'
'PVT_V2_22\[(?P<PVT_V2_22>[-0-9\s]+)\]\s'
'PVT_V2_23\[(?P<PVT_V2_23>[-0-9\s]+)\]\s'
'PVT_V2_24\[(?P<PVT_V2_24>[-0-9\s]+)\]\s'
'PVT_V2_25\[(?P<PVT_V2_25>[-0-9\s]+)\]\s'
'PVT_V3_0\[(?P<PVT_V3_0>[-0-9\s]+)\]\s'
'PVT_V3_1\[(?P<PVT_V3_1>[-0-9\s]+)\]\s'
'PVT_V3_2\[(?P<PVT_V3_2>[-0-9\s]+)\]\s'
'PVT_V3_3\[(?P<PVT_V3_3>[-0-9\s]+)\]\s'
'PVT_V3_4\[(?P<PVT_V3_4>[-0-9\s]+)\]\s'
'PVT_V3_5\[(?P<PVT_V3_5>[-0-9\s]+)\]\s'
'PVT_V3_6\[(?P<PVT_V3_6>[-0-9\s]+)\]\s'
'PVT_V3_7\[(?P<PVT_V3_7>[-0-9\s]+)\]\s'
'PVT_V3_8\[(?P<PVT_V3_8>[-0-9\s]+)\]\s'
'PVT_V3_9\[(?P<PVT_V3_9>[-0-9\s]+)\]\s'
'PVT_V3_10\[(?P<PVT_V3_10>[-0-9\s]+)\]\s'
'PVT_V3_11\[(?P<PVT_V3_11>[-0-9\s]+)\]\s'
'PVT_V3_12\[(?P<PVT_V3_12>[-0-9\s]+)\]\s'
'PVT_V3_13\[(?P<PVT_V3_13>[-0-9\s]+)\]\s'
'PVT_V3_14\[(?P<PVT_V3_14>[-0-9\s]+)\]\s'
'PVT_V3_15\[(?P<PVT_V3_15>[-0-9\s]+)\]\s'
'PVT_V3_16\[(?P<PVT_V3_16>[-0-9\s]+)\]\s'
'PVT_V3_17\[(?P<PVT_V3_17>[-0-9\s]+)\]\s'
'PVT_V3_18\[(?P<PVT_V3_18>[-0-9\s]+)\]\s'
'PVT_V3_19\[(?P<PVT_V3_19>[-0-9\s]+)\]\s'
'PVT_V3_20\[(?P<PVT_V3_20>[-0-9\s]+)\]\s'
'PVT_V3_21\[(?P<PVT_V3_21>[-0-9\s]+)\]\s'
'PVT_V3_22\[(?P<PVT_V3_22>[-0-9\s]+)\]\s'
'PVT_V3_23\[(?P<PVT_V3_23>[-0-9\s]+)\]\s'
'PVT_V3_24\[(?P<PVT_V3_24>[-0-9\s]+)\]\s'
'PVT_V3_25\[(?P<PVT_V3_25>[-0-9\s]+)\]\s'
'FM\[(?P<FM>[0-9]+)\]\s'
'CRC\[(?P<CRC>[0-9\s]+)\]', re.X
)
def __repr__(self) -> str:
return f"CGMinerAvalon: {str(self.ip)}"
def parse_estats(self, estats):
for estat in estats:
for key in estat:
if key[:5] == 'MM ID':
self._parse_estat(estat, key)
def _parse_estat(self, estat, key):
module = estat[key]
module_info = re.match(self.pattern, module)
if not module_info:
return None
module_info = module_info.groupdict()
print(module_info)

View File

@@ -1,11 +0,0 @@
from miners.cgminer import CGMiner
class CGMinerAvalon(CGMiner):
def __init__(self, ip: str) -> None:
super().__init__(ip)
self.model = "Avalon"
self.api_type = "CGMiner"
def __repr__(self) -> str:
return f"CGMinerAvalon: {str(self.ip)}"

View File

@@ -1,74 +0,0 @@
from API.bmminer import BMMinerAPI
from miners import BaseMiner
import asyncssh
class BMMiner(BaseMiner):
def __init__(self, ip: str) -> None:
api = BMMinerAPI(ip)
super().__init__(ip, api)
self.model = None
self.config = None
self.uname = 'root'
self.pwd = 'admin'
def __repr__(self) -> str:
return f"BMMiner: {str(self.ip)}"
async def get_model(self):
if self.model:
return self.model
version_data = await self.api.devdetails()
if version_data:
self.model = version_data["DEVDETAILS"][0]["Model"].replace("Antminer ", "")
return self.model
return None
async def get_hostname(self) -> str:
try:
async with (await self._get_ssh_connection()) as conn:
if conn is not None:
data = await conn.run('cat /proc/sys/kernel/hostname')
return data.stdout.strip()
else:
return "?"
except Exception:
return "?"
async def _get_ssh_connection(self) -> asyncssh.connect:
try:
conn = await asyncssh.connect(str(self.ip),
known_hosts=None,
username=self.uname,
password=self.pwd,
server_host_key_algs=['ssh-rsa'])
return conn
except asyncssh.misc.PermissionDenied:
try:
conn = await asyncssh.connect(str(self.ip),
known_hosts=None,
username="admin",
password="admin",
server_host_key_algs=['ssh-rsa'])
return conn
except Exception as e:
print(e)
except OSError:
print(str(self.ip) + ": Connection refused.")
return None
async def send_ssh_command(self, cmd):
result = None
async with (await self._get_ssh_connection()) as conn:
for i in range(3):
try:
result = await conn.run(cmd)
except Exception as e:
print(f"{cmd} error: {e}")
if i == 3:
return
continue
return result
async def reboot(self) -> None:
await self.send_ssh_command("reboot")

View File

@@ -1,158 +0,0 @@
from miners import BaseMiner
from API.bosminer import BOSMinerAPI
import asyncssh
import toml
from config.bos import bos_config_convert, general_config_convert_bos
class BOSMiner(BaseMiner):
def __init__(self, ip: str) -> None:
api = BOSMinerAPI(ip)
super().__init__(ip, api)
self.model = None
self.config = None
self.uname = 'root'
self.pwd = 'admin'
self.nominal_chips = 63
def __repr__(self) -> str:
return f"BOSminer: {str(self.ip)}"
async def _get_ssh_connection(self) -> asyncssh.connect:
"""Create a new asyncssh connection"""
conn = await asyncssh.connect(str(self.ip), known_hosts=None, username=self.uname, password=self.pwd,
server_host_key_algs=['ssh-rsa'])
# return created connection
return conn
async def send_ssh_command(self, cmd: str) -> None:
"""Sends SSH command to miner."""
# creates result variable
result = None
# runs the command on the miner
async with (await self._get_ssh_connection()) as conn:
# attempt to run command up to 3 times
for i in range(3):
try:
# save result of the command
result = await conn.run(cmd)
except Exception as e:
print(f"{cmd} error: {e}")
if i == 3:
return
continue
# let the user know the result of the command
if result is not None:
if result.stdout != "":
print(result.stdout)
if result.stderr != "":
print("ERROR: " + result.stderr)
elif result.stderr != "":
print("ERROR: " + result.stderr)
else:
print(cmd)
async def fault_light_on(self) -> None:
"""Sends command to turn on fault light on the miner."""
await self.send_ssh_command('miner fault_light on')
async def fault_light_off(self) -> None:
"""Sends command to turn off fault light on the miner."""
await self.send_ssh_command('miner fault_light off')
async def restart_backend(self):
await self.restart_bosminer()
async def restart_bosminer(self) -> None:
"""Restart bosminer hashing process."""
await self.send_ssh_command('/etc/init.d/bosminer restart')
async def reboot(self) -> None:
"""Reboots power to the physical miner."""
await self.send_ssh_command('/sbin/reboot')
async def get_config(self) -> None:
async with (await self._get_ssh_connection()) as conn:
async with conn.start_sftp_client() as sftp:
async with sftp.open('/etc/bosminer.toml') as file:
toml_data = toml.loads(await file.read())
cfg = await bos_config_convert(toml_data)
self.config = cfg
async def get_hostname(self) -> str:
"""Attempts to get hostname from miner."""
try:
async with (await self._get_ssh_connection()) as conn:
data = await conn.run('cat /proc/sys/kernel/hostname')
return data.stdout.strip()
except Exception as e:
print(self.ip, e)
return "BOSMiner Unknown"
async def get_model(self):
if self.model:
return self.model + " (BOS)"
version_data = await self.api.devdetails()
if version_data:
if not version_data["DEVDETAILS"] == []:
self.model = version_data["DEVDETAILS"][0]["Model"].replace("Antminer ", "")
return self.model + " (BOS)"
return None
async def send_config(self, yaml_config) -> None:
"""Configures miner with yaml config."""
toml_conf = await general_config_convert_bos(yaml_config)
async with (await self._get_ssh_connection()) as conn:
async with conn.start_sftp_client() as sftp:
async with sftp.open('/etc/bosminer.toml', 'w+') as file:
await file.write(toml_conf)
await conn.run("/etc/init.d/bosminer restart")
async def get_board_info(self) -> dict:
"""Gets data on each board and chain in the miner."""
devdetails = await self.api.devdetails()
if not devdetails.get("DEVDETAILS"):
print("devdetails error", devdetails)
return {0: [], 1: [], 2: []}
devs = devdetails['DEVDETAILS']
boards = {}
offset = devs[0]["ID"]
for board in devs:
boards[board["ID"] - offset] = []
if not board['Chips'] == self.nominal_chips:
nominal = False
else:
nominal = True
boards[board["ID"] - offset].append({
"chain": board["ID"] - offset,
"chip_count": board['Chips'],
"chip_status": "o" * board['Chips'],
"nominal": nominal
})
return boards
async def get_bad_boards(self) -> dict:
"""Checks for and provides list of non working boards."""
boards = await self.get_board_info()
bad_boards = {}
for board in boards.keys():
for chain in boards[board]:
if not chain["chip_count"] == 63:
if board not in bad_boards.keys():
bad_boards[board] = []
bad_boards[board].append(chain)
return bad_boards
async def check_good_boards(self) -> str:
"""Checks for and provides list for working boards."""
devs = await self.api.devdetails()
bad = 0
chains = devs['DEVDETAILS']
for chain in chains:
if chain['Chips'] == 0:
bad += 1
if not bad > 0:
return str(self.ip)

View File

@@ -1,59 +0,0 @@
from API.btminer import BTMinerAPI
from miners import BaseMiner
from API import APIError
class BTMiner(BaseMiner):
def __init__(self, ip: str) -> None:
api = BTMinerAPI(ip)
self.model = None
super().__init__(ip, api)
self.nominal_chips = 66
def __repr__(self) -> str:
return f"BTMiner: {str(self.ip)}"
async def get_model(self):
if self.model:
return self.model
version_data = await self.api.devdetails()
if version_data:
self.model = version_data["DEVDETAILS"][0]["Model"].split("V")[0]
return self.model
return None
async def get_hostname(self) -> str:
try:
host_data = await self.api.get_miner_info()
if host_data:
return host_data["Msg"]["hostname"]
except APIError:
return "?"
async def get_board_info(self) -> dict:
"""Gets data on each board and chain in the miner."""
devs = await self.api.devs()
if not devs.get("DEVS"):
print("devs error", devs)
return {0: [], 1: [], 2: []}
devs = devs["DEVS"]
boards = {}
offset = devs[0]["ID"]
for board in devs:
boards[board["ID"] - offset] = []
if "Effective Chips" in board.keys():
if not board['Effective Chips'] in self.nominal_chips:
nominal = False
else:
nominal = True
boards[board["ID"] - offset].append({
"chain": board["ID"] - offset,
"chip_count": board['Effective Chips'],
"chip_status": "o" * board['Effective Chips'],
"nominal": nominal
})
else:
print(board)
return boards

View File

@@ -1,136 +0,0 @@
from miners import BaseMiner
from API.cgminer import CGMinerAPI
from API import APIError
import asyncssh
class CGMiner(BaseMiner):
def __init__(self, ip: str) -> None:
api = CGMinerAPI(ip)
super().__init__(ip, api)
self.model = None
self.config = None
self.uname = 'root'
self.pwd = 'admin'
def __repr__(self) -> str:
return f"CGMiner: {str(self.ip)}"
async def get_model(self):
if self.model:
return self.model
try:
version_data = await self.api.devdetails()
except APIError:
return None
if version_data:
self.model = version_data["DEVDETAILS"][0]["Model"].replace("Antminer ", "")
return self.model
return None
async def get_hostname(self) -> str:
try:
async with (await self._get_ssh_connection()) as conn:
if conn is not None:
data = await conn.run('cat /proc/sys/kernel/hostname')
return data.stdout.strip()
else:
return "?"
except Exception:
return "?"
async def _get_ssh_connection(self) -> asyncssh.connect:
try:
conn = await asyncssh.connect(str(self.ip),
known_hosts=None,
username=self.uname,
password=self.pwd,
server_host_key_algs=['ssh-rsa'])
return conn
except asyncssh.misc.PermissionDenied:
try:
conn = await asyncssh.connect(str(self.ip),
known_hosts=None,
username="admin",
password="admin",
server_host_key_algs=['ssh-rsa'])
return conn
except Exception as e:
print(e)
except OSError:
print(str(self.ip) + " Connection refused.")
return None
async def send_ssh_command(self, cmd):
result = None
async with (await self._get_ssh_connection()) as conn:
for i in range(3):
try:
result = await conn.run(cmd)
except Exception as e:
print(f"{cmd} error: {e}")
if i == 3:
return
continue
# handle result
self._result_handler(result)
@staticmethod
def _result_handler(result: asyncssh.process.SSHCompletedProcess) -> None:
if result is not None:
# noinspection PyUnresolvedReferences
if len(result.stdout) > 0:
# noinspection PyUnresolvedReferences
print("ssh stdout: \n" + result.stdout)
# noinspection PyUnresolvedReferences
if len(result.stderr) > 0:
# noinspection PyUnresolvedReferences
print("ssh stderr: \n" + result.stderrr)
# noinspection PyUnresolvedReferences
if len(result.stdout) <= 0 and len(result.stderr) <= 0:
print("ssh stdout stderr empty")
# if result.stdout != "":
# print(result.stdout)
# if result.stderr != "":
# print("ERROR: " + result.stderr)
# elif result.stderr != "":
# print("ERROR: " + result.stderr)
# else:
# print(cmd)
async def restart_backend(self) -> None:
await self.restart_cgminer()
async def restart_cgminer(self) -> None:
commands = ['cgminer-api restart',
'/usr/bin/cgminer-monitor >/dev/null 2>&1']
commands = ';'.join(commands)
await self.send_ssh_command(commands)
async def reboot(self) -> None:
await self.send_ssh_command("reboot")
async def start_cgminer(self) -> None:
commands = ['mkdir -p /etc/tmp/',
'echo \"*/3 * * * * /usr/bin/cgminer-monitor\" > /etc/tmp/root',
'crontab -u root /etc/tmp/root',
'/usr/bin/cgminer-monitor >/dev/null 2>&1']
commands = ';'.join(commands)
await self.send_ssh_command(commands)
async def stop_cgminer(self) -> None:
commands = ['mkdir -p /etc/tmp/',
'echo \"\" > /etc/tmp/root',
'crontab -u root /etc/tmp/root',
'killall cgminer']
commands = ';'.join(commands)
await self.send_ssh_command(commands)
async def get_config(self) -> None:
async with (await self._get_ssh_connection()) as conn:
command = 'cat /etc/config/cgminer'
result = await conn.run(command, check=True)
self._result_handler(result)
self.config = result.stdout
print(str(self.config))

View File

@@ -1,339 +0,0 @@
from miners.antminer.S9.bosminer import BOSMinerS9
from miners.antminer.S9.bmminer import BMMinerS9
from miners.antminer.S9.cgminer import CGMinerS9
from miners.antminer.T9.hive import HiveonT9
from miners.antminer.T9.cgminer import CGMinerT9
from miners.antminer.T9.bmminer import BMMinerT9
from miners.antminer.X17.bosminer import BOSMinerX17
from miners.antminer.X17.bmminer import BMMinerX17
from miners.antminer.X17.cgminer import CGMinerX17
from miners.antminer.X19.bmminer import BMMinerX19
from miners.antminer.X19.cgminer import CGMinerX19
from miners.whatsminer.M20 import BTMinerM20
from miners.whatsminer.M21 import BTMinerM21
from miners.whatsminer.M30 import BTMinerM30
from miners.whatsminer.M31 import BTMinerM31
from miners.whatsminer.M32 import BTMinerM32
from miners.avalonminer import CGMinerAvalon
from miners.cgminer import CGMiner
from miners.bmminer import BMMiner
from miners.bosminer import BOSMiner
from miners.unknown import UnknownMiner
from API import APIError
import asyncio
import ipaddress
import json
from settings import MINER_FACTORY_GET_VERSION_RETRIES as GET_VERSION_RETRIES
class MinerFactory:
def __init__(self):
self.miners = {}
async def get_miner_generator(self, ips: list):
"""
Get Miner objects from ip addresses using an async generator.
Returns an asynchronous generator containing Miners.
Parameters:
ips: a list of ip addresses to get miners for.
"""
# get the event loop
loop = asyncio.get_event_loop()
# create a list of tasks
scan_tasks = []
# for each miner IP that was passed in, add a task to get its class
for miner in ips:
scan_tasks.append(loop.create_task(self.get_miner(miner)))
# asynchronously run the tasks and return them as they complete
scanned = asyncio.as_completed(scan_tasks)
# loop through and yield the miners as they complete
for miner in scanned:
yield await miner
async def get_miner(self, ip: ipaddress.ip_address):
"""Decide a miner type using the IP address of the miner."""
# check if the miner already exists in cache
if ip in self.miners:
return self.miners[ip]
# if everything fails, the miner is already set to unknown
miner = UnknownMiner(str(ip))
api = None
model = None
# try to get the API multiple times based on retries
for i in range(GET_VERSION_RETRIES):
# get the API type, should be BOSMiner, CGMiner, BMMiner, BTMiner, or None
api = await self._get_api_type(ip)
# if we find the API type, dont need to loop anymore
if api:
break
# try to get the model multiple times based on retries
for i in range(GET_VERSION_RETRIES):
# get the model, should return some miner model type, e.g. Antminer S9
model = await self._get_miner_model(ip)
# if we find the model type, dont need to loop anymore
if model:
break
# make sure we have model information
if model:
# check if the miner is an Antminer
if "Antminer" in model:
# S9 logic
if "Antminer S9" in model:
# handle the different API types
if not api:
print(ip)
miner = BOSMinerS9(str(ip))
elif "BOSMiner" in api:
miner = BOSMinerS9(str(ip))
elif "CGMiner" in api:
miner = CGMinerS9(str(ip))
elif "BMMiner" in api:
miner = BMMinerS9(str(ip))
elif "Antminer T9" in model:
if "BMMiner" in api:
if "Hiveon" in model:
# hiveOS, return T9 Hive
miner = HiveonT9(str(ip))
else:
miner = BMMinerT9(str(ip))
elif "CGMiner" in api:
miner = CGMinerT9(str(ip))
# X17 model logic
elif "17" in model:
# handle the different API types
if "BOSMiner" in api:
miner = BOSMinerX17(str(ip))
elif "CGMiner" in api:
miner = CGMinerX17(str(ip))
elif "BMMiner" in api:
miner = BMMinerX17(str(ip))
# X19 logic
elif "19" in model:
# handle the different API types
if "CGMiner" in api:
miner = CGMinerX19(str(ip))
elif "BMMiner" in api:
miner = BMMinerX19(str(ip))
# Avalonminer V8
elif "avalon" in model:
miner = CGMinerAvalon(str(ip))
# Whatsminers
elif "M20" in model:
miner = BTMinerM20(str(ip))
elif "M21" in model:
miner = BTMinerM21(str(ip))
elif "M30" in model:
miner = BTMinerM30(str(ip))
elif "M31" in model:
miner = BTMinerM31(str(ip))
elif "M32" in model:
miner = BTMinerM32(str(ip))
# if we cant find a model, check if we found the API
else:
# return the miner base class with some API if we found it
if api:
if "BOSMiner" in api:
miner = BOSMiner(str(ip))
elif "CGMiner" in api:
miner = CGMiner(str(ip))
elif "BMMiner" in api:
miner = BMMiner(str(ip))
# save the miner to the cache at its IP
self.miners[ip] = miner
# return the miner
return miner
def clear_cached_miners(self):
"""Clear the miner factory cache."""
# empty out self.miners
self.miners = {}
async def _get_miner_model(self, ip: ipaddress.ip_address or str) -> str or None:
# instantiate model as being nothing if getting it fails
model = None
# try block in case of APIError or OSError 121 (Semaphore timeout)
try:
# send the devdetails command to the miner (will fail with no boards/devices)
data = await self._send_api_command(str(ip), "devdetails")
# sometimes data is b'', check for that
if data:
# status check, make sure the command succeeded
if data.get("STATUS"):
if not isinstance(data["STATUS"], str):
# if status is E, its an error
if data["STATUS"][0].get("STATUS") not in ["I", "S"]:
# try an alternate method if devdetails fails
data = await self._send_api_command(str(ip), "version")
# make sure we have data
if data:
# check the keys are there to get the version
if data.get("VERSION"):
if data["VERSION"][0].get("Type"):
# save the model to be returned later
model = data["VERSION"][0]["Type"]
else:
# make sure devdetails actually contains data, if its empty, there are no devices
if "DEVDETAILS" in data.keys() and not data["DEVDETAILS"] == []:
# check for model, for most miners
if not data["DEVDETAILS"][0]["Model"] == "":
# model of most miners
model = data["DEVDETAILS"][0]["Model"]
# if model fails, try driver
else:
# some avalonminers have model in driver
model = data["DEVDETAILS"][0]["Driver"]
else:
# if all that fails, try just version
data = await self._send_api_command(str(ip), "version")
if "VERSION" in data.keys():
model = data["VERSION"][0]["Type"]
else:
print(data)
return model
# if there are errors, we just return None
except APIError:
return model
except OSError as e:
if e.winerror == 121:
print(e)
return model
else:
print(ip, e)
return model
async def _send_api_command(self, ip: ipaddress.ip_address or str, command: str):
try:
# get reader and writer streams
reader, writer = await asyncio.open_connection(str(ip), 4028)
# handle OSError 121
except OSError as e:
if e.winerror == "121":
print("Semaphore Timeout has Expired.")
return {}
# create the command
cmd = {"command": command}
# send the command
writer.write(json.dumps(cmd).encode('utf-8'))
await writer.drain()
# instantiate data
data = b""
# loop to receive all the data
try:
while True:
d = await reader.read(4096)
if not d:
break
data += d
except Exception as e:
print(e)
try:
# some json from the API returns with a null byte (\x00) on the end
if data.endswith(b"\x00"):
# handle the null byte
str_data = data.decode('utf-8')[:-1]
else:
# no null byte
str_data = data.decode('utf-8')
# fix an error with a btminer return having an extra comma that breaks json.loads()
str_data = str_data.replace(",}", "}")
# fix an error with a btminer return having a newline that breaks json.loads()
str_data = str_data.replace("\n", "")
# fix an error with a bmminer return not having a specific comma that breaks json.loads()
str_data = str_data.replace("}{", "},{")
# parse the json
data = json.loads(str_data)
# handle bad json
except json.decoder.JSONDecodeError as e:
# raise APIError(f"Decode Error: {data}")
data = None
# close the connection
writer.close()
await writer.wait_closed()
return data
async def _get_api_type(self, ip: ipaddress.ip_address or str) -> dict or None:
"""Get data on the version of the miner to return the right miner."""
# instantiate API as None in case something fails
api = None
# try block to handle OSError 121 (Semaphore timeout)
try:
# try the version command,works on most miners
data = await self._send_api_command(str(ip), "version")
# if we got data back, try to parse it
if data:
# make sure the command succeeded
if data.get("STATUS") and not data.get("STATUS") == "E":
if data["STATUS"][0].get("STATUS") in ["I", "S"]:
# check if there are any BMMiner strings in any of the dict keys
if any("BMMiner" in string for string in data["VERSION"][0].keys()):
api = "BMMiner"
# check if there are any CGMiner strings in any of the dict keys
elif any("CGMiner" in string for string in data["VERSION"][0].keys()):
api = "CGMiner"
# check if there are any BOSMiner strings in any of the dict keys
elif any("BOSminer" in string for string in data["VERSION"][0].keys()):
api = "BOSMiner"
# if all that fails, check the Description to see if it is a whatsminer
elif data.get("Description") and "whatsminer" in data.get("Description"):
api = "BTMiner"
# return the API if we found it
if api:
return api
# if there are errors, return None
except OSError as e:
if e.winerror == 121:
return None
else:
print(ip, e)
return None

View File

@@ -1,17 +0,0 @@
from API.unknown import UnknownAPI
from miners import BaseMiner
class UnknownMiner(BaseMiner):
def __init__(self, ip: str) -> None:
api = UnknownAPI(ip)
super().__init__(ip, api)
def __repr__(self) -> str:
return f"Unknown: {str(self.ip)}"
async def get_model(self):
return "Unknown"
async def get_hostname(self):
return "Unknown"

View File

@@ -1,9 +0,0 @@
from miners.btminer import BTMiner
class BTMinerM20(BTMiner):
def __init__(self, ip: str) -> None:
super().__init__(ip)
def __repr__(self) -> str:
return f"M20 - BTMiner: {str(self.ip)}"

View File

@@ -1,10 +0,0 @@
from miners.btminer import BTMiner
class BTMinerM21(BTMiner):
def __init__(self, ip: str) -> None:
super().__init__(ip)
self.nominal_chips = [105, 66]
def __repr__(self) -> str:
return f"M21 - BTMiner: {str(self.ip)}"

View File

@@ -1,9 +0,0 @@
from miners.btminer import BTMiner
class BTMinerM30(BTMiner):
def __init__(self, ip: str) -> None:
super().__init__(ip)
def __repr__(self) -> str:
return f"M30- BTMiner: {str(self.ip)}"

View File

@@ -1,10 +0,0 @@
from miners.btminer import BTMiner
class BTMinerM31(BTMiner):
def __init__(self, ip: str) -> None:
super().__init__(ip)
self.nominal_chips = [78]
def __repr__(self) -> str:
return f"M31 - BTMiner: {str(self.ip)}"

View File

@@ -1,9 +0,0 @@
from miners.btminer import BTMiner
class BTMinerM32(BTMiner):
def __init__(self, ip: str) -> None:
super().__init__(ip)
def __repr__(self) -> str:
return f"M32 - BTMiner: {str(self.ip)}"

View File

View File

@@ -1,58 +0,0 @@
import asyncio
from network import MinerNetwork
from miners.bosminer import BOSMiner
async def get_bos_bad_tuners(ip: str = "192.168.1.0", mask: int = 24):
# create a miner network
miner_network = MinerNetwork(ip, mask=mask)
# scan for miners
miners = await miner_network.scan_network_for_miners()
# create an empty list of tasks
tuner_tasks = []
# loop checks if the miner is a BOSMiner
for miner in miners:
# can only do this if its a subclass of BOSMiner
if BOSMiner in type(miner).__bases__:
tuner_tasks.append(_get_tuner_status(miner))
# run all the tuner status commands
tuner_status = await asyncio.gather(*tuner_tasks)
# create a list of all miners with bad board tuner status'
bad_tuner_miners = []
for item in tuner_status:
# loop through and get each miners' bad board count
bad_boards = []
for board in item["tuner_status"]:
# if its not stable or still testing, its bad
if board["status"] not in ["Stable", "Testing performance profile", "Tuning individual chips"]:
# remove the part about the board refusing to start
bad_boards.append({"board": board["board"],
"error": board["status"].replace("Hashchain refused to start: ", "")})
# if this miner has bad boards, add it to the list of bad board miners
if len(bad_boards) > 0:
bad_tuner_miners.append({"ip": item["ip"], "boards": bad_boards})
# return the list of bad board miners
return bad_tuner_miners
async def _get_tuner_status(miner):
# run the tunerstatus command, since the miner will always be BOSMiner
tuner_status = await miner.api.tunerstatus()
# create a list to add the tuner data to
tuner_data = []
# if we have data, loop through to get the hashchain status
if tuner_status:
for board in tuner_status["TUNERSTATUS"][0]["TunerChainStatus"]:
tuner_data.append({"board": board["HashchainIndex"], "status": board["Status"]})
# return the data along with the IP or later tracking
return {"ip": str(miner.ip), "tuner_status": tuner_data}

44
mkdocs.yml Normal file
View File

@@ -0,0 +1,44 @@
site_name: pyasic
repo_url: https://github.com/UpstreamData/pyasic
nav:
- Introduction: "index.md"
- Miners:
- Supported Miners: "miners/supported_types.md"
- Miner Factory: "miners/miner_factory.md"
- Backends:
- BMMiner: "miners/backends/bmminer.md"
- BOSMiner: "miners/backends/bosminer.md"
- BTMiner: "miners/backends/btminer.md"
- CGMiner: "miners/backends/cgminer.md"
- Hiveon: "miners/backends/hiveon.md"
- Classes:
- Antminer X9: "miners/antminer/X9.md"
- Antminer X17: "miners/antminer/X17.md"
- Antminer X19: "miners/antminer/X19.md"
- Avalon 7X: "miners/avalonminer/A7X.md"
- Avalon 8X: "miners/avalonminer/A8X.md"
- Avalon 9X: "miners/avalonminer/A9X.md"
- Avalon 10X: "miners/avalonminer/A10X.md"
- Whatsminer M2X: "miners/whatsminer/M2X.md"
- Whatsminer M3X: "miners/whatsminer/M3X.md"
- Network:
- Miner Network: "network/miner_network.md"
- Miner Network Range: "network/miner_network_range.md"
- Data:
- Miner Data: "data/miner_data.md"
- Config:
- Miner Config: "config/miner_config.md"
- Advanced:
- Miner APIs:
- Base: "API/api.md"
- BMMiner: "API/bmminer.md"
- BOSMiner: "API/bosminer.md"
- BTMiner: "API/btminer.md"
- CGMiner: "API/cgminer.md"
- Unknown: "API/unknown.md"
plugins:
- mkdocstrings
- search

View File

@@ -1,151 +0,0 @@
import ipaddress
import asyncio
from miners.miner_factory import MinerFactory
from settings import NETWORK_PING_RETRIES as PING_RETRIES, NETWORK_PING_TIMEOUT as PING_TIMEOUT, \
NETWORK_SCAN_THREADS as SCAN_THREADS
class MinerNetwork:
def __init__(self, ip_addr: str or None = None, mask: str or int or None = None) -> None:
self.network = None
self.miner_factory = MinerFactory()
self.ip_addr = ip_addr
self.connected_miners = {}
self.mask = mask
def __len__(self):
return len([item for item in self.get_network().hosts()])
def get_network(self) -> ipaddress.ip_network:
"""Get the network using the information passed to the MinerNetwork or from cache."""
# if we have a network cached already, use that
if self.network:
return self.network
# if there is no IP address passed, default to 192.168.1.0
if not self.ip_addr:
default_gateway = "192.168.1.0"
# if we do have an IP address passed, use that
else:
default_gateway = self.ip_addr
# if there is no subnet mask passed, default to /24
if not self.mask:
subnet_mask = "24"
# if we do have a mask passed, use that
else:
subnet_mask = str(self.mask)
# save the network and return it
self.network = ipaddress.ip_network(f"{default_gateway}/{subnet_mask}", strict=False)
return self.network
async def scan_network_for_miners(self) -> None or list:
"""Scan the network for miners, and return found miners as a list."""
# get the network
local_network = self.get_network()
print(f"Scanning {local_network} for miners...")
# create a list of tasks and miner IPs
scan_tasks = []
miner_ips = []
# for each IP in the network
for host in local_network.hosts():
# make sure we don't exceed the allowed async tasks
if len(scan_tasks) < SCAN_THREADS:
# add the task to the list
scan_tasks.append(self.ping_miner(host))
else:
# run the scan tasks
miner_ips_scan = await asyncio.gather(*scan_tasks)
# add scanned miners to the list of found miners
miner_ips.extend(miner_ips_scan)
# empty the task list
scan_tasks = []
# do a final scan to empty out the list
miner_ips_scan = await asyncio.gather(*scan_tasks)
miner_ips.extend(miner_ips_scan)
# remove all None from the miner list
miner_ips = list(filter(None, miner_ips))
print(f"Found {len(miner_ips)} connected miners...")
# create a list of tasks to get miners
create_miners_tasks = []
# clear cached miners
self.miner_factory.clear_cached_miners()
# try to get each miner found
for miner_ip in miner_ips:
# append to the list of tasks
create_miners_tasks.append(self.miner_factory.get_miner(miner_ip))
# get all miners in the list
miners = await asyncio.gather(*create_miners_tasks)
# return the miner objects
return miners
async def scan_network_generator(self):
"""
Scan the network for miners using an async generator.
Returns an asynchronous generator containing found miners.
"""
# get the current event loop
loop = asyncio.get_event_loop()
# get the network
local_network = self.get_network()
# create a list of scan tasks
scan_tasks = []
# for each ip on the network, loop through and scan it
for host in local_network.hosts():
# make sure we don't exceed the allowed async tasks
if len(scan_tasks) >= SCAN_THREADS:
# scanned is a loopable list of awaitables
scanned = asyncio.as_completed(scan_tasks)
# when we scan, empty the scan tasks
scan_tasks = []
# yield miners as they are scanned
for miner in scanned:
yield await miner
# add the ping to the list of tasks if we dont scan
scan_tasks.append(loop.create_task(self.ping_miner(host)))
# do one last scan at the end to close out the list
scanned = asyncio.as_completed(scan_tasks)
for miner in scanned:
yield await miner
@staticmethod
async def ping_miner(ip: ipaddress.ip_address) -> None or ipaddress.ip_address:
for i in range(PING_RETRIES):
connection_fut = asyncio.open_connection(str(ip), 4028)
try:
# get the read and write streams from the connection
reader, writer = await asyncio.wait_for(connection_fut, timeout=PING_TIMEOUT)
# immediately close connection, we know connection happened
writer.close()
# make sure the writer is closed
await writer.wait_closed()
# ping was successful
return ip
except asyncio.exceptions.TimeoutError:
# ping failed if we time out
continue
except ConnectionRefusedError:
# handle for other connection errors
print(f"{str(ip)}: Connection Refused.")
# ping failed, likely with an exception
except Exception as e:
print(e)
continue
return

396
poetry.lock generated Normal file
View File

@@ -0,0 +1,396 @@
[[package]]
name = "anyio"
version = "3.6.1"
description = "High level compatibility layer for multiple asynchronous event loop implementations"
category = "main"
optional = false
python-versions = ">=3.6.2"
[package.dependencies]
idna = ">=2.8"
sniffio = ">=1.1"
[package.extras]
doc = ["packaging", "sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"]
test = ["coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "contextlib2", "uvloop (<0.15)", "mock (>=4)", "uvloop (>=0.15)"]
trio = ["trio (>=0.16)"]
[[package]]
name = "asyncssh"
version = "2.11.0"
description = "AsyncSSH: Asynchronous SSHv2 client and server library"
category = "main"
optional = false
python-versions = ">= 3.6"
[package.dependencies]
cryptography = ">=3.1"
typing-extensions = ">=3.6"
[package.extras]
bcrypt = ["bcrypt (>=3.1.3)"]
fido2 = ["fido2 (>=0.9.2)"]
gssapi = ["gssapi (>=1.2.0)"]
libnacl = ["libnacl (>=1.4.2)"]
pkcs11 = ["python-pkcs11 (>=0.7.0)"]
pyopenssl = ["pyOpenSSL (>=17.0.0)"]
pywin32 = ["pywin32 (>=227)"]
[[package]]
name = "certifi"
version = "2022.6.15"
description = "Python package for providing Mozilla's CA Bundle."
category = "main"
optional = false
python-versions = ">=3.6"
[[package]]
name = "cffi"
version = "1.15.1"
description = "Foreign Function Interface for Python calling C code."
category = "main"
optional = false
python-versions = "*"
[package.dependencies]
pycparser = "*"
[[package]]
name = "cryptography"
version = "37.0.4"
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
category = "main"
optional = false
python-versions = ">=3.6"
[package.dependencies]
cffi = ">=1.12"
[package.extras]
docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"]
docstest = ["pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"]
pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"]
sdist = ["setuptools_rust (>=0.11.4)"]
ssh = ["bcrypt (>=3.1.5)"]
test = ["pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"]
[[package]]
name = "h11"
version = "0.12.0"
description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
category = "main"
optional = false
python-versions = ">=3.6"
[[package]]
name = "httpcore"
version = "0.15.0"
description = "A minimal low-level HTTP client."
category = "main"
optional = false
python-versions = ">=3.7"
[package.dependencies]
anyio = ">=3.0.0,<4.0.0"
certifi = "*"
h11 = ">=0.11,<0.13"
sniffio = ">=1.0.0,<2.0.0"
[package.extras]
http2 = ["h2 (>=3,<5)"]
socks = ["socksio (>=1.0.0,<2.0.0)"]
[[package]]
name = "httpx"
version = "0.23.0"
description = "The next generation HTTP client."
category = "main"
optional = false
python-versions = ">=3.7"
[package.dependencies]
certifi = "*"
httpcore = ">=0.15.0,<0.16.0"
rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]}
sniffio = "*"
[package.extras]
brotli = ["brotlicffi", "brotli"]
cli = ["click (>=8.0.0,<9.0.0)", "rich (>=10,<13)", "pygments (>=2.0.0,<3.0.0)"]
http2 = ["h2 (>=3,<5)"]
socks = ["socksio (>=1.0.0,<2.0.0)"]
[[package]]
name = "idna"
version = "3.3"
description = "Internationalized Domain Names in Applications (IDNA)"
category = "main"
optional = false
python-versions = ">=3.5"
[[package]]
name = "passlib"
version = "1.7.4"
description = "comprehensive password hashing framework supporting over 30 schemes"
category = "main"
optional = false
python-versions = "*"
[package.extras]
argon2 = ["argon2-cffi (>=18.2.0)"]
bcrypt = ["bcrypt (>=3.1.0)"]
build_docs = ["sphinx (>=1.6)", "sphinxcontrib-fulltoc (>=1.2.0)", "cloud-sptheme (>=1.10.1)"]
totp = ["cryptography"]
[[package]]
name = "pyaml"
version = "21.10.1"
description = "PyYAML-based module to produce pretty and readable YAML-serialized data"
category = "main"
optional = false
python-versions = "*"
[package.dependencies]
PyYAML = "*"
[[package]]
name = "pycparser"
version = "2.21"
description = "C parser in Python"
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]]
name = "pyyaml"
version = "6.0"
description = "YAML parser and emitter for Python"
category = "main"
optional = false
python-versions = ">=3.6"
[[package]]
name = "rfc3986"
version = "1.5.0"
description = "Validating URI References per RFC 3986"
category = "main"
optional = false
python-versions = "*"
[package.dependencies]
idna = {version = "*", optional = true, markers = "extra == \"idna2008\""}
[package.extras]
idna2008 = ["idna"]
[[package]]
name = "sniffio"
version = "1.2.0"
description = "Sniff out which async library your code is running under"
category = "main"
optional = false
python-versions = ">=3.5"
[[package]]
name = "toml"
version = "0.10.2"
description = "Python Library for Tom's Obvious, Minimal Language"
category = "main"
optional = false
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
[[package]]
name = "typing-extensions"
version = "4.3.0"
description = "Backported and Experimental Type Hints for Python 3.7+"
category = "main"
optional = false
python-versions = ">=3.7"
[metadata]
lock-version = "1.1"
python-versions = "^3.9"
content-hash = "8d93eafd928d7fed4b0a00d13e46982c2d4310c37acb2faec7e7a477b3f35e9c"
[metadata.files]
anyio = [
{file = "anyio-3.6.1-py3-none-any.whl", hash = "sha256:cb29b9c70620506a9a8f87a309591713446953302d7d995344d0d7c6c0c9a7be"},
{file = "anyio-3.6.1.tar.gz", hash = "sha256:413adf95f93886e442aea925f3ee43baa5a765a64a0f52c6081894f9992fdd0b"},
]
asyncssh = [
{file = "asyncssh-2.11.0-py3-none-any.whl", hash = "sha256:7302348cbd54c58d3259da17f13e77912de1b005e366b15c8b183d948c8a91a8"},
{file = "asyncssh-2.11.0.tar.gz", hash = "sha256:59c36ce77ba9dda8dd57ad875776e7105ddb1fa851bc039bb3aeadeac4f67b56"},
]
certifi = [
{file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"},
{file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"},
]
cffi = [
{file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"},
{file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"},
{file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"},
{file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"},
{file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"},
{file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"},
{file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"},
{file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"},
{file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"},
{file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"},
{file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"},
{file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"},
{file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"},
{file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"},
{file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"},
{file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"},
{file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"},
{file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"},
{file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"},
{file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"},
{file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"},
{file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"},
{file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"},
{file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"},
{file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"},
{file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"},
{file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"},
{file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"},
{file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"},
{file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"},
{file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"},
{file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"},
{file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"},
{file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"},
{file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"},
{file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"},
{file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"},
{file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"},
{file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"},
{file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"},
{file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"},
{file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"},
{file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"},
{file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"},
{file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"},
{file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"},
{file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"},
{file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"},
{file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"},
{file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"},
{file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"},
{file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"},
{file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"},
{file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"},
{file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"},
{file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"},
{file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"},
{file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"},
{file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"},
{file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"},
{file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"},
{file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"},
{file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"},
{file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"},
]
cryptography = [
{file = "cryptography-37.0.4-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:549153378611c0cca1042f20fd9c5030d37a72f634c9326e225c9f666d472884"},
{file = "cryptography-37.0.4-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:a958c52505c8adf0d3822703078580d2c0456dd1d27fabfb6f76fe63d2971cd6"},
{file = "cryptography-37.0.4-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f721d1885ecae9078c3f6bbe8a88bc0786b6e749bf32ccec1ef2b18929a05046"},
{file = "cryptography-37.0.4-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:3d41b965b3380f10e4611dbae366f6dc3cefc7c9ac4e8842a806b9672ae9add5"},
{file = "cryptography-37.0.4-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80f49023dd13ba35f7c34072fa17f604d2f19bf0989f292cedf7ab5770b87a0b"},
{file = "cryptography-37.0.4-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2dcb0b3b63afb6df7fd94ec6fbddac81b5492513f7b0436210d390c14d46ee8"},
{file = "cryptography-37.0.4-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:b7f8dd0d4c1f21759695c05a5ec8536c12f31611541f8904083f3dc582604280"},
{file = "cryptography-37.0.4-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:30788e070800fec9bbcf9faa71ea6d8068f5136f60029759fd8c3efec3c9dcb3"},
{file = "cryptography-37.0.4-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:190f82f3e87033821828f60787cfa42bff98404483577b591429ed99bed39d59"},
{file = "cryptography-37.0.4-cp36-abi3-win32.whl", hash = "sha256:b62439d7cd1222f3da897e9a9fe53bbf5c104fff4d60893ad1355d4c14a24157"},
{file = "cryptography-37.0.4-cp36-abi3-win_amd64.whl", hash = "sha256:f7a6de3e98771e183645181b3627e2563dcde3ce94a9e42a3f427d2255190327"},
{file = "cryptography-37.0.4-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bc95ed67b6741b2607298f9ea4932ff157e570ef456ef7ff0ef4884a134cc4b"},
{file = "cryptography-37.0.4-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:f8c0a6e9e1dd3eb0414ba320f85da6b0dcbd543126e30fcc546e7372a7fbf3b9"},
{file = "cryptography-37.0.4-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:e007f052ed10cc316df59bc90fbb7ff7950d7e2919c9757fd42a2b8ecf8a5f67"},
{file = "cryptography-37.0.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bc997818309f56c0038a33b8da5c0bfbb3f1f067f315f9abd6fc07ad359398d"},
{file = "cryptography-37.0.4-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:d204833f3c8a33bbe11eda63a54b1aad7aa7456ed769a982f21ec599ba5fa282"},
{file = "cryptography-37.0.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:75976c217f10d48a8b5a8de3d70c454c249e4b91851f6838a4e48b8f41eb71aa"},
{file = "cryptography-37.0.4-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:7099a8d55cd49b737ffc99c17de504f2257e3787e02abe6d1a6d136574873441"},
{file = "cryptography-37.0.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2be53f9f5505673eeda5f2736bea736c40f051a739bfae2f92d18aed1eb54596"},
{file = "cryptography-37.0.4-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:91ce48d35f4e3d3f1d83e29ef4a9267246e6a3be51864a5b7d2247d5086fa99a"},
{file = "cryptography-37.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4c590ec31550a724ef893c50f9a97a0c14e9c851c85621c5650d699a7b88f7ab"},
{file = "cryptography-37.0.4.tar.gz", hash = "sha256:63f9c17c0e2474ccbebc9302ce2f07b55b3b3fcb211ded18a42d5764f5c10a82"},
]
h11 = [
{file = "h11-0.12.0-py3-none-any.whl", hash = "sha256:36a3cb8c0a032f56e2da7084577878a035d3b61d104230d4bd49c0c6b555a9c6"},
{file = "h11-0.12.0.tar.gz", hash = "sha256:47222cb6067e4a307d535814917cd98fd0a57b6788ce715755fa2b6c28b56042"},
]
httpcore = [
{file = "httpcore-0.15.0-py3-none-any.whl", hash = "sha256:1105b8b73c025f23ff7c36468e4432226cbb959176eab66864b8e31c4ee27fa6"},
{file = "httpcore-0.15.0.tar.gz", hash = "sha256:18b68ab86a3ccf3e7dc0f43598eaddcf472b602aba29f9aa6ab85fe2ada3980b"},
]
httpx = [
{file = "httpx-0.23.0-py3-none-any.whl", hash = "sha256:42974f577483e1e932c3cdc3cd2303e883cbfba17fe228b0f63589764d7b9c4b"},
{file = "httpx-0.23.0.tar.gz", hash = "sha256:f28eac771ec9eb4866d3fb4ab65abd42d38c424739e80c08d8d20570de60b0ef"},
]
idna = [
{file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"},
{file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"},
]
passlib = [
{file = "passlib-1.7.4-py2.py3-none-any.whl", hash = "sha256:aa6bca462b8d8bda89c70b382f0c298a20b5560af6cbfa2dce410c0a2fb669f1"},
{file = "passlib-1.7.4.tar.gz", hash = "sha256:defd50f72b65c5402ab2c573830a6978e5f202ad0d984793c8dde2c4152ebe04"},
]
pyaml = [
{file = "pyaml-21.10.1-py2.py3-none-any.whl", hash = "sha256:19985ed303c3a985de4cf8fd329b6d0a5a5b5c9035ea240eccc709ebacbaf4a0"},
{file = "pyaml-21.10.1.tar.gz", hash = "sha256:c6519fee13bf06e3bb3f20cacdea8eba9140385a7c2546df5dbae4887f768383"},
]
pycparser = [
{file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"},
{file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"},
]
pyyaml = [
{file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"},
{file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"},
{file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"},
{file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"},
{file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"},
{file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"},
{file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"},
{file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"},
{file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"},
{file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"},
{file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"},
{file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"},
{file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"},
{file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"},
{file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"},
{file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"},
{file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"},
{file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"},
{file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"},
{file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"},
{file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"},
{file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"},
{file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"},
{file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"},
{file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"},
{file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"},
{file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"},
{file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"},
{file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"},
{file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"},
{file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"},
{file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"},
{file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"},
]
rfc3986 = [
{file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"},
{file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"},
]
sniffio = [
{file = "sniffio-1.2.0-py3-none-any.whl", hash = "sha256:471b71698eac1c2112a40ce2752bb2f4a4814c22a54a3eed3676bc0f5ca9f663"},
{file = "sniffio-1.2.0.tar.gz", hash = "sha256:c4666eecec1d3f50960c6bdf61ab7bc350648da6c126e3cf6898d8cd4ddcd3de"},
]
toml = [
{file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
]
typing-extensions = [
{file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"},
{file = "typing_extensions-4.3.0.tar.gz", hash = "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6"},
]

255
pyasic/API/__init__.py Normal file
View File

@@ -0,0 +1,255 @@
# Copyright 2022 Upstream Data Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
import json
import ipaddress
import warnings
import logging
from typing import Union
class APIError(Exception):
def __init__(self, *args):
if args:
self.message = args[0]
else:
self.message = None
def __str__(self):
if self.message:
return f"{self.message}"
else:
return "Incorrect API parameters."
class APIWarning(Warning):
def __init__(self, *args):
if args:
self.message = args[0]
else:
self.message = None
def __str__(self):
if self.message:
return f"{self.message}"
else:
return "Incorrect API parameters."
class BaseMinerAPI:
def __init__(self, ip: str, port: int = 4028) -> None:
# api port, should be 4028
self.port = port
# ip address of the miner
self.ip = ipaddress.ip_address(ip)
def get_commands(self) -> list:
"""Get a list of command accessible to a specific type of API on the miner.
Returns:
A list of all API commands that the miner supports.
"""
return [
func
for func in
# each function in self
dir(self)
if callable(getattr(self, func)) and
# no __ methods
not func.startswith("__") and
# remove all functions that are in this base class
func
not in [
func
for func in dir(BaseMinerAPI)
if callable(getattr(BaseMinerAPI, func))
]
]
def _check_commands(self, *commands):
allowed_commands = self.get_commands()
return_commands = []
for command in [*commands]:
if command in allowed_commands:
return_commands.append(command)
else:
warnings.warn(
f"""Removing incorrect command: {command}
If you are sure you want to use this command please use API.send_command("{command}", ignore_errors=True) instead.""",
APIWarning,
)
return return_commands
async def multicommand(
self, *commands: str, ignore_x19_error: bool = False
) -> dict:
"""Creates and sends multiple commands as one command to the miner.
Parameters:
*commands: The commands to send as a multicommand to the miner.
ignore_x19_error: Whether or not to ignore errors raised by x19 miners when using the "+" delimited style.
"""
logging.debug(f"{self.ip}: Sending multicommand: {[*commands]}")
# make sure we can actually run each command, otherwise they will fail
commands = self._check_commands(*commands)
# standard multicommand format is "command1+command2"
# doesnt work for S19 which uses the backup _x19_multicommand
command = "+".join(commands)
try:
data = await self.send_command(command, x19_command=ignore_x19_error)
except APIError:
logging.debug(f"{self.ip}: Handling X19 multicommand.")
data = await self._x19_multicommand(*command.split("+"))
logging.debug(f"{self.ip}: Received multicommand data.")
return data
async def _x19_multicommand(self, *commands):
data = None
try:
data = {}
# send all commands individually
for cmd in commands:
data[cmd] = []
data[cmd].append(await self.send_command(cmd, x19_command=True))
except APIError as e:
raise APIError(e)
except Exception as e:
logging.warning(f"{self.ip}: API Multicommand Error: {e}")
return data
async def send_command(
self,
command: Union[str, bytes],
parameters: Union[str, int, bool] = None,
ignore_errors: bool = False,
x19_command: bool = False,
) -> dict:
"""Send an API command to the miner and return the result.
Parameters:
command: The command to sent to the miner.
parameters: Any additional parameters to be sent with the command.
ignore_errors: Whether or not to raise APIError when the command returns an error.
x19_command: Whether this is a command for an x19 that may be an issue (such as a "+" delimited multicommand)
Returns:
The return data from the API command parsed from JSON into a dict.
"""
try:
# get reader and writer streams
reader, writer = await asyncio.open_connection(str(self.ip), self.port)
# handle OSError 121
except OSError as e:
if e.winerror == "121":
logging.warning("Semaphore Timeout has Expired.")
return {}
# create the command
cmd = {"command": command}
if parameters:
cmd["parameter"] = parameters
# send the command
writer.write(json.dumps(cmd).encode("utf-8"))
await writer.drain()
# instantiate data
data = b""
# loop to receive all the data
try:
while True:
d = await reader.read(4096)
if not d:
break
data += d
except Exception as e:
logging.warning(f"{self.ip}: API Command Error: - {e}")
data = self._load_api_data(data)
# close the connection
writer.close()
await writer.wait_closed()
# check for if the user wants to allow errors to return
if not ignore_errors:
# validate the command succeeded
validation = self._validate_command_output(data)
if not validation[0]:
if not x19_command:
logging.warning(f"{self.ip}: API Command Error: {validation[1]}")
raise APIError(validation[1])
return data
@staticmethod
def _validate_command_output(data: dict) -> tuple:
# check if the data returned is correct or an error
# if status isn't a key, it is a multicommand
if "STATUS" not in data.keys():
for key in data.keys():
# make sure not to try to turn id into a dict
if not key == "id":
# make sure they succeeded
if "STATUS" in data[key][0].keys():
if data[key][0]["STATUS"][0]["STATUS"] not in ["S", "I"]:
# this is an error
return False, f"{key}: " + data[key][0]["STATUS"][0]["Msg"]
elif "id" not in data.keys():
if data["STATUS"] not in ["S", "I"]:
return False, data["Msg"]
else:
# make sure the command succeeded
if type(data["STATUS"]) == str:
if data["STATUS"] in ["RESTART"]:
return True, None
elif data["STATUS"][0]["STATUS"] not in ("S", "I"):
# this is an error
if data["STATUS"][0]["STATUS"] not in ("S", "I"):
return False, data["STATUS"][0]["Msg"]
return True, None
@staticmethod
def _load_api_data(data: bytes) -> dict:
str_data = None
try:
# some json from the API returns with a null byte (\x00) on the end
if data.endswith(b"\x00"):
# handle the null byte
str_data = data.decode("utf-8")[:-1]
else:
# no null byte
str_data = data.decode("utf-8")
# fix an error with a btminer return having an extra comma that breaks json.loads()
str_data = str_data.replace(",}", "}")
# fix an error with a btminer return having a newline that breaks json.loads()
str_data = str_data.replace("\n", "")
# fix an error with a bmminer return not having a specific comma that breaks json.loads()
str_data = str_data.replace("}{", "},{")
# fix an error with a bmminer return having a specific comma that breaks json.loads()
str_data = str_data.replace("[,{", "[{")
# fix an error with Avalonminers returning inf and nan
str_data = str_data.replace("inf", "0")
str_data = str_data.replace("nan", "0")
# fix whatever this garbage from avalonminers is `,"id":1}`
if str_data.startswith(","):
str_data = f"{{{str_data[1:]}"
# parse the json
parsed_data = json.loads(str_data)
# handle bad json
except json.decoder.JSONDecodeError as e:
raise APIError(f"Decode Error {e}: {str_data}")
return parsed_data

694
pyasic/API/bmminer.py Normal file
View File

@@ -0,0 +1,694 @@
# Copyright 2022 Upstream Data Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pyasic.API import BaseMinerAPI
class BMMinerAPI(BaseMinerAPI):
"""An abstraction of the BMMiner API.
Each method corresponds to an API command in BMMiner.
[BMMiner API documentation](https://github.com/jameshilliard/bmminer/blob/master/API-README)
This class abstracts use of the BMMiner API, as well as the
methods for sending commands to it. The `self.send_command()`
function handles sending a command to the miner asynchronously, and
as such is the base for many of the functions in this class, which
rely on it to send the command for them.
Parameters:
ip: The IP of the miner to reference the API on.
port: The port to reference the API on. Default is 4028.
"""
def __init__(self, ip: str, port: int = 4028) -> None:
super().__init__(ip, port)
async def version(self) -> dict:
"""Get miner version info.
<details>
<summary>Expand</summary>
Returns:
Miner version information.
</details>
"""
return await self.send_command("version")
async def config(self) -> dict:
"""Get some basic configuration info.
<details>
<summary>Expand</summary>
Returns:
## Some miner configuration information:
* ASC Count <- the number of ASCs
* PGA Count <- the number of PGAs
* Pool Count <- the number of Pools
* Strategy <- the current pool strategy
* Log Interval <- the interval of logging
* Device Code <- list of compiled device drivers
* OS <- the current operating system
* Failover-Only <- failover-only setting
* Scan Time <- scan-time setting
* Queue <- queue setting
* Expiry <- expiry setting
</details>
"""
return await self.send_command("config")
async def summary(self) -> dict:
"""Get the status summary of the miner.
<details>
<summary>Expand</summary>
Returns:
The status summary of the miner.
</details>
"""
return await self.send_command("summary")
async def pools(self) -> dict:
"""Get pool information.
<details>
<summary>Expand</summary>
Returns:
Miner pool information.
</details>
"""
return await self.send_command("pools")
async def devs(self) -> dict:
"""Get data on each PGA/ASC with their details.
<details>
<summary>Expand</summary>
Returns:
Data on each PGA/ASC with their details.
</details>
"""
return await self.send_command("devs")
async def edevs(self, old: bool = False) -> dict:
"""Get data on each PGA/ASC with their details, ignoring blacklisted and zombie devices.
<details>
<summary>Expand</summary>
Parameters:
old: Include zombie devices that became zombies less than 'old' seconds ago
Returns:
Data on each PGA/ASC with their details.
</details>
"""
if old:
return await self.send_command("edevs", parameters=old)
else:
return await self.send_command("edevs")
async def pga(self, n: int) -> dict:
"""Get data from PGA n.
<details>
<summary>Expand</summary>
Parameters:
n: The PGA number to get data from.
Returns:
Data on the PGA n.
</details>
"""
return await self.send_command("pga", parameters=n)
async def pgacount(self) -> dict:
"""Get data fon all PGAs.
<details>
<summary>Expand</summary>
Returns:
Data on the PGAs connected.
</details>
"""
return await self.send_command("pgacount")
async def switchpool(self, n: int) -> dict:
"""Switch pools to pool n.
<details>
<summary>Expand</summary>
Parameters:
n: The pool to switch to.
Returns:
A confirmation of switching to pool n.
</details>
"""
return await self.send_command("switchpool", parameters=n)
async def enablepool(self, n: int) -> dict:
"""Enable pool n.
<details>
<summary>Expand</summary>
Parameters:
n: The pool to enable.
Returns:
A confirmation of enabling pool n.
</details>
"""
return await self.send_command("enablepool", parameters=n)
async def addpool(self, url: str, username: str, password: str) -> dict:
"""Add a pool to the miner.
<details>
<summary>Expand</summary>
Parameters:
url: The URL of the new pool to add.
username: The users username on the new pool.
password: The worker password on the new pool.
Returns:
A confirmation of adding the pool.
</details>
"""
return await self.send_command(
"addpool", parameters=f"{url},{username},{password}"
)
async def poolpriority(self, *n: int) -> dict:
"""Set pool priority.
<details>
<summary>Expand</summary>
Parameters:
*n: Pools in order of priority.
Returns:
A confirmation of setting pool priority.
</details>
"""
pools = f"{','.join([str(item) for item in n])}"
return await self.send_command("poolpriority", parameters=pools)
async def poolquota(self, n: int, q: int) -> dict:
"""Set pool quota.
<details>
<summary>Expand</summary>
Parameters:
n: Pool number to set quota on.
q: Quota to set the pool to.
Returns:
A confirmation of setting pool quota.
</details>
"""
return await self.send_command("poolquota", parameters=f"{n},{q}")
async def disablepool(self, n: int) -> dict:
"""Disable a pool.
<details>
<summary>Expand</summary>
Parameters:
n: Pool to disable.
Returns:
A confirmation of diabling the pool.
</details>
"""
return await self.send_command("disablepool", parameters=n)
async def removepool(self, n: int) -> dict:
"""Remove a pool.
<details>
<summary>Expand</summary>
Parameters:
n: Pool to remove.
Returns:
A confirmation of removing the pool.
</details>
"""
return await self.send_command("removepool", parameters=n)
async def save(self, filename: str = None) -> dict:
"""Save the config.
<details>
<summary>Expand</summary>
Parameters:
filename: Filename to save the config as.
Returns:
A confirmation of saving the config.
</details>
"""
if filename:
return await self.send_command("save", parameters=filename)
else:
return await self.send_command("save")
async def quit(self) -> dict:
"""Quit BMMiner.
<details>
<summary>Expand</summary>
Returns:
A single "BYE" before BMMiner quits.
</details>
"""
return await self.send_command("quit")
async def notify(self) -> dict:
"""Notify the user of past errors.
<details>
<summary>Expand</summary>
Returns:
The last status and count of each devices problem(s).
</details>
"""
return await self.send_command("notify")
async def privileged(self) -> dict:
"""Check if you have privileged access.
<details>
<summary>Expand</summary>
Returns:
The STATUS section with an error if you have no privileged access, or success if you have privileged access.
</details>
"""
return await self.send_command("privileged")
async def pgaenable(self, n: int) -> dict:
"""Enable PGA n.
<details>
<summary>Expand</summary>
Parameters:
n: The PGA to enable.
Returns:
A confirmation of enabling PGA n.
</details>
"""
return await self.send_command("pgaenable", parameters=n)
async def pgadisable(self, n: int) -> dict:
"""Disable PGA n.
<details>
<summary>Expand</summary>
Parameters:
n: The PGA to disable.
Returns:
A confirmation of disabling PGA n.
</details>
"""
return await self.send_command("pgadisable", parameters=n)
async def pgaidentify(self, n: int) -> dict:
"""Identify PGA n.
<details>
<summary>Expand</summary>
Parameters:
n: The PGA to identify.
Returns:
A confirmation of identifying PGA n.
</details>
"""
return await self.send_command("pgaidentify", parameters=n)
async def devdetails(self) -> dict:
"""Get data on all devices with their static details.
<details>
<summary>Expand</summary>
Returns:
Data on all devices with their static details.
</details>
"""
return await self.send_command("devdetails")
async def restart(self) -> dict:
"""Restart BMMiner using the API.
<details>
<summary>Expand</summary>
Returns:
A reply informing of the restart.
</details>
"""
return await self.send_command("restart")
async def stats(self) -> dict:
"""Get stats of each device/pool with more than 1 getwork.
<details>
<summary>Expand</summary>
Returns:
Stats of each device/pool with more than 1 getwork.
</details>
"""
return await self.send_command("stats")
async def estats(self, old: bool = False) -> dict:
"""Get stats of each device/pool with more than 1 getwork, ignoring zombie devices.
<details>
<summary>Expand</summary>
Parameters:
old: Include zombie devices that became zombies less than 'old' seconds ago.
Returns:
Stats of each device/pool with more than 1 getwork, ignoring zombie devices.
</details>
"""
if old:
return await self.send_command("estats", parameters=old)
else:
return await self.send_command("estats")
async def check(self, command: str) -> dict:
"""Check if the command command exists in BMMiner.
<details>
<summary>Expand</summary>
Parameters:
command: The command to check.
Returns:
## Information about a command:
* Exists (Y/N) <- the command exists in this version
* Access (Y/N) <- you have access to use the command
</details>
"""
return await self.send_command("check", parameters=command)
async def failover_only(self, failover: bool) -> dict:
"""Set failover-only.
<details>
<summary>Expand</summary>
Parameters:
failover: What to set failover-only to.
Returns:
Confirmation of setting failover-only.
</details>
"""
return await self.send_command("failover-only", parameters=failover)
async def coin(self) -> dict:
"""Get information on the current coin.
<details>
<summary>Expand</summary>
Returns:
## Information about the current coin being mined:
* Hash Method <- the hashing algorithm
* Current Block Time <- blocktime as a float, 0 means none
* Current Block Hash <- the hash of the current block, blank means none
* LP <- whether LP is in use on at least 1 pool
* Network Difficulty: the current network difficulty
</details>
"""
return await self.send_command("coin")
async def debug(self, setting: str) -> dict:
"""Set a debug setting.
<details>
<summary>Expand</summary>
Parameters:
setting: Which setting to switch to.
## Options are:
* Silent
* Quiet
* Verbose
* Debug
* RPCProto
* PerDevice
* WorkTime
* Normal
Returns:
Data on which debug setting was enabled or disabled.
</details>
"""
return await self.send_command("debug", parameters=setting)
async def setconfig(self, name: str, n: int) -> dict:
"""Set config of name to value n.
<details>
<summary>Expand</summary>
Parameters:
name: The name of the config setting to set.
## Options are:
* queue
* scantime
* expiry
n: The value to set the 'name' setting to.
Returns:
The results of setting config of name to n.
</details>
"""
return await self.send_command("setconfig", parameters=f"{name},{n}")
async def usbstats(self) -> dict:
"""Get stats of all USB devices except ztex.
<details>
<summary>Expand</summary>
Returns:
The stats of all USB devices except ztex.
</details>
"""
return await self.send_command("usbstats")
async def pgaset(self, n: int, opt: str, val: int = None) -> dict:
"""Set PGA option opt to val on PGA n.
<details>
<summary>Expand</summary>
Options:
```
MMQ -
opt: clock
val: 160 - 230 (multiple of 2)
CMR -
opt: clock
val: 100 - 220
```
Parameters:
n: The PGA to set the options on.
opt: The option to set. Setting this to 'help' returns a help message.
val: The value to set the option to.
Returns:
Confirmation of setting PGA n with opt[,val].
</details>
"""
if val:
return await self.send_command("pgaset", parameters=f"{n},{opt},{val}")
else:
return await self.send_command("pgaset", parameters=f"{n},{opt}")
async def zero(self, which: str, summary: bool) -> dict:
"""Zero a device.
<details>
<summary>Expand</summary>
Parameters:
which: Which device to zero. Setting this to 'all' zeros all devices. Setting this to 'bestshare' zeros only the bestshare values for each pool and global.
summary: Whether or not to show a full summary.
Returns:
the STATUS section with info on the zero and optional summary.
</details>
"""
return await self.send_command("zero", parameters=f"{which},{summary}")
async def hotplug(self, n: int) -> dict:
"""Enable hotplug.
<details>
<summary>Expand</summary>
Parameters:
n: The device number to set hotplug on.
Returns:
Information on hotplug status.
</details>
"""
return await self.send_command("hotplug", parameters=n)
async def asc(self, n: int) -> dict:
"""Get data for ASC device n.
<details>
<summary>Expand</summary>
Parameters:
n: The device to get data for.
Returns:
The data for ASC device n.
</details>
"""
return await self.send_command("asc", parameters=n)
async def ascenable(self, n: int) -> dict:
"""Enable ASC device n.
<details>
<summary>Expand</summary>
Parameters:
n: The device to enable.
Returns:
Confirmation of enabling ASC device n.
</details>
"""
return await self.send_command("ascenable", parameters=n)
async def ascdisable(self, n: int) -> dict:
"""Disable ASC device n.
<details>
<summary>Expand</summary>
Parameters:
n: The device to disable.
Returns:
Confirmation of disabling ASC device n.
</details>
"""
return await self.send_command("ascdisable", parameters=n)
async def ascidentify(self, n: int) -> dict:
"""Identify ASC device n.
<details>
<summary>Expand</summary>
Parameters:
n: The device to identify.
Returns:
Confirmation of identifying ASC device n.
</details>
"""
return await self.send_command("ascidentify", parameters=n)
async def asccount(self) -> dict:
"""Get data on the number of ASC devices and their info.
<details>
<summary>Expand</summary>
Returns:
Data on all ASC devices.
</details>
"""
return await self.send_command("asccount")
async def ascset(self, n: int, opt: str, val: int = None) -> dict:
"""Set ASC n option opt to value val.
<details>
<summary>Expand</summary>
Sets an option on the ASC n to a value. Allowed options are:
```
AVA+BTB -
opt: freq
val: 256 - 1024 (chip frequency)
BTB -
opt: millivolts
val: 1000 - 1400 (core voltage)
MBA -
opt: reset
val: 0 - # of chips (reset a chip)
opt: freq
val: 0 - # of chips, 100 - 1400 (chip frequency)
opt: ledcount
val: 0 - 100 (chip count for LED)
opt: ledlimit
val: 0 - 200 (LED off below GH/s)
opt: spidelay
val: 0 - 9999 (SPI per I/O delay)
opt: spireset
val: i or s, 0 - 9999 (SPI regular reset)
opt: spisleep
val: 0 - 9999 (SPI reset sleep in ms)
BMA -
opt: volt
val: 0 - 9
opt: clock
val: 0 - 15
```
Parameters:
n: The ASC to set the options on.
opt: The option to set. Setting this to 'help' returns a help message.
val: The value to set the option to.
Returns:
Confirmation of setting option opt to value val.
</details>
"""
if val:
return await self.send_command("ascset", parameters=f"{n},{opt},{val}")
else:
return await self.send_command("ascset", parameters=f"{n},{opt}")
async def lcd(self) -> dict:
"""Get a general all-in-one status summary of the miner.
<details>
<summary>Expand</summary>
Returns:
An all-in-one status summary of the miner.
</details>
"""
return await self.send_command("lcd")
async def lockstats(self) -> dict:
"""Write lockstats to STDERR.
<details>
<summary>Expand</summary>
Returns:
The result of writing the lock stats to STDERR.
</details>
"""
return await self.send_command("lockstats")

275
pyasic/API/bosminer.py Normal file
View File

@@ -0,0 +1,275 @@
# Copyright 2022 Upstream Data Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pyasic.API import BaseMinerAPI
class BOSMinerAPI(BaseMinerAPI):
"""An abstraction of the BOSMiner API.
Each method corresponds to an API command in BOSMiner.
[BOSMiner API documentation](https://docs.braiins.com/os/plus-en/Development/1_api.html)
This class abstracts use of the BOSMiner API, as well as the
methods for sending commands to it. The `self.send_command()`
function handles sending a command to the miner asynchronously, and
as such is the base for many of the functions in this class, which
rely on it to send the command for them.
Parameters:
ip: The IP of the miner to reference the API on.
port: The port to reference the API on. Default is 4028.
"""
def __init__(self, ip: str, port: int = 4028):
super().__init__(ip, port)
async def asccount(self) -> dict:
"""Get data on the number of ASC devices and their info.
<details>
<summary>Expand</summary>
Returns:
Data on all ASC devices.
</details>
"""
return await self.send_command("asccount")
async def asc(self, n: int) -> dict:
"""Get data for ASC device n.
<details>
<summary>Expand</summary>
Parameters:
n: The device to get data for.
Returns:
The data for ASC device n.
</details>
"""
return await self.send_command("asc", parameters=n)
async def devdetails(self) -> dict:
"""Get data on all devices with their static details.
<details>
<summary>Expand</summary>
Returns:
Data on all devices with their static details.
</details>
"""
return await self.send_command("devdetails")
async def devs(self) -> dict:
"""Get data on each PGA/ASC with their details.
<details>
<summary>Expand</summary>
Returns:
Data on each PGA/ASC with their details.
</details>
"""
return await self.send_command("devs")
async def edevs(self, old: bool = False) -> dict:
"""Get data on each PGA/ASC with their details, ignoring blacklisted and zombie devices.
<details>
<summary>Expand</summary>
Parameters:
old: Include zombie devices that became zombies less than 'old' seconds ago
Returns:
Data on each PGA/ASC with their details.
</details>
"""
if old:
return await self.send_command("edevs", parameters="old")
else:
return await self.send_command("edevs")
async def pools(self) -> dict:
"""Get pool information.
<details>
<summary>Expand</summary>
Returns:
Miner pool information.
</details>
"""
return await self.send_command("pools")
async def summary(self) -> dict:
"""Get the status summary of the miner.
<details>
<summary>Expand</summary>
Returns:
The status summary of the miner.
</details>
"""
return await self.send_command("summary")
async def stats(self) -> dict:
"""Get stats of each device/pool with more than 1 getwork.
<details>
<summary>Expand</summary>
Returns:
Stats of each device/pool with more than 1 getwork.
</details>
"""
return await self.send_command("stats")
async def version(self) -> dict:
"""Get miner version info.
<details>
<summary>Expand</summary>
Returns:
Miner version information.
</details>
"""
return await self.send_command("version")
async def estats(self, old: bool = False) -> dict:
"""Get stats of each device/pool with more than 1 getwork, ignoring zombie devices.
<details>
<summary>Expand</summary>
Parameters:
old: Include zombie devices that became zombies less than 'old' seconds ago.
Returns:
Stats of each device/pool with more than 1 getwork, ignoring zombie devices.
</details>
"""
if old:
return await self.send_command("estats", parameters=old)
else:
return await self.send_command("estats")
async def check(self, command: str) -> dict:
"""Check if the command command exists in BOSMiner.
<details>
<summary>Expand</summary>
Parameters:
command: The command to check.
Returns:
## Information about a command:
* Exists (Y/N) <- the command exists in this version
* Access (Y/N) <- you have access to use the command
</details>
"""
return await self.send_command("check", parameters=command)
async def coin(self) -> dict:
"""Get information on the current coin.
<details>
<summary>Expand</summary>
Returns:
## Information about the current coin being mined:
* Hash Method <- the hashing algorithm
* Current Block Time <- blocktime as a float, 0 means none
* Current Block Hash <- the hash of the current block, blank means none
* LP <- whether LP is in use on at least 1 pool
* Network Difficulty: the current network difficulty
</details>
"""
return await self.send_command("coin")
async def lcd(self) -> dict:
"""Get a general all-in-one status summary of the miner.
<details>
<summary>Expand</summary>
Returns:
An all-in-one status summary of the miner.
</details>
"""
return await self.send_command("lcd")
async def fans(self) -> dict:
"""Get fan data.
<details>
<summary>Expand</summary>
Returns:
Data on the fans of the miner.
</details>
"""
return await self.send_command("fans")
async def tempctrl(self) -> dict:
"""Get temperature control data.
<details>
<summary>Expand</summary>
Returns:
Data about the temp control settings of the miner.
</details>
"""
return await self.send_command("tempctrl")
async def temps(self) -> dict:
"""Get temperature data.
<details>
<summary>Expand</summary>
Returns:
Data on the temps of the miner.
</details>
"""
return await self.send_command("temps")
async def tunerstatus(self) -> dict:
"""Get tuner status data
<details>
<summary>Expand</summary>
Returns:
Data on the status of autotuning.
</details>
"""
return await self.send_command("tunerstatus")
async def pause(self) -> dict:
"""Pause mining.
<details>
<summary>Expand</summary>
Returns:
Confirmation of pausing mining.
</details>
"""
return await self.send_command("pause")
async def resume(self) -> dict:
"""Resume mining.
<details>
<summary>Expand</summary>
Returns:
Confirmation of resuming mining.
</details>
"""
return await self.send_command("resume")

File diff suppressed because it is too large Load Diff

694
pyasic/API/cgminer.py Normal file
View File

@@ -0,0 +1,694 @@
# Copyright 2022 Upstream Data Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pyasic.API import BaseMinerAPI
class CGMinerAPI(BaseMinerAPI):
"""An abstraction of the CGMiner API.
Each method corresponds to an API command in GGMiner.
[CGMiner API documentation](https://github.com/ckolivas/cgminer/blob/master/API-README)
This class abstracts use of the CGMiner API, as well as the
methods for sending commands to it. The self.send_command()
function handles sending a command to the miner asynchronously, and
as such is the base for many of the functions in this class, which
rely on it to send the command for them.
Parameters:
ip: The IP of the miner to reference the API on.
port: The port to reference the API on. Default is 4028.
"""
def __init__(self, ip: str, port: int = 4028):
super().__init__(ip, port)
async def version(self) -> dict:
"""Get miner version info.
<details>
<summary>Expand</summary>
Returns:
Miner version information.
</details>
"""
return await self.send_command("version")
async def config(self) -> dict:
"""Get some basic configuration info.
<details>
<summary>Expand</summary>
Returns:
## Some miner configuration information:
* ASC Count <- the number of ASCs
* PGA Count <- the number of PGAs
* Pool Count <- the number of Pools
* Strategy <- the current pool strategy
* Log Interval <- the interval of logging
* Device Code <- list of compiled device drivers
* OS <- the current operating system
* Failover-Only <- failover-only setting
* Scan Time <- scan-time setting
* Queue <- queue setting
* Expiry <- expiry setting
</details>
"""
return await self.send_command("config")
async def summary(self) -> dict:
"""Get the status summary of the miner.
<details>
<summary>Expand</summary>
Returns:
The status summary of the miner.
</details>
"""
return await self.send_command("summary")
async def pools(self) -> dict:
"""Get pool information.
<details>
<summary>Expand</summary>
Returns:
Miner pool information.
</details>
"""
return await self.send_command("pools")
async def devs(self) -> dict:
"""Get data on each PGA/ASC with their details.
<details>
<summary>Expand</summary>
Returns:
Data on each PGA/ASC with their details.
</details>
"""
return await self.send_command("devs")
async def edevs(self, old: bool = False) -> dict:
"""Get data on each PGA/ASC with their details, ignoring blacklisted and zombie devices.
<details>
<summary>Expand</summary>
Parameters:
old: Include zombie devices that became zombies less than 'old' seconds ago
Returns:
Data on each PGA/ASC with their details.
</details>
"""
if old:
return await self.send_command("edevs", parameters=old)
else:
return await self.send_command("edevs")
async def pga(self, n: int) -> dict:
"""Get data from PGA n.
<details>
<summary>Expand</summary>
Parameters:
n: The PGA number to get data from.
Returns:
Data on the PGA n.
</details>
"""
return await self.send_command("pga", parameters=n)
async def pgacount(self) -> dict:
"""Get data fon all PGAs.
<details>
<summary>Expand</summary>
Returns:
Data on the PGAs connected.
</details>
"""
return await self.send_command("pgacount")
async def switchpool(self, n: int) -> dict:
"""Switch pools to pool n.
<details>
<summary>Expand</summary>
Parameters:
n: The pool to switch to.
Returns:
A confirmation of switching to pool n.
</details>
"""
return await self.send_command("switchpool", parameters=n)
async def enablepool(self, n: int) -> dict:
"""Enable pool n.
<details>
<summary>Expand</summary>
Parameters:
n: The pool to enable.
Returns:
A confirmation of enabling pool n.
</details>
"""
return await self.send_command("enablepool", parameters=n)
async def addpool(self, url: str, username: str, password: str) -> dict:
"""Add a pool to the miner.
<details>
<summary>Expand</summary>
Parameters:
url: The URL of the new pool to add.
username: The users username on the new pool.
password: The worker password on the new pool.
Returns:
A confirmation of adding the pool.
</details>
"""
return await self.send_command(
"addpool", parameters=f"{url},{username},{password}"
)
async def poolpriority(self, *n: int) -> dict:
"""Set pool priority.
<details>
<summary>Expand</summary>
Parameters:
*n: Pools in order of priority.
Returns:
A confirmation of setting pool priority.
</details>
"""
pools = f"{','.join([str(item) for item in n])}"
return await self.send_command("poolpriority", parameters=pools)
async def poolquota(self, n: int, q: int) -> dict:
"""Set pool quota.
<details>
<summary>Expand</summary>
Parameters:
n: Pool number to set quota on.
q: Quota to set the pool to.
Returns:
A confirmation of setting pool quota.
</details>
"""
return await self.send_command("poolquota", parameters=f"{n},{q}")
async def disablepool(self, n: int) -> dict:
"""Disable a pool.
<details>
<summary>Expand</summary>
Parameters:
n: Pool to disable.
Returns:
A confirmation of diabling the pool.
</details>
"""
return await self.send_command("disablepool", parameters=n)
async def removepool(self, n: int) -> dict:
"""Remove a pool.
<details>
<summary>Expand</summary>
Parameters:
n: Pool to remove.
Returns:
A confirmation of removing the pool.
</details>
"""
return await self.send_command("removepool", parameters=n)
async def save(self, filename: str = None) -> dict:
"""Save the config.
<details>
<summary>Expand</summary>
Parameters:
filename: Filename to save the config as.
Returns:
A confirmation of saving the config.
</details>
"""
if filename:
return await self.send_command("save", parameters=filename)
else:
return await self.send_command("save")
async def quit(self) -> dict:
"""Quit CGMiner.
<details>
<summary>Expand</summary>
Returns:
A single "BYE" before CGMiner quits.
</details>
"""
return await self.send_command("quit")
async def notify(self) -> dict:
"""Notify the user of past errors.
<details>
<summary>Expand</summary>
Returns:
The last status and count of each devices problem(s).
</details>
"""
return await self.send_command("notify")
async def privileged(self) -> dict:
"""Check if you have privileged access.
<details>
<summary>Expand</summary>
Returns:
The STATUS section with an error if you have no privileged access, or success if you have privileged access.
</details>
"""
return await self.send_command("privileged")
async def pgaenable(self, n: int) -> dict:
"""Enable PGA n.
<details>
<summary>Expand</summary>
Parameters:
n: The PGA to enable.
Returns:
A confirmation of enabling PGA n.
</details>
"""
return await self.send_command("pgaenable", parameters=n)
async def pgadisable(self, n: int) -> dict:
"""Disable PGA n.
<details>
<summary>Expand</summary>
Parameters:
n: The PGA to disable.
Returns:
A confirmation of disabling PGA n.
</details>
"""
return await self.send_command("pgadisable", parameters=n)
async def pgaidentify(self, n: int) -> dict:
"""Identify PGA n.
<details>
<summary>Expand</summary>
Parameters:
n: The PGA to identify.
Returns:
A confirmation of identifying PGA n.
</details>
"""
return await self.send_command("pgaidentify", parameters=n)
async def devdetails(self) -> dict:
"""Get data on all devices with their static details.
<details>
<summary>Expand</summary>
Returns:
Data on all devices with their static details.
</details>
"""
return await self.send_command("devdetails")
async def restart(self) -> dict:
"""Restart CGMiner using the API.
<details>
<summary>Expand</summary>
Returns:
A reply informing of the restart.
</details>
"""
return await self.send_command("restart")
async def stats(self) -> dict:
"""Get stats of each device/pool with more than 1 getwork.
<details>
<summary>Expand</summary>
Returns:
Stats of each device/pool with more than 1 getwork.
</details>
"""
return await self.send_command("stats")
async def estats(self, old: bool = False) -> dict:
"""Get stats of each device/pool with more than 1 getwork, ignoring zombie devices.
<details>
<summary>Expand</summary>
Parameters:
old: Include zombie devices that became zombies less than 'old' seconds ago.
Returns:
Stats of each device/pool with more than 1 getwork, ignoring zombie devices.
</details>
"""
if old:
return await self.send_command("estats", parameters=old)
else:
return await self.send_command("estats")
async def check(self, command: str) -> dict:
"""Check if the command command exists in CGMiner.
<details>
<summary>Expand</summary>
Parameters:
command: The command to check.
Returns:
## Information about a command:
* Exists (Y/N) <- the command exists in this version
* Access (Y/N) <- you have access to use the command
</details>
"""
return await self.send_command("check", parameters=command)
async def failover_only(self, failover: bool) -> dict:
"""Set failover-only.
<details>
<summary>Expand</summary>
Parameters:
failover: What to set failover-only to.
Returns:
Confirmation of setting failover-only.
</details>
"""
return await self.send_command("failover-only", parameters=failover)
async def coin(self) -> dict:
"""Get information on the current coin.
<details>
<summary>Expand</summary>
Returns:
## Information about the current coin being mined:
* Hash Method <- the hashing algorithm
* Current Block Time <- blocktime as a float, 0 means none
* Current Block Hash <- the hash of the current block, blank means none
* LP <- whether LP is in use on at least 1 pool
* Network Difficulty: the current network difficulty
</details>
"""
return await self.send_command("coin")
async def debug(self, setting: str) -> dict:
"""Set a debug setting.
<details>
<summary>Expand</summary>
Parameters:
setting: Which setting to switch to.
## Options are:
* Silent
* Quiet
* Verbose
* Debug
* RPCProto
* PerDevice
* WorkTime
* Normal
Returns:
Data on which debug setting was enabled or disabled.
</details>
"""
return await self.send_command("debug", parameters=setting)
async def setconfig(self, name: str, n: int) -> dict:
"""Set config of name to value n.
<details>
<summary>Expand</summary>
Parameters:
name: The name of the config setting to set.
## Options are:
* queue
* scantime
* expiry
n: The value to set the 'name' setting to.
Returns:
The results of setting config of name to n.
</details>
"""
return await self.send_command("setconfig", parameters=f"{name},{n}")
async def usbstats(self) -> dict:
"""Get stats of all USB devices except ztex.
<details>
<summary>Expand</summary>
Returns:
The stats of all USB devices except ztex.
</details>
"""
return await self.send_command("usbstats")
async def pgaset(self, n: int, opt: str, val: int = None) -> dict:
"""Set PGA option opt to val on PGA n.
<details>
<summary>Expand</summary>
Options:
```
MMQ -
opt: clock
val: 160 - 230 (multiple of 2)
CMR -
opt: clock
val: 100 - 220
```
Parameters:
n: The PGA to set the options on.
opt: The option to set. Setting this to 'help' returns a help message.
val: The value to set the option to.
Returns:
Confirmation of setting PGA n with opt[,val].
</details>
"""
if val:
return await self.send_command("pgaset", parameters=f"{n},{opt},{val}")
else:
return await self.send_command("pgaset", parameters=f"{n},{opt}")
async def zero(self, which: str, summary: bool) -> dict:
"""Zero a device.
<details>
<summary>Expand</summary>
Parameters:
which: Which device to zero. Setting this to 'all' zeros all devices. Setting this to 'bestshare' zeros only the bestshare values for each pool and global.
summary: Whether or not to show a full summary.
Returns:
the STATUS section with info on the zero and optional summary.
</details>
"""
return await self.send_command("zero", parameters=f"{which},{summary}")
async def hotplug(self, n: int) -> dict:
"""Enable hotplug.
<details>
<summary>Expand</summary>
Parameters:
n: The device number to set hotplug on.
Returns:
Information on hotplug status.
</details>
"""
return await self.send_command("hotplug", parameters=n)
async def asc(self, n: int) -> dict:
"""Get data for ASC device n.
<details>
<summary>Expand</summary>
Parameters:
n: The device to get data for.
Returns:
The data for ASC device n.
</details>
"""
return await self.send_command("asc", parameters=n)
async def ascenable(self, n: int) -> dict:
"""Enable ASC device n.
<details>
<summary>Expand</summary>
Parameters:
n: The device to enable.
Returns:
Confirmation of enabling ASC device n.
</details>
"""
return await self.send_command("ascenable", parameters=n)
async def ascdisable(self, n: int) -> dict:
"""Disable ASC device n.
<details>
<summary>Expand</summary>
Parameters:
n: The device to disable.
Returns:
Confirmation of disabling ASC device n.
</details>
"""
return await self.send_command("ascdisable", parameters=n)
async def ascidentify(self, n: int) -> dict:
"""Identify ASC device n.
<details>
<summary>Expand</summary>
Parameters:
n: The device to identify.
Returns:
Confirmation of identifying ASC device n.
</details>
"""
return await self.send_command("ascidentify", parameters=n)
async def asccount(self) -> dict:
"""Get data on the number of ASC devices and their info.
<details>
<summary>Expand</summary>
Returns:
Data on all ASC devices.
</details>
"""
return await self.send_command("asccount")
async def ascset(self, n: int, opt: str, val: int = None) -> dict:
"""Set ASC n option opt to value val.
<details>
<summary>Expand</summary>
Sets an option on the ASC n to a value. Allowed options are:
```
AVA+BTB -
opt: freq
val: 256 - 1024 (chip frequency)
BTB -
opt: millivolts
val: 1000 - 1400 (core voltage)
MBA -
opt: reset
val: 0 - # of chips (reset a chip)
opt: freq
val: 0 - # of chips, 100 - 1400 (chip frequency)
opt: ledcount
val: 0 - 100 (chip count for LED)
opt: ledlimit
val: 0 - 200 (LED off below GH/s)
opt: spidelay
val: 0 - 9999 (SPI per I/O delay)
opt: spireset
val: i or s, 0 - 9999 (SPI regular reset)
opt: spisleep
val: 0 - 9999 (SPI reset sleep in ms)
BMA -
opt: volt
val: 0 - 9
opt: clock
val: 0 - 15
```
Parameters:
n: The ASC to set the options on.
opt: The option to set. Setting this to 'help' returns a help message.
val: The value to set the option to.
Returns:
Confirmation of setting option opt to value val.
</details>
"""
if val:
return await self.send_command("ascset", parameters=f"{n},{opt},{val}")
else:
return await self.send_command("ascset", parameters=f"{n},{opt}")
async def lcd(self) -> dict:
"""Get a general all-in-one status summary of the miner.
<details>
<summary>Expand</summary>
Returns:
An all-in-one status summary of the miner.
</details>
"""
return await self.send_command("lcd")
async def lockstats(self) -> dict:
"""Write lockstats to STDERR.
<details>
<summary>Expand</summary>
Returns:
The result of writing the lock stats to STDERR.
</details>
"""
return await self.send_command("lockstats")

View File

@@ -1,7 +1,28 @@
from API import BaseMinerAPI
# Copyright 2022 Upstream Data Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pyasic.API import BaseMinerAPI
class UnknownAPI(BaseMinerAPI):
"""An abstraction of an API for a miner which is unknown.
This class is designed to try to be a intersection of as many miner APIs
and API commands as possible (API API), to ensure that it can be used
with as many APIs as possible.
"""
def __init__(self, ip, port=4028):
super().__init__(ip, port)
@@ -65,7 +86,7 @@ class UnknownAPI(BaseMinerAPI):
async def addpool(self, url: str, username: str, password: str) -> dict:
# BOS has not implemented this yet, they will in the future
raise NotImplementedError
# return await self.send_command("addpool", parameters=f"{url}, {username}, {password}")
# return await self.send_command("addpool", parameters=f"{url},{username},{password}")
async def removepool(self, n: int) -> dict:
# BOS has not implemented this yet, they will in the future

13
pyasic/__init__.py Normal file
View File

@@ -0,0 +1,13 @@
# Copyright 2022 Upstream Data Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

410
pyasic/config/__init__.py Normal file
View File

@@ -0,0 +1,410 @@
# Copyright 2022 Upstream Data Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from dataclasses import dataclass, asdict
from typing import Literal, List
import random
import string
import toml
import yaml
import json
import time
@dataclass
class _Pool:
"""A dataclass for pool information.
Attributes:
url: URL of the pool.
username: Username on the pool.
password: Worker password on the pool.
"""
url: str = ""
username: str = ""
password: str = ""
def from_dict(self, data: dict):
"""Convert raw pool data as a dict to usable data and save it to this class.
Parameters:
data: The raw config data to convert.
"""
for key in data.keys():
if key == "url":
self.url = data[key]
if key in ["user", "username"]:
self.username = data[key]
if key in ["pass", "password"]:
self.password = data[key]
return self
def as_x19(self, user_suffix: str = None) -> dict:
"""Convert the data in this class to a dict usable by an X19 device.
Parameters:
user_suffix: The suffix to append to username.
"""
username = self.username
if user_suffix:
username = f"{username}{user_suffix}"
pool = {"url": self.url, "user": username, "pass": self.password}
return pool
def as_avalon(self, user_suffix: str = None) -> str:
"""Convert the data in this class to a string usable by an Avalonminer device.
Parameters:
user_suffix: The suffix to append to username.
"""
username = self.username
if user_suffix:
username = f"{username}{user_suffix}"
pool = ",".join([self.url, username, self.password])
return pool
def as_bos(self, user_suffix: str = None) -> dict:
"""Convert the data in this class to a dict usable by an BOSMiner device.
Parameters:
user_suffix: The suffix to append to username.
"""
username = self.username
if user_suffix:
username = f"{username}{user_suffix}"
pool = {"url": self.url, "user": username, "password": self.password}
return pool
@dataclass
class _PoolGroup:
"""A dataclass for pool group information.
Attributes:
quota: The group quota.
group_name: The name of the pool group.
pools: A list of pools in this group.
"""
quota: int = 1
group_name: str = None
pools: List[_Pool] = None
def __post_init__(self):
if not self.group_name:
self.group_name = "".join(
random.choice(string.ascii_uppercase + string.digits) for _ in range(6)
) # generate random pool group name in case it isn't set
def from_dict(self, data: dict):
"""Convert raw pool group data as a dict to usable data and save it to this class.
Parameters:
data: The raw config data to convert.
"""
pools = []
for key in data.keys():
if key in ["name", "group_name"]:
self.group_name = data[key]
if key == "quota":
self.quota = data[key]
if key in ["pools", "pool"]:
for pool in data[key]:
pools.append(_Pool().from_dict(pool))
self.pools = pools
return self
def as_x19(self, user_suffix: str = None) -> List[dict]:
"""Convert the data in this class to a list usable by an X19 device.
Parameters:
user_suffix: The suffix to append to username.
"""
pools = []
for pool in self.pools[:3]:
pools.append(pool.as_x19(user_suffix=user_suffix))
return pools
def as_avalon(self, user_suffix: str = None) -> str:
"""Convert the data in this class to a dict usable by an Avalonminer device.
Parameters:
user_suffix: The suffix to append to username.
"""
pool = self.pools[0].as_avalon(user_suffix=user_suffix)
return pool
def as_bos(self, user_suffix: str = None) -> dict:
"""Convert the data in this class to a dict usable by an BOSMiner device.
Parameters:
user_suffix: The suffix to append to username.
"""
group = {
"name": self.group_name,
"quota": self.quota,
"pool": [pool.as_bos(user_suffix=user_suffix) for pool in self.pools],
}
return group
@dataclass
class MinerConfig:
"""A dataclass for miner configuration information.
Attributes:
pool_groups: A list of pool groups in this config.
temp_mode: The temperature control mode.
temp_target: The target temp.
temp_hot: The hot temp (100% fans).
temp_dangerous: The dangerous temp (shutdown).
minimum_fans: The minimum numbers of fans to run the miner.
fan_speed: Manual fan speed to run the fan at (only if temp_mode == "manual").
asicboost: Whether or not to enable asicboost.
autotuning_enabled: Whether or not to enable autotuning.
autotuning_wattage: The wattage to use when autotuning.
dps_enabled: Whether or not to enable dynamic power scaling.
dps_power_step: The amount of power to reduce autotuning by when the miner reaches dangerous temp.
dps_min_power: The minimum power to reduce autotuning to.
dps_shutdown_enabled: Whether or not to shutdown the miner when `dps_min_power` is reached.
dps_shutdown_duration: The amount of time to shutdown for (in hours).
"""
pool_groups: List[_PoolGroup] = None
temp_mode: Literal["auto", "manual", "disabled"] = "auto"
temp_target: float = 70.0
temp_hot: float = 80.0
temp_dangerous: float = 10.0
minimum_fans: int = None
fan_speed: Literal[tuple(range(101))] = None # noqa - Ignore weird Literal usage
asicboost: bool = None
autotuning_enabled: bool = True
autotuning_wattage: int = 900
dps_enabled: bool = None
dps_power_step: int = None
dps_min_power: int = None
dps_shutdown_enabled: bool = None
dps_shutdown_duration: float = None
def as_dict(self) -> dict:
"""Convert the data in this class to a dict."""
data_dict = asdict(self)
for key in asdict(self).keys():
if data_dict[key] is None:
del data_dict[key]
return data_dict
def as_toml(self) -> str:
"""Convert the data in this class to toml."""
return toml.dumps(self.as_dict())
def as_yaml(self) -> str:
"""Convert the data in this class to yaml."""
return yaml.dump(self.as_dict(), sort_keys=False)
def from_raw(self, data: dict):
"""Convert raw config data as a dict to usable data and save it to this class.
This should be able to handle any raw config file from any miner supported by pyasic.
Parameters:
data: The raw config data to convert.
"""
pool_groups = []
for key in data.keys():
if key == "pools":
pool_groups.append(_PoolGroup().from_dict({"pools": data[key]}))
elif key == "group":
for group in data[key]:
pool_groups.append(_PoolGroup().from_dict(group))
if key == "bitmain-fan-ctrl":
if data[key]:
self.temp_mode = "manual"
if data.get("bitmain-fan-pwm"):
self.fan_speed = int(data["bitmain-fan-pwm"])
elif key == "fan_control":
for _key in data[key].keys():
if _key == "min_fans":
self.minimum_fans = data[key][_key]
elif _key == "speed":
self.fan_speed = data[key][_key]
elif key == "temp_control":
for _key in data[key].keys():
if _key == "mode":
self.temp_mode = data[key][_key]
elif _key == "target_temp":
self.temp_target = data[key][_key]
elif _key == "hot_temp":
self.temp_hot = data[key][_key]
elif _key == "dangerous_temp":
self.temp_dangerous = data[key][_key]
if key == "hash_chain_global":
if data[key].get("asic_boost"):
self.asicboost = data[key]["asic_boost"]
if key == "autotuning":
for _key in data[key].keys():
if _key == "enabled":
self.autotuning_enabled = data[key][_key]
elif _key == "psu_power_limit":
self.autotuning_wattage = data[key][_key]
if key == "power_scaling":
for _key in data[key].keys():
if _key == "enabled":
self.dps_enabled = data[key][_key]
elif _key == "power_step":
self.dps_power_step = data[key][_key]
elif _key == "min_psu_power_limit":
self.dps_min_power = data[key][_key]
elif _key == "shutdown_enabled":
self.dps_shutdown_enabled = data[key][_key]
elif _key == "shutdown_duration":
self.dps_shutdown_duration = data[key][_key]
self.pool_groups = pool_groups
return self
def from_dict(self, data: dict):
"""Convert an output dict of this class back into usable data and save it to this class.
Parameters:
data: The dict config data to convert.
"""
pool_groups = []
for group in data["pool_groups"]:
pool_groups.append(_PoolGroup().from_dict(group))
for key in data.keys():
if getattr(self, key) and not key == "pool_groups":
setattr(self, key, data[key])
self.pool_groups = pool_groups
return self
def from_toml(self, data: str):
"""Convert output toml of this class back into usable data and save it to this class.
Parameters:
data: The toml config data to convert.
"""
return self.from_dict(toml.loads(data))
def from_yaml(self, data: str):
"""Convert output yaml of this class back into usable data and save it to this class.
Parameters:
data: The yaml config data to convert.
"""
return self.from_dict(yaml.load(data, Loader=yaml.SafeLoader))
def as_x19(self, user_suffix: str = None) -> str:
"""Convert the data in this class to a config usable by an X19 device.
Parameters:
user_suffix: The suffix to append to username.
"""
cfg = {
"pools": self.pool_groups[0].as_x19(user_suffix=user_suffix),
"bitmain-fan-ctrl": False,
"bitmain-fan-pwn": 100,
}
if not self.temp_mode == "auto":
cfg["bitmain-fan-ctrl"] = True
if self.fan_speed:
cfg["bitmain-fan-ctrl"] = str(self.fan_speed)
return json.dumps(cfg)
def as_avalon(self, user_suffix: str = None) -> str:
"""Convert the data in this class to a config usable by an Avalonminer device.
Parameters:
user_suffix: The suffix to append to username.
"""
cfg = self.pool_groups[0].as_avalon(user_suffix=user_suffix)
return cfg
def as_bos(self, model: str = "S9", user_suffix: str = None) -> str:
"""Convert the data in this class to a config usable by an BOSMiner device.
Parameters:
model: The model of the miner to be used in the format portion of the config.
user_suffix: The suffix to append to username.
"""
cfg = {
"format": {
"version": "1.2+",
"model": f"Antminer {model}",
"generator": "Upstream Config Utility",
"timestamp": int(time.time()),
},
"group": [
group.as_bos(user_suffix=user_suffix) for group in self.pool_groups
],
"temp_control": {
"mode": self.temp_mode,
"target_temp": self.temp_target,
"hot_temp": self.temp_hot,
"dangerous_temp": self.temp_dangerous,
},
}
if self.autotuning_enabled or self.autotuning_wattage:
cfg["autotuning"] = {}
if self.autotuning_enabled:
cfg["autotuning"]["enabled"] = self.autotuning_enabled
if self.autotuning_wattage:
cfg["autotuning"]["psu_power_limit"] = self.autotuning_wattage
if self.asicboost:
cfg["hash_chain_global"] = {}
cfg["hash_chain_global"]["asic_boost"] = self.asicboost
if any(
[
getattr(self, item)
for item in [
"dps_enabled",
"dps_power_step",
"dps_min_power",
"dps_shutdown_enabled",
"dps_shutdown_duration",
]
]
):
cfg["power_scaling"] = {}
if self.dps_enabled:
cfg["power_scaling"]["enabled"] = self.dps_enabled
if self.dps_power_step:
cfg["power_scaling"]["power_step"] = self.dps_power_step
if self.dps_min_power:
cfg["power_scaling"]["min_psu_power_limit"] = self.dps_min_power
if self.dps_shutdown_enabled:
cfg["power_scaling"]["shutdown_enabled"] = self.dps_shutdown_enabled
if self.dps_shutdown_duration:
cfg["power_scaling"]["shutdown_duration"] = self.dps_shutdown_duration
return toml.dumps(cfg)

149
pyasic/data/__init__.py Normal file
View File

@@ -0,0 +1,149 @@
# Copyright 2022 Upstream Data Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Union
from dataclasses import dataclass, field, asdict
from datetime import datetime
@dataclass
class MinerData:
"""A Dataclass to standardize data returned from miners (specifically `AnyMiner().get_data()`)
Attributes:
ip: The IP of the miner as a str.
datetime: The time and date this data was generated.
model: The model of the miner as a str.
hostname: The network hostname of the miner as a str.
hashrate: The hashrate of the miner in TH/s as a float.
left_board_hashrate: The hashrate of the left board of the miner in TH/s as a float.
center_board_hashrate: The hashrate of the center board of the miner in TH/s as a float.
right_board_hashrate: The hashrate of the right board of the miner in TH/s as a float.
temperature_avg: The average temperature across the boards. Calculated automatically.
env_temp: The environment temps as a float.
left_board_temp: The temp of the left PCB as an int.
left_board_chip_temp: The temp of the left board chips as an int.
center_board_temp: The temp of the center PCB as an int.
center_board_chip_temp: The temp of the center board chips as an int.
right_board_temp: The temp of the right PCB as an int.
right_board_chip_temp: The temp of the right board chips as an int.
wattage: Current power draw of the miner as an int.
wattage_limit: Power limit of the miner as an int.
fan_1: The speed of the first fan as an int.
fan_2: The speed of the second fan as an int.
fan_3: The speed of the third fan as an int.
fan_4: The speed of the fourth fan as an int.
left_chips: The number of chips online in the left board as an int.
center_chips: The number of chips online in the left board as an int.
right_chips: The number of chips online in the left board as an int.
total_chips: The total number of chips on all boards. Calculated automatically.
ideal_chips: The ideal number of chips in the miner as an int.
percent_ideal: The percent of total chips out of the ideal count. Calculated automatically.
nominal: The nominal amount of chips in the miner. Calculated automatically.
pool_split: The pool split as a str.
pool_1_url: The first pool url on the miner as a str.
pool_1_user: The first pool user on the miner as a str.
pool_2_url: The second pool url on the miner as a str.
pool_2_user: The second pool user on the miner as a str.
errors: A list of errors on the miner.
fault_light: Whether or not the fault light is on as a boolean.
"""
ip: str
datetime: datetime = None
mac: str = "00:00:00:00:00:00"
model: str = "Unknown"
hostname: str = "Unknown"
hashrate: float = 0
left_board_hashrate: float = 0
center_board_hashrate: float = 0
right_board_hashrate: float = 0
temperature_avg: int = field(init=False)
env_temp: float = 0
left_board_temp: int = 0
left_board_chip_temp: int = 0
center_board_temp: int = 0
center_board_chip_temp: int = 0
right_board_temp: int = 0
right_board_chip_temp: int = 0
wattage: int = 0
wattage_limit: int = 0
fan_1: int = -1
fan_2: int = -1
fan_3: int = -1
fan_4: int = -1
left_chips: int = 0
center_chips: int = 0
right_chips: int = 0
total_chips: int = field(init=False)
ideal_chips: int = 1
percent_ideal: float = field(init=False)
nominal: int = field(init=False)
pool_split: str = "0"
pool_1_url: str = "Unknown"
pool_1_user: str = "Unknown"
pool_2_url: str = ""
pool_2_user: str = ""
errors: list = field(default_factory=list)
fault_light: Union[bool, None] = None
def __post_init__(self):
self.datetime = datetime.now()
@property
def total_chips(self): # noqa - Skip PyCharm inspection
return self.right_chips + self.center_chips + self.left_chips
@total_chips.setter
def total_chips(self, val):
pass
@property
def nominal(self): # noqa - Skip PyCharm inspection
return self.ideal_chips == self.total_chips
@nominal.setter
def nominal(self, val):
pass
@property
def percent_ideal(self): # noqa - Skip PyCharm inspection
return round((self.total_chips / self.ideal_chips) * 100)
@percent_ideal.setter
def percent_ideal(self, val):
pass
@property
def temperature_avg(self): # noqa - Skip PyCharm inspection
total_temp = 0
temp_count = 0
for temp in [
self.left_board_chip_temp,
self.center_board_chip_temp,
self.right_board_chip_temp,
]:
if temp and not temp == 0:
total_temp += temp
temp_count += 1
if not temp_count > 0:
return 0
return round(total_temp / temp_count)
@temperature_avg.setter
def temperature_avg(self, val):
pass
def asdict(self):
return asdict(self)

View File

@@ -0,0 +1,16 @@
# Copyright 2022 Upstream Data Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .whatsminer import WhatsminerError
from .bos import BraiinsOSError

View File

@@ -0,0 +1,25 @@
# Copyright 2022 Upstream Data Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from dataclasses import dataclass, asdict
@dataclass
class BraiinsOSError:
"""A Dataclass to handle error codes of BraiinsOS+ miners."""
error_message: str
def asdict(self):
return asdict(self)

View File

@@ -0,0 +1,166 @@
# Copyright 2022 Upstream Data Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from dataclasses import dataclass, field, asdict
@dataclass
class WhatsminerError:
"""A Dataclass to handle error codes of Whatsminers."""
error_code: int
error_message: str = field(init=False)
@property
def error_message(self): # noqa - Skip PyCharm inspection
if self.error_code in ERROR_CODES:
return ERROR_CODES[self.error_code]
return "Unknown error type."
@error_message.setter
def error_message(self, val):
pass
def asdict(self):
return asdict(self)
ERROR_CODES = {
110: "Intake fan speed error.",
111: "Exhaust fan speed error.",
120: "Intake fan speed error. Fan speed deviates by more than 2000.",
121: "Exhaust fan speed error. Fan speed deviates by more than 2000.",
130: "Intake fan speed error. Fan speed deviates by more than 3000.",
131: "Exhaust fan speed error. Fan speed deviates by more than 3000.",
140: "Fan speed too high.",
200: "Power probing error. No power found.",
201: "Power supply and configuration file don't match.",
202: "Power output voltage error.",
203: "Power protecting due to high environment temperature.",
204: "Power current protecting due to high environment temperature.",
205: "Power current error.",
206: "Power input low voltage error.",
207: "Power input current protecting due to bad power input.",
210: "Power error.",
213: "Power input voltage and current do not match power output.",
216: "Power remained unchanged for a long time.",
217: "Power set enable error.",
218: "Power input voltage is lower than 230V for high power mode.",
233: "Power output high temperature protection error.",
234: "Power output high temperature protection error.",
235: "Power output high temperature protection error.",
236: "Power output high current protection error.",
237: "Power output high current protection error.",
238: "Power output high current protection error.",
239: "Power output high voltage protection error.",
240: "Power output low voltage protection error.",
241: "Power output current imbalance error.",
243: "Power input high temperature protection error.",
244: "Power input high temperature protection error.",
245: "Power input high temperature protection error.",
246: "Power input high voltage protection error.",
247: "Power input high voltage protection error.",
248: "Power input high current protection error.",
249: "Power input high current protection error.",
250: "Power input low voltage protection error.",
251: "Power input low voltage protection error.",
253: "Power supply fan error.",
254: "Power supply fan error.",
255: "Power output high power protection error.",
256: "Power output high power protection error.",
257: "Input over current protection of power supply on primary side.",
263: "Power communication warning.",
264: "Power communication error.",
267: "Power watchdog protection.",
268: "Power output high current protection.",
269: "Power input high current protection.",
270: "Power input high voltage protection.",
271: "Power input low voltage protection.",
272: "Excessive power supply output warning.",
273: "Power input too high warning.",
274: "Power fan warning.",
275: "Power high temperature warning.",
300: "Right board temperature sensor detection error.",
301: "Center board temperature sensor detection error.",
302: "Left board temperature sensor detection error.",
320: "Right board temperature reading error.",
321: "Center board temperature reading error.",
322: "Left board temperature reading error.",
329: "Control board temperature sensor communication error.",
350: "Right board temperature protecting.",
351: "Center board temperature protecting.",
352: "Left board temperature protecting.",
360: "Hashboard high temperature error.",
410: "Right board eeprom detection error.",
411: "Center board eeprom detection error.",
412: "Left board eeprom detection error.",
420: "Right board eeprom parsing error.",
421: "Center board eeprom parsing error.",
422: "Left board eeprom parsing error.",
430: "Right board chip bin type error.",
431: "Center board chip bin type error.",
432: "Left board chip bin type error.",
440: "Right board eeprom chip number X error.",
441: "Center board eeprom chip number X error.",
442: "Left board eeprom chip number X error.",
450: "Right board eeprom xfer error.",
451: "Center board eeprom xfer error.",
452: "Left board eeprom xfer error.",
510: "Right board miner type error.",
511: "Center board miner type error.",
512: "Left board miner type error.",
520: "Right board bin type error.",
521: "Center board bin type error.",
522: "Left board bin type error.",
530: "Right board not found.",
531: "Center board not found.",
532: "Left board not found.",
540: "Right board error reading chip id.",
541: "Center board error reading chip id.",
542: "Left board error reading chip id.",
550: "Right board has bad chips.",
551: "Center board has bad chips.",
552: "Left board has bad chips.",
560: "Right board loss of balance error.",
561: "Center board loss of balance error.",
562: "Left board loss of balance error.",
600: "Environment temperature is too high.",
610: "Environment temperature is too high for high performance mode.",
701: "Control board no support chip.",
710: "Control board rebooted as an exception.",
712: "Control board rebooted as an exception.",
800: "CGMiner checksum error.",
801: "System monitor checksum error.",
802: "Remote daemon checksum error.",
2010: "All pools are disabled.",
2020: "Pool 0 connection failed.",
2021: "Pool 1 connection failed.",
2022: "Pool 2 connection failed.",
2030: "High rejection rate on pool.",
2040: "The pool does not support asicboost mode.",
2310: "Hashrate is too low.",
2320: "Hashrate is too low.",
2340: "Hashrate loss is too high.",
2350: "Hashrate loss is too high.",
5070: "Right hashboard water velocity is abnormal.",
5071: "Center hashboard water velocity is abnormal.",
5072: "Left hashboard water velocity is abnormal.",
5110: "Right hashboard frequency up timeout.",
5111: "Center hashboard frequency up timeout.",
5112: "Left hashboard frequency up timeout.",
8410: "Software version error.",
100001: "/antiv/signature illegal.",
100002: "/antiv/dig/init.d illegal.",
100003: "/antiv/dig/pf_partial.dig illegal.",
}

45
pyasic/logger/__init__.py Normal file
View File

@@ -0,0 +1,45 @@
# Copyright 2022 Upstream Data Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from pyasic.settings import PyasicSettings
def init_logger():
if PyasicSettings().logfile:
logging.basicConfig(
filename="logfile.txt",
filemode="a",
format="%(pathname)s:%(lineno)d in %(funcName)s\n[%(levelname)s][%(asctime)s](%(name)s) - %(message)s",
datefmt="%x %X",
)
else:
logging.basicConfig(
format="%(pathname)s:%(lineno)d in %(funcName)s\n[%(levelname)s][%(asctime)s](%(name)s) - %(message)s",
datefmt="%x %X",
)
_logger = logging.getLogger()
if PyasicSettings().debug:
_logger.setLevel(logging.DEBUG)
logging.getLogger("asyncssh").setLevel(logging.DEBUG)
else:
_logger.setLevel(logging.WARNING)
logging.getLogger("asyncssh").setLevel(logging.WARNING)
return _logger
logger = init_logger()

116
pyasic/miners/__init__.py Normal file
View File

@@ -0,0 +1,116 @@
# Copyright 2022 Upstream Data Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncssh
import logging
import ipaddress
from pyasic.data import MinerData
class BaseMiner:
def __init__(self, *args) -> None:
self.ip = None
self.uname = "root"
self.pwd = "admin"
self.api = None
self.api_type = None
self.model = None
self.light = None
self.hostname = None
self.nominal_chips = 1
self.version = None
self.fan_count = 2
self.config = None
def __repr__(self):
return f"{'' if not self.api_type else self.api_type} {'' if not self.model else self.model}: {str(self.ip)}"
def __lt__(self, other):
return ipaddress.ip_address(self.ip) < ipaddress.ip_address(other.ip)
def __gt__(self, other):
return ipaddress.ip_address(self.ip) > ipaddress.ip_address(other.ip)
def __eq__(self, other):
return ipaddress.ip_address(self.ip) == ipaddress.ip_address(other.ip)
async def _get_ssh_connection(self) -> asyncssh.connect:
"""Create a new asyncssh connection"""
try:
conn = await asyncssh.connect(
str(self.ip),
known_hosts=None,
username=self.uname,
password=self.pwd,
server_host_key_algs=["ssh-rsa"],
)
return conn
except asyncssh.misc.PermissionDenied:
try:
conn = await asyncssh.connect(
str(self.ip),
known_hosts=None,
username="root",
password="admin",
server_host_key_algs=["ssh-rsa"],
)
return conn
except Exception as e:
raise e
except OSError as e:
logging.warning(f"Connection refused: {self}")
raise e
except Exception as e:
raise e
async def fault_light_on(self) -> bool:
return False
async def fault_light_off(self) -> bool:
return False
async def send_file(self, src, dest):
async with (await self._get_ssh_connection()) as conn:
await asyncssh.scp(src, (conn, dest))
async def check_light(self):
return self.light
async def get_board_info(self):
return None
async def get_config(self):
return None
async def get_hostname(self):
return None
async def get_model(self):
return None
async def reboot(self):
return False
async def restart_backend(self):
return False
async def send_config(self, *args, **kwargs):
return None
async def get_mac(self):
return None
async def get_data(self) -> MinerData:
return MinerData(ip=str(self.ip))

View File

@@ -0,0 +1,19 @@
# Copyright 2022 Upstream Data Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .bmminer import BMMiner
from .bosminer import BOSMiner
from .btminer import BTMiner
from .cgminer import CGMiner
from .hiveon import Hiveon

View File

@@ -0,0 +1,308 @@
# Copyright 2022 Upstream Data Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ipaddress
import logging
from typing import Union
from pyasic.API.bmminer import BMMinerAPI
from pyasic.miners import BaseMiner
from pyasic.data import MinerData
from pyasic.settings import PyasicSettings
class BMMiner(BaseMiner):
"""Base handler for BMMiner based miners."""
def __init__(self, ip: str) -> None:
super().__init__(ip)
self.ip = ipaddress.ip_address(ip)
self.api = BMMinerAPI(ip)
self.api_type = "BMMiner"
self.uname = "root"
self.pwd = "admin"
async def get_model(self) -> Union[str, None]:
"""Get miner model.
Returns:
Miner model or None.
"""
# check if model is cached
if self.model:
logging.debug(f"Found model for {self.ip}: {self.model}")
return self.model
# get devdetails data
version_data = await self.api.devdetails()
# if we get data back, parse it for model
if version_data:
# handle Antminer BMMiner as a base
self.model = version_data["DEVDETAILS"][0]["Model"].replace("Antminer ", "")
logging.debug(f"Found model for {self.ip}: {self.model}")
return self.model
# if we don't get devdetails, log a failed attempt
logging.warning(f"Failed to get model for miner: {self}")
return None
async def get_hostname(self) -> str:
"""Get miner hostname.
Returns:
The hostname of the miner as a string or "?"
"""
if self.hostname:
return self.hostname
try:
# open an ssh connection
async with (await self._get_ssh_connection()) as conn:
# if we get the connection, check hostname
if conn is not None:
# get output of the hostname file
data = await conn.run("cat /proc/sys/kernel/hostname")
host = data.stdout.strip()
# return hostname data
logging.debug(f"Found hostname for {self.ip}: {host}")
self.hostname = host
return self.hostname
else:
# return ? if we fail to get hostname with no ssh connection
logging.warning(f"Failed to get hostname for miner: {self}")
return "?"
except Exception:
# return ? if we fail to get hostname with an exception
logging.warning(f"Failed to get hostname for miner: {self}")
return "?"
async def send_ssh_command(self, cmd: str) -> Union[str, None]:
"""Send a command to the miner over ssh.
Parameters:
cmd: The command to run.
Returns:
Result of the command or None.
"""
result = None
# open an ssh connection
async with (await self._get_ssh_connection()) as conn:
# 3 retries
for i in range(3):
try:
# run the command and get the result
result = await conn.run(cmd)
result = result.stdout
except Exception as e:
# if the command fails, log it
logging.warning(f"{self} command {cmd} error: {e}")
# on the 3rd retry, return None
if i == 3:
return
continue
# return the result, either command output or None
return result
async def get_config(self) -> Union[list, None]:
"""Get the pool configuration of the miner.
Returns:
Pool config data or None.
"""
# get pool data
pools = await self.api.pools()
pool_data = []
# ensure we got pool data
if not pools:
return
# parse all the pools
for pool in pools["POOLS"]:
pool_data.append({"url": pool["URL"], "user": pool["User"], "pwd": "123"})
return pool_data
async def reboot(self) -> bool:
"""Reboot the miner.
Returns:
The result of rebooting the miner.
"""
logging.debug(f"{self}: Sending reboot command.")
_ret = await self.send_ssh_command("reboot")
logging.debug(f"{self}: Reboot command completed.")
if isinstance(_ret, str):
return True
return False
async def get_data(self) -> MinerData:
"""Get data from the miner.
Returns:
A [`MinerData`][pyasic.data.MinerData] instance containing the miners data.
"""
data = MinerData(ip=str(self.ip), ideal_chips=self.nominal_chips * 3)
board_offset = -1
fan_offset = -1
model = await self.get_model()
hostname = await self.get_hostname()
mac = await self.get_mac()
if model:
data.model = model
if hostname:
data.hostname = hostname
if mac:
data.mac = mac
data.fault_light = await self.check_light()
miner_data = None
for i in range(PyasicSettings().miner_get_data_retries):
miner_data = await self.api.multicommand(
"summary", "pools", "stats", ignore_x19_error=True
)
if miner_data:
break
if not miner_data:
return data
summary = miner_data.get("summary")[0]
pools = miner_data.get("pools")[0]
stats = miner_data.get("stats")[0]
if summary:
hr = summary.get("SUMMARY")
if hr:
if len(hr) > 0:
hr = hr[0].get("GHS av")
if hr:
data.hashrate = round(hr / 1000, 2)
if stats:
boards = stats.get("STATS")
if boards:
if len(boards) > 0:
for board_num in range(1, 16, 5):
for _b_num in range(5):
b = boards[1].get(f"chain_acn{board_num + _b_num}")
if b and not b == 0 and board_offset == -1:
board_offset = board_num
if board_offset == -1:
board_offset = 1
data.left_chips = boards[1].get(f"chain_acn{board_offset}")
data.center_chips = boards[1].get(f"chain_acn{board_offset+1}")
data.right_chips = boards[1].get(f"chain_acn{board_offset+2}")
data.left_board_hashrate = round(
float(boards[1].get(f"chain_rate{board_offset}")) / 1000, 2
)
data.center_board_hashrate = round(
float(boards[1].get(f"chain_rate{board_offset+1}")) / 1000, 2
)
data.right_board_hashrate = round(
float(boards[1].get(f"chain_rate{board_offset+2}")) / 1000, 2
)
if stats:
temp = stats.get("STATS")
if temp:
if len(temp) > 1:
for fan_num in range(1, 8, 4):
for _f_num in range(4):
f = temp[1].get(f"fan{fan_num + _f_num}")
if f and not f == 0 and fan_offset == -1:
fan_offset = fan_num
if fan_offset == -1:
fan_offset = 1
for fan in range(self.fan_count):
setattr(
data, f"fan_{fan + 1}", temp[1].get(f"fan{fan_offset+fan}")
)
board_map = {0: "left_board", 1: "center_board", 2: "right_board"}
env_temp_list = []
for item in range(3):
board_temp = temp[1].get(f"temp{item + board_offset}")
chip_temp = temp[1].get(f"temp2_{item + board_offset}")
setattr(data, f"{board_map[item]}_chip_temp", chip_temp)
setattr(data, f"{board_map[item]}_temp", board_temp)
if f"temp_pcb{item}" in temp[1].keys():
env_temp = temp[1][f"temp_pcb{item}"].split("-")[0]
if not env_temp == 0:
env_temp_list.append(int(env_temp))
data.env_temp = sum(env_temp_list) / len(env_temp_list)
if pools:
pool_1 = None
pool_2 = None
pool_1_user = None
pool_2_user = None
pool_1_quota = 1
pool_2_quota = 1
quota = 0
for pool in pools.get("POOLS"):
if not pool_1_user:
pool_1_user = pool.get("User")
pool_1 = pool["URL"]
pool_1_quota = pool["Quota"]
elif not pool_2_user:
pool_2_user = pool.get("User")
pool_2 = pool["URL"]
pool_2_quota = pool["Quota"]
if not pool.get("User") == pool_1_user:
if not pool_2_user == pool.get("User"):
pool_2_user = pool.get("User")
pool_2 = pool["URL"]
pool_2_quota = pool["Quota"]
if pool_2_user and not pool_2_user == pool_1_user:
quota = f"{pool_1_quota}/{pool_2_quota}"
if pool_1:
pool_1 = pool_1.replace("stratum+tcp://", "").replace(
"stratum2+tcp://", ""
)
data.pool_1_url = pool_1
if pool_1_user:
data.pool_1_user = pool_1_user
if pool_2:
pool_2 = pool_2.replace("stratum+tcp://", "").replace(
"stratum2+tcp://", ""
)
data.pool_2_url = pool_2
if pool_2_user:
data.pool_2_user = pool_2_user
if quota:
data.pool_split = str(quota)
return data

View File

@@ -0,0 +1,435 @@
# Copyright 2022 Upstream Data Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ipaddress
import logging
import json
from typing import Union
import toml
from pyasic.miners import BaseMiner
from pyasic.API.bosminer import BOSMinerAPI
from pyasic.API import APIError
from pyasic.data.error_codes import BraiinsOSError
from pyasic.data import MinerData
from pyasic.config import MinerConfig
from pyasic.settings import PyasicSettings
class BOSMiner(BaseMiner):
def __init__(self, ip: str) -> None:
super().__init__(ip)
self.ip = ipaddress.ip_address(ip)
self.api = BOSMinerAPI(ip)
self.api_type = "BOSMiner"
self.uname = "root"
self.pwd = "admin"
self.config = None
async def send_ssh_command(self, cmd: str) -> Union[str, None]:
"""Send a command to the miner over ssh.
Returns:
Result of the command or None.
"""
result = None
# open an ssh connection
async with (await self._get_ssh_connection()) as conn:
# 3 retries
for i in range(3):
try:
# run the command and get the result
result = await conn.run(cmd)
result = result.stdout
except Exception as e:
# if the command fails, log it
logging.warning(f"{self} command {cmd} error: {e}")
# on the 3rd retry, return None
if i == 3:
return
continue
# return the result, either command output or None
return str(result)
async def fault_light_on(self) -> bool:
"""Sends command to turn on fault light on the miner."""
logging.debug(f"{self}: Sending fault_light on command.")
self.light = True
_ret = await self.send_ssh_command("miner fault_light on")
logging.debug(f"{self}: fault_light on command completed.")
if isinstance(_ret, str):
return True
return False
async def fault_light_off(self) -> bool:
"""Sends command to turn off fault light on the miner."""
logging.debug(f"{self}: Sending fault_light off command.")
self.light = False
_ret = await self.send_ssh_command("miner fault_light off")
logging.debug(f"{self}: fault_light off command completed.")
if isinstance(_ret, str):
return True
return False
async def restart_backend(self) -> bool:
"""Restart bosminer hashing process. Wraps [`restart_bosminer`][pyasic.miners._backends.bosminer.BOSMiner.restart_bosminer] to standardize."""
return await self.restart_bosminer()
async def restart_bosminer(self) -> bool:
"""Restart bosminer hashing process."""
logging.debug(f"{self}: Sending bosminer restart command.")
_ret = await self.send_ssh_command("/etc/init.d/bosminer restart")
logging.debug(f"{self}: bosminer restart command completed.")
if isinstance(_ret, str):
return True
return False
async def reboot(self) -> bool:
"""Reboots power to the physical miner."""
logging.debug(f"{self}: Sending reboot command.")
_ret = await self.send_ssh_command("/sbin/reboot")
logging.debug(f"{self}: Reboot command completed.")
if isinstance(_ret, str):
return True
return False
async def get_config(self) -> MinerConfig:
"""Gets the config for the miner and sets it as `self.config`.
Returns:
The config from `self.config`.
"""
logging.debug(f"{self}: Getting config.")
async with (await self._get_ssh_connection()) as conn:
logging.debug(f"{self}: Opening SFTP connection.")
async with conn.start_sftp_client() as sftp:
logging.debug(f"{self}: Reading config file.")
async with sftp.open("/etc/bosminer.toml") as file:
toml_data = toml.loads(await file.read())
logging.debug(f"{self}: Converting config file.")
cfg = MinerConfig().from_raw(toml_data)
self.config = cfg
return self.config
async def get_hostname(self) -> str:
"""Get miner hostname.
Returns:
The hostname of the miner as a string or "?"
"""
if self.hostname:
return self.hostname
try:
async with (await self._get_ssh_connection()) as conn:
if conn is not None:
data = await conn.run("cat /proc/sys/kernel/hostname")
host = data.stdout.strip()
logging.debug(f"Found hostname for {self.ip}: {host}")
self.hostname = host
return self.hostname
else:
logging.warning(f"Failed to get hostname for miner: {self}")
return "?"
except Exception:
logging.warning(f"Failed to get hostname for miner: {self}")
return "?"
async def get_model(self) -> Union[str, None]:
"""Get miner model.
Returns:
Miner model or None.
"""
# check if model is cached
if self.model:
logging.debug(f"Found model for {self.ip}: {self.model} (BOS)")
return self.model + " (BOS)"
# get devdetails data
try:
version_data = await self.api.devdetails()
except APIError as e:
version_data = None
if e.message == "Not ready":
cfg = json.loads(await self.send_ssh_command("bosminer config --data"))
model = cfg.get("data").get("format").get("model")
if model:
model = model.replace("Antminer ", "")
self.model = model
return self.model + " (BOS)"
# if we get data back, parse it for model
if version_data:
if not version_data["DEVDETAILS"] == []:
# handle Antminer BOSMiner as a base
self.model = version_data["DEVDETAILS"][0]["Model"].replace(
"Antminer ", ""
)
logging.debug(f"Found model for {self.ip}: {self.model} (BOS)")
return self.model + " (BOS)"
# if we don't get devdetails, log a failed attempt
logging.warning(f"Failed to get model for miner: {self}")
return None
async def get_version(self) -> Union[str, None]:
"""Get miner firmware version.
Returns:
Miner firmware version or None.
"""
# check if version is cached
if self.version:
logging.debug(f"Found version for {self.ip}: {self.version}")
return self.version
# get output of bos version file
version_data = await self.send_ssh_command("cat /etc/bos_version")
# if we get the version data, parse it
if version_data:
self.version = version_data.split("-")[5]
logging.debug(f"Found version for {self.ip}: {self.version}")
return self.version
# if we fail to get version, log a failed attempt
logging.warning(f"Failed to get model for miner: {self}")
return None
async def send_config(self, yaml_config, ip_user: bool = False) -> None:
"""Configures miner with yaml config."""
logging.debug(f"{self}: Sending config.")
if ip_user:
suffix = str(self.ip).split(".")[-1]
toml_conf = (
MinerConfig()
.from_yaml(yaml_config)
.as_bos(model=self.model.replace(" (BOS)", ""), user_suffix=suffix)
)
else:
toml_conf = (
MinerConfig()
.from_yaml(yaml_config)
.as_bos(model=self.model.replace(" (BOS)", ""))
)
async with (await self._get_ssh_connection()) as conn:
await conn.run("/etc/init.d/bosminer stop")
logging.debug(f"{self}: Opening SFTP connection.")
async with conn.start_sftp_client() as sftp:
logging.debug(f"{self}: Opening config file.")
async with sftp.open("/etc/bosminer.toml", "w+") as file:
await file.write(toml_conf)
logging.debug(f"{self}: Restarting BOSMiner")
await conn.run("/etc/init.d/bosminer start")
async def get_data(self) -> MinerData:
"""Get data from the miner.
Returns:
A [`MinerData`][pyasic.data.MinerData] instance containing the miners data.
"""
data = MinerData(ip=str(self.ip), ideal_chips=self.nominal_chips * 3)
board_offset = -1
fan_offset = -1
model = await self.get_model()
hostname = await self.get_hostname()
mac = await self.get_mac()
if model:
data.model = model
if hostname:
data.hostname = hostname
if mac:
data.mac = mac
data.fault_light = await self.check_light()
miner_data = None
for i in range(PyasicSettings().miner_get_data_retries):
try:
miner_data = await self.api.multicommand(
"summary",
"temps",
"tunerstatus",
"pools",
"devdetails",
"fans",
"devs",
)
except APIError as e:
if str(e.message) == "Not ready":
miner_data = await self.api.multicommand(
"summary", "tunerstatus", "pools", "devs"
)
if miner_data:
break
if not miner_data:
return data
summary = miner_data.get("summary")
temps = miner_data.get("temps")
tunerstatus = miner_data.get("tunerstatus")
pools = miner_data.get("pools")
devdetails = miner_data.get("devdetails")
devs = miner_data.get("devs")
fans = miner_data.get("fans")
if summary:
hr = summary[0].get("SUMMARY")
if hr:
if len(hr) > 0:
hr = hr[0].get("MHS 1m")
if hr:
data.hashrate = round(hr / 1000000, 2)
if temps:
temp = temps[0].get("TEMPS")
if temp:
if len(temp) > 0:
board_map = {0: "left_board", 1: "center_board", 2: "right_board"}
offset = 6 if temp[0]["ID"] in [6, 7, 8] else temp[0]["ID"]
for board in temp:
_id = board["ID"] - offset
chip_temp = round(board["Chip"])
board_temp = round(board["Board"])
setattr(data, f"{board_map[_id]}_chip_temp", chip_temp)
setattr(data, f"{board_map[_id]}_temp", board_temp)
if fans:
fan_data = fans[0].get("FANS")
if fan_data:
for fan in range(self.fan_count):
setattr(data, f"fan_{fan+1}", fan_data[fan]["RPM"])
if pools:
pool_1 = None
pool_2 = None
pool_1_user = None
pool_2_user = None
pool_1_quota = 1
pool_2_quota = 1
quota = 0
for pool in pools[0].get("POOLS"):
if not pool_1_user:
pool_1_user = pool.get("User")
pool_1 = pool["URL"]
pool_1_quota = pool["Quota"]
elif not pool_2_user:
pool_2_user = pool.get("User")
pool_2 = pool["URL"]
pool_2_quota = pool["Quota"]
if not pool.get("User") == pool_1_user:
if not pool_2_user == pool.get("User"):
pool_2_user = pool.get("User")
pool_2 = pool["URL"]
pool_2_quota = pool["Quota"]
if pool_2_user and not pool_2_user == pool_1_user:
quota = f"{pool_1_quota}/{pool_2_quota}"
if pool_1:
pool_1 = pool_1.replace("stratum+tcp://", "").replace(
"stratum2+tcp://", ""
)
data.pool_1_url = pool_1
if pool_1_user:
data.pool_1_user = pool_1_user
if pool_2:
pool_2 = pool_2.replace("stratum+tcp://", "").replace(
"stratum2+tcp://", ""
)
data.pool_2_url = pool_2
if pool_2_user:
data.pool_2_user = pool_2_user
if quota:
data.pool_split = str(quota)
if tunerstatus:
tuner = tunerstatus[0].get("TUNERSTATUS")
if tuner:
if len(tuner) > 0:
wattage = tuner[0].get("ApproximateMinerPowerConsumption")
wattage_limit = tuner[0].get("PowerLimit")
if wattage_limit:
data.wattage_limit = wattage_limit
if wattage:
data.wattage = wattage
chain_status = tuner[0].get("TunerChainStatus")
if chain_status and len(chain_status) > 0:
board_map = {
0: "Left board",
1: "Center board",
2: "Right board",
}
offset = (
6
if chain_status[0]["HashchainIndex"] in [6, 7, 8]
else chain_status[0]["HashchainIndex"]
)
for board in chain_status:
_id = board["HashchainIndex"] - offset
if board["Status"] not in [
"Stable",
"Testing performance profile",
]:
_error = board["Status"]
_error = _error[0].lower() + _error[1:]
data.errors.append(
BraiinsOSError(f"{board_map[_id]} {_error}")
)
if devdetails:
boards = devdetails[0].get("DEVDETAILS")
if boards:
if len(boards) > 0:
board_map = {0: "left_chips", 1: "center_chips", 2: "right_chips"}
offset = 6 if boards[0]["ID"] in [6, 7, 8] else boards[0]["ID"]
for board in boards:
_id = board["ID"] - offset
chips = board["Chips"]
setattr(data, board_map[_id], chips)
if devs:
boards = devs[0].get("DEVS")
if boards:
if len(boards) > 0:
board_map = {
0: "left_board_hashrate",
1: "center_board_hashrate",
2: "right_board_hashrate",
}
offset = 6 if boards[0]["ID"] in [6, 7, 8] else boards[0]["ID"]
for board in boards:
_id = board["ID"] - offset
hashrate = round(board["MHS 1m"] / 1000000, 2)
setattr(data, board_map[_id], hashrate)
return data
async def get_mac(self):
result = await self.send_ssh_command("cat /sys/class/net/eth0/address")
return result.upper().strip()

View File

@@ -0,0 +1,63 @@
# Copyright 2022 Upstream Data Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import ipaddress
from pyasic.API.bosminer import BOSMinerAPI
from pyasic.miners import BaseMiner
class BOSMinerOld(BaseMiner):
def __init__(self, ip: str) -> None:
super().__init__(ip)
self.ip = ipaddress.ip_address(ip)
self.api = BOSMinerAPI(ip)
self.api_type = "BOSMiner"
self.uname = "root"
self.pwd = "admin"
async def send_ssh_command(self, cmd: str) -> str or None:
"""Send a command to the miner over ssh.
:return: Result of the command or None.
"""
result = None
# open an ssh connection
async with (await self._get_ssh_connection()) as conn:
# 3 retries
for i in range(3):
try:
# run the command and get the result
result = await conn.run(cmd)
if result.stdout:
result = result.stdout
except Exception as e:
if e == "SSH connection closed":
return "Update completed."
# if the command fails, log it
logging.warning(f"{self} command {cmd} error: {e}")
# on the 3rd retry, return None
if i == 3:
return
continue
# return the result, either command output or None
return str(result)
async def update_to_plus(self):
result = await self.send_ssh_command("opkg update && opkg install bos_plus")
return result

View File

@@ -0,0 +1,273 @@
# Copyright 2022 Upstream Data Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ipaddress
import logging
from typing import Union
from pyasic.API.btminer import BTMinerAPI
from pyasic.miners import BaseMiner
from pyasic.API import APIError
from pyasic.data import MinerData
from pyasic.data.error_codes import WhatsminerError
from pyasic.settings import PyasicSettings
class BTMiner(BaseMiner):
def __init__(self, ip: str) -> None:
super().__init__(ip)
self.ip = ipaddress.ip_address(ip)
self.api = BTMinerAPI(ip)
self.api_type = "BTMiner"
async def get_model(self) -> Union[str, None]:
"""Get miner model.
Returns:
Miner model or None.
"""
if self.model:
logging.debug(f"Found model for {self.ip}: {self.model}")
return self.model
version_data = await self.api.devdetails()
if version_data:
self.model = version_data["DEVDETAILS"][0]["Model"].split("V")[0]
logging.debug(f"Found model for {self.ip}: {self.model}")
return self.model
logging.warning(f"Failed to get model for miner: {self}")
return None
async def get_hostname(self) -> Union[str, None]:
"""Get miner hostname.
Returns:
The hostname of the miner as a string or None.
"""
if self.hostname:
return self.hostname
try:
host_data = await self.api.get_miner_info()
if host_data:
host = host_data["Msg"]["hostname"]
logging.debug(f"Found hostname for {self.ip}: {host}")
self.hostname = host
return self.hostname
except APIError:
logging.info(f"Failed to get hostname for miner: {self}")
return None
except Exception:
logging.warning(f"Failed to get hostname for miner: {self}")
return None
async def get_mac(self) -> str:
"""Get the mac address of the miner.
Returns:
The mac address of the miner as a string.
"""
mac = ""
data = await self.api.summary()
if data:
if data.get("SUMMARY"):
if len(data["SUMMARY"]) > 0:
_mac = data["SUMMARY"][0].get("MAC")
if _mac:
mac = _mac
if mac == "":
try:
data = await self.api.get_miner_info()
if data:
if "Msg" in data.keys():
if "mac" in data["Msg"].keys():
mac = data["Msg"]["mac"]
except APIError:
pass
return str(mac).upper()
async def get_data(self) -> MinerData:
"""Get data from the miner.
Returns:
A [`MinerData`][pyasic.data.MinerData] instance containing the miners data.
"""
data = MinerData(ip=str(self.ip), ideal_chips=self.nominal_chips * 3)
mac = None
try:
model = await self.get_model()
except APIError:
logging.info(f"Failed to get model: {self}")
model = None
data.model = "Whatsminer"
try:
hostname = await self.get_hostname()
except APIError:
logging.info(f"Failed to get hostname: {self}")
hostname = None
data.hostname = "Whatsminer"
if model:
data.model = model
if hostname:
data.hostname = hostname
data.fault_light = await self.check_light()
miner_data = None
for i in range(PyasicSettings().miner_get_data_retries):
try:
miner_data = await self.api.multicommand("summary", "devs", "pools")
if miner_data:
break
except APIError:
pass
if not miner_data:
return data
summary = miner_data.get("summary")[0]
devs = miner_data.get("devs")[0]
pools = miner_data.get("pools")[0]
if summary:
summary_data = summary.get("SUMMARY")
if summary_data:
if len(summary_data) > 0:
wattage_limit = None
if summary_data[0].get("MAC"):
mac = summary_data[0]["MAC"]
if summary_data[0].get("Env Temp"):
data.env_temp = summary_data[0]["Env Temp"]
if summary_data[0].get("Power Limit"):
wattage_limit = summary_data[0]["Power Limit"]
data.fan_1 = summary_data[0]["Fan Speed In"]
data.fan_2 = summary_data[0]["Fan Speed Out"]
hr = summary_data[0].get("MHS 1m")
if hr:
data.hashrate = round(hr / 1000000, 2)
wattage = summary_data[0].get("Power")
if wattage:
data.wattage = round(wattage)
if not wattage_limit:
wattage_limit = round(wattage)
data.wattage_limit = wattage_limit
if summary_data[0].get("Error Code Count"):
for i in range(summary_data[0]["Error Code Count"]):
if summary_data[0].get(f"Error Code {i}"):
data.errors.append(
WhatsminerError(
error_code=summary_data[0][f"Error Code {i}"]
)
)
if devs:
temp_data = devs.get("DEVS")
if temp_data:
board_map = {0: "left_board", 1: "center_board", 2: "right_board"}
for board in temp_data:
_id = board["ASC"]
chip_temp = round(board["Chip Temp Avg"])
board_temp = round(board["Temperature"])
hashrate = round(board["MHS 1m"] / 1000000, 2)
setattr(data, f"{board_map[_id]}_chip_temp", chip_temp)
setattr(data, f"{board_map[_id]}_temp", board_temp)
setattr(data, f"{board_map[_id]}_hashrate", hashrate)
if devs:
boards = devs.get("DEVS")
if boards:
if len(boards) > 0:
board_map = {0: "left_chips", 1: "center_chips", 2: "right_chips"}
if "ID" in boards[0].keys():
id_key = "ID"
else:
id_key = "ASC"
offset = boards[0][id_key]
for board in boards:
_id = board[id_key] - offset
chips = board["Effective Chips"]
setattr(data, board_map[_id], chips)
if pools:
pool_1 = None
pool_2 = None
pool_1_user = None
pool_2_user = None
pool_1_quota = 1
pool_2_quota = 1
quota = 0
for pool in pools.get("POOLS"):
if not pool_1_user:
pool_1_user = pool.get("User")
pool_1 = pool["URL"]
pool_1_quota = pool["Quota"]
elif not pool_2_user:
pool_2_user = pool.get("User")
pool_2 = pool["URL"]
pool_2_quota = pool["Quota"]
if not pool.get("User") == pool_1_user:
if not pool_2_user == pool.get("User"):
pool_2_user = pool.get("User")
pool_2 = pool["URL"]
pool_2_quota = pool["Quota"]
if pool_2_user and not pool_2_user == pool_1_user:
quota = f"{pool_1_quota}/{pool_2_quota}"
if pool_1:
pool_1 = pool_1.replace("stratum+tcp://", "").replace(
"stratum2+tcp://", ""
)
data.pool_1_url = pool_1
if pool_1_user:
data.pool_1_user = pool_1_user
if pool_2:
pool_2 = pool_2.replace("stratum+tcp://", "").replace(
"stratum2+tcp://", ""
)
data.pool_2_url = pool_2
if pool_2_user:
data.pool_2_user = pool_2_user
if quota:
data.pool_split = str(quota)
if not mac:
try:
mac = await self.get_mac()
except APIError:
logging.info(f"Failed to get mac: {self}")
mac = None
if mac:
data.mac = mac
return data

View File

@@ -0,0 +1,308 @@
# Copyright 2022 Upstream Data Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ipaddress
import logging
from typing import Union
from pyasic.API.cgminer import CGMinerAPI
from pyasic.miners import BaseMiner
from pyasic.API import APIError
from pyasic.data import MinerData
from pyasic.settings import PyasicSettings
class CGMiner(BaseMiner):
def __init__(self, ip: str) -> None:
super().__init__(ip)
self.ip = ipaddress.ip_address(ip)
self.api = CGMinerAPI(ip)
self.api_type = "CGMiner"
self.uname = "root"
self.pwd = "admin"
self.config = None
async def get_model(self) -> Union[str, None]:
"""Get miner model.
Returns:
Miner model or None.
"""
if self.model:
return self.model
try:
version_data = await self.api.devdetails()
except APIError:
return None
if version_data:
self.model = version_data["DEVDETAILS"][0]["Model"].replace("Antminer ", "")
return self.model
return None
async def get_hostname(self) -> Union[str, None]:
"""Get miner hostname.
Returns:
The hostname of the miner as a string or "?"
"""
if self.hostname:
return self.hostname
try:
async with (await self._get_ssh_connection()) as conn:
if conn is not None:
data = await conn.run("cat /proc/sys/kernel/hostname")
host = data.stdout.strip()
self.hostname = host
return self.hostname
else:
return None
except Exception:
return None
async def send_ssh_command(self, cmd: str) -> Union[str, None]:
"""Send a command to the miner over ssh.
Parameters:
cmd: The command to run.
Returns:
Result of the command or None.
"""
result = None
async with (await self._get_ssh_connection()) as conn:
for i in range(3):
try:
result = await conn.run(cmd)
result = result.stdout
except Exception as e:
print(f"{cmd} error: {e}")
if i == 3:
return
continue
return result
async def restart_backend(self) -> bool:
"""Restart cgminer hashing process. Wraps [`restart_cgminer`][pyasic.miners._backends.cgminer.CGMiner.restart_cgminer] to standardize."""
return await self.restart_cgminer()
async def restart_cgminer(self) -> bool:
"""Restart cgminer hashing process."""
commands = ["cgminer-api restart", "/usr/bin/cgminer-monitor >/dev/null 2>&1"]
commands = ";".join(commands)
_ret = await self.send_ssh_command(commands)
if isinstance(_ret, str):
return True
return False
async def reboot(self) -> bool:
"""Reboots power to the physical miner."""
logging.debug(f"{self}: Sending reboot command.")
_ret = await self.send_ssh_command("reboot")
logging.debug(f"{self}: Reboot command completed.")
if isinstance(_ret, str):
return True
return False
async def start_cgminer(self) -> None:
"""Start cgminer hashing process."""
commands = [
"mkdir -p /etc/tmp/",
'echo "*/3 * * * * /usr/bin/cgminer-monitor" > /etc/tmp/root',
"crontab -u root /etc/tmp/root",
"/usr/bin/cgminer-monitor >/dev/null 2>&1",
]
commands = ";".join(commands)
await self.send_ssh_command(commands)
async def stop_cgminer(self) -> None:
"""Restart cgminer hashing process."""
commands = [
"mkdir -p /etc/tmp/",
'echo "" > /etc/tmp/root',
"crontab -u root /etc/tmp/root",
"killall cgminer",
]
commands = ";".join(commands)
await self.send_ssh_command(commands)
async def get_config(self) -> str:
"""Gets the config for the miner and sets it as `self.config`.
Returns:
The config from `self.config`.
"""
async with (await self._get_ssh_connection()) as conn:
command = "cat /etc/config/cgminer"
result = await conn.run(command, check=True)
self.config = result.stdout
return self.config
async def get_data(self) -> MinerData:
"""Get data from the miner.
Returns:
A [`MinerData`][pyasic.data.MinerData] instance containing the miners data.
"""
data = MinerData(ip=str(self.ip), ideal_chips=self.nominal_chips * 3)
board_offset = -1
fan_offset = -1
model = await self.get_model()
hostname = await self.get_hostname()
mac = await self.get_mac()
if model:
data.model = model
if hostname:
data.hostname = hostname
if mac:
data.mac = mac
data.fault_light = await self.check_light()
miner_data = None
for i in range(PyasicSettings().miner_get_data_retries):
miner_data = await self.api.multicommand(
"summary", "pools", "stats", ignore_x19_error=True
)
if miner_data:
break
if not miner_data:
return data
summary = miner_data.get("summary")[0]
pools = miner_data.get("pools")[0]
stats = miner_data.get("stats")[0]
if summary:
hr = summary.get("SUMMARY")
if hr:
if len(hr) > 0:
hr = hr[0].get("GHS av")
if hr:
data.hashrate = round(hr / 1000, 2)
if stats:
boards = stats.get("STATS")
if boards:
if len(boards) > 0:
for board_num in range(1, 16, 5):
for _b_num in range(5):
b = boards[1].get(f"chain_acn{board_num + _b_num}")
if b and not b == 0 and board_offset == -1:
board_offset = board_num
if board_offset == -1:
board_offset = 1
data.left_chips = boards[1].get(f"chain_acn{board_offset}")
data.center_chips = boards[1].get(f"chain_acn{board_offset+1}")
data.right_chips = boards[1].get(f"chain_acn{board_offset+2}")
data.left_board_hashrate = round(
float(boards[1].get(f"chain_rate{board_offset}")) / 1000, 2
)
data.center_board_hashrate = round(
float(boards[1].get(f"chain_rate{board_offset+1}")) / 1000, 2
)
data.right_board_hashrate = round(
float(boards[1].get(f"chain_rate{board_offset+2}")) / 1000, 2
)
if stats:
temp = stats.get("STATS")
if temp:
if len(temp) > 1:
for fan_num in range(1, 8, 4):
for _f_num in range(4):
f = temp[1].get(f"fan{fan_num + _f_num}")
if f and not f == 0 and fan_offset == -1:
fan_offset = fan_num
if fan_offset == -1:
fan_offset = 1
for fan in range(self.fan_count):
setattr(
data, f"fan_{fan + 1}", temp[1].get(f"fan{fan_offset+fan}")
)
board_map = {0: "left_board", 1: "center_board", 2: "right_board"}
env_temp_list = []
for item in range(3):
board_temp = temp[1].get(f"temp{item + board_offset}")
chip_temp = temp[1].get(f"temp2_{item + board_offset}")
setattr(data, f"{board_map[item]}_chip_temp", chip_temp)
setattr(data, f"{board_map[item]}_temp", board_temp)
if f"temp_pcb{item}" in temp[1].keys():
env_temp = temp[1][f"temp_pcb{item}"].split("-")[0]
if not env_temp == 0:
env_temp_list.append(int(env_temp))
data.env_temp = sum(env_temp_list) / len(env_temp_list)
if pools:
pool_1 = None
pool_2 = None
pool_1_user = None
pool_2_user = None
pool_1_quota = 1
pool_2_quota = 1
quota = 0
for pool in pools.get("POOLS"):
if not pool_1_user:
pool_1_user = pool.get("User")
pool_1 = pool["URL"]
if pool.get("Quota"):
pool_2_quota = pool.get("Quota")
elif not pool_2_user:
pool_2_user = pool.get("User")
pool_2 = pool["URL"]
if pool.get("Quota"):
pool_2_quota = pool.get("Quota")
if not pool.get("User") == pool_1_user:
if not pool_2_user == pool.get("User"):
pool_2_user = pool.get("User")
pool_2 = pool["URL"]
if pool.get("Quota"):
pool_2_quota = pool.get("Quota")
if pool_2_user and not pool_2_user == pool_1_user:
quota = f"{pool_1_quota}/{pool_2_quota}"
if pool_1:
pool_1 = pool_1.replace("stratum+tcp://", "").replace(
"stratum2+tcp://", ""
)
data.pool_1_url = pool_1
if pool_1_user:
data.pool_1_user = pool_1_user
if pool_2:
pool_2 = pool_2.replace("stratum+tcp://", "").replace(
"stratum2+tcp://", ""
)
data.pool_2_url = pool_2
if pool_2_user:
data.pool_2_user = pool_2_user
if quota:
data.pool_split = str(quota)
return data

Some files were not shown because too many files have changed in this diff Show More