Compare commits

..

300 Commits

Author SHA1 Message Date
b-rowan
4fd2199435 version: bump version number. 2024-01-24 18:39:50 -07:00
b-rowan
3226d47846 Merge branch 'dev_fluxos' 2024-01-24 18:39:12 -07:00
b-rowan
6c1931fe7e bug: fix some naming issues with auradine, and add chip count for AT1500. 2024-01-24 18:37:29 -07:00
b-rowan
1dd87ac102 feature: add expected chips for M50S++VK10 2024-01-24 18:32:50 -07:00
b-rowan
95d1e40b4f bug: fix auradine fan config parsing. 2024-01-24 18:28:10 -07:00
b-rowan
31682b7fae bug: fix auradine fan data and config parsing. 2024-01-24 18:22:28 -07:00
b-rowan
e6523fc7d5 bug: fix auradine wattage data. 2024-01-24 18:18:11 -07:00
b-rowan
91de12467b bug: add multicommand flag to auradine multicommand output. 2024-01-24 18:08:22 -07:00
b-rowan
d81e3e9f88 bug: fix auradine multicommand format for get_data. 2024-01-24 18:05:32 -07:00
b-rowan
49fc0f3c54 bug: fix auradine hashboards. 2024-01-24 17:55:47 -07:00
b-rowan
4b36044e56 bug: fix auradine web api token format. 2024-01-24 17:45:42 -07:00
b-rowan
90fb67f586 bug: fix auradine web api token. 2024-01-24 17:38:49 -07:00
b-rowan
edf31ae7df bug: fix auradine identification. 2024-01-24 17:33:41 -07:00
b-rowan
af354fd8e2 feature: add auradine to web selection options. 2024-01-24 17:17:03 -07:00
UpstreamData
6a2a3e836d bug: fix auradine selection. 2024-01-24 16:23:25 -07:00
b-rowan
41709e4706 feature: add auradine data functions. 2024-01-23 16:15:34 -07:00
b-rowan
b60c7a55d4 feature: add auradine control functions. 2024-01-23 15:28:37 -07:00
b-rowan
eed1973345 feature: add auradine models. 2024-01-23 14:23:57 -07:00
b-rowan
64774d2017 feature: add basic auradine miner framework. 2024-01-23 14:06:54 -07:00
b-rowan
e9751d6cd1 version: bump version number. 2024-01-22 19:54:52 -07:00
b-rowan
e2b0a76e67 bug: fix unneeded error handling when getting hostname fails. 2024-01-22 19:40:07 -07:00
b-rowan
1c5c39fa97 version: bump version number. 2024-01-22 18:43:34 -07:00
b-rowan
27c48764a8 refactor: remove miner factory cache. 2024-01-22 18:41:19 -07:00
UpstreamData
5e01f7517b version: bump version number. 2024-01-22 13:39:52 -07:00
b-rowan
569f659fac Merge pull request #96 from fdeh75/fix-vnish-wattage-dimension
Fix vnish wattage dimension
2024-01-22 13:38:27 -07:00
fdeh
dd9c6f1f63 Fix vnish wattage dimension
Update backend vnish.py
2024-01-22 23:17:27 +03:00
b-rowan
0958f47cfe version: bump version number. 2024-01-21 14:39:35 -07:00
b-rowan
3820b40f44 bug: Fix DataLocations defaulting to all get_config. 2024-01-21 14:39:01 -07:00
b-rowan
cce1917c00 version: bump version number. 2024-01-21 12:28:44 -07:00
b-rowan
2ee19f47e7 bug: fix failing configuration on BOSminer. 2024-01-21 12:28:11 -07:00
b-rowan
ff526a3273 version: bump version number. 2024-01-21 10:39:48 -07:00
b-rowan
7811245ec9 Merge pull request #95 from UpstreamData/dev_quality
Improve overall code quality, move ssh to `miner.ssh`, remove `pwd` for miners.
2024-01-21 10:39:01 -07:00
b-rowan
cbab76847a refactor: remove BBB check for BOSMiner. 2024-01-21 10:15:44 -07:00
upstreamdata
ce981d1787 refactor: reformat. 2024-01-18 15:47:52 -07:00
upstreamdata
4b5314a8f6 refactor: move ssh to miner.ssh 2024-01-18 15:32:09 -07:00
UpstreamData
3be3086a38 docs: fix issues with docs. 2024-01-16 15:55:01 -07:00
UpstreamData
a0c76fe24f refactor: remove unused imports. 2024-01-16 15:36:55 -07:00
UpstreamData
acdcfd04cd refactor: remove unneeded lambda and remove pass. 2024-01-16 15:34:06 -07:00
UpstreamData
91a5998b4e refactor: remove unneeded lambda. 2024-01-16 15:31:31 -07:00
UpstreamData
7292af450c refactor: improve RPC handlers. 2024-01-16 15:26:32 -07:00
UpstreamData
307926afbb refactor: use protocol for BaseMiner and update attributes to be part of the class rather than a __init__ method. 2024-01-16 14:47:43 -07:00
UpstreamData
10293ae24a refactor: add default values for data locations to reduce duplication. 2024-01-16 09:31:23 -07:00
UpstreamData
f820372532 refactor: remove some duplicated code in rpc APIs. 2024-01-16 08:57:46 -07:00
UpstreamData
22965ffefa refactor: fix not x is None 2024-01-16 08:40:47 -07:00
UpstreamData
34ca5ba68f refactor: shorten some lines. 2024-01-16 08:39:15 -07:00
UpstreamData
468134e754 refactor: fix some not x in y and not x not in y 2024-01-16 08:36:31 -07:00
UpstreamData
5327b3fe3d refactor: remove unused variables. 2024-01-16 08:16:21 -07:00
UpstreamData
68b85aa7da refactor: remove some useless logging statements, and remove some unused imports. 2024-01-16 08:15:24 -07:00
UpstreamData
b78652b279 refactor: remove some unused variables. 2024-01-15 16:23:54 -07:00
UpstreamData
832a276f4b refactor: remove some unused pass statements. 2024-01-15 16:16:08 -07:00
UpstreamData
2b82b29690 refactor: remove some duplicate classes, and rename UnknownMiner get methods. 2024-01-15 16:03:40 -07:00
UpstreamData
56dd1c80b5 refactor: remove and fix some hardcoded passwords. 2024-01-15 15:53:10 -07:00
UpstreamData
d686cdacc8 refactor: change some if statements to if is not None. 2024-01-15 15:43:31 -07:00
UpstreamData
aab8825997 refactor: rename API to rpc, and classes from {X}API to {X}RPCAPI to clarify naming. 2024-01-15 15:09:51 -07:00
UpstreamData
4ed49c2321 version: bump version number. 2024-01-15 14:59:29 -07:00
UpstreamData
c069468803 bug: fix some bugs with epic, update miner repr, and remove get_model from braiinsOS. 2024-01-15 14:58:54 -07:00
UpstreamData
707cf8b848 version: bump version number. 2024-01-15 14:29:04 -07:00
UpstreamData
170843aae7 bug: add handler for failed get_data calls to make errors more verbose. 2024-01-15 14:28:39 -07:00
UpstreamData
f5acf9ec62 Merge branch 'dev_boser'
# Conflicts:
#	pyasic/miners/antminer/hiveon/X9/T9.py
#	pyasic/miners/backends/bosminer_old.py
#	pyasic/miners/backends/braiins_os.py
#	pyasic/miners/backends/btminer.py
#	pyasic/miners/backends/cgminer_avalon.py
#	pyasic/miners/backends/epic.py
#	pyasic/miners/backends/hiveon.py
#	pyasic/miners/backends/innosilicon.py
#	pyasic/miners/base.py
#	tests/miners_tests/__init__.py
2024-01-15 14:25:02 -07:00
UpstreamData
edaf89c73a refactor: fix some formatting issues and bugs. 2024-01-15 14:18:41 -07:00
UpstreamData
ce34dfdde8 bug: fix fault_light check for boser. 2024-01-15 14:00:51 -07:00
UpstreamData
e45e51ce65 refactor: fix merge. 2024-01-15 13:09:23 -07:00
UpstreamData
f1501718a3 feature: finish get_data functions for bosminer 2024-01-15 10:48:03 -07:00
UpstreamData
831d6ee955 feature: add boser fault light functions. 2024-01-15 10:48:02 -07:00
UpstreamData
7be6596fdd refactor: swap except (KeyError, ValueError) to except LookupError. 2024-01-15 10:48:02 -07:00
b-rowan
928e0dd028 feature: start refactoring BOSer and BOSMiner into separate classes. 2024-01-15 10:48:00 -07:00
UpstreamData
672e753afb bug: add test to cross check function arguments, and fix some method implementations and naming. 2024-01-15 10:47:56 -07:00
UpstreamData
269e6aac14 bug: add more tests and finish renaming methods. 2024-01-15 10:47:40 -07:00
UpstreamData
1a4f3f7dc7 bug: make sure all data locations are accurate. 2024-01-15 10:47:40 -07:00
UpstreamData
b0337e8417 refactor: swap (KeyError, IndexError) for LookupError. 2024-01-15 10:47:40 -07:00
UpstreamData
60f3687d02 refactor: optimize imports. 2024-01-15 10:47:39 -07:00
UpstreamData
a8c45cb95d refactor: remove parameters from get_{x} functions and move them to _get_{x}(**params). Add miner.fw_str, and miner.raw_model. Remove model from get_data exclude. Swap fan_count to expected_fans. 2024-01-15 10:47:39 -07:00
UpstreamData
aa9ba66f8e bug: add test to cross check function arguments, and fix some method implementations and naming. 2024-01-15 10:47:39 -07:00
UpstreamData
06cc84f16d refactor: remove parameters from get_{x} functions and move them to _get_{x}(**params). Add miner.fw_str, and miner.raw_model. Remove model from get_data exclude. Swap fan_count to expected_fans. 2024-01-15 10:47:38 -07:00
fdeh
067d5c98f5 Fix VNish get_hashrate and get_fans errors
Update vnish.py. Fix data locations according to the method arguments
2024-01-15 10:47:38 -07:00
UpstreamData
b4b84c773f refactor: remove bad function. 2024-01-15 10:47:38 -07:00
UpstreamData
cd1768aae9 refactor: swap (KeyError, IndexError) for LookupError. 2024-01-15 10:47:37 -07:00
UpstreamData
2ef85d3868 refactor: optimize imports. 2024-01-15 10:47:36 -07:00
UpstreamData
6f64cc5e0d refactor: remove parameters from get_{x} functions and move them to _get_{x}(**params). Add miner.fw_str, and miner.raw_model. Remove model from get_data exclude. Swap fan_count to expected_fans. 2024-01-15 10:47:33 -07:00
b-rowan
d44907435c Merge pull request #91 from UpstreamData/dev_get_params
Move parameters to private methods for `get_{x}` methods
2024-01-15 10:43:08 -07:00
b-rowan
04ca75d00e Merge branch 'master' into dev_get_params 2024-01-15 10:42:37 -07:00
UpstreamData
b56e94ce8c bug: add more tests and finish renaming methods. 2024-01-15 10:35:15 -07:00
UpstreamData
e7d30aad84 bug: make sure all data locations are accurate. 2024-01-15 10:29:39 -07:00
UpstreamData
194fb539a1 refactor: swap (KeyError, IndexError) for LookupError. 2024-01-15 10:23:58 -07:00
UpstreamData
416ea2964b refactor: optimize imports. 2024-01-15 10:23:57 -07:00
UpstreamData
3234f7e06f refactor: remove parameters from get_{x} functions and move them to _get_{x}(**params). Add miner.fw_str, and miner.raw_model. Remove model from get_data exclude. Swap fan_count to expected_fans. 2024-01-15 10:23:57 -07:00
UpstreamData
8fb357544b bug: add test to cross check function arguments, and fix some method implementations and naming. 2024-01-15 10:23:55 -07:00
UpstreamData
34006941ad bug: add test to cross check function arguments, and fix some method implementations and naming. 2024-01-15 10:16:47 -07:00
UpstreamData
3c3c34c54b Merge branch 'master' into dev_get_params 2024-01-15 08:10:46 -07:00
b-rowan
5a61a87766 docs: update docs. 2024-01-14 12:59:13 -07:00
b-rowan
ef9a026ee8 docs: update docs. 2024-01-14 12:58:11 -07:00
b-rowan
71c85e0603 bug: fix a possible failed authentication when using gRPC. 2024-01-14 12:09:29 -07:00
UpstreamData
c5224b808e refactor: remove parameters from get_{x} functions and move them to _get_{x}(**params). Add miner.fw_str, and miner.raw_model. Remove model from get_data exclude. Swap fan_count to expected_fans. 2024-01-14 10:02:50 -07:00
b-rowan
e4c6d751a1 version: bump version number. 2024-01-14 10:02:38 -07:00
fdeh
ff4dfa124b Fix VNish get_hashrate and get_fans errors
Update vnish.py. Fix data locations according to the method arguments
2024-01-14 10:02:38 -07:00
b-rowan
d0eb5119aa version: bump version number. 2024-01-14 10:00:15 -07:00
fdeh
cfa51623c4 Fix VNish get_hashrate and get_fans errors
Update vnish.py. Fix data locations according to the method arguments
2024-01-14 10:00:15 -07:00
b-rowan
96bb56ebd1 version: bump version number. 2024-01-14 09:59:06 -07:00
b-rowan
cdd7beccbe Merge pull request #92 from fdeh75/fix-vnish-data-gathering
Fix VNish get_hashrate and get_fans errors
2024-01-14 09:58:16 -07:00
fdeh
1a544851df Fix VNish get_hashrate and get_fans errors
Update vnish.py. Fix data locations according to the method arguments
2024-01-14 19:53:47 +03:00
UpstreamData
aa2dc5a53d feature: update some gRPC functions, and add as_boser for some of the MinerConfig values. 2024-01-12 15:06:44 -07:00
UpstreamData
361d6e07cc feature: finish get_data functions for bosminer 2024-01-12 13:29:46 -07:00
UpstreamData
53a018f526 feature: add boser fault light functions. 2024-01-12 11:58:26 -07:00
UpstreamData
6c9a378eee feature: add boser config parsing. 2024-01-12 11:54:17 -07:00
UpstreamData
be67ef3471 refactor: remove bad function. 2024-01-11 15:29:29 -07:00
UpstreamData
a094d28a36 refactor: swap (KeyError, IndexError) for LookupError. 2024-01-11 15:20:33 -07:00
UpstreamData
4156f93c0d refactor: optimize imports. 2024-01-11 15:00:48 -07:00
UpstreamData
ed6eb11653 bug: fix being unable to get fw version as part of multicommand. 2024-01-11 13:57:48 -07:00
snyk-bot
39299f2cfa fix: docs/requirements.txt to reduce vulnerabilities
The following vulnerabilities are fixed by pinning transitive dependencies:
- https://snyk.io/vuln/SNYK-PYTHON-JINJA2-6150717
2024-01-11 11:37:01 -07:00
snyk-bot
c80ca1415a fix: docs/requirements.txt to reduce vulnerabilities
The following vulnerabilities are fixed by pinning transitive dependencies:
- https://snyk.io/vuln/SNYK-PYTHON-JINJA2-6150717
2024-01-11 11:36:05 -07:00
UpstreamData
a8428a2739 refactor: remove parameters from get_{x} functions and move them to _get_{x}(**params). Add miner.fw_str, and miner.raw_model. Remove model from get_data exclude. Swap fan_count to expected_fans. 2024-01-11 11:33:44 -07:00
UpstreamData
895fb1b43e refactor: swap except (KeyError, ValueError) to except LookupError. 2024-01-11 10:20:18 -07:00
UpstreamData
014896ae1b bug: fix data passed by get_version to BOSminer. 2024-01-11 09:53:06 -07:00
snyk-bot
84ac991685 fix: docs/requirements.txt to reduce vulnerabilities
The following vulnerabilities are fixed by pinning transitive dependencies:
- https://snyk.io/vuln/SNYK-PYTHON-JINJA2-6150717
2024-01-11 16:00:03 +00:00
b-rowan
bb481553fa bug: fix missing message in grpc command. 2024-01-10 22:46:58 -07:00
b-rowan
7ab3d8b54e feature: improve data gathering slightly on BOSMiner. 2024-01-10 22:26:28 -07:00
b-rowan
36494f2aca bug: remove boser check in miner_factory, and fix bad syntax on comparison. 2024-01-10 22:15:31 -07:00
b-rowan
bea44a72ea feature: start refactoring BOSer and BOSMiner into separate classes. 2024-01-10 22:12:27 -07:00
b-rowan
9da7b44177 feature: add vnish config parsing. 2024-01-06 11:31:12 -07:00
UpstreamData
e7f05f7a28 version: bump version number. 2024-01-05 16:22:03 -07:00
UpstreamData
2d229be9fd feature: add board serial numbers to whatsminers. 2024-01-05 16:18:03 -07:00
UpstreamData
de5038e57a feature: add AntminerModern serial numbers to Hashboard data. 2024-01-05 15:57:26 -07:00
UpstreamData
8ad1b3f72a refactor: fix formatting issue. 2024-01-05 08:49:44 -07:00
b-rowan
070fb26dbc version: bump version number. 2024-01-04 20:58:44 -07:00
b-rowan
80d9d7df1d bug: fix possible empty command when getting small data points. 2024-01-04 20:58:15 -07:00
UpstreamData
928c24f56f version: bump version number. 2024-01-04 13:07:13 -07:00
UpstreamData
6e7442f90d Update data locations to be typed with dataclasses and enums. (#82)
* feature: swap AntminerModern to new data location style.

* bug: fix a bunch of missed instances of `nominal_` naming.

* feature: add support for S19 Pro Hydro.

* version: bump version number.

* dependencies: bump httpx version

* version: bump version number.

* feature: implement data locations for all remaining miners.

* refactor: remove some unused docstrings.

* feature: swap AntminerModern to new data location style.

* feature: implement data locations for all remaining miners.

* refactor: remove some unused docstrings.

* bug: fix misnamed data locations, and update base miner get_data to use new data locations.

* bug: fix include/exclude implementation on get_data.

* bug: swap ePIC to BaseMiner subclass.

* feature: add DataOptions to __all__

* tests: update data tests with new data locations method.

* bug: remove bad command from bosminer commands.

* dependencies: update dependencies.

* bug: fix some typing issues with python 3.8, and remove useless semaphore and scan threads.

* bug: fix KeyError when pools rpc command returns broken data.
2024-01-04 13:03:45 -07:00
b-rowan
936474ed3b Merge pull request #84 from jpcomps/master 2023-12-23 13:07:47 -07:00
John-Paul Compagnone
2e28060e05 fixes, changes, and formatting 2023-12-23 15:01:42 -05:00
John-Paul Compagnone
07f92557c6 cover chiptune case 2023-12-22 23:35:13 -05:00
John-Paul Compagnone
6f6f5743cf add get_config to ePIC backend 2023-12-22 23:35:13 -05:00
Upstream Data
b89ea1fa92 version: bump version number. 2023-12-22 16:29:03 -07:00
Upstream Data
3588197741 dependencies: bump httpx version 2023-12-22 16:28:46 -07:00
Upstream Data
8adc3d2adf version: bump version number. 2023-12-22 15:47:25 -07:00
Upstream Data
040c0b6842 feature: add support for S19 Pro Hydro. 2023-12-22 15:40:23 -07:00
Upstream Data
550b4a97a1 bug: fix a bunch of missed instances of nominal_ naming. 2023-12-22 15:32:01 -07:00
UpstreamData
d84d95fe5f version: bump version number. 2023-12-21 15:25:57 -07:00
UpstreamData
0e5b811fb9 Add config attribute to data and refactor data naming (#81)
* feature: add config to MinerData.  Remove related attributes.

* feature: rename ideal and nominal to expected to make data naming consistent across files.

* refactor: run isort on all files.

* docs: update docstrings.
2023-12-21 15:20:50 -07:00
UpstreamData
3d31179562 feature: add more BOS+ supported types. 2023-12-19 08:40:31 -07:00
UpstreamData
69f39bef0c docs: update tagline 2023-12-19 08:18:06 -07:00
UpstreamData
1076dab7f5 Update README.md 2023-12-19 08:17:39 -07:00
UpstreamData
3ae1f700c2 docs: update README.md 2023-12-18 14:48:19 -07:00
UpstreamData
dc3f061b9b docs: update shields. 2023-12-18 14:44:43 -07:00
UpstreamData
52758dd8b3 docs: update README. 2023-12-18 14:33:42 -07:00
UpstreamData
0e492f1cfd tests: add more tests for miners. 2023-12-18 14:11:16 -07:00
UpstreamData
659dc55f3c bug: add missing key to epic data locations. 2023-12-18 14:07:46 -07:00
UpstreamData
eb9b29aca1 tests: add tests for config and update tests. 2023-12-18 14:00:40 -07:00
UpstreamData
b045abe76e bug: reorder config information and fix bad key. 2023-12-18 13:59:56 -07:00
Upstream Data
7a75818a20 version: bump version number. 2023-12-17 09:09:00 -07:00
Upstream Data
d2be68d35e bug: fix MinerConfig default values for 3.11+. Add MinerConfig.as_epic default implementation. 2023-12-17 09:08:14 -07:00
Upstream Data
c5c4bb10ee version: bump version number. 2023-12-16 10:59:23 -07:00
Upstream Data
c4dfdda448 Merge branch 'dev_bugs'
# Conflicts:
#	pyasic/miners/miner_factory.py
#	pyasic/miners/types/whatsminer/M6X/M60.py
#	pyasic/miners/types/whatsminer/M6X/M60S.py
#	pyasic/miners/types/whatsminer/M6X/M63.py
#	pyasic/miners/types/whatsminer/M6X/M63S.py
#	pyasic/miners/types/whatsminer/M6X/M66.py
#	pyasic/miners/types/whatsminer/M6X/M66S.py
#	pyasic/miners/types/whatsminer/M6X/__init__.py
#	pyasic/miners/whatsminer/btminer/M6X/M60.py
#	pyasic/miners/whatsminer/btminer/M6X/M60S.py
#	pyasic/miners/whatsminer/btminer/M6X/M66S.py
#	pyasic/miners/whatsminer/btminer/M6X/__init__.py
#	pyasic/miners/whatsminer/btminer/__init__.py
2023-12-16 10:55:27 -07:00
Upstream Data
4459de2260 feature: add support for S19kProNoPIC BOS. Reformat. 2023-12-16 10:54:51 -07:00
UpstreamData
201cfd7ef9 docs: update documentation to be more readable on the main page. 2023-12-13 11:15:03 -07:00
UpstreamData
4201905fdd bug: fix some tasks not being cancelled properly in miner factory. 2023-12-13 10:18:28 -07:00
checksum0
497ffb5bc0 Add all the currently known Whatsminer M6X machines (#77)
* Create new BTMiner M6X backend class to represent Whatsminer new M6X generation

* Add all new known types of Whatsminer M6X

* Ensure all new types are imported in their respective __init__.py

* Create all BTMiner API class for known types of new M6X generation

* Ensure all new BTMiner API class are imported in __init__.py

* Fix erroneous M6X models data

* Ensure M6X miners are imported and add them to their MinerTypes dictionary in miner_factory.py
2023-12-12 19:38:36 -07:00
checksum0
2f762c95db Add all the currently known Whatsminer M6X machines (#77)
* Create new BTMiner M6X backend class to represent Whatsminer new M6X generation

* Add all new known types of Whatsminer M6X

* Ensure all new types are imported in their respective __init__.py

* Create all BTMiner API class for known types of new M6X generation

* Ensure all new BTMiner API class are imported in __init__.py

* Fix erroneous M6X models data

* Ensure M6X miners are imported and add them to their MinerTypes dictionary in miner_factory.py
2023-12-12 19:32:12 -07:00
UpstreamData
67aed79330 bug: fix mode spec in bosminer config. 2023-12-12 13:21:50 -07:00
UpstreamData
073e048726 bug: fix bosminer config missing format information. 2023-12-12 13:11:49 -07:00
UpstreamData
02234f3d1e feature: improve dict merging speed 2023-12-12 09:25:43 -07:00
UpstreamData
dc22df0280 refactor: remove innosilicon pool comment, as it is correct. 2023-12-12 08:54:24 -07:00
UpstreamData
02056b8c88 refactor: remove config prints. 2023-12-11 15:36:02 -07:00
UpstreamData
3a43cd293c bug: Fix improper naming of fan mode. 2023-12-11 15:18:23 -07:00
UpstreamData
6941d9f349 bug: add default case for work mode when there is no work mode returned from bitmain. 2023-12-11 15:08:57 -07:00
UpstreamData
f6b0b64d86 bug: set default quota to 1. 2023-12-11 14:07:17 -07:00
UpstreamData
8d68dd9dac refactor: re-order config keys 2023-12-11 14:06:22 -07:00
UpstreamData
27368a9bd2 bug: fix some issues, and remove unused imports. 2023-12-11 13:48:26 -07:00
UpstreamData
c919b00312 feature: allow config conversion to and from dict. 2023-12-11 13:40:10 -07:00
UpstreamData
f162529883 feature: allow dps conversion for bos grpc. 2023-12-11 11:40:46 -07:00
Upstream Data
bb182bb22d bug: fix some issues with return types and missing return statements. 2023-12-10 20:28:06 -07:00
Upstream Data
af15c4fbd1 bug: pin working betterproto version. 2023-12-10 20:25:27 -07:00
Upstream Data
47c2eb9f0e feature: use betterproto + grpclib. 2023-12-10 20:10:11 -07:00
Upstream Data
1ab39f5873 bug: fix bosminer config parsing. 2023-12-10 17:40:39 -07:00
Upstream Data
43200a7354 feature: Add bosminer.toml parser. 2023-12-10 13:20:03 -07:00
Upstream Data
4fc57832e1 feature: Finish fixing get and send config handlers for miners. 2023-12-10 10:14:57 -07:00
Upstream Data
9ee63cc3ab feature: Update get and send config methods for most miners, and add as_inno. 2023-12-10 10:10:55 -07:00
Upstream Data
b22b506d55 feature: Add whatsminer send_config. 2023-12-10 09:55:05 -07:00
Upstream Data
468fba3465 feature: Add whatsminer set mode commands. 2023-12-10 09:49:24 -07:00
Upstream Data
0399094197 feature: add AM old and goldshell configs. 2023-12-10 09:45:34 -07:00
Upstream Data
bfdfa8a6ab feature: Add AM modern send and get config. 2023-12-10 09:30:31 -07:00
Upstream Data
83d0d09b0d feature: Add whatsminer get_config. 2023-12-09 17:35:47 -07:00
Upstream Data
f892c3a0fd feature: Add from am_modern to config. 2023-12-09 16:59:39 -07:00
Upstream Data
81b974f565 bug: fix bad indentation. 2023-12-09 15:12:36 -07:00
UpstreamData
5eaf876c6d feature: add bos to config miner types. 2023-12-09 13:27:23 -07:00
Upstream Data
d7d1b845a7 feature: add MinerConfig.from_api(). 2023-12-09 13:06:52 -07:00
UpstreamData
242517a36a feature: add inno to config miner types. 2023-12-08 11:03:36 -07:00
UpstreamData
791249bf3d feature: add avalon and goldshell to miner config types. 2023-12-08 10:57:57 -07:00
UpstreamData
5a70a27f07 reformat: remove some useless files. 2023-12-08 10:11:43 -07:00
UpstreamData
bca81f3bca feature: add AM old and modern, and WM config implementation. 2023-12-08 10:10:21 -07:00
UpstreamData
6d75565baf feature: start adding new config implementation. 2023-12-08 09:16:04 -07:00
JP Compagnone
9f42e6a3be add new Antminer models (S19jPro+ and S19k Pro) (#75)
* Add S19jPro+ and S19K Pro

* typo
2023-12-08 08:34:30 -07:00
Upstream Data
362b204c91 version: bump version number. 2023-11-29 20:45:46 -07:00
Upstream Data
952b660c05 bug: re-add missing socket check during scan. 2023-11-29 20:45:25 -07:00
UpstreamData
fbd73881d4 version: bump version number. 2023-11-28 16:42:15 -07:00
JP Compagnone
68c4dadb63 hotfix: fix epic api error handling (#74)
* hotfix: fix epic api error handling

* much cleaner way to handle the retry
2023-11-28 16:39:57 -07:00
UpstreamData
87016670d4 version: bump version number. 2023-11-28 10:50:56 -07:00
JP Compagnone
8701bbe4e2 Feature - Add initial ePIC UMC support (#71)
* prelim support of ePIC UMC

* slowly adding things

* add most api calls

* add some guards

* fix post commands

* remove print

* catch when API returns error

* missing guard

* remove semicolon

* recommended changes

* add docs and changes

* respect ignore_errors
2023-11-28 10:49:49 -07:00
UpstreamData
7d1f125b0b docs: update settings docs to list. 2023-11-27 11:07:43 -07:00
UpstreamData
e433902bd5 docs: update settings docs. 2023-11-27 10:57:13 -07:00
UpstreamData
a653772968 docs: update network docs. 2023-11-27 10:53:40 -07:00
UpstreamData
d8b938cd5b version: bump version number. 2023-11-27 10:39:10 -07:00
UpstreamData
47d76e325a docs: update docs to include new information. 2023-11-27 10:37:38 -07:00
UpstreamData
7ee7868094 feature: change so_linger_time to socket_linger_time. 2023-11-27 10:19:30 -07:00
UpstreamData
3f1183a4f9 feature: add so_linger option to settings. 2023-11-27 09:24:28 -07:00
Upstream Data
2b443497ea bug: improve handling of whatsminers in get_miner. 2023-11-25 12:48:45 -07:00
Upstream Data
c3972f9524 feature: add default ssl ctx to all httpx clients to speed up initialization. 2023-11-25 01:08:04 -07:00
Upstream Data
92bbb21c11 bug: handle OSError as ConnectionError, and handle Vnish Msg bug because of missing id key. 2023-11-25 00:05:03 -07:00
Upstream Data
1980ff2563 bug: do additional checks on refused connection when scanning. 2023-11-24 23:32:22 -07:00
Upstream Data
93d09a1483 bug: handle unhandled error in pings. 2023-11-24 23:28:16 -07:00
Upstream Data
690d0d99df feature: added new setting for api command timeouts. 2023-11-24 23:19:14 -07:00
Upstream Data
78f689eb2c feature: update scan method to use port 80 when possible, and add .scan() method. 2023-11-24 23:13:56 -07:00
Upstream Data
e68f188e8f bug: fix timeout references in MinerFactory and fix MinerNetwork instantiation. 2023-11-24 23:00:06 -07:00
Upstream Data
7eda611fe9 bug: fix scanning size being too large. 2023-11-24 22:50:43 -07:00
Upstream Data
1d12817942 feature: improve network parsing and implementation. 2023-11-24 22:38:43 -07:00
UpstreamData
b24efd4c69 bug: fix miner network not working with lists. 2023-11-24 13:27:09 -07:00
UpstreamData
5533135b05 docs: update supported miners. 2023-11-23 11:54:01 -07:00
UpstreamData
475054fbe0 feature: finish support for most whatsminer subtypes. 2023-11-23 11:52:44 -07:00
Daniel Sokil
06bad1bbe0 Add More Whatsminer Models, and Additional Config For Existing Models (#70) 2023-11-23 11:35:18 -07:00
UpstreamData
f3746ff756 version: bump version number. 2023-11-20 11:19:45 -07:00
UpstreamData
9f16d37c8b feature: hide GRPC and GQL if BOSer is not found. 2023-11-20 11:19:13 -07:00
UpstreamData
8a13c7940a docs: update pyproject.toml description. 2023-11-20 10:33:25 -07:00
UpstreamData
8bea76ff67 feature: add chip count for M30S+VG50. 2023-11-20 10:32:51 -07:00
Upstream Data
1504bd744c version: bump version number. 2023-11-18 22:45:38 -07:00
Upstream Data
6449f10615 feature: implement GPRC set commands properly. 2023-11-18 22:45:09 -07:00
UpstreamData
d79509bda7 version: bump version number. Pin httpx to 0.25.0 min. 2023-11-12 18:36:45 -07:00
UpstreamData
630b847466 version: bump version number. Pin httpx to 0.25.0 min. 2023-11-12 18:35:52 -07:00
Colin Crossman
ed11611919 Bump version number
Note: some issues with HTTPX may be resolved by using 1.0.0b, but I did not bump the requirement at this time to the beta.
2023-11-11 13:59:14 -07:00
Colin Crossman
e2431c938d Address unknown password issue on Whatsminers
When a whatsminer had an unknown password (not the default one), it would result in a timeout error. By moving the password check to before the data pull step, the timeout issue can be caught and addressed efficiently.
2023-11-11 13:52:04 -07:00
Colin Crossman
60f4b4a5ed Address a situation which causes many asyncio errors 2023-11-11 13:49:51 -07:00
UpstreamData
d41097af20 version: bump vbersion number. 2023-11-08 11:13:24 -07:00
UpstreamData
8a5d505731 bug: fix anyio stream error on some linux distros when getting miner. 2023-11-08 11:12:46 -07:00
UpstreamData
36e76c6f41 Add support for the grpc set_performance_mode command. 2023-11-07 08:54:06 -07:00
UpstreamData
717b9421dd Merge branch 'dev_grpc'
# Conflicts:
#	pyasic/web/bosminer.py
#	pyproject.toml
2023-10-30 16:36:06 -06:00
UpstreamData
d2f71e8c94 version: bump version number. 2023-10-30 16:34:05 -06:00
UpstreamData
697991f28f bug: fix some cases where a warning could still be passed when it was unexpected. 2023-10-30 16:33:01 -06:00
UpstreamData
b0e18ab766 feature: implement most of the GRPC functions for BOS, except for some configuration options which will require complex enums. 2023-10-27 16:35:09 -06:00
UpstreamData
e39a6921d0 refactor: improve settings handling to not use a dataclass, and not use singleton. 2023-10-26 10:28:59 -06:00
UpstreamData
aac1be0565 feature: refactor BOS web class into multiple classes. 2023-10-26 10:28:57 -06:00
UpstreamData
683fcb2138 version: bump version number. 2023-10-26 10:28:56 -06:00
UpstreamData
9fbbef9b18 bug: fix an issue with bosminer not responding correctly on older models with fans. 2023-10-26 10:28:56 -06:00
UpstreamData
6e0b9a0a7b version: bump version number. 2023-10-26 10:28:56 -06:00
UpstreamData
7f472f6f4f bug: fix possible missing value for bitmain work mode when checking is_mining. 2023-10-26 10:28:55 -06:00
UpstreamData
b7d7b33ab9 bug: round hashrate data in MinerData, and remove some unused imports. 2023-10-26 10:28:55 -06:00
UpstreamData
da11c0bb1f version: bump version number. 2023-10-26 10:28:55 -06:00
UpstreamData
eae433d2bd bug: update get_miner to work with latest whatsminer version. 2023-10-26 10:28:54 -06:00
UpstreamData
c16bc37aff refactor: improve settings handling to not use a dataclass, and not use singleton. 2023-10-26 10:28:53 -06:00
UpstreamData
255b06ac9e version: bump version number. 2023-10-23 13:01:44 -06:00
UpstreamData
29ec619126 bug: fix an issue with bosminer not responding correctly on older models with fans. 2023-10-23 12:59:52 -06:00
UpstreamData
247def04ff version: bump version number. 2023-10-12 13:19:38 -06:00
UpstreamData
4600e7d953 bug: fix possible missing value for bitmain work mode when checking is_mining. 2023-10-12 13:19:11 -06:00
UpstreamData
850c266555 bug: round hashrate data in MinerData, and remove some unused imports. 2023-10-10 13:59:28 -06:00
UpstreamData
ad374fe2fb version: bump version number. 2023-10-05 10:18:10 -06:00
UpstreamData
5ca39b6fe7 bug: update get_miner to work with latest whatsminer version. 2023-10-05 10:17:45 -06:00
UpstreamData
b50dd26e6f feature: refactor BOS web class into multiple classes. 2023-10-03 15:07:39 -06:00
UpstreamData
53eaccaa9b docs: update documentation. 2023-10-03 15:07:39 -06:00
UpstreamData
91f207316a version: bump version number. 2023-10-03 15:07:39 -06:00
UpstreamData
1e37418909 bug: fix some issues with early version of whatsminers, and handle some possible errors with BOS. 2023-10-03 15:07:38 -06:00
UpstreamData
4c09ba5529 version: bump version number. 2023-10-03 15:07:38 -06:00
UpstreamData
7bab4747ad refactor: improve settings handling to not use a dataclass, and not use singleton. 2023-10-03 15:07:37 -06:00
UpstreamData
fd8cc7378c version: bump version number. 2023-10-03 15:07:33 -06:00
UpstreamData
8aeef4d5e7 feature: add support for M20P, and add chips for M20SV30. 2023-10-03 15:07:33 -06:00
UpstreamData
4bafde9da7 docs: update documentation. 2023-10-03 14:59:25 -06:00
UpstreamData
5a3107aecf version: bump version number. 2023-10-03 11:12:11 -06:00
UpstreamData
7e758720f0 bug: fix some issues with early version of whatsminers, and handle some possible errors with BOS. 2023-10-03 11:11:32 -06:00
UpstreamData
39e3e249f8 version: bump version number. 2023-10-02 13:14:21 -06:00
UpstreamData
118c5b056e refactor: improve settings handling to not use a dataclass, and not use singleton. 2023-10-02 13:13:31 -06:00
UpstreamData
2c3b5599fe version: bump version number. 2023-10-02 09:20:24 -06:00
UpstreamData
e421eaa324 feature: add support for M20P, and add chips for M20SV30. 2023-10-02 09:20:01 -06:00
UpstreamData
75d6bc6808 version: bump version number. 2023-09-28 15:49:23 -06:00
UpstreamData
98c547e416 bug: fAdd new commands added in whatsminer API 2.0.5. 2023-09-28 15:49:23 -06:00
UpstreamData
45250e36e4 bug: fix whatsminer identification to work with backwards incompatible changes in API 2.0.5. 2023-09-28 15:49:23 -06:00
UpstreamData
fa7544d052 Update README.md 2023-09-28 15:49:22 -06:00
UpstreamData
53f3fc5ee9 version: bump version number. 2023-09-28 15:47:49 -06:00
UpstreamData
1b36de4131 bug: fAdd new commands added in whatsminer API 2.0.5. 2023-09-28 15:47:20 -06:00
UpstreamData
6f0c6f6284 bug: fix whatsminer identification to work with backwards incompatible changes in API 2.0.5. 2023-09-28 15:42:12 -06:00
UpstreamData
b7dda5bf87 Update README.md 2023-09-26 11:50:56 -06:00
UpstreamData
14f33a40c3 feature: add grpc BOS class and add grpc requests to requirements. 2023-09-22 09:44:25 -06:00
UpstreamData
5c904aced0 feature: refactor BOS web class into multiple classes. 2023-09-22 09:32:59 -06:00
UpstreamData
53a3bbf531 version: bump version number. 2023-09-19 13:59:56 -06:00
UpstreamData
50586f1ce7 feature: add S19+. 2023-09-19 13:59:03 -06:00
UpstreamData
9f6235a0fc feature: add S19i. 2023-09-19 13:56:40 -06:00
UpstreamData
4d21f150ce version: bump version number. 2023-09-18 09:35:38 -06:00
UpstreamData
7c0dfc49dd bug: fix wrong fault light setting when setting fault light to off. 2023-09-18 09:35:19 -06:00
UpstreamData
269b13f6c1 version: bump version number. 2023-09-15 08:57:56 -06:00
Elias Kunnas
a9bb7d2e5a Fix btminer pre_power_on (#62) 2023-09-15 08:56:29 -06:00
Upstream Data
11295f27a7 version: bump version number. 2023-09-12 19:21:04 -06:00
Upstream Data
55aa3dd85b bug: handle edge cases where a missed get_config on bosminer can cause an empty config to be applied to a miner. 2023-09-12 19:20:48 -06:00
UpstreamData
20272d4360 version: bump version number. 2023-09-11 13:45:52 -06:00
UpstreamData
623dc92ef2 feature: Add MinerData.as_dict(). 2023-09-11 13:45:23 -06:00
Upstream Data
2d59394b1e version: bump version number. 2023-09-07 19:07:11 -06:00
Upstream Data
26c2095ff1 bug: fix uncaught error in get_hashboards with BMMiner if a key doesnt exist. 2023-09-07 19:06:51 -06:00
Upstream Data
ec7d241caa version: bump version number. 2023-09-05 17:22:23 -06:00
Upstream Data
d0432ed1aa bug: handle for some weird edge cases with boards plugged into the wrong slots on X19. 2023-09-05 17:22:02 -06:00
Upstream Data
8c5503d002 version: bump version number. 2023-08-30 17:47:20 -06:00
Upstream Data
6d6f950c95 bug: add modified changed from [Issue 57](https://github.com/UpstreamData/pyasic/issues/57#issuecomment-1699984187) 2023-08-30 17:46:23 -06:00
UpstreamData
30745e54ba feature: add chip count for M30S+VE50 2023-08-30 11:18:25 -06:00
UpstreamData
c3fd94e79e version: bump version number. 2023-08-28 08:53:59 -06:00
UpstreamData
2924a8d67b feature: add more whatsminer error codes. 2023-08-28 08:53:27 -06:00
UpstreamData
9f4c4bb9cf feature: add exclude to get_data, and change data_to_get to include. 2023-08-28 08:32:29 -06:00
UpstreamData
3d6eebf06e bug: fix a bug with hostname gathering on some Avalons. 2023-08-28 08:31:54 -06:00
277 changed files with 14121 additions and 9712 deletions

291
README.md
View File

@@ -1,142 +1,249 @@
# pyasic
*A set of modules for interfacing with many common types of ASIC bitcoin miners, using both their API and SSH.*
*A simplified and standardized interface for Bitcoin ASICs.*
[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
[![pypi](https://img.shields.io/pypi/v/pyasic.svg)](https://pypi.org/project/pyasic/)
[![python](https://img.shields.io/pypi/pyversions/pyasic.svg)](https://pypi.org/project/pyasic/)
[![Read the Docs](https://img.shields.io/readthedocs/pyasic)](https://pyasic.readthedocs.io/en/latest/)
[![GitHub](https://img.shields.io/github/license/UpstreamData/pyasic)](https://github.com/UpstreamData/pyasic/blob/master/LICENSE.txt)
[![CodeFactor Grade](https://img.shields.io/codefactor/grade/github/UpstreamData/pyasic)](https://www.codefactor.io/repository/github/upstreamdata/pyasic)
## Documentation and Supported Miners
Documentation is located on Read the Docs as [pyasic](https://pyasic.readthedocs.io/en/latest/).
[![PyPI - Version](https://img.shields.io/pypi/v/pyasic.svg)](https://pypi.org/project/pyasic/)
[![PyPI - Downloads](https://img.shields.io/pypi/dm/pyasic)](https://pypi.org/project/pyasic/)
Supported miners are listed in the docs, [here](https://pyasic.readthedocs.io/en/latest/miners/supported_types/).
[![Python - Supported Versions](https://img.shields.io/pypi/pyversions/pyasic.svg)](https://pypi.org/project/pyasic/)
[![CodeFactor - Grade](https://img.shields.io/codefactor/grade/github/UpstreamData/pyasic)](https://www.codefactor.io/repository/github/upstreamdata/pyasic)
[![Commit Activity - master](https://img.shields.io/github/commit-activity/y/UpstreamData/pyasic)](https://github.com/UpstreamData/pyasic/commits/master/)
## Installation
You can install pyasic directly from pip with the command `pip install pyasic`.
[![Code Style - Black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
[![Read The Docs - Docs](https://img.shields.io/readthedocs/pyasic)](https://pyasic.readthedocs.io/en/latest/)
[![License - Apache 2.0](https://img.shields.io/github/license/UpstreamData/pyasic)](https://github.com/UpstreamData/pyasic/blob/master/LICENSE.txt)
For those of you who aren't comfortable with code and developer tools, there are windows builds of GUI applications that use this library [here](https://drive.google.com/drive/folders/1DjR8UOS_g0ehfiJcgmrV0FFoqFvE9akW?usp=sharing).
---
## Intro
## Developer Setup
It is highly reccommended that you contribute to this project through [`pyasic-super`](https://github.com/UpstreamData/pyasic-super) using its submodules. This allows testing in conjunction with other `pyasic` related programs.
Welcome to `pyasic`! `pyasic` uses an asynchronous method of communicating with ASIC miners on your network, which makes it super fast.
<br>
[Click here to view supported miner types](miners/supported_types.md)
This repo uses poetry for dependencies, which can be installed by following the guide on their website [here](https://python-poetry.org/docs/#installation).
---
## Getting started
After you have poetry installed, run `poetry install --with dev`, or `poetry install --with dev,docs` if you want to include packages required for documentation.
Getting started with `pyasic` is easy. First, find your miner (or miners) on the network by scanning for them or getting the correct class automatically for them if you know the IP.
Finally, initialize pre-commit hooks with `poetry run pre-commit install`.
### Documentation Testing
Testing the documentation can be done by running `poetry run mkdocs serve`, whcih will serve the documentation locally on port 8000.
## Interfacing with miners programmatically
There are 2 main ways to get a miner (and the functions attached to it), via scanning or via the `MinerFactory()`.
#### Scanning for miners
##### Scanning for miners
To scan for miners in `pyasic`, we use the class `MinerNetwork`, which abstracts the search, communication, identification, setup, and return of a miner to 1 command.
The command `MinerNetwork.scan()` returns a list that contains any miners found.
```python
import asyncio
from pyasic.network import MinerNetwork
import asyncio # asyncio for handling the async part
from pyasic.network import MinerNetwork # miner network handles the scanning
# define asynchronous function to scan for miners
async def scan_and_get_data():
# Define network range to be used for scanning
# This can take a list of IPs, a constructor string, or an IP and subnet mask
# The standard mask is /24 (x.x.x.0-255), and you can pass any IP address in the subnet
net = MinerNetwork("192.168.1.69", mask=24)
# Scan the network for miners
# This function returns a list of miners of the correct type as a class
miners: list = await net.scan_network_for_miners()
async def scan_miners(): # define async scan function to allow awaiting
# create a miner network
# you can pass in any IP and it will use that in a subnet with a /24 mask (255 IPs).
network = MinerNetwork.from_subnet("192.168.1.50/24") # this uses the 192.168.1.0-255 network
# We can now get data from any of these miners
# To do them all we have to create a list of tasks and gather them
tasks = [miner.get_data() for miner in miners]
# Gather all tasks asynchronously and run them
data = await asyncio.gather(*tasks)
# Data is now a list of MinerData, and we can reference any part of that
# Print out all data for now
for item in data:
print(item)
# scan for miners asynchronously
# this will return the correct type of miners if they are supported with all functionality.
miners = await network.scan()
print(miners)
if __name__ == "__main__":
asyncio.run(scan_and_get_data())
asyncio.run(scan_miners()) # run the scan asynchronously with asyncio.run()
```
---
##### Creating miners based on IP
If you already know the IP address of your miner or miners, you can use the `MinerFactory` to communicate and identify the miners, or an abstraction of its functionality, `get_miner()`.
The function `get_miner()` will return any miner it found at the IP address specified, or an `UnknownMiner` if it cannot identify the miner.
```python
import asyncio # asyncio for handling the async part
from pyasic import get_miner # handles miner creation
#### Getting a miner if you know the IP
async def get_miners(): # define async scan function to allow awaiting
# get the miner with the miner factory
# the miner factory is a singleton, and will always use the same object and cache
# this means you can always call it as MinerFactory().get_miner(), or just get_miner()
miner_1 = await get_miner("192.168.1.75")
miner_2 = await get_miner("192.168.1.76")
print(miner_1, miner_2)
# can also gather these, since they are async
# gathering them will get them both at the same time
# this makes it much faster to get a lot of miners at a time
tasks = [get_miner("192.168.1.75"), get_miner("192.168.1.76")]
miners = await asyncio.gather(*tasks)
print(miners)
if __name__ == "__main__":
asyncio.run(get_miners()) # get the miners asynchronously with asyncio.run()
```
---
## Data gathering
Once you have your miner(s) identified, you will likely want to get data from the miner(s). You can do this using a built-in function in each miner called `get_data()`.
This function will return an instance of the dataclass `MinerData` with all data it can gather from the miner.
Each piece of data in a `MinerData` instance can be referenced by getting it as an attribute, such as `MinerData().hashrate`.
##### One miner
```python
import asyncio
from pyasic import get_miner
async def gather_miner_data():
miner = await get_miner("192.168.1.75")
if miner is not None:
miner_data = await miner.get_data()
print(miner_data) # all data from the dataclass
print(miner_data.hashrate) # hashrate of the miner in TH/s
if __name__ == "__main__":
asyncio.run(gather_miner_data())
```
---
##### Multiple miners
You can do something similar with multiple miners, with only needing to make a small change to get all the data at once.
```python
import asyncio # asyncio for handling the async part
from pyasic.network import MinerNetwork # miner network handles the scanning
async def gather_miner_data(): # define async scan function to allow awaiting
network = MinerNetwork.from_subnet("192.168.1.50/24")
miners = await network.scan()
# we need to asyncio.gather() all the miners get_data() functions to make them run together
all_miner_data = await asyncio.gather(*[miner.get_data() for miner in miners])
for miner_data in all_miner_data:
print(miner_data) # print out all the data one by one
if __name__ == "__main__":
asyncio.run(gather_miner_data())
```
---
## Miner control
`pyasic` exposes a standard interface for each miner using control functions.
Every miner class in `pyasic` must implement all the control functions defined in `BaseMiner`.
These functions are
`check_light`,
`fault_light_off`,
`fault_light_on`,
`get_config`,
`get_data`,
`get_errors`,
`get_hostname`,
`get_model`,
`reboot`,
`restart_backend`,
`stop_mining`,
`resume_mining`,
`is_mining`,
`send_config`, and
`set_power_limit`.
##### Usage
```python
import asyncio
from pyasic import get_miner
# define asynchronous function to get miner and data
async def get_miner_data(miner_ip: str):
# Use MinerFactory to get miner
# MinerFactory is a singleton, so we can just get the instance in place
miner = await get_miner(miner_ip)
async def set_fault_light():
miner = await get_miner("192.168.1.20")
# Get data from the miner
data = await miner.get_data()
print(data)
# call control function
await miner.fault_light_on()
if __name__ == "__main__":
asyncio.run(get_miner_data("192.168.1.69"))
asyncio.run(set_fault_light())
```
### Advanced data gathering
---
## Helper dataclasses
If needed, this library exposes a wrapper for the miner API that can be used for advanced data gathering.
##### `MinerConfig` and `MinerData`
You can see more information on basic usage of the APIs past this example in the docs [here](https://pyasic.readthedocs.io/en/latest/API/api/).
`pyasic` implements a few dataclasses as helpers to make data return types consistent across different miners and miner APIs. The different fields of these dataclasses can all be viewed with the classmethod `cls.fields()`.
Please see the appropriate API documentation page (pyasic docs -> Advanced -> Miner APIs -> your API type) for a link to that specific miner's API documentation page and more information.
---
#### List available API commands
##### MinerData
`MinerData` is a return from the [`get_data()`](#get-data) function, and is used to have a consistent dataset across all returns.
You can call `MinerData.as_dict()` to get the dataclass as a dictionary, and there are many other helper functions contained in the class to convert to different data formats.
`MinerData` instances can also be added to each other to combine their data and can be divided by a number to divide all their data, allowing you to get average data from many miners by doing -
```python
from pyasic import MinerData
# examples of miner data
d1 = MinerData("192.168.1.1")
d2 = MinerData("192.168.1.2")
list_of_miner_data = [d1, d2]
average_data = sum(list_of_miner_data, start=MinerData("0.0.0.0"))/len(list_of_miner_data)
```
---
##### MinerConfig
`MinerConfig` is `pyasic`'s way to represent a configuration file from a miner.
It is designed to unionize the configuration of all supported miner types, and is the return from [`get_config()`](#get-config).
Each miner has a unique way to convert the `MinerConfig` to their specific type, there are helper functions in the class.
In most cases these helper functions should not be used, as [`send_config()`](#send-config) takes a [`MinerConfig` and will do the conversion to the right type for you.
You can use the `MinerConfig` as follows:
```python
import asyncio
from pyasic import get_miner
async def get_api_commands(miner_ip: str):
# Get the miner
miner = await get_miner(miner_ip)
async def set_fault_light():
miner = await get_miner("192.168.1.20")
# List all available commands
# Can also be called explicitly with the function miner.api.get_commands()
print(miner.api.commands)
# get config
cfg = await miner.get_config()
# send config
await miner.send_config(cfg)
if __name__ == "__main__":
asyncio.run(get_api_commands("192.168.1.69"))
asyncio.run(set_fault_light())
```
#### Use miner API commands to gather data
---
## Settings
The miner API commands will raise an `APIError` if they fail with a bad status code, to bypass this you must send them manually by using `miner.api.send_command(command, ignore_errors=True)`
`pyasic` has settings designed to make using large groups of miners easier. You can set the default password for all types of miners using the `pyasic.settings` module, used as follows:
```python
import asyncio
from pyasic import settings
from pyasic import get_miner
async def get_api_commands(miner_ip: str):
# Get the miner
miner = await get_miner(miner_ip)
# Run the devdetails command
# This is equivalent to await miner.api.send_command("devdetails")
devdetails: dict = await miner.api.devdetails()
print(devdetails)
if __name__ == "__main__":
asyncio.run(get_api_commands("192.168.1.69"))
settings.update("default_antminer_password", "my_pwd")
```
##### Default values:
```
"network_ping_retries": 1,
"network_ping_timeout": 3,
"network_scan_threads": 300,
"factory_get_retries": 1,
"factory_get_timeout": 3,
"get_data_retries": 1,
"api_function_timeout": 5,
"default_whatsminer_password": "admin",
"default_innosilicon_password": "admin",
"default_antminer_password": "root",
"default_bosminer_password": "root",
"default_vnish_password": "admin",
"default_goldshell_password": "123456789",
# ADVANCED
# Only use this if you know what you are doing
"socket_linger_time": 1000,
```

View File

@@ -1,27 +0,0 @@
# pyasic
## Miner APIs
Each miner has a unique API that is used to communicate with it.
Each of these API types has commands that differ between them, and some commands have data that others do not.
Each miner that is a subclass of [`BaseMiner`][pyasic.miners.BaseMiner] should have an API linked to it as `Miner.api`.
All API implementations inherit from [`BaseMinerAPI`][pyasic.API.BaseMinerAPI], which implements the basic communications protocols.
[`BaseMinerAPI`][pyasic.API.BaseMinerAPI] should never be used unless inheriting to create a new miner API class for a new type of miner (which should be exceedingly rare).
[`BaseMinerAPI`][pyasic.API.BaseMinerAPI] cannot be instantiated directly, it will raise a `TypeError`.
Use these instead -
#### [BFGMiner API][pyasic.API.bfgminer.BFGMinerAPI]
#### [BMMiner API][pyasic.API.bmminer.BMMinerAPI]
#### [BOSMiner API][pyasic.API.bosminer.BOSMinerAPI]
#### [BTMiner API][pyasic.API.btminer.BTMinerAPI]
#### [CGMiner API][pyasic.API.cgminer.CGMinerAPI]
#### [LUXMiner API][pyasic.API.luxminer.LUXMinerAPI]
#### [Unknown API][pyasic.API.unknown.UnknownAPI]
<br>
## BaseMinerAPI
::: pyasic.API.BaseMinerAPI
handler: python
options:
heading_level: 4

View File

@@ -6,19 +6,3 @@
options:
show_root_heading: false
heading_level: 4
## Pool Groups
::: pyasic.config._PoolGroup
handler: python
options:
show_root_heading: false
heading_level: 4
## Pools
::: pyasic.config._Pool
handler: python
options:
show_root_heading: false
heading_level: 4

View File

@@ -1,6 +1,6 @@
# pyasic
## Miner Data
## Miner Data
::: pyasic.data.MinerData
handler: python
options:
@@ -13,3 +13,10 @@
options:
show_root_heading: false
heading_level: 4
## Fan Data
::: pyasic.data.Fan
handler: python
options:
show_root_heading: false
heading_level: 4

View File

@@ -31,6 +31,8 @@ def backend_str(backend: MinerTypes) -> str:
return "Stock Firmware Avalonminers"
case MinerTypes.VNISH:
return "Vnish Firmware Miners"
case MinerTypes.EPIC:
return "ePIC Firmware Miners"
case MinerTypes.BRAIINS_OS:
return "BOS+ Firmware Miners"
case MinerTypes.HIVEON:

View File

@@ -1,25 +1,31 @@
# pyasic
*A set of modules for interfacing with many common types of ASIC bitcoin miners, using both their API and SSH.*
*A simplified and standardized interface for Bitcoin ASICs.*
[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
[![pypi](https://img.shields.io/pypi/v/pyasic.svg)](https://pypi.org/project/pyasic/)
[![python](https://img.shields.io/pypi/pyversions/pyasic.svg)](https://pypi.org/project/pyasic/)
[![Read the Docs](https://img.shields.io/readthedocs/pyasic)](https://pyasic.readthedocs.io/en/latest/)
[![GitHub](https://img.shields.io/github/license/UpstreamData/pyasic)](https://github.com/UpstreamData/pyasic/blob/master/LICENSE.txt)
[![CodeFactor Grade](https://img.shields.io/codefactor/grade/github/UpstreamData/pyasic)](https://www.codefactor.io/repository/github/upstreamdata/pyasic)
[![PyPI - Version](https://img.shields.io/pypi/v/pyasic.svg)](https://pypi.org/project/pyasic/)
[![PyPI - Downloads](https://img.shields.io/pypi/dm/pyasic)](https://pypi.org/project/pyasic/)
[![Python - Supported Versions](https://img.shields.io/pypi/pyversions/pyasic.svg)](https://pypi.org/project/pyasic/)
[![CodeFactor - Grade](https://img.shields.io/codefactor/grade/github/UpstreamData/pyasic)](https://www.codefactor.io/repository/github/upstreamdata/pyasic)
[![Commit Activity - master](https://img.shields.io/github/commit-activity/y/UpstreamData/pyasic)](https://github.com/UpstreamData/pyasic/commits/master/)
[![Code Style - Black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
[![Read The Docs - Docs](https://img.shields.io/readthedocs/pyasic)](https://pyasic.readthedocs.io/en/latest/)
[![License - Apache 2.0](https://img.shields.io/github/license/UpstreamData/pyasic)](https://github.com/UpstreamData/pyasic/blob/master/LICENSE.txt)
---
## Intro
Welcome to pyasic! Pyasic uses an asynchronous method of communicating with asic miners on your network, which makes it super fast.
---
Welcome to `pyasic`! `pyasic` uses an asynchronous method of communicating with ASIC miners on your network, which makes it super fast.
[Supported Miner Types](miners/supported_types.md)
[Click here to view supported miner types](miners/supported_types.md)
Getting started with pyasic is easy. First, find your miner (or miners) on the network by scanning for them or getting the correct class automatically for them if you know the IP.
---
## Getting started
---
Getting started with `pyasic` is easy. First, find your miner (or miners) on the network by scanning for them or getting the correct class automatically for them if you know the IP.
<br>
## Scanning for miners
To scan for miners in pyasic, we use the class [`MinerNetwork`][pyasic.network.MinerNetwork], which abstracts the search, communication, identification, setup, and return of a miner to 1 command.
The command [`MinerNetwork().scan_network_for_miners()`][pyasic.network.MinerNetwork.scan_network_for_miners] returns a list that contains any miners found.
##### Scanning for miners
To scan for miners in `pyasic`, we use the class [`MinerNetwork`][pyasic.network.MinerNetwork], which abstracts the search, communication, identification, setup, and return of a miner to 1 command.
The command [`MinerNetwork.scan()`][pyasic.network.MinerNetwork.scan] returns a list that contains any miners found.
```python
import asyncio # asyncio for handling the async part
from pyasic.network import MinerNetwork # miner network handles the scanning
@@ -28,20 +34,19 @@ from pyasic.network import MinerNetwork # miner network handles the scanning
async def scan_miners(): # define async scan function to allow awaiting
# create a miner network
# you can pass in any IP and it will use that in a subnet with a /24 mask (255 IPs).
network = MinerNetwork("192.168.1.50") # this uses the 192.168.1.0-255 network
network = MinerNetwork.from_subnet("192.168.1.50/24") # this uses the 192.168.1.0-255 network
# scan for miners asynchronously
# this will return the correct type of miners if they are supported with all functionality.
miners = await network.scan_network_for_miners()
miners = await network.scan()
print(miners)
if __name__ == "__main__":
asyncio.run(scan_miners()) # run the scan asynchronously with asyncio.run()
```
<br>
## Creating miners based on IP
---
##### Creating miners based on IP
If you already know the IP address of your miner or miners, you can use the [`MinerFactory`][pyasic.miners.miner_factory.MinerFactory] to communicate and identify the miners, or an abstraction of its functionality, [`get_miner()`][pyasic.miners.get_miner].
The function [`get_miner()`][pyasic.miners.get_miner] will return any miner it found at the IP address specified, or an `UnknownMiner` if it cannot identify the miner.
```python
@@ -58,6 +63,8 @@ async def get_miners(): # define async scan function to allow awaiting
print(miner_1, miner_2)
# can also gather these, since they are async
# gathering them will get them both at the same time
# this makes it much faster to get a lot of miners at a time
tasks = [get_miner("192.168.1.75"), get_miner("192.168.1.76")]
miners = await asyncio.gather(*tasks)
print(miners)
@@ -67,13 +74,14 @@ if __name__ == "__main__":
asyncio.run(get_miners()) # get the miners asynchronously with asyncio.run()
```
<br>
## Getting data from miners
Once you have your miner(s) identified, you will likely want to get data from the miner(s). You can do this using a built in function in each miner called `get_data()`.
---
## Data gathering
---
Once you have your miner(s) identified, you will likely want to get data from the miner(s). You can do this using a built-in function in each miner called `get_data()`.
This function will return an instance of the dataclass [`MinerData`][pyasic.data.MinerData] with all data it can gather from the miner.
Each piece of data in a [`MinerData`][pyasic.data.MinerData] instance can be referenced by getting it as an attribute, such as [`MinerData().hashrate`][pyasic.data.MinerData].
##### One miner
```python
import asyncio
from pyasic import get_miner
@@ -88,7 +96,8 @@ async def gather_miner_data():
if __name__ == "__main__":
asyncio.run(gather_miner_data())
```
---
##### Multiple miners
You can do something similar with multiple miners, with only needing to make a small change to get all the data at once.
```python
import asyncio # asyncio for handling the async part
@@ -96,8 +105,8 @@ from pyasic.network import MinerNetwork # miner network handles the scanning
async def gather_miner_data(): # define async scan function to allow awaiting
network = MinerNetwork("192.168.1.50")
miners = await network.scan_network_for_miners()
network = MinerNetwork.from_subnet("192.168.1.50/24")
miners = await network.scan()
# we need to asyncio.gather() all the miners get_data() functions to make them run together
all_miner_data = await asyncio.gather(*[miner.get_data() for miner in miners])
@@ -109,161 +118,60 @@ if __name__ == "__main__":
asyncio.run(gather_miner_data())
```
<br>
## Controlling miners via pyasic
Every miner class in pyasic must implement all the control functions defined in [`BaseMiner`][pyasic.miners.BaseMiner].
---
## Miner control
---
`pyasic` exposes a standard interface for each miner using control functions.
Every miner class in `pyasic` must implement all the control functions defined in [`MinerProtocol`][pyasic.miners.base.MinerProtocol].
These functions are
[`check_light`](#check-light),
[`fault_light_off`](#fault-light-off),
[`fault_light_on`](#fault-light-on),
[`get_config`](#get-config),
[`get_data`](#get-data),
[`get_errors`](#get-errors),
[`get_hostname`](#get-hostname),
[`get_model`](#get-model),
[`reboot`](#reboot),
[`restart_backend`](#restart-backend),
[`stop_mining`](#stop-mining),
[`resume_mining`](#resume-mining),
[`is_mining`](#is-mining),
[`send_config`](#send-config), and
[`set_power_limit`](#set-power-limit).
[`check_light`][pyasic.miners.base.MinerProtocol.check_light],
[`fault_light_off`][pyasic.miners.base.MinerProtocol.fault_light_off],
[`fault_light_on`][pyasic.miners.base.MinerProtocol.fault_light_on],
[`get_config`][pyasic.miners.base.MinerProtocol.get_config],
[`get_data`][pyasic.miners.base.MinerProtocol.get_data],
[`get_errors`][pyasic.miners.base.MinerProtocol.get_errors],
[`get_hostname`][pyasic.miners.base.MinerProtocol.get_hostname],
[`get_model`][pyasic.miners.base.MinerProtocol.get_model],
[`reboot`][pyasic.miners.base.MinerProtocol.reboot],
[`restart_backend`][pyasic.miners.base.MinerProtocol.restart_backend],
[`stop_mining`][pyasic.miners.base.MinerProtocol.stop_mining],
[`resume_mining`][pyasic.miners.base.MinerProtocol.resume_mining],
[`is_mining`][pyasic.miners.base.MinerProtocol.is_mining],
[`send_config`][pyasic.miners.base.MinerProtocol.send_config], and
[`set_power_limit`][pyasic.miners.base.MinerProtocol.set_power_limit].
<br>
##### Usage
```python
import asyncio
from pyasic import get_miner
### Check Light
::: pyasic.miners.BaseMiner.check_light
handler: python
options:
heading_level: 4
<br>
async def set_fault_light():
miner = await get_miner("192.168.1.20")
### Fault Light Off
::: pyasic.miners.BaseMiner.fault_light_off
handler: python
options:
heading_level: 4
# call control function
await miner.fault_light_on()
<br>
if __name__ == "__main__":
asyncio.run(set_fault_light())
```
### Fault Light On
::: pyasic.miners.BaseMiner.fault_light_on
handler: python
options:
heading_level: 4
---
## Helper dataclasses
---
<br>
##### [`MinerConfig`][pyasic.config.MinerConfig] and [`MinerData`][pyasic.data.MinerData]
### Get Config
::: pyasic.miners.BaseMiner.get_config
handler: python
options:
heading_level: 4
`pyasic` implements a few dataclasses as helpers to make data return types consistent across different miners and miner APIs. The different fields of these dataclasses can all be viewed with the classmethod `cls.fields()`.
<br>
---
### Get Data
::: pyasic.miners.BaseMiner.get_data
handler: python
options:
heading_level: 4
<br>
### Get Errors
::: pyasic.miners.BaseMiner.get_errors
handler: python
options:
heading_level: 4
<br>
### Get Hostname
::: pyasic.miners.BaseMiner.get_hostname
handler: python
options:
heading_level: 4
<br>
### Get Model
::: pyasic.miners.BaseMiner.get_model
handler: python
options:
heading_level: 4
<br>
### Reboot
::: pyasic.miners.BaseMiner.reboot
handler: python
options:
heading_level: 4
<br>
### Restart Backend
::: pyasic.miners.BaseMiner.restart_backend
handler: python
options:
heading_level: 4
<br>
### Stop Mining
::: pyasic.miners.BaseMiner.stop_mining
handler: python
options:
heading_level: 4
<br>
### Resume Mining
::: pyasic.miners.BaseMiner.resume_mining
handler: python
options:
heading_level: 4
<br>
### Is Mining
::: pyasic.miners.BaseMiner.is_mining
handler: python
options:
heading_level: 4
<br>
### Send Config
::: pyasic.miners.BaseMiner.send_config
handler: python
options:
heading_level: 4
<br>
### Set Power Limit
::: pyasic.miners.BaseMiner.set_power_limit
handler: python
options:
heading_level: 4
<br>
## [`MinerConfig`][pyasic.config.MinerConfig] and [`MinerData`][pyasic.data.MinerData]
Pyasic implements a few dataclasses as helpers to make data return types consistent across different miners and miner APIs. The different fields of these dataclasses can all be viewed with the classmethod `cls.fields()`.
<br>
### [`MinerData`][pyasic.data.MinerData]
##### [`MinerData`][pyasic.data.MinerData]
[`MinerData`][pyasic.data.MinerData] is a return from the [`get_data()`](#get-data) function, and is used to have a consistent dataset across all returns.
You can call [`MinerData.asdict()`][pyasic.data.MinerData.asdict] to get the dataclass as a dictionary, and there are many other helper functions contained in the class to convert to different data formats.
You can call [`MinerData.as_dict()`][pyasic.data.MinerData.as_dict] to get the dataclass as a dictionary, and there are many other helper functions contained in the class to convert to different data formats.
[`MinerData`][pyasic.data.MinerData] instances can also be added to each other to combine their data and can be divided by a number to divide all their data, allowing you to get average data from many miners by doing -
```python
@@ -278,13 +186,64 @@ list_of_miner_data = [d1, d2]
average_data = sum(list_of_miner_data, start=MinerData("0.0.0.0"))/len(list_of_miner_data)
```
---
<br>
##### [`MinerConfig`][pyasic.config.MinerConfig]
### [`MinerConfig`][pyasic.config.MinerConfig]
[`MinerConfig`][pyasic.config.MinerConfig] is pyasic's way to represent a configuration file from a miner.
It is the return from [`get_config()`](#get-config).
[`MinerConfig`][pyasic.config.MinerConfig] is `pyasic`'s way to represent a configuration file from a miner.
It is designed to unionize the configuration of all supported miner types, and is the return from [`get_config()`](#get-config).
Each miner has a unique way to convert the [`MinerConfig`][pyasic.config.MinerConfig] to their specific type, there are helper functions in the class.
In most cases these helper functions should not be used, as [`send_config()`](#send-config) takes a [`MinerConfig`][pyasic.config.MinerConfig] and will do the conversion to the right type for you.
You can use the [`MinerConfig`][pyasic.config.MinerConfig] as follows:
```python
import asyncio
from pyasic import get_miner
async def set_fault_light():
miner = await get_miner("192.168.1.20")
# get config
cfg = await miner.get_config()
# send config
await miner.send_config(cfg)
if __name__ == "__main__":
asyncio.run(set_fault_light())
```
---
## Settings
---
`pyasic` has settings designed to make using large groups of miners easier. You can set the default password for all types of miners using the `pyasic.settings` module, used as follows:
```python
from pyasic import settings
settings.update("default_antminer_password", "my_pwd")
```
##### Default values:
```
"network_ping_retries": 1,
"network_ping_timeout": 3,
"network_scan_threads": 300,
"factory_get_retries": 1,
"factory_get_timeout": 3,
"get_data_retries": 1,
"api_function_timeout": 5,
"default_whatsminer_password": "admin",
"default_innosilicon_password": "admin",
"default_antminer_password": "root",
"default_bosminer_password": "root",
"default_vnish_password": "admin",
"default_goldshell_password": "123456789",
# ADVANCED
# Only use this if you know what you are doing
"socket_linger_time": 1000,
```

View File

@@ -29,6 +29,20 @@
show_root_heading: false
heading_level: 4
## S19i
::: pyasic.miners.antminer.bmminer.X19.S19.BMMinerS19i
handler: python
options:
show_root_heading: false
heading_level: 4
## S19+
::: pyasic.miners.antminer.bmminer.X19.S19.BMMinerS19Plus
handler: python
options:
show_root_heading: false
heading_level: 4
## S19j No PIC
::: pyasic.miners.antminer.bmminer.X19.S19.BMMinerS19jNoPIC
handler: python
@@ -113,6 +127,13 @@
show_root_heading: false
heading_level: 4
## S19j Pro (BOS)
::: pyasic.miners.antminer.bosminer.X19.S19.BOSMinerS19jPro
handler: python
options:
show_root_heading: false
heading_level: 4
## T19 (BOS)
::: pyasic.miners.antminer.bosminer.X19.T19.BOSMinerT19
handler: python
@@ -176,3 +197,38 @@
show_root_heading: false
heading_level: 4
## S19 (ePIC)
::: pyasic.miners.antminer.epic.X19.S19.ePICS19
handler: python
options:
show_root_heading: false
heading_level: 4
## S19 Pro (ePIC)
::: pyasic.miners.antminer.epic.X19.S19.ePICS19Pro
handler: python
options:
show_root_heading: false
heading_level: 4
## S19j (ePIC)
::: pyasic.miners.antminer.epic.X19.S19.ePICS19j
handler: python
options:
show_root_heading: false
heading_level: 4
## S19j Pro (ePIC)
::: pyasic.miners.antminer.epic.X19.S19.ePICS19jPro
handler: python
options:
show_root_heading: false
heading_level: 4
## S19 XP (ePIC)
::: pyasic.miners.antminer.epic.X19.S19.ePICS19XP
handler: python
options:
show_root_heading: false
heading_level: 4

View File

@@ -1,7 +1,15 @@
# pyasic
## BOSMiner Backend
::: pyasic.miners.backends.bosminer.BOSMiner
::: pyasic.miners.backends.braiins_os.BOSMiner
handler: python
options:
show_root_heading: false
heading_level: 4
## BOSer Backend
::: pyasic.miners.backends.braiins_os.BOSer
handler: python
options:
show_root_heading: false

View File

@@ -1,6 +1,7 @@
# pyasic
## BFGMinerAPI
::: pyasic.API.bfgminer.BFGMinerAPI
## ePIC Backend
::: pyasic.miners.backends.epic.ePIC
handler: python
options:
show_root_heading: false

View File

@@ -1,10 +1,17 @@
# pyasic
## Base Miner
[`BaseMiner`][pyasic.miners.BaseMiner] is the basis for all miner classes, they all subclass (usually indirectly) from this class.
[`BaseMiner`][pyasic.miners.base.BaseMiner] is the basis for all miner classes, they all subclass (usually indirectly) from this class.
You may not instantiate this class on its own, only subclass from it. Trying to instantiate an instance of this class will raise `TypeError`.
This class inherits from the [`MinerProtocol`][pyasic.miners.base.MinerProtocol], which outlines functionality for miners.
::: pyasic.miners.BaseMiner
You may not instantiate this class on its own, only subclass from it.
::: pyasic.miners.base.BaseMiner
handler: python
options:
heading_level: 4
::: pyasic.miners.base.MinerProtocol
handler: python
options:
heading_level: 4

91
docs/miners/functions.md Normal file
View File

@@ -0,0 +1,91 @@
## Control functionality
### Check Light
::: pyasic.miners.base.MinerProtocol.check_light
handler: python
options:
heading_level: 4
### Fault Light Off
::: pyasic.miners.base.MinerProtocol.fault_light_off
handler: python
options:
heading_level: 4
### Fault Light On
::: pyasic.miners.base.MinerProtocol.fault_light_on
handler: python
options:
heading_level: 4
### Get Config
::: pyasic.miners.base.MinerProtocol.get_config
handler: python
options:
heading_level: 4
### Get Data
::: pyasic.miners.base.MinerProtocol.get_data
handler: python
options:
heading_level: 4
### Get Errors
::: pyasic.miners.base.MinerProtocol.get_errors
handler: python
options:
heading_level: 4
### Get Hostname
::: pyasic.miners.base.MinerProtocol.get_hostname
handler: python
options:
heading_level: 4
### Get Model
::: pyasic.miners.base.MinerProtocol.get_model
handler: python
options:
heading_level: 4
### Reboot
::: pyasic.miners.base.MinerProtocol.reboot
handler: python
options:
heading_level: 4
### Restart Backend
::: pyasic.miners.base.MinerProtocol.restart_backend
handler: python
options:
heading_level: 4
### Stop Mining
::: pyasic.miners.base.MinerProtocol.stop_mining
handler: python
options:
heading_level: 4
### Resume Mining
::: pyasic.miners.base.MinerProtocol.resume_mining
handler: python
options:
heading_level: 4
### Is Mining
::: pyasic.miners.base.MinerProtocol.is_mining
handler: python
options:
heading_level: 4
### Send Config
::: pyasic.miners.base.MinerProtocol.send_config
handler: python
options:
heading_level: 4
### Set Power Limit
::: pyasic.miners.base.MinerProtocol.set_power_limit
handler: python
options:
heading_level: 4

View File

@@ -2,23 +2,22 @@
## X5 Models
## CK5
::: pyasic.miners.goldshell.bfgminer.X5.CK5.BFGMinerCK5
::: pyasic.miners.goldshell.bfgminer.X5.CK5.GoldshellCK5
handler: python
options:
show_root_heading: false
heading_level: 4
## HS5
::: pyasic.miners.goldshell.bfgminer.X5.HS5.BFGMinerHS5
::: pyasic.miners.goldshell.bfgminer.X5.HS5.GoldshellHS5
handler: python
options:
show_root_heading: false
heading_level: 4
## KD5
::: pyasic.miners.goldshell.bfgminer.X5.KD5.BFGMinerKD5
::: pyasic.miners.goldshell.bfgminer.X5.KD5.GoldshellKD5
handler: python
options:
show_root_heading: false
heading_level: 4

View File

@@ -2,9 +2,8 @@
## XMax Models
## KD Max
::: pyasic.miners.goldshell.bfgminer.XMax.KDMax.BFGMinerKDMax
::: pyasic.miners.goldshell.bfgminer.XMax.KDMax.KDMax
handler: python
options:
show_root_heading: false
heading_level: 4

View File

@@ -2,9 +2,8 @@
## A10X Models
## A10X
::: pyasic.miners.innosilicon.cgminer.A10X.A10X.CGMinerA10X
::: pyasic.miners.innosilicon.cgminer.A10X.A10X.InnosiliconA10X
handler: python
options:
show_root_heading: false
heading_level: 4

View File

@@ -2,9 +2,8 @@
## T3X Models
## T3H+
::: pyasic.miners.innosilicon.cgminer.T3X.T3H.CGMinerT3HPlus
::: pyasic.miners.innosilicon.cgminer.T3X.T3H.InnosiliconT3HPlus
handler: python
options:
show_root_heading: false
heading_level: 4

View File

@@ -1,6 +1,14 @@
# pyasic
## Miner Factory
[`MinerFactory`][pyasic.miners.miner_factory.MinerFactory] is the way to create miner types in `pyasic`. The most important method is [`get_miner()`][pyasic.get_miner], which is mapped to [`pyasic.get_miner()`][pyasic.get_miner], and should be used from there.
The instance used for [`pyasic.get_miner()`][pyasic.get_miner] is `pyasic.miner_factory`.
[`MinerFactory`][pyasic.MinerFactory] also keeps a cache, which can be cleared if needed with `pyasic.miner_factory.clear_cached_miners()`.
Finally, there is functionality to get multiple miners without using `asyncio.gather()` explicitly. Use `pyasic.miner_factory.get_multiple_miners()` with a list of IPs as strings to get a list of miner instances. You can also get multiple miners with an `AsyncGenerator` by using `pyasic.miner_factory.get_miner_generator()`.
::: pyasic.miners.miner_factory.MinerFactory
handler: python
options:

View File

@@ -10,9 +10,6 @@ details {
padding-top:0px;
padding-bottom:0px;
}
ul {
margin:0px;
}
</style>
<details>
@@ -73,6 +70,8 @@ ul {
<li><a href="../antminer/X19#s19l">S19L</a></li>
<li><a href="../antminer/X19#s19-pro">S19 Pro</a></li>
<li><a href="../antminer/X19#s19j">S19j</a></li>
<li><a href="../antminer/X19#s19i">S19i</a></li>
<li><a href="../antminer/X19#s19_1">S19+</a></li>
<li><a href="../antminer/X19#s19j-no-pic">S19j No PIC</a></li>
<li><a href="../antminer/X19#s19-pro_1">S19 Pro+</a></li>
<li><a href="../antminer/X19#s19j-pro">S19j Pro</a></li>
@@ -94,6 +93,8 @@ ul {
<li><a href="../whatsminer/M2X#m20s-v10">M20S V10</a></li>
<li><a href="../whatsminer/M2X#m20s-v20">M20S V20</a></li>
<li><a href="../whatsminer/M2X#m20s-v30">M20S V30</a></li>
<li><a href="../whatsminer/M2X#m20p-v10">M20P V10</a></li>
<li><a href="../whatsminer/M2X#m20p-v30">M20P V30</a></li>
<li><a href="../whatsminer/M2X#m20s_1-v30">M20S+ V30</a></li>
<li><a href="../whatsminer/M2X#m21-v10">M21 V10</a></li>
<li><a href="../whatsminer/M2X#m21s-v20">M21S V20</a></li>
@@ -108,6 +109,8 @@ ul {
<ul>
<li><a href="../whatsminer/M3X#m30-v10">M30 V10</a></li>
<li><a href="../whatsminer/M3X#m30-v20">M30 V20</a></li>
<li><a href="../whatsminer/M3X#m30k-v10">M30K V10</a></li>
<li><a href="../whatsminer/M3X#m30l-v10">M30L V10</a></li>
<li><a href="../whatsminer/M3X#m30s-v10">M30S V10</a></li>
<li><a href="../whatsminer/M3X#m30s-v20">M30S V20</a></li>
<li><a href="../whatsminer/M3X#m30s-v30">M30S V30</a></li>
@@ -157,6 +160,7 @@ ul {
<li><a href="../whatsminer/M3X#m30s_1-ve100">M30S+ VE100</a></li>
<li><a href="../whatsminer/M3X#m30s_1-vf20">M30S+ VF20</a></li>
<li><a href="../whatsminer/M3X#m30s_1-vf30">M30S+ VF30</a></li>
<li><a href="../whatsminer/M3X#m30s_1-vg20">M30S+ VG20</a></li>
<li><a href="../whatsminer/M3X#m30s_1-vg30">M30S+ VG30</a></li>
<li><a href="../whatsminer/M3X#m30s_1-vg40">M30S+ VG40</a></li>
<li><a href="../whatsminer/M3X#m30s_1-vg50">M30S+ VG50</a></li>
@@ -190,6 +194,9 @@ ul {
<li><a href="../whatsminer/M3X#m30s_1_1-vj30">M30S++ VJ30</a></li>
<li><a href="../whatsminer/M3X#m31-v10">M31 V10</a></li>
<li><a href="../whatsminer/M3X#m31-v20">M31 V20</a></li>
<li><a href="../whatsminer/M3X#m31h-v10">M31H V10</a></li>
<li><a href="../whatsminer/M3X#m31h-v40">M31H V40</a></li>
<li><a href="../whatsminer/M3X#m30l-v10">M30L V10</a></li>
<li><a href="../whatsminer/M3X#m31s-v10">M31S V10</a></li>
<li><a href="../whatsminer/M3X#m31s-v20">M31S V20</a></li>
<li><a href="../whatsminer/M3X#m31s-v30">M31S V30</a></li>
@@ -205,7 +212,6 @@ ul {
<li><a href="../whatsminer/M3X#m31se-v10">M31SE V10</a></li>
<li><a href="../whatsminer/M3X#m31se-v20">M31SE V20</a></li>
<li><a href="../whatsminer/M3X#m31se-v30">M31SE V30</a></li>
<li><a href="../whatsminer/M3X#m31h-v40">M31H V40</a></li>
<li><a href="../whatsminer/M3X#m31s_1-v10">M31S+ V10</a></li>
<li><a href="../whatsminer/M3X#m31s_1-v20">M31S+ V20</a></li>
<li><a href="../whatsminer/M3X#m31s_1-v30">M31S+ V30</a></li>
@@ -232,6 +238,7 @@ ul {
<li><a href="../whatsminer/M3X#m33-v20">M33 V20</a></li>
<li><a href="../whatsminer/M3X#m33-v30">M33 V30</a></li>
<li><a href="../whatsminer/M3X#m33s-vg30">M33S VG30</a></li>
<li><a href="../whatsminer/M3X#m33s_1-vg20">M33S+ VG20</a></li>
<li><a href="../whatsminer/M3X#m33s_1-vh20">M33S+ VH20</a></li>
<li><a href="../whatsminer/M3X#m33s_1-vh30">M33S+ VH30</a></li>
<li><a href="../whatsminer/M3X#m33s_1_1-vh20">M33S++ VH20</a></li>
@@ -241,12 +248,15 @@ ul {
<li><a href="../whatsminer/M3X#m36s-ve10">M36S VE10</a></li>
<li><a href="../whatsminer/M3X#m36s_1-vg30">M36S+ VG30</a></li>
<li><a href="../whatsminer/M3X#m36s_1_1-vh30">M36S++ VH30</a></li>
<li><a href="../whatsminer/M3X#m39-v10">M39 V10</a></li>
<li><a href="../whatsminer/M3X#m39-v20">M39 V20</a></li>
<li><a href="../whatsminer/M3X#m39-v30">M39 V30</a></li>
</ul>
</details>
<details>
<summary>M5X Series:</summary>
<ul>
<li><a href="../whatsminer/M5X#m50-ve30">M50 VE30</a></li>
<li><a href="../whatsminer/M5X#m50-vg30">M50 VG30</a></li>
<li><a href="../whatsminer/M5X#m50-vh10">M50 VH10</a></li>
<li><a href="../whatsminer/M5X#m50-vh20">M50 VH20</a></li>
@@ -397,6 +407,7 @@ ul {
<li><a href="../antminer/X19#s19j-bos">S19j (BOS)</a></li>
<li><a href="../antminer/X19#s19j-no-pic-bos">S19j No PIC (BOS)</a></li>
<li><a href="../antminer/X19#s19j-pro-bos">S19j Pro (BOS)</a></li>
<li><a href="../antminer/X19#s19j-pro-bos">S19j Pro (BOS)</a></li>
<li><a href="../antminer/X19#t19-bos">T19 (BOS)</a></li>
</ul>
</details>
@@ -434,6 +445,21 @@ ul {
</ul>
</details>
<details>
<summary>ePIC Firmware Miners:</summary>
<ul>
<details>
<summary>X19 Series:</summary>
<ul>
<li><a href="../antminer/X19#s19-epic">S19 (ePIC)</a></li>
<li><a href="../antminer/X19#s19-pro-epic">S19 Pro (ePIC)</a></li>
<li><a href="../antminer/X19#s19j-epic">S19j (ePIC)</a></li>
<li><a href="../antminer/X19#s19j-pro-epic">S19j Pro (ePIC)</a></li>
<li><a href="../antminer/X19#s19-xp-epic">S19 XP (ePIC)</a></li>
</ul>
</details>
</ul>
</details>
<details>
<summary>HiveOS Firmware Miners:</summary>
<ul>
<details>

View File

@@ -29,6 +29,20 @@
show_root_heading: false
heading_level: 4
## M20P V10
::: pyasic.miners.whatsminer.btminer.M2X.M20P.BTMinerM20PV10
handler: python
options:
show_root_heading: false
heading_level: 4
## M20P V30
::: pyasic.miners.whatsminer.btminer.M2X.M20P.BTMinerM20PV30
handler: python
options:
show_root_heading: false
heading_level: 4
## M20S+ V30
::: pyasic.miners.whatsminer.btminer.M2X.M20S_Plus.BTMinerM20SPlusV30
handler: python

View File

@@ -15,6 +15,20 @@
show_root_heading: false
heading_level: 4
## M30K V10
::: pyasic.miners.whatsminer.btminer.M3X.M30K.BTMinerM30KV10
handler: python
options:
show_root_heading: false
heading_level: 4
## M30L V10
::: pyasic.miners.whatsminer.btminer.M3X.M30L.BTMinerM30LV10
handler: python
options:
show_root_heading: false
heading_level: 4
## M30S V10
::: pyasic.miners.whatsminer.btminer.M3X.M30S.BTMinerM30SV10
handler: python
@@ -358,6 +372,13 @@
show_root_heading: false
heading_level: 4
## M30S+ VG20
::: pyasic.miners.whatsminer.btminer.M3X.M30S_Plus.BTMinerM30SPlusVG20
handler: python
options:
show_root_heading: false
heading_level: 4
## M30S+ VG30
::: pyasic.miners.whatsminer.btminer.M3X.M30S_Plus.BTMinerM30SPlusVG30
handler: python
@@ -589,6 +610,27 @@
show_root_heading: false
heading_level: 4
## M31H V10
::: pyasic.miners.whatsminer.btminer.M3X.M31H.BTMinerM31HV10
handler: python
options:
show_root_heading: false
heading_level: 4
## M31H V40
::: pyasic.miners.whatsminer.btminer.M3X.M31H.BTMinerM31HV40
handler: python
options:
show_root_heading: false
heading_level: 4
## M30L V10
::: pyasic.miners.whatsminer.btminer.M3X.M31L.BTMinerM31LV10
handler: python
options:
show_root_heading: false
heading_level: 4
## M31S V10
::: pyasic.miners.whatsminer.btminer.M3X.M31S.BTMinerM31SV10
handler: python
@@ -694,13 +736,6 @@
show_root_heading: false
heading_level: 4
## M31H V40
::: pyasic.miners.whatsminer.btminer.M3X.M31H.BTMinerM31HV40
handler: python
options:
show_root_heading: false
heading_level: 4
## M31S+ V10
::: pyasic.miners.whatsminer.btminer.M3X.M31S_Plus.BTMinerM31SPlusV10
handler: python
@@ -883,6 +918,13 @@
show_root_heading: false
heading_level: 4
## M33S+ VG20
::: pyasic.miners.whatsminer.btminer.M3X.M33S_Plus.BTMinerM33SPlusVG20
handler: python
options:
show_root_heading: false
heading_level: 4
## M33S+ VH20
::: pyasic.miners.whatsminer.btminer.M3X.M33S_Plus.BTMinerM33SPlusVH20
handler: python
@@ -946,6 +988,13 @@
show_root_heading: false
heading_level: 4
## M39 V10
::: pyasic.miners.whatsminer.btminer.M3X.M39.BTMinerM39V10
handler: python
options:
show_root_heading: false
heading_level: 4
## M39 V20
::: pyasic.miners.whatsminer.btminer.M3X.M39.BTMinerM39V20
handler: python
@@ -953,3 +1002,10 @@
show_root_heading: false
heading_level: 4
## M39 V30
::: pyasic.miners.whatsminer.btminer.M3X.M39.BTMinerM39V30
handler: python
options:
show_root_heading: false
heading_level: 4

View File

@@ -1,6 +1,13 @@
# pyasic
## M5X Models
## M50 VE30
::: pyasic.miners.whatsminer.btminer.M5X.M50.BTMinerM50VE30
handler: python
options:
show_root_heading: false
heading_level: 4
## M50 VG30
::: pyasic.miners.whatsminer.btminer.M5X.M50.BTMinerM50VG30
handler: python

View File

@@ -1,12 +0,0 @@
# pyasic
## Miner Network Range
[`MinerNetworkRange`][pyasic.network.net_range.MinerNetworkRange] is a class used by [`MinerNetwork`][pyasic.network.MinerNetwork] to handle any constructor stings.
The goal is to emulate what is produced by `ipaddress.ip_network` by allowing [`MinerNetwork`][pyasic.network.MinerNetwork] to get a list of hosts.
This allows this class to be the [`MinerNetwork.network`][pyasic.network.MinerNetwork] and hence be used for scanning.
::: pyasic.network.net_range.MinerNetworkRange
handler: python
options:
show_root_heading: false
heading_level: 4

View File

@@ -1,3 +1,3 @@
jinja2<3.1.0
jinja2<3.1.3
mkdocs
mkdocstrings[python]

27
docs/rpc/api.md Normal file
View File

@@ -0,0 +1,27 @@
# pyasic
## Miner APIs
Each miner has a unique API that is used to communicate with it.
Each of these API types has commands that differ between them, and some commands have data that others do not.
Each miner that is a subclass of [`BaseMiner`][pyasic.miners.BaseMiner] should have an API linked to it as `Miner.api`.
All API implementations inherit from [`BaseMinerRPCAPI`][pyasic.rpc.BaseMinerRPCAPI], which implements the basic communications protocols.
[`BaseMinerRPCAPI`][pyasic.rpc.BaseMinerRPCAPI] should never be used unless inheriting to create a new miner API class for a new type of miner (which should be exceedingly rare).
[`BaseMinerRPCAPI`][pyasic.rpc.BaseMinerRPCAPI] cannot be instantiated directly, it will raise a `TypeError`.
Use these instead -
#### [BFGMiner API][pyasic.rpc.bfgminer.BFGMinerRPCAPI]
#### [BMMiner API][pyasic.rpc.bmminer.BMMinerRPCAPI]
#### [BOSMiner API][pyasic.rpc.bosminer.BOSMinerRPCAPI]
#### [BTMiner API][pyasic.rpc.btminer.BTMinerRPCAPI]
#### [CGMiner API][pyasic.rpc.cgminer.CGMinerRPCAPI]
#### [LUXMiner API][pyasic.rpc.luxminer.LUXMinerRPCAPI]
#### [Unknown API][pyasic.rpc.unknown.UnknownRPCAPI]
<br>
## BaseMinerRPCAPI
::: pyasic.rpc.BaseMinerRPCAPI
handler: python
options:
heading_level: 4

7
docs/rpc/bfgminer.md Normal file
View File

@@ -0,0 +1,7 @@
# pyasic
## BFGMinerRPCAPI
::: pyasic.rpc.bfgminer.BFGMinerRPCAPI
handler: python
options:
show_root_heading: false
heading_level: 4

View File

@@ -1,6 +1,6 @@
# pyasic
## BMMinerAPI
::: pyasic.API.bmminer.BMMinerAPI
## BMMinerRPCAPI
::: pyasic.rpc.bmminer.BMMinerRPCAPI
handler: python
options:
show_root_heading: false

View File

@@ -1,6 +1,6 @@
# pyasic
## BOSMinerAPI
::: pyasic.API.bosminer.BOSMinerAPI
## BOSMinerRPCAPI
::: pyasic.rpc.bosminer.BOSMinerRPCAPI
handler: python
options:
show_root_heading: false

View File

@@ -1,6 +1,6 @@
# pyasic
## BTMinerAPI
::: pyasic.API.btminer.BTMinerAPI
## BTMinerRPCAPI
::: pyasic.rpc.btminer.BTMinerRPCAPI
handler: python
options:
show_root_heading: false

View File

@@ -1,6 +1,6 @@
# pyasic
## CGMinerAPI
::: pyasic.API.cgminer.CGMinerAPI
## CGMinerRPCAPI
::: pyasic.rpc.cgminer.CGMinerRPCAPI
handler: python
options:
show_root_heading: false

View File

@@ -1,6 +1,6 @@
# pyasic
## LUXMinerAPI
::: pyasic.API.luxminer.LUXMinerAPI
## LUXMinerRPCAPI
::: pyasic.rpc.luxminer.LUXMinerRPCAPI
handler: python
options:
show_root_heading: false

View File

@@ -1,6 +1,6 @@
# pyasic
## UnknownAPI
::: pyasic.API.unknown.UnknownAPI
## UnknownRPCAPI
::: pyasic.rpc.unknown.UnknownRPCAPI
handler: python
options:
show_root_heading: false

36
docs/settings/settings.md Normal file
View File

@@ -0,0 +1,36 @@
# pyasic
## settings
All settings here are global settings for all of pyasic. Set these settings with `update(key, value)`.
Settings options:
- `network_ping_retries`
- `network_ping_timeout`
- `network_scan_threads`
- `factory_get_retries`
- `factory_get_timeout`
- `get_data_retries`
- `api_function_timeout`
- `default_whatsminer_password`
- `default_innosilicon_password`
- `default_antminer_password`
- `default_bosminer_password`
- `default_vnish_password`
- `default_goldshell_password`
- `socket_linger_time`
### get
::: pyasic.settings.get
handler: python
options:
show_root_heading: false
heading_level: 4
### update
::: pyasic.settings.update
handler: python
options:
show_root_heading: false
heading_level: 4

View File

@@ -4,24 +4,24 @@ nav:
- Introduction: "index.md"
- Miners:
- Supported Miners: "miners/supported_types.md"
- Standard Functionality: "miners/functions.md"
- Miner Factory: "miners/miner_factory.md"
- Network:
- Miner Network: "network/miner_network.md"
- Miner Network Range: "network/miner_network_range.md"
- Dataclasses:
- Miner Data: "data/miner_data.md"
- Error Codes: "data/error_codes.md"
- Miner Config: "config/miner_config.md"
- Advanced:
- Miner APIs:
- Intro: "API/api.md"
- BFGMiner: "API/bfgminer.md"
- BMMiner: "API/bmminer.md"
- BOSMiner: "API/bosminer.md"
- BTMiner: "API/btminer.md"
- CGMiner: "API/cgminer.md"
- LUXMiner: "API/luxminer.md"
- Unknown: "API/unknown.md"
- RPC APIs:
- Intro: "rpc/api.md"
- BFGMiner: "rpc/bfgminer.md"
- BMMiner: "rpc/bmminer.md"
- BOSMiner: "rpc/bosminer.md"
- BTMiner: "rpc/btminer.md"
- CGMiner: "rpc/cgminer.md"
- LUXMiner: "rpc/luxminer.md"
- Unknown: "rpc/unknown.md"
- Backends:
- BMMiner: "miners/backends/bmminer.md"
- BOSMiner: "miners/backends/bosminer.md"
@@ -30,6 +30,7 @@ nav:
- CGMiner: "miners/backends/cgminer.md"
- LUXMiner: "miners/backends/luxminer.md"
- VNish: "miners/backends/vnish.md"
- ePIC: "miners/backends/epic.md"
- Hiveon: "miners/backends/hiveon.md"
- Classes:
- Antminer X3: "miners/antminer/X3.md"
@@ -53,7 +54,8 @@ nav:
- Goldshell X5: "miners/goldshell/X5.md"
- Goldshell XMax: "miners/goldshell/XMax.md"
- Base Miner: "miners/base_miner.md"
- Settings:
- Settings: "settings/settings.md"
plugins:
- mkdocstrings

View File

@@ -1,674 +0,0 @@
# ------------------------------------------------------------------------------
# Copyright 2022 Upstream Data Inc -
# -
# Licensed under the Apache License, Version 2.0 (the "License"); -
# you may not use this file except in compliance with the License. -
# You may obtain a copy of the License at -
# -
# http://www.apache.org/licenses/LICENSE-2.0 -
# -
# Unless required by applicable law or agreed to in writing, software -
# distributed under the License is distributed on an "AS IS" BASIS, -
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------
import asyncio
import logging
from pyasic.API import APIError, BaseMinerAPI
class BFGMinerAPI(BaseMinerAPI):
"""An abstraction of the BFGMiner API.
Each method corresponds to an API command in BFGMiner.
[BFGMiner API documentation](https://github.com/luke-jr/bfgminer/blob/bfgminer/README.RPC)
This class abstracts use of the BFGMiner API, as well as the
methods for sending commands to it. The self.send_command()
function handles sending a command to the miner asynchronously, and
as such is the base for many of the functions in this class, which
rely on it to send the command for them.
Parameters:
ip: The IP of the miner to reference the API on.
port: The port to reference the API on. Default is 4028.
"""
def __init__(self, ip: str, api_ver: str = "0.0.0", port: int = 4028):
super().__init__(ip, port)
self.api_ver = api_ver
async def multicommand(self, *commands: str, allow_warning: bool = True) -> dict:
# make sure we can actually run each command, otherwise they will fail
commands = self._check_commands(*commands)
# standard multicommand format is "command1+command2"
# doesn't work for S19 which uses the backup _x19_multicommand
command = "+".join(commands)
try:
data = await self.send_command(command, allow_warning=allow_warning)
except APIError:
logging.debug(f"{self} - (Multicommand) - Handling X19 multicommand.")
data = await self._x19_multicommand(*command.split("+"))
data["multicommand"] = True
return data
async def _x19_multicommand(self, *commands) -> dict:
tasks = []
# send all commands individually
for cmd in commands:
tasks.append(
asyncio.create_task(self._handle_multicommand(cmd, allow_warning=True))
)
all_data = await asyncio.gather(*tasks)
data = {}
for item in all_data:
data.update(item)
return data
async def version(self) -> dict:
"""Get miner version info.
<details>
<summary>Expand</summary>
Returns:
Miner version information.
</details>
"""
return await self.send_command("version")
async def config(self) -> dict:
"""Get some basic configuration info.
<details>
<summary>Expand</summary>
Returns:
## Some miner configuration information:
* ASC Count <- the number of ASCs
* PGA Count <- the number of PGAs
* Pool Count <- the number of Pools
* Strategy <- the current pool strategy
* Log Interval <- the interval of logging
* Device Code <- list of compiled device drivers
* OS <- the current operating system
* Failover-Only <- failover-only setting
* Scan Time <- scan-time setting
* Queue <- queue setting
* Expiry <- expiry setting
</details>
"""
return await self.send_command("config")
async def summary(self) -> dict:
"""Get the status summary of the miner.
<details>
<summary>Expand</summary>
Returns:
The status summary of the miner.
</details>
"""
return await self.send_command("summary")
async def pools(self) -> dict:
"""Get pool information.
<details>
<summary>Expand</summary>
Returns:
Miner pool information.
</details>
"""
return await self.send_command("pools")
async def devs(self) -> dict:
"""Get data on each PGA/ASC with their details.
<details>
<summary>Expand</summary>
Returns:
Data on each PGA/ASC with their details.
</details>
"""
return await self.send_command("devs")
async def procs(self) -> dict:
"""Get data on each processor with their details.
<details>
<summary>Expand</summary>
Returns:
Data on each processor with their details.
</details>
"""
return await self.send_command("procs")
async def devscan(self, info: str = "") -> dict:
"""Get data on each processor with their details.
<details>
<summary>Expand</summary>
Parameters:
info: Info to scan for device by.
Returns:
Data on each processor with their details.
</details>
"""
return await self.send_command("devscan", parameters=info)
async def pga(self, n: int) -> dict:
"""Get data from PGA n.
<details>
<summary>Expand</summary>
Parameters:
n: The PGA number to get data from.
Returns:
Data on the PGA n.
</details>
"""
return await self.send_command("pga", parameters=n)
async def proc(self, n: int = 0) -> dict:
"""Get data processor n.
<details>
<summary>Expand</summary>
Parameters:
n: The processor to get data on.
Returns:
Data on processor n.
</details>
"""
return await self.send_command("proc", parameters=n)
async def pgacount(self) -> dict:
"""Get data fon all PGAs.
<details>
<summary>Expand</summary>
Returns:
Data on the PGAs connected.
</details>
"""
return await self.send_command("pgacount")
async def proccount(self) -> dict:
"""Get data fon all processors.
<details>
<summary>Expand</summary>
Returns:
Data on the processors connected.
</details>
"""
return await self.send_command("proccount")
async def switchpool(self, n: int) -> dict:
"""Switch pools to pool n.
<details>
<summary>Expand</summary>
Parameters:
n: The pool to switch to.
Returns:
A confirmation of switching to pool n.
</details>
"""
return await self.send_command("switchpool", parameters=n)
async def enablepool(self, n: int) -> dict:
"""Enable pool n.
<details>
<summary>Expand</summary>
Parameters:
n: The pool to enable.
Returns:
A confirmation of enabling pool n.
</details>
"""
return await self.send_command("enablepool", parameters=n)
async def addpool(self, url: str, username: str, password: str) -> dict:
"""Add a pool to the miner.
<details>
<summary>Expand</summary>
Parameters:
url: The URL of the new pool to add.
username: The users username on the new pool.
password: The worker password on the new pool.
Returns:
A confirmation of adding the pool.
</details>
"""
return await self.send_command(
"addpool", parameters=f"{url},{username},{password}"
)
async def poolpriority(self, *n: int) -> dict:
"""Set pool priority.
<details>
<summary>Expand</summary>
Parameters:
*n: Pools in order of priority.
Returns:
A confirmation of setting pool priority.
</details>
"""
pools = f"{','.join([str(item) for item in n])}"
return await self.send_command("poolpriority", parameters=pools)
async def poolquota(self, n: int, q: int) -> dict:
"""Set pool quota.
<details>
<summary>Expand</summary>
Parameters:
n: Pool number to set quota on.
q: Quota to set the pool to.
Returns:
A confirmation of setting pool quota.
</details>
"""
return await self.send_command("poolquota", parameters=f"{n},{q}")
async def disablepool(self, n: int) -> dict:
"""Disable a pool.
<details>
<summary>Expand</summary>
Parameters:
n: Pool to disable.
Returns:
A confirmation of diabling the pool.
</details>
"""
return await self.send_command("disablepool", parameters=n)
async def removepool(self, n: int) -> dict:
"""Remove a pool.
<details>
<summary>Expand</summary>
Parameters:
n: Pool to remove.
Returns:
A confirmation of removing the pool.
</details>
"""
return await self.send_command("removepool", parameters=n)
async def save(self, filename: str = None) -> dict:
"""Save the config.
<details>
<summary>Expand</summary>
Parameters:
filename: Filename to save the config as.
Returns:
A confirmation of saving the config.
</details>
"""
if filename:
return await self.send_command("save", parameters=filename)
else:
return await self.send_command("save")
async def quit(self) -> dict:
"""Quit CGMiner.
<details>
<summary>Expand</summary>
Returns:
A single "BYE" before CGMiner quits.
</details>
"""
return await self.send_command("quit")
async def notify(self) -> dict:
"""Notify the user of past errors.
<details>
<summary>Expand</summary>
Returns:
The last status and count of each devices problem(s).
</details>
"""
return await self.send_command("notify")
async def privileged(self) -> dict:
"""Check if you have privileged access.
<details>
<summary>Expand</summary>
Returns:
The STATUS section with an error if you have no privileged access, or success if you have privileged access.
</details>
"""
return await self.send_command("privileged")
async def pgaenable(self, n: int) -> dict:
"""Enable PGA n.
<details>
<summary>Expand</summary>
Parameters:
n: The PGA to enable.
Returns:
A confirmation of enabling PGA n.
</details>
"""
return await self.send_command("pgaenable", parameters=n)
async def pgadisable(self, n: int) -> dict:
"""Disable PGA n.
<details>
<summary>Expand</summary>
Parameters:
n: The PGA to disable.
Returns:
A confirmation of disabling PGA n.
</details>
"""
return await self.send_command("pgadisable", parameters=n)
async def pgarestart(self, n: int) -> dict:
"""Restart PGA n.
<details>
<summary>Expand</summary>
Parameters:
n: The PGA to restart.
Returns:
A confirmation of restarting PGA n.
</details>
"""
return await self.send_command("pgadisable", parameters=n)
async def pgaidentify(self, n: int) -> dict:
"""Identify PGA n.
<details>
<summary>Expand</summary>
Parameters:
n: The PGA to identify.
Returns:
A confirmation of identifying PGA n.
</details>
"""
return await self.send_command("pgaidentify", parameters=n)
async def procenable(self, n: int) -> dict:
"""Enable processor n.
<details>
<summary>Expand</summary>
Parameters:
n: The processor to enable.
Returns:
A confirmation of enabling processor n.
</details>
"""
return await self.send_command("procenable", parameters=n)
async def procdisable(self, n: int) -> dict:
"""Disable processor n.
<details>
<summary>Expand</summary>
Parameters:
n: The processor to disable.
Returns:
A confirmation of disabling processor n.
</details>
"""
return await self.send_command("procdisable", parameters=n)
async def procrestart(self, n: int) -> dict:
"""Restart processor n.
<details>
<summary>Expand</summary>
Parameters:
n: The processor to restart.
Returns:
A confirmation of restarting processor n.
</details>
"""
return await self.send_command("procdisable", parameters=n)
async def procidentify(self, n: int) -> dict:
"""Identify processor n.
<details>
<summary>Expand</summary>
Parameters:
n: The processor to identify.
Returns:
A confirmation of identifying processor n.
</details>
"""
return await self.send_command("procidentify", parameters=n)
async def devdetails(self) -> dict:
"""Get data on all devices with their static details.
<details>
<summary>Expand</summary>
Returns:
Data on all devices with their static details.
</details>
"""
return await self.send_command("devdetails")
async def restart(self) -> dict:
"""Restart CGMiner using the API.
<details>
<summary>Expand</summary>
Returns:
A reply informing of the restart.
</details>
"""
return await self.send_command("restart")
async def stats(self) -> dict:
"""Get stats of each device/pool with more than 1 getwork.
<details>
<summary>Expand</summary>
Returns:
Stats of each device/pool with more than 1 getwork.
</details>
"""
return await self.send_command("stats")
async def check(self, command: str) -> dict:
"""Check if the command command exists in CGMiner.
<details>
<summary>Expand</summary>
Parameters:
command: The command to check.
Returns:
## Information about a command:
* Exists (Y/N) <- the command exists in this version
* Access (Y/N) <- you have access to use the command
</details>
"""
return await self.send_command("check", parameters=command)
async def failover_only(self, failover: bool) -> dict:
"""Set failover-only.
<details>
<summary>Expand</summary>
Parameters:
failover: What to set failover-only to.
Returns:
Confirmation of setting failover-only.
</details>
"""
return await self.send_command("failover-only", parameters=failover)
async def coin(self) -> dict:
"""Get information on the current coin.
<details>
<summary>Expand</summary>
Returns:
## Information about the current coin being mined:
* Hash Method <- the hashing algorithm
* Current Block Time <- blocktime as a float, 0 means none
* Current Block Hash <- the hash of the current block, blank means none
* LP <- whether LP is in use on at least 1 pool
* Network Difficulty: the current network difficulty
</details>
"""
return await self.send_command("coin")
async def debug(self, setting: str) -> dict:
"""Set a debug setting.
<details>
<summary>Expand</summary>
Parameters:
setting: Which setting to switch to.
## Options are:
* Silent
* Quiet
* Verbose
* Debug
* RPCProto
* PerDevice
* WorkTime
* Normal
Returns:
Data on which debug setting was enabled or disabled.
</details>
"""
return await self.send_command("debug", parameters=setting)
async def setconfig(self, name: str, n: int) -> dict:
"""Set config of name to value n.
<details>
<summary>Expand</summary>
Parameters:
name: The name of the config setting to set.
## Options are:
* queue
* scantime
* expiry
n: The value to set the 'name' setting to.
Returns:
The results of setting config of name to n.
</details>
"""
return await self.send_command("setconfig", parameters=f"{name},{n}")
async def pgaset(self, n: int, opt: str, val: int = None) -> dict:
"""Set PGA option opt to val on PGA n.
<details>
<summary>Expand</summary>
Options:
```
MMQ -
opt: clock
val: 2 - 250 (multiple of 2)
XBS -
opt: clock
val: 2 - 250 (multiple of 2)
```
Parameters:
n: The PGA to set the options on.
opt: The option to set. Setting this to 'help' returns a help message.
val: The value to set the option to.
Returns:
Confirmation of setting PGA n with opt[,val].
</details>
"""
if val:
return await self.send_command("pgaset", parameters=f"{n},{opt},{val}")
else:
return await self.send_command("pgaset", parameters=f"{n},{opt}")
async def pprocset(self, n: int, opt: str, val: int = None) -> dict:
"""Set processor option opt to val on processor n.
<details>
<summary>Expand</summary>
Options:
```
MMQ -
opt: clock
val: 2 - 250 (multiple of 2)
XBS -
opt: clock
val: 2 - 250 (multiple of 2)
```
Parameters:
n: The PGA to set the options on.
opt: The option to set. Setting this to 'help' returns a help message.
val: The value to set the option to.
Returns:
Confirmation of setting PGA n with opt[,val].
</details>
"""
if val:
return await self.send_command("pgaset", parameters=f"{n},{opt},{val}")
else:
return await self.send_command("pgaset", parameters=f"{n},{opt}")
async def zero(self, which: str, summary: bool) -> dict:
"""Zero a device.
<details>
<summary>Expand</summary>
Parameters:
which: Which device to zero. Setting this to 'all' zeros all devices. Setting this to 'bestshare' zeros only the bestshare values for each pool and global.
summary: Whether or not to show a full summary.
Returns:
the STATUS section with info on the zero and optional summary.
</details>
"""
return await self.send_command("zero", parameters=f"{which},{summary}")

View File

@@ -1,731 +0,0 @@
# ------------------------------------------------------------------------------
# Copyright 2022 Upstream Data Inc -
# -
# Licensed under the Apache License, Version 2.0 (the "License"); -
# you may not use this file except in compliance with the License. -
# You may obtain a copy of the License at -
# -
# http://www.apache.org/licenses/LICENSE-2.0 -
# -
# Unless required by applicable law or agreed to in writing, software -
# distributed under the License is distributed on an "AS IS" BASIS, -
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------
import asyncio
import logging
from pyasic.API import APIError, BaseMinerAPI
class BMMinerAPI(BaseMinerAPI):
"""An abstraction of the BMMiner API.
Each method corresponds to an API command in BMMiner.
[BMMiner API documentation](https://github.com/jameshilliard/bmminer/blob/master/API-README)
This class abstracts use of the BMMiner API, as well as the
methods for sending commands to it. The `self.send_command()`
function handles sending a command to the miner asynchronously, and
as such is the base for many of the functions in this class, which
rely on it to send the command for them.
Parameters:
ip: The IP of the miner to reference the API on.
port: The port to reference the API on. Default is 4028.
"""
def __init__(self, ip: str, api_ver: str = "0.0.0", port: int = 4028) -> None:
super().__init__(ip, port=port)
self.api_ver = api_ver
async def multicommand(self, *commands: str, allow_warning: bool = True) -> dict:
# make sure we can actually run each command, otherwise they will fail
commands = self._check_commands(*commands)
# standard multicommand format is "command1+command2"
# doesn't work for S19 which uses the backup _x19_multicommand
command = "+".join(commands)
try:
data = await self.send_command(command, allow_warning=allow_warning)
except APIError:
logging.debug(f"{self} - (Multicommand) - Handling X19 multicommand.")
data = await self._x19_multicommand(
*command.split("+"), allow_warning=allow_warning
)
data["multicommand"] = True
return data
async def _x19_multicommand(self, *commands, allow_warning: bool = True) -> dict:
tasks = []
# send all commands individually
for cmd in commands:
tasks.append(
asyncio.create_task(self._handle_multicommand(cmd, allow_warning=True))
)
all_data = await asyncio.gather(*tasks)
data = {}
for item in all_data:
data.update(item)
return data
async def version(self) -> dict:
"""Get miner version info.
<details>
<summary>Expand</summary>
Returns:
Miner version information.
</details>
"""
return await self.send_command("version")
async def config(self) -> dict:
"""Get some basic configuration info.
<details>
<summary>Expand</summary>
Returns:
## Some miner configuration information:
* ASC Count <- the number of ASCs
* PGA Count <- the number of PGAs
* Pool Count <- the number of Pools
* Strategy <- the current pool strategy
* Log Interval <- the interval of logging
* Device Code <- list of compiled device drivers
* OS <- the current operating system
* Failover-Only <- failover-only setting
* Scan Time <- scan-time setting
* Queue <- queue setting
* Expiry <- expiry setting
</details>
"""
return await self.send_command("config")
async def summary(self) -> dict:
"""Get the status summary of the miner.
<details>
<summary>Expand</summary>
Returns:
The status summary of the miner.
</details>
"""
return await self.send_command("summary")
async def pools(self) -> dict:
"""Get pool information.
<details>
<summary>Expand</summary>
Returns:
Miner pool information.
</details>
"""
return await self.send_command("pools")
async def devs(self) -> dict:
"""Get data on each PGA/ASC with their details.
<details>
<summary>Expand</summary>
Returns:
Data on each PGA/ASC with their details.
</details>
"""
return await self.send_command("devs")
async def edevs(self, old: bool = False) -> dict:
"""Get data on each PGA/ASC with their details, ignoring blacklisted and zombie devices.
<details>
<summary>Expand</summary>
Parameters:
old: Include zombie devices that became zombies less than 'old' seconds ago
Returns:
Data on each PGA/ASC with their details.
</details>
"""
if old:
return await self.send_command("edevs", parameters=old)
else:
return await self.send_command("edevs")
async def pga(self, n: int) -> dict:
"""Get data from PGA n.
<details>
<summary>Expand</summary>
Parameters:
n: The PGA number to get data from.
Returns:
Data on the PGA n.
</details>
"""
return await self.send_command("pga", parameters=n)
async def pgacount(self) -> dict:
"""Get data fon all PGAs.
<details>
<summary>Expand</summary>
Returns:
Data on the PGAs connected.
</details>
"""
return await self.send_command("pgacount")
async def switchpool(self, n: int) -> dict:
"""Switch pools to pool n.
<details>
<summary>Expand</summary>
Parameters:
n: The pool to switch to.
Returns:
A confirmation of switching to pool n.
</details>
"""
return await self.send_command("switchpool", parameters=n)
async def enablepool(self, n: int) -> dict:
"""Enable pool n.
<details>
<summary>Expand</summary>
Parameters:
n: The pool to enable.
Returns:
A confirmation of enabling pool n.
</details>
"""
return await self.send_command("enablepool", parameters=n)
async def addpool(self, url: str, username: str, password: str) -> dict:
"""Add a pool to the miner.
<details>
<summary>Expand</summary>
Parameters:
url: The URL of the new pool to add.
username: The users username on the new pool.
password: The worker password on the new pool.
Returns:
A confirmation of adding the pool.
</details>
"""
return await self.send_command(
"addpool", parameters=f"{url},{username},{password}"
)
async def poolpriority(self, *n: int) -> dict:
"""Set pool priority.
<details>
<summary>Expand</summary>
Parameters:
*n: Pools in order of priority.
Returns:
A confirmation of setting pool priority.
</details>
"""
pools = f"{','.join([str(item) for item in n])}"
return await self.send_command("poolpriority", parameters=pools)
async def poolquota(self, n: int, q: int) -> dict:
"""Set pool quota.
<details>
<summary>Expand</summary>
Parameters:
n: Pool number to set quota on.
q: Quota to set the pool to.
Returns:
A confirmation of setting pool quota.
</details>
"""
return await self.send_command("poolquota", parameters=f"{n},{q}")
async def disablepool(self, n: int) -> dict:
"""Disable a pool.
<details>
<summary>Expand</summary>
Parameters:
n: Pool to disable.
Returns:
A confirmation of diabling the pool.
</details>
"""
return await self.send_command("disablepool", parameters=n)
async def removepool(self, n: int) -> dict:
"""Remove a pool.
<details>
<summary>Expand</summary>
Parameters:
n: Pool to remove.
Returns:
A confirmation of removing the pool.
</details>
"""
return await self.send_command("removepool", parameters=n)
async def save(self, filename: str = None) -> dict:
"""Save the config.
<details>
<summary>Expand</summary>
Parameters:
filename: Filename to save the config as.
Returns:
A confirmation of saving the config.
</details>
"""
if filename:
return await self.send_command("save", parameters=filename)
else:
return await self.send_command("save")
async def quit(self) -> dict:
"""Quit BMMiner.
<details>
<summary>Expand</summary>
Returns:
A single "BYE" before BMMiner quits.
</details>
"""
return await self.send_command("quit")
async def notify(self) -> dict:
"""Notify the user of past errors.
<details>
<summary>Expand</summary>
Returns:
The last status and count of each devices problem(s).
</details>
"""
return await self.send_command("notify")
async def privileged(self) -> dict:
"""Check if you have privileged access.
<details>
<summary>Expand</summary>
Returns:
The STATUS section with an error if you have no privileged access, or success if you have privileged access.
</details>
"""
return await self.send_command("privileged")
async def pgaenable(self, n: int) -> dict:
"""Enable PGA n.
<details>
<summary>Expand</summary>
Parameters:
n: The PGA to enable.
Returns:
A confirmation of enabling PGA n.
</details>
"""
return await self.send_command("pgaenable", parameters=n)
async def pgadisable(self, n: int) -> dict:
"""Disable PGA n.
<details>
<summary>Expand</summary>
Parameters:
n: The PGA to disable.
Returns:
A confirmation of disabling PGA n.
</details>
"""
return await self.send_command("pgadisable", parameters=n)
async def pgaidentify(self, n: int) -> dict:
"""Identify PGA n.
<details>
<summary>Expand</summary>
Parameters:
n: The PGA to identify.
Returns:
A confirmation of identifying PGA n.
</details>
"""
return await self.send_command("pgaidentify", parameters=n)
async def devdetails(self) -> dict:
"""Get data on all devices with their static details.
<details>
<summary>Expand</summary>
Returns:
Data on all devices with their static details.
</details>
"""
return await self.send_command("devdetails")
async def restart(self) -> dict:
"""Restart BMMiner using the API.
<details>
<summary>Expand</summary>
Returns:
A reply informing of the restart.
</details>
"""
return await self.send_command("restart")
async def stats(self) -> dict:
"""Get stats of each device/pool with more than 1 getwork.
<details>
<summary>Expand</summary>
Returns:
Stats of each device/pool with more than 1 getwork.
</details>
"""
return await self.send_command("stats")
async def estats(self, old: bool = False) -> dict:
"""Get stats of each device/pool with more than 1 getwork, ignoring zombie devices.
<details>
<summary>Expand</summary>
Parameters:
old: Include zombie devices that became zombies less than 'old' seconds ago.
Returns:
Stats of each device/pool with more than 1 getwork, ignoring zombie devices.
</details>
"""
if old:
return await self.send_command("estats", parameters=old)
else:
return await self.send_command("estats")
async def check(self, command: str) -> dict:
"""Check if the command command exists in BMMiner.
<details>
<summary>Expand</summary>
Parameters:
command: The command to check.
Returns:
## Information about a command:
* Exists (Y/N) <- the command exists in this version
* Access (Y/N) <- you have access to use the command
</details>
"""
return await self.send_command("check", parameters=command)
async def failover_only(self, failover: bool) -> dict:
"""Set failover-only.
<details>
<summary>Expand</summary>
Parameters:
failover: What to set failover-only to.
Returns:
Confirmation of setting failover-only.
</details>
"""
return await self.send_command("failover-only", parameters=failover)
async def coin(self) -> dict:
"""Get information on the current coin.
<details>
<summary>Expand</summary>
Returns:
## Information about the current coin being mined:
* Hash Method <- the hashing algorithm
* Current Block Time <- blocktime as a float, 0 means none
* Current Block Hash <- the hash of the current block, blank means none
* LP <- whether LP is in use on at least 1 pool
* Network Difficulty: the current network difficulty
</details>
"""
return await self.send_command("coin")
async def debug(self, setting: str) -> dict:
"""Set a debug setting.
<details>
<summary>Expand</summary>
Parameters:
setting: Which setting to switch to.
## Options are:
* Silent
* Quiet
* Verbose
* Debug
* RPCProto
* PerDevice
* WorkTime
* Normal
Returns:
Data on which debug setting was enabled or disabled.
</details>
"""
return await self.send_command("debug", parameters=setting)
async def setconfig(self, name: str, n: int) -> dict:
"""Set config of name to value n.
<details>
<summary>Expand</summary>
Parameters:
name: The name of the config setting to set.
## Options are:
* queue
* scantime
* expiry
n: The value to set the 'name' setting to.
Returns:
The results of setting config of name to n.
</details>
"""
return await self.send_command("setconfig", parameters=f"{name},{n}")
async def usbstats(self) -> dict:
"""Get stats of all USB devices except ztex.
<details>
<summary>Expand</summary>
Returns:
The stats of all USB devices except ztex.
</details>
"""
return await self.send_command("usbstats")
async def pgaset(self, n: int, opt: str, val: int = None) -> dict:
"""Set PGA option opt to val on PGA n.
<details>
<summary>Expand</summary>
Options:
```
MMQ -
opt: clock
val: 160 - 230 (multiple of 2)
CMR -
opt: clock
val: 100 - 220
```
Parameters:
n: The PGA to set the options on.
opt: The option to set. Setting this to 'help' returns a help message.
val: The value to set the option to.
Returns:
Confirmation of setting PGA n with opt[,val].
</details>
"""
if val:
return await self.send_command("pgaset", parameters=f"{n},{opt},{val}")
else:
return await self.send_command("pgaset", parameters=f"{n},{opt}")
async def zero(self, which: str, summary: bool) -> dict:
"""Zero a device.
<details>
<summary>Expand</summary>
Parameters:
which: Which device to zero. Setting this to 'all' zeros all devices. Setting this to 'bestshare' zeros only the bestshare values for each pool and global.
summary: Whether or not to show a full summary.
Returns:
the STATUS section with info on the zero and optional summary.
</details>
"""
return await self.send_command("zero", parameters=f"{which},{summary}")
async def hotplug(self, n: int) -> dict:
"""Enable hotplug.
<details>
<summary>Expand</summary>
Parameters:
n: The device number to set hotplug on.
Returns:
Information on hotplug status.
</details>
"""
return await self.send_command("hotplug", parameters=n)
async def asc(self, n: int) -> dict:
"""Get data for ASC device n.
<details>
<summary>Expand</summary>
Parameters:
n: The device to get data for.
Returns:
The data for ASC device n.
</details>
"""
return await self.send_command("asc", parameters=n)
async def ascenable(self, n: int) -> dict:
"""Enable ASC device n.
<details>
<summary>Expand</summary>
Parameters:
n: The device to enable.
Returns:
Confirmation of enabling ASC device n.
</details>
"""
return await self.send_command("ascenable", parameters=n)
async def ascdisable(self, n: int) -> dict:
"""Disable ASC device n.
<details>
<summary>Expand</summary>
Parameters:
n: The device to disable.
Returns:
Confirmation of disabling ASC device n.
</details>
"""
return await self.send_command("ascdisable", parameters=n)
async def ascidentify(self, n: int) -> dict:
"""Identify ASC device n.
<details>
<summary>Expand</summary>
Parameters:
n: The device to identify.
Returns:
Confirmation of identifying ASC device n.
</details>
"""
return await self.send_command("ascidentify", parameters=n)
async def asccount(self) -> dict:
"""Get data on the number of ASC devices and their info.
<details>
<summary>Expand</summary>
Returns:
Data on all ASC devices.
</details>
"""
return await self.send_command("asccount")
async def ascset(self, n: int, opt: str, val: int = None) -> dict:
"""Set ASC n option opt to value val.
<details>
<summary>Expand</summary>
Sets an option on the ASC n to a value. Allowed options are:
```
AVA+BTB -
opt: freq
val: 256 - 1024 (chip frequency)
BTB -
opt: millivolts
val: 1000 - 1400 (core voltage)
MBA -
opt: reset
val: 0 - # of chips (reset a chip)
opt: freq
val: 0 - # of chips, 100 - 1400 (chip frequency)
opt: ledcount
val: 0 - 100 (chip count for LED)
opt: ledlimit
val: 0 - 200 (LED off below GH/s)
opt: spidelay
val: 0 - 9999 (SPI per I/O delay)
opt: spireset
val: i or s, 0 - 9999 (SPI regular reset)
opt: spisleep
val: 0 - 9999 (SPI reset sleep in ms)
BMA -
opt: volt
val: 0 - 9
opt: clock
val: 0 - 15
```
Parameters:
n: The ASC to set the options on.
opt: The option to set. Setting this to 'help' returns a help message.
val: The value to set the option to.
Returns:
Confirmation of setting option opt to value val.
</details>
"""
if val:
return await self.send_command("ascset", parameters=f"{n},{opt},{val}")
else:
return await self.send_command("ascset", parameters=f"{n},{opt}")
async def lcd(self) -> dict:
"""Get a general all-in-one status summary of the miner.
<details>
<summary>Expand</summary>
Returns:
An all-in-one status summary of the miner.
</details>
"""
return await self.send_command("lcd")
async def lockstats(self) -> dict:
"""Write lockstats to STDERR.
<details>
<summary>Expand</summary>
Returns:
The result of writing the lock stats to STDERR.
</details>
"""
return await self.send_command("lockstats")

View File

@@ -13,11 +13,7 @@
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------
from pyasic.API.bmminer import BMMinerAPI
from pyasic.API.bosminer import BOSMinerAPI
from pyasic.API.btminer import BTMinerAPI
from pyasic.API.cgminer import CGMinerAPI
from pyasic.API.unknown import UnknownAPI
from pyasic import settings
from pyasic.config import MinerConfig
from pyasic.data import (
BraiinsOSError,
@@ -28,18 +24,22 @@ from pyasic.data import (
)
from pyasic.errors import APIError, APIWarning
from pyasic.miners import get_miner
from pyasic.miners.base import AnyMiner
from pyasic.miners.miner_factory import MinerFactory
from pyasic.miners.base import AnyMiner, DataOptions
from pyasic.miners.miner_factory import MinerFactory, miner_factory
from pyasic.miners.miner_listener import MinerListener
from pyasic.network import MinerNetwork
from pyasic.settings import PyasicSettings
from pyasic.rpc.bmminer import BMMinerRPCAPI
from pyasic.rpc.bosminer import BOSMinerRPCAPI
from pyasic.rpc.btminer import BTMinerRPCAPI
from pyasic.rpc.cgminer import CGMinerRPCAPI
from pyasic.rpc.unknown import UnknownRPCAPI
__all__ = [
"BMMinerAPI",
"BOSMinerAPI",
"BTMinerAPI",
"CGMinerAPI",
"UnknownAPI",
"BMMinerRPCAPI",
"BOSMinerRPCAPI",
"BTMinerRPCAPI",
"CGMinerRPCAPI",
"UnknownRPCAPI",
"MinerConfig",
"MinerData",
"BraiinsOSError",
@@ -50,8 +50,10 @@ __all__ = [
"APIWarning",
"get_miner",
"AnyMiner",
"DataOptions",
"MinerFactory",
"miner_factory",
"MinerListener",
"MinerNetwork",
"PyasicSettings",
"settings",
]

View File

@@ -13,664 +13,204 @@
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------
from copy import deepcopy
from dataclasses import asdict, dataclass, field
import logging
import random
import string
import time
from dataclasses import asdict, dataclass, fields
from enum import IntEnum
from typing import List, Literal
import toml
import yaml
class X19PowerMode(IntEnum):
Normal = 0
Sleep = 1
LPM = 3
@dataclass
class _Pool:
"""A dataclass for pool information.
Attributes:
url: URL of the pool.
username: Username on the pool.
password: Worker password on the pool.
"""
url: str = ""
username: str = ""
password: str = ""
@classmethod
def fields(cls):
return fields(cls)
def from_dict(self, data: dict):
"""Convert raw pool data as a dict to usable data and save it to this class.
Parameters:
data: The raw config data to convert.
"""
for key in data.keys():
if key == "url":
self.url = data[key]
if key in ["user", "username"]:
self.username = data[key]
if key in ["pass", "password"]:
self.password = data[key]
return self
def as_wm(self, user_suffix: str = None) -> dict:
"""Convert the data in this class to a dict usable by an Whatsminer device.
Parameters:
user_suffix: The suffix to append to username.
"""
username = self.username
if user_suffix:
username = f"{username}{user_suffix}"
pool = {"url": self.url, "user": username, "pass": self.password}
return pool
def as_x19(self, user_suffix: str = None) -> dict:
"""Convert the data in this class to a dict usable by an X19 device.
Parameters:
user_suffix: The suffix to append to username.
"""
username = self.username
if user_suffix:
username = f"{username}{user_suffix}"
pool = {"url": self.url, "user": username, "pass": self.password}
return pool
def as_x17(self, user_suffix: str = None) -> dict:
"""Convert the data in this class to a dict usable by an X5 device.
Parameters:
user_suffix: The suffix to append to username.
"""
username = self.username
if user_suffix:
username = f"{username}{user_suffix}"
pool = {"url": self.url, "user": username, "pass": self.password}
return pool
def as_goldshell(self, user_suffix: str = None) -> dict:
"""Convert the data in this class to a dict usable by a goldshell device.
Parameters:
user_suffix: The suffix to append to username.
"""
username = self.username
if user_suffix:
username = f"{username}{user_suffix}"
pool = {"url": self.url, "user": username, "pass": self.password}
return pool
def as_inno(self, user_suffix: str = None) -> dict:
"""Convert the data in this class to a dict usable by an Innosilicon device.
Parameters:
user_suffix: The suffix to append to username.
"""
username = self.username
if user_suffix:
username = f"{username}{user_suffix}"
pool = {
f"Pool": self.url,
f"UserName": username,
f"Password": self.password,
}
return pool
def as_avalon(self, user_suffix: str = None) -> str:
"""Convert the data in this class to a string usable by an Avalonminer device.
Parameters:
user_suffix: The suffix to append to username.
"""
username = self.username
if user_suffix:
username = f"{username}{user_suffix}"
pool = ",".join([self.url, username, self.password])
return pool
def as_bos(self, user_suffix: str = None) -> dict:
"""Convert the data in this class to a dict usable by an BOSMiner device.
Parameters:
user_suffix: The suffix to append to username.
"""
username = self.username
if user_suffix:
username = f"{username}{user_suffix}"
pool = {"url": self.url, "user": username, "password": self.password}
return pool
@dataclass
class _PoolGroup:
"""A dataclass for pool group information.
Attributes:
quota: The group quota.
group_name: The name of the pool group.
pools: A list of pools in this group.
"""
quota: int = 1
group_name: str = None
pools: List[_Pool] = None
@classmethod
def fields(cls):
return fields(cls)
def __post_init__(self):
if not self.group_name:
self.group_name = "".join(
random.choice(string.ascii_uppercase + string.digits) for _ in range(6)
) # generate random pool group name in case it isn't set
def from_dict(self, data: dict):
"""Convert raw pool group data as a dict to usable data and save it to this class.
Parameters:
data: The raw config data to convert.
"""
pools = []
for key in data.keys():
if key in ["name", "group_name"]:
self.group_name = data[key]
if key == "quota":
self.quota = data[key]
if key in ["pools", "pool"]:
for pool in data[key]:
pools.append(_Pool().from_dict(pool))
self.pools = pools
return self
def as_x19(self, user_suffix: str = None) -> List[dict]:
"""Convert the data in this class to a list usable by an X19 device.
Parameters:
user_suffix: The suffix to append to username.
"""
pools = []
for pool in self.pools[:3]:
pools.append(pool.as_x19(user_suffix=user_suffix))
return pools
def as_x17(self, user_suffix: str = None) -> dict:
"""Convert the data in this class to a list usable by an X17 device.
Parameters:
user_suffix: The suffix to append to username.
"""
pools = {
"_ant_pool1url": "",
"_ant_pool1user": "",
"_ant_pool1pw": "",
"_ant_pool2url": "",
"_ant_pool2user": "",
"_ant_pool2pw": "",
"_ant_pool3url": "",
"_ant_pool3user": "",
"_ant_pool3pw": "",
}
for idx, pool in enumerate(self.pools[:3]):
pools[f"_ant_pool{idx+1}url"] = pool.as_x17(user_suffix=user_suffix)["url"]
pools[f"_ant_pool{idx+1}user"] = pool.as_x17(user_suffix=user_suffix)[
"user"
]
pools[f"_ant_pool{idx+1}pw"] = pool.as_x17(user_suffix=user_suffix)["pass"]
return pools
def as_goldshell(self, user_suffix: str = None) -> list:
"""Convert the data in this class to a list usable by a goldshell device.
Parameters:
user_suffix: The suffix to append to username.
"""
return [pool.as_goldshell(user_suffix=user_suffix) for pool in self.pools[:3]]
def as_inno(self, user_suffix: str = None) -> dict:
"""Convert the data in this class to a list usable by an Innosilicon device.
Parameters:
user_suffix: The suffix to append to username.
"""
pools = {
"Pool1": None,
"UserName1": None,
"Password1": None,
"Pool2": None,
"UserName2": None,
"Password2": None,
"Pool3": None,
"UserName3": None,
"Password3": None,
}
for idx, pool in enumerate(self.pools[:3]):
pool_data = pool.as_inno(user_suffix=user_suffix)
for key in pool_data:
pools[f"{key}{idx+1}"] = pool_data[key]
return pools
def as_wm(self, user_suffix: str = None) -> dict:
"""Convert the data in this class to a list usable by a Whatsminer device.
Parameters:
user_suffix: The suffix to append to username.
"""
pools = {}
for i in range(1, 4):
if i <= len(self.pools):
pool_wm = self.pools[i - 1].as_wm(user_suffix)
pools[f"pool_{i}"] = pool_wm["url"]
pools[f"worker_{i}"] = pool_wm["user"]
pools[f"passwd_{i}"] = pool_wm["pass"]
else:
pools[f"pool_{i}"] = ""
pools[f"worker_{i}"] = ""
pools[f"passwd_{i}"] = ""
return pools
def as_avalon(self, user_suffix: str = None) -> str:
"""Convert the data in this class to a dict usable by an Avalonminer device.
Parameters:
user_suffix: The suffix to append to username.
"""
pool = self.pools[0].as_avalon(user_suffix=user_suffix)
return pool
def as_bos(self, user_suffix: str = None) -> dict:
"""Convert the data in this class to a dict usable by an BOSMiner device.
Parameters:
user_suffix: The suffix to append to username.
"""
group = {
"name": self.group_name,
"quota": self.quota,
"pool": [pool.as_bos(user_suffix=user_suffix) for pool in self.pools],
}
return group
from pyasic.config.fans import FanModeConfig
from pyasic.config.mining import MiningModeConfig
from pyasic.config.pools import PoolConfig
from pyasic.config.power_scaling import PowerScalingConfig
from pyasic.config.temperature import TemperatureConfig
@dataclass
class MinerConfig:
"""A dataclass for miner configuration information.
Attributes:
pool_groups: A list of pool groups in this config.
temp_mode: The temperature control mode.
temp_target: The target temp.
temp_hot: The hot temp (100% fans).
temp_dangerous: The dangerous temp (shutdown).
minimum_fans: The minimum numbers of fans to run the miner.
fan_speed: Manual fan speed to run the fan at (only if temp_mode == "manual").
asicboost: Whether or not to enable asicboost.
autotuning_enabled: Whether or not to enable autotuning.
autotuning_mode: Autotuning mode, either "wattage" or "hashrate".
autotuning_wattage: The wattage to use when autotuning.
autotuning_hashrate: The hashrate to use when autotuning.
dps_enabled: Whether or not to enable dynamic power scaling.
dps_power_step: The amount of power to reduce autotuning by when the miner reaches dangerous temp.
dps_min_power: The minimum power to reduce autotuning to.
dps_shutdown_enabled: Whether or not to shutdown the miner when `dps_min_power` is reached.
dps_shutdown_duration: The amount of time to shutdown for (in hours).
"""
pool_groups: List[_PoolGroup] = None
temp_mode: Literal["auto", "manual", "disabled"] = "auto"
temp_target: float = 70.0
temp_hot: float = 80.0
temp_dangerous: float = 100.0
minimum_fans: int = None
fan_speed: Literal[tuple(range(101))] = None # noqa - Ignore weird Literal usage
asicboost: bool = None
miner_mode: IntEnum = X19PowerMode.Normal
autotuning_enabled: bool = True
autotuning_mode: Literal["power", "hashrate"] = None
autotuning_wattage: int = None
autotuning_hashrate: int = None
dps_enabled: bool = None
dps_power_step: int = None
dps_min_power: int = None
dps_shutdown_enabled: bool = None
dps_shutdown_duration: float = None
@classmethod
def fields(cls):
return fields(cls)
pools: PoolConfig = field(default_factory=PoolConfig.default)
fan_mode: FanModeConfig = field(default_factory=FanModeConfig.default)
temperature: TemperatureConfig = field(default_factory=TemperatureConfig.default)
mining_mode: MiningModeConfig = field(default_factory=MiningModeConfig.default)
power_scaling: PowerScalingConfig = field(
default_factory=PowerScalingConfig.default
)
def as_dict(self) -> dict:
"""Convert the data in this class to a dict."""
logging.debug(f"MinerConfig - (To Dict) - Dumping Dict config")
data_dict = asdict(self)
for key in asdict(self).keys():
if isinstance(data_dict[key], IntEnum):
data_dict[key] = data_dict[key].value
if data_dict[key] is None:
del data_dict[key]
return data_dict
return asdict(self)
def as_toml(self) -> str:
"""Convert the data in this class to toml."""
logging.debug(f"MinerConfig - (To TOML) - Dumping TOML config")
return toml.dumps(self.as_dict())
def as_yaml(self) -> str:
"""Convert the data in this class to yaml."""
logging.debug(f"MinerConfig - (To YAML) - Dumping YAML config")
return yaml.dump(self.as_dict(), sort_keys=False)
def from_raw(self, data: dict):
"""Convert raw config data as a dict to usable data and save it to this class.
This should be able to handle any raw config file from any miner supported by pyasic.
Parameters:
data: The raw config data to convert.
"""
logging.debug(f"MinerConfig - (From Raw) - Loading raw config")
pool_groups = []
if isinstance(data, list):
# goldshell config list
data = {"pools": data}
for key in data.keys():
if key == "pools":
pool_groups.append(_PoolGroup().from_dict({"pools": data[key]}))
elif key == "group":
for group in data[key]:
pool_groups.append(_PoolGroup().from_dict(group))
if key == "bitmain-fan-ctrl":
if data[key]:
self.temp_mode = "manual"
if data.get("bitmain-fan-pwm"):
self.fan_speed = int(data["bitmain-fan-pwm"])
elif key == "bitmain-work-mode":
if data[key]:
self.miner_mode = X19PowerMode(int(data[key]))
elif key == "fan_control":
for _key in data[key]:
if _key == "min_fans":
self.minimum_fans = data[key][_key]
elif _key == "speed":
self.fan_speed = data[key][_key]
elif key == "temp_control":
for _key in data[key]:
if _key == "mode":
self.temp_mode = data[key][_key]
elif _key == "target_temp":
self.temp_target = data[key][_key]
elif _key == "hot_temp":
self.temp_hot = data[key][_key]
elif _key == "dangerous_temp":
self.temp_dangerous = data[key][_key]
if key == "hash_chain_global":
if data[key].get("asic_boost"):
self.asicboost = data[key]["asic_boost"]
if key == "autotuning":
for _key in data[key]:
if _key == "enabled":
self.autotuning_enabled = data[key][_key]
elif _key == "psu_power_limit":
self.autotuning_wattage = data[key][_key]
elif _key == "power_target":
self.autotuning_wattage = data[key][_key]
elif _key == "hashrate_target":
self.autotuning_hashrate = data[key][_key]
elif _key == "mode":
self.autotuning_mode = data[key][_key].replace("_target", "")
if key in ["power_scaling", "performance_scaling"]:
for _key in data[key]:
if _key == "enabled":
self.dps_enabled = data[key][_key]
elif _key == "power_step":
self.dps_power_step = data[key][_key]
elif _key in ["min_psu_power_limit", "min_power_target"]:
self.dps_min_power = data[key][_key]
elif _key == "shutdown_enabled":
self.dps_shutdown_enabled = data[key][_key]
elif _key == "shutdown_duration":
self.dps_shutdown_duration = data[key][_key]
self.pool_groups = pool_groups
return self
def from_api(self, pools: list):
"""Convert list output from the `AnyMiner.api.pools()` command into a usable data and save it to this class.
Parameters:
pools: The list of pool data to convert.
"""
logging.debug(f"MinerConfig - (From API) - Loading API config")
_pools = []
for pool in pools:
url = pool.get("URL")
user = pool.get("User")
_pools.append({"url": url, "user": user, "pass": "123"})
self.pool_groups = [_PoolGroup().from_dict({"pools": _pools})]
return self
def from_dict(self, data: dict):
"""Convert an output dict of this class back into usable data and save it to this class.
Parameters:
data: The dict config data to convert.
"""
logging.debug(f"MinerConfig - (From Dict) - Loading Dict config")
pool_groups = []
for group in data["pool_groups"]:
pool_groups.append(_PoolGroup().from_dict(group))
for key in data:
if (
hasattr(self, key)
and not key == "pool_groups"
and not key == "miner_mode"
):
setattr(self, key, data[key])
if key == "miner_mode":
self.miner_mode = X19PowerMode(data[key])
self.pool_groups = pool_groups
return self
def from_toml(self, data: str):
"""Convert output toml of this class back into usable data and save it to this class.
Parameters:
data: The toml config data to convert.
"""
logging.debug(f"MinerConfig - (From TOML) - Loading TOML config")
return self.from_dict(toml.loads(data))
def from_yaml(self, data: str):
"""Convert output yaml of this class back into usable data and save it to this class.
Parameters:
data: The yaml config data to convert.
"""
logging.debug(f"MinerConfig - (From YAML) - Loading YAML config")
return self.from_dict(yaml.load(data, Loader=yaml.SafeLoader))
def as_am_modern(self, user_suffix: str = None) -> dict:
return {
**self.fan_mode.as_am_modern(),
"freq-level": "100",
**self.mining_mode.as_am_modern(),
**self.pools.as_am_modern(user_suffix=user_suffix),
**self.temperature.as_am_modern(),
**self.power_scaling.as_am_modern(),
}
def as_wm(self, user_suffix: str = None) -> dict:
"""Convert the data in this class to a config usable by a Whatsminer device.
Parameters:
user_suffix: The suffix to append to username.
"""
logging.debug(f"MinerConfig - (As Whatsminer) - Generating Whatsminer config")
return {
"pools": self.pool_groups[0].as_wm(user_suffix=user_suffix),
"wattage": self.autotuning_wattage,
**self.fan_mode.as_wm(),
**self.mining_mode.as_wm(),
**self.pools.as_wm(user_suffix=user_suffix),
**self.temperature.as_wm(),
**self.power_scaling.as_wm(),
}
def as_am_old(self, user_suffix: str = None) -> dict:
return {
**self.fan_mode.as_am_old(),
**self.mining_mode.as_am_old(),
**self.pools.as_am_old(user_suffix=user_suffix),
**self.temperature.as_am_old(),
**self.power_scaling.as_am_old(),
}
def as_goldshell(self, user_suffix: str = None) -> dict:
return {
**self.fan_mode.as_goldshell(),
**self.mining_mode.as_goldshell(),
**self.pools.as_goldshell(user_suffix=user_suffix),
**self.temperature.as_goldshell(),
**self.power_scaling.as_goldshell(),
}
def as_avalon(self, user_suffix: str = None) -> dict:
return {
**self.fan_mode.as_avalon(),
**self.mining_mode.as_avalon(),
**self.pools.as_avalon(user_suffix=user_suffix),
**self.temperature.as_avalon(),
**self.power_scaling.as_avalon(),
}
def as_inno(self, user_suffix: str = None) -> dict:
"""Convert the data in this class to a config usable by an Innosilicon device.
Parameters:
user_suffix: The suffix to append to username.
"""
logging.debug(f"MinerConfig - (As Inno) - Generating Innosilicon config")
return self.pool_groups[0].as_inno(user_suffix=user_suffix)
def as_x19(self, user_suffix: str = None) -> dict:
"""Convert the data in this class to a config usable by an X19 device.
Parameters:
user_suffix: The suffix to append to username.
"""
logging.debug(f"MinerConfig - (As X19) - Generating X19 config")
cfg = {
"bitmain-fan-ctrl": False,
"bitmain-fan-pwn": "100",
"freq-level": "100",
"miner-mode": str(self.miner_mode.value),
"pools": self.pool_groups[0].as_x19(user_suffix=user_suffix),
return {
**self.fan_mode.as_inno(),
**self.mining_mode.as_inno(),
**self.pools.as_inno(user_suffix=user_suffix),
**self.temperature.as_inno(),
**self.power_scaling.as_inno(),
}
if not self.temp_mode == "auto":
cfg["bitmain-fan-ctrl"] = True
if self.fan_speed:
cfg["bitmain-fan-pwn"] = str(self.fan_speed)
return cfg
def as_x17(self, user_suffix: str = None) -> dict:
"""Convert the data in this class to a config usable by an X5 device.
Parameters:
user_suffix: The suffix to append to username.
"""
cfg = self.pool_groups[0].as_x17(user_suffix=user_suffix)
return cfg
def as_goldshell(self, user_suffix: str = None) -> list:
"""Convert the data in this class to a config usable by a goldshell device.
Parameters:
user_suffix: The suffix to append to username.
"""
cfg = self.pool_groups[0].as_goldshell(user_suffix=user_suffix)
return cfg
def as_avalon(self, user_suffix: str = None) -> str:
"""Convert the data in this class to a config usable by an Avalonminer device.
Parameters:
user_suffix: The suffix to append to username.
"""
logging.debug(f"MinerConfig - (As Avalon) - Generating AvalonMiner config")
cfg = self.pool_groups[0].as_avalon(user_suffix=user_suffix)
return cfg
def as_bos(self, model: str = "S9", user_suffix: str = None) -> str:
"""Convert the data in this class to a config usable by an BOSMiner device.
Parameters:
model: The model of the miner to be used in the format portion of the config.
user_suffix: The suffix to append to username.
"""
logging.debug(f"MinerConfig - (As BOS) - Generating BOSMiner config")
cfg = {
"format": {
"version": "1.2+",
"model": f"Antminer {model.replace('j', 'J')}",
"generator": "pyasic",
"timestamp": int(time.time()),
},
"group": [
group.as_bos(user_suffix=user_suffix) for group in self.pool_groups
],
"temp_control": {
"mode": self.temp_mode,
"target_temp": self.temp_target,
"hot_temp": self.temp_hot,
"dangerous_temp": self.temp_dangerous,
},
def as_bosminer(self, user_suffix: str = None) -> dict:
return {
**merge(self.fan_mode.as_bosminer(), self.temperature.as_bosminer()),
**self.mining_mode.as_bosminer(),
**self.pools.as_bosminer(user_suffix=user_suffix),
**self.power_scaling.as_bosminer(),
}
if self.autotuning_enabled or self.autotuning_wattage:
cfg["autotuning"] = {}
if self.autotuning_enabled:
cfg["autotuning"]["enabled"] = True
else:
cfg["autotuning"]["enabled"] = False
if self.autotuning_mode:
cfg["format"]["version"] = "2.0"
cfg["autotuning"]["mode"] = self.autotuning_mode + "_target"
if self.autotuning_wattage:
cfg["autotuning"]["power_target"] = self.autotuning_wattage
elif self.autotuning_hashrate:
cfg["autotuning"]["hashrate_target"] = self.autotuning_hashrate
else:
if self.autotuning_wattage:
cfg["autotuning"]["psu_power_limit"] = self.autotuning_wattage
def as_boser(self, user_suffix: str = None) -> dict:
return {
**self.fan_mode.as_boser(),
**self.temperature.as_boser(),
**self.mining_mode.as_boser(),
**self.pools.as_boser(user_suffix=user_suffix),
**self.power_scaling.as_boser(),
}
if self.asicboost:
cfg["hash_chain_global"] = {}
cfg["hash_chain_global"]["asic_boost"] = self.asicboost
def as_epic(self, user_suffix: str = None) -> dict:
return {
**self.fan_mode.as_epic(),
**self.temperature.as_epic(),
**self.mining_mode.as_epic(),
**self.pools.as_epic(),
**self.power_scaling.as_epic(),
}
if self.minimum_fans is not None or self.fan_speed is not None:
cfg["fan_control"] = {}
if self.minimum_fans is not None:
cfg["fan_control"]["min_fans"] = self.minimum_fans
if self.fan_speed is not None:
cfg["fan_control"]["speed"] = self.fan_speed
def as_auradine(self, user_suffix: str = None) -> dict:
return {
**self.fan_mode.as_auradine(),
**self.mining_mode.as_auradine(),
**self.pools.as_auradine(user_suffix=user_suffix),
}
if any(
[
getattr(self, item)
for item in [
"dps_enabled",
"dps_power_step",
"dps_min_power",
"dps_shutdown_enabled",
"dps_shutdown_duration",
]
]
):
cfg["power_scaling"] = {}
if self.dps_enabled:
cfg["power_scaling"]["enabled"] = self.dps_enabled
if self.dps_power_step:
cfg["power_scaling"]["power_step"] = self.dps_power_step
if self.dps_min_power:
if cfg["format"]["version"] == "2.0":
cfg["power_scaling"]["min_power_target"] = self.dps_min_power
else:
cfg["power_scaling"]["min_psu_power_limit"] = self.dps_min_power
if self.dps_shutdown_enabled:
cfg["power_scaling"]["shutdown_enabled"] = self.dps_shutdown_enabled
if self.dps_shutdown_duration:
cfg["power_scaling"]["shutdown_duration"] = self.dps_shutdown_duration
@classmethod
def from_dict(cls, dict_conf: dict) -> "MinerConfig":
return cls(
pools=PoolConfig.from_dict(dict_conf.get("pools")),
mining_mode=MiningModeConfig.from_dict(dict_conf.get("mining_mode")),
fan_mode=FanModeConfig.from_dict(dict_conf.get("fan_mode")),
temperature=TemperatureConfig.from_dict(dict_conf.get("temperature")),
power_scaling=PowerScalingConfig.from_dict(dict_conf.get("power_scaling")),
)
return toml.dumps(cfg)
@classmethod
def from_api(cls, api_pools: dict) -> "MinerConfig":
return cls(pools=PoolConfig.from_api(api_pools))
@classmethod
def from_am_modern(cls, web_conf: dict) -> "MinerConfig":
return cls(
pools=PoolConfig.from_am_modern(web_conf),
mining_mode=MiningModeConfig.from_am_modern(web_conf),
fan_mode=FanModeConfig.from_am_modern(web_conf),
)
@classmethod
def from_am_old(cls, web_conf: dict) -> "MinerConfig":
return cls.from_am_modern(web_conf)
@classmethod
def from_goldshell(cls, web_conf: dict) -> "MinerConfig":
return cls(pools=PoolConfig.from_am_modern(web_conf))
@classmethod
def from_inno(cls, web_pools: list) -> "MinerConfig":
return cls(pools=PoolConfig.from_inno(web_pools))
@classmethod
def from_bosminer(cls, toml_conf: dict) -> "MinerConfig":
return cls(
pools=PoolConfig.from_bosminer(toml_conf),
mining_mode=MiningModeConfig.from_bosminer(toml_conf),
fan_mode=FanModeConfig.from_bosminer(toml_conf),
temperature=TemperatureConfig.from_bosminer(toml_conf),
power_scaling=PowerScalingConfig.from_bosminer(toml_conf),
)
@classmethod
def from_boser(cls, grpc_miner_conf: dict) -> "MinerConfig":
return cls(
pools=PoolConfig.from_boser(grpc_miner_conf),
mining_mode=MiningModeConfig.from_boser(grpc_miner_conf),
fan_mode=FanModeConfig.from_boser(grpc_miner_conf),
temperature=TemperatureConfig.from_boser(grpc_miner_conf),
power_scaling=PowerScalingConfig.from_boser(grpc_miner_conf),
)
@classmethod
def from_epic(cls, web_conf: dict) -> "MinerConfig":
return cls(
pools=PoolConfig.from_epic(web_conf),
fan_mode=FanModeConfig.from_epic(web_conf),
temperature=TemperatureConfig.from_epic(web_conf),
mining_mode=MiningModeConfig.from_epic(web_conf),
)
@classmethod
def from_vnish(cls, web_settings: dict) -> "MinerConfig":
return cls(
pools=PoolConfig.from_vnish(web_settings),
fan_mode=FanModeConfig.from_vnish(web_settings),
temperature=TemperatureConfig.from_vnish(web_settings),
mining_mode=MiningModeConfig.from_vnish(web_settings),
)
@classmethod
def from_auradine(cls, web_conf: dict) -> "MinerConfig":
return cls(
pools=PoolConfig.from_api(web_conf["pools"]),
fan_mode=FanModeConfig.from_auradine(web_conf["fan"]),
mining_mode=MiningModeConfig.from_auradine(web_conf["mode"]),
)
def merge(a: dict, b: dict) -> dict:
result = deepcopy(a)
for b_key, b_val in b.items():
a_val = result.get(b_key)
if isinstance(a_val, dict) and isinstance(b_val, dict):
result[b_key] = merge(a_val, b_val)
else:
result[b_key] = deepcopy(b_val)
return result

107
pyasic/config/base.py Normal file
View File

@@ -0,0 +1,107 @@
# ------------------------------------------------------------------------------
# Copyright 2022 Upstream Data Inc -
# -
# Licensed under the Apache License, Version 2.0 (the "License"); -
# you may not use this file except in compliance with the License. -
# You may obtain a copy of the License at -
# -
# http://www.apache.org/licenses/LICENSE-2.0 -
# -
# Unless required by applicable law or agreed to in writing, software -
# distributed under the License is distributed on an "AS IS" BASIS, -
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------
from dataclasses import asdict, dataclass
from enum import Enum
from typing import Union
class MinerConfigOption(Enum):
@classmethod
def from_dict(cls, dict_conf: Union[dict, None]):
return cls.default()
def as_am_modern(self) -> dict:
return self.value.as_am_modern()
def as_am_old(self) -> dict:
return self.value.as_am_old()
def as_wm(self) -> dict:
return self.value.as_wm()
def as_inno(self) -> dict:
return self.value.as_inno()
def as_goldshell(self) -> dict:
return self.value.as_goldshell()
def as_avalon(self) -> dict:
return self.value.as_avalon()
def as_bosminer(self) -> dict:
return self.value.as_bosminer()
def as_boser(self) -> dict:
return self.value.as_boser()
def as_epic(self) -> dict:
return self.value.as_epic()
def as_vnish(self) -> dict:
return self.value.as_vnish()
def as_auradine(self) -> dict:
return self.value.as_auradine()
def __call__(self, *args, **kwargs):
return self.value(*args, **kwargs)
@classmethod
def default(cls):
pass
@dataclass
class MinerConfigValue:
@classmethod
def from_dict(cls, dict_conf: Union[dict, None]):
return cls()
def as_dict(self):
return asdict(self)
def as_am_modern(self) -> dict:
return {}
def as_am_old(self) -> dict:
return {}
def as_wm(self) -> dict:
return {}
def as_inno(self) -> dict:
return {}
def as_goldshell(self) -> dict:
return {}
def as_avalon(self) -> dict:
return {}
def as_bosminer(self) -> dict:
return {}
def as_boser(self) -> dict:
return {}
def as_epic(self) -> dict:
return {}
def as_vnish(self) -> dict:
return {}
def as_auradine(self) -> dict:
return {}

220
pyasic/config/fans.py Normal file
View File

@@ -0,0 +1,220 @@
# ------------------------------------------------------------------------------
# Copyright 2022 Upstream Data Inc -
# -
# Licensed under the Apache License, Version 2.0 (the "License"); -
# you may not use this file except in compliance with the License. -
# You may obtain a copy of the License at -
# -
# http://www.apache.org/licenses/LICENSE-2.0 -
# -
# Unless required by applicable law or agreed to in writing, software -
# distributed under the License is distributed on an "AS IS" BASIS, -
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------
from dataclasses import dataclass, field
from typing import Union
from pyasic.config.base import MinerConfigOption, MinerConfigValue
@dataclass
class FanModeNormal(MinerConfigValue):
mode: str = field(init=False, default="normal")
minimum_fans: int = 1
minimum_speed: int = 0
@classmethod
def from_dict(cls, dict_conf: Union[dict, None]) -> "FanModeNormal":
cls_conf = {}
if dict_conf.get("minimum_fans") is not None:
cls_conf["minimum_fans"] = dict_conf["minimum_fans"]
if dict_conf.get("minimum_speed") is not None:
cls_conf["minimum_speed"] = dict_conf["minimum_speed"]
return cls(**cls_conf)
@classmethod
def from_vnish(cls, web_cooling_settings: dict):
cls_conf = {}
if web_cooling_settings.get("fan_min_count") is not None:
cls_conf["minimum_fans"] = web_cooling_settings["fan_min_count"]
if web_cooling_settings.get("fan_min_duty") is not None:
cls_conf["minimum_speed"] = web_cooling_settings["fan_min_duty"]
return cls(**cls_conf)
def as_am_modern(self) -> dict:
return {"bitmain-fan-ctrl": False, "bitmain-fan-pwn": "100"}
def as_bosminer(self) -> dict:
return {"temp_control": {"mode": "auto"}}
@dataclass
class FanModeManual(MinerConfigValue):
mode: str = field(init=False, default="manual")
speed: int = 100
minimum_fans: int = 1
@classmethod
def from_dict(cls, dict_conf: Union[dict, None]) -> "FanModeManual":
cls_conf = {}
if dict_conf.get("speed") is not None:
cls_conf["speed"] = dict_conf["speed"]
if dict_conf.get("minimum_fans") is not None:
cls_conf["minimum_fans"] = dict_conf["minimum_fans"]
return cls(**cls_conf)
@classmethod
def from_bosminer(cls, toml_fan_conf: dict) -> "FanModeManual":
cls_conf = {}
if toml_fan_conf.get("min_fans") is not None:
cls_conf["minimum_fans"] = toml_fan_conf["min_fans"]
if toml_fan_conf.get("speed") is not None:
cls_conf["speed"] = toml_fan_conf["speed"]
return cls(**cls_conf)
@classmethod
def from_vnish(cls, web_cooling_settings: dict) -> "FanModeManual":
cls_conf = {}
if web_cooling_settings.get("fan_min_count") is not None:
cls_conf["minimum_fans"] = web_cooling_settings["fan_min_count"]
if web_cooling_settings["mode"].get("param") is not None:
cls_conf["speed"] = web_cooling_settings["mode"]["param"]
return cls(**cls_conf)
def as_am_modern(self) -> dict:
return {"bitmain-fan-ctrl": True, "bitmain-fan-pwn": str(self.speed)}
def as_bosminer(self) -> dict:
return {
"temp_control": {"mode": "manual"},
"fan_control": {"min_fans": self.minimum_fans, "speed": self.speed},
}
def as_auradine(self) -> dict:
return {"fan": {"percentage": self.speed}}
@dataclass
class FanModeImmersion(MinerConfigValue):
mode: str = field(init=False, default="immersion")
@classmethod
def from_dict(cls, dict_conf: Union[dict, None]) -> "FanModeImmersion":
return cls()
def as_am_modern(self) -> dict:
return {"bitmain-fan-ctrl": True, "bitmain-fan-pwn": "0"}
def as_bosminer(self) -> dict:
return {"temp_control": {"mode": "disabled"}}
def as_auradine(self) -> dict:
return {"fan": {"percentage": 0}}
class FanModeConfig(MinerConfigOption):
normal = FanModeNormal
manual = FanModeManual
immersion = FanModeImmersion
@classmethod
def default(cls):
return cls.normal()
@classmethod
def from_dict(cls, dict_conf: Union[dict, None]):
if dict_conf is None:
return cls.default()
mode = dict_conf.get("mode")
if mode is None:
return cls.default()
clsattr = getattr(cls, mode)
if clsattr is not None:
return clsattr().from_dict(dict_conf)
@classmethod
def from_am_modern(cls, web_conf: dict):
if web_conf.get("bitmain-fan-ctrl") is not None:
fan_manual = web_conf["bitmain-fan-ctrl"]
if fan_manual:
return cls.manual(speed=web_conf["bitmain-fan-pwm"])
else:
return cls.normal()
else:
return cls.default()
@classmethod
def from_epic(cls, web_conf: dict):
try:
fan_mode = web_conf["Fans"]["Fan Mode"]
if fan_mode.get("Manual") is not None:
return cls.manual(speed=fan_mode.get("Manual"))
else:
return cls.normal()
except KeyError:
return cls.default()
@classmethod
def from_bosminer(cls, toml_conf: dict):
if toml_conf.get("temp_control") is None:
return cls.default()
if toml_conf["temp_control"].get("mode") is None:
return cls.default()
mode = toml_conf["temp_control"]["mode"]
if mode == "auto":
return cls.normal()
elif mode == "manual":
if toml_conf.get("fan_control"):
return cls.manual().from_bosminer(toml_conf["fan_control"])
return cls.manual()
elif mode == "disabled":
return cls.immersion()
@classmethod
def from_vnish(cls, web_settings: dict):
try:
mode = web_settings["miner"]["cooling"]["mode"]["name"]
except LookupError:
return cls.default()
if mode == "auto":
return cls.normal().from_vnish(web_settings["miner"]["cooling"])
elif mode == "manual":
return cls.manual().from_vnish(web_settings["miner"]["cooling"])
elif mode == "immers":
return cls.immersion()
@classmethod
def from_boser(cls, grpc_miner_conf: dict):
try:
temperature_conf = grpc_miner_conf["temperature"]
except LookupError:
return cls.default()
keys = temperature_conf.keys()
if "auto" in keys:
if "minimumRequiredFans" in keys:
return cls.normal(temperature_conf["minimumRequiredFans"])
return cls.normal()
if "manual" in keys:
conf = {}
if "fanSpeedRatio" in temperature_conf["manual"].keys():
conf["speed"] = int(temperature_conf["manual"]["fanSpeedRatio"])
if "minimumRequiredFans" in keys:
conf["minimum_fans"] = int(temperature_conf["minimumRequiredFans"])
return cls.manual(**conf)
@classmethod
def from_auradine(cls, web_fan: dict):
try:
fan_data = web_fan["Fan"][0]
fan_1_max = fan_data["Max"]
fan_1_target = fan_data["Target"]
return cls.manual(speed=round((fan_1_target / fan_1_max) * 100))
except LookupError:
return cls.default()

369
pyasic/config/mining.py Normal file
View File

@@ -0,0 +1,369 @@
# ------------------------------------------------------------------------------
# Copyright 2022 Upstream Data Inc -
# -
# Licensed under the Apache License, Version 2.0 (the "License"); -
# you may not use this file except in compliance with the License. -
# You may obtain a copy of the License at -
# -
# http://www.apache.org/licenses/LICENSE-2.0 -
# -
# Unless required by applicable law or agreed to in writing, software -
# distributed under the License is distributed on an "AS IS" BASIS, -
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------
from dataclasses import dataclass, field
from typing import Dict, Union
from pyasic.config.base import MinerConfigOption, MinerConfigValue
from pyasic.web.braiins_os.proto.braiins.bos.v1 import (
HashrateTargetMode,
PerformanceMode,
Power,
PowerTargetMode,
SaveAction,
SetPerformanceModeRequest,
TeraHashrate,
TunerPerformanceMode,
)
@dataclass
class MiningModeNormal(MinerConfigValue):
mode: str = field(init=False, default="normal")
@classmethod
def from_dict(cls, dict_conf: Union[dict, None]) -> "MiningModeNormal":
return cls()
def as_am_modern(self) -> dict:
return {"miner-mode": "0"}
def as_wm(self) -> dict:
return {"mode": self.mode}
def as_auradine(self) -> dict:
return {"mode": {"mode": self.mode}}
@dataclass
class MiningModeSleep(MinerConfigValue):
mode: str = field(init=False, default="sleep")
@classmethod
def from_dict(cls, dict_conf: Union[dict, None]) -> "MiningModeSleep":
return cls()
def as_am_modern(self) -> dict:
return {"miner-mode": "1"}
def as_wm(self) -> dict:
return {"mode": self.mode}
def as_auradine(self) -> dict:
return {"mode": {"sleep": "on"}}
@dataclass
class MiningModeLPM(MinerConfigValue):
mode: str = field(init=False, default="low")
@classmethod
def from_dict(cls, dict_conf: Union[dict, None]) -> "MiningModeLPM":
return cls()
def as_am_modern(self) -> dict:
return {"miner-mode": "3"}
def as_wm(self) -> dict:
return {"mode": self.mode}
def as_auradine(self) -> dict:
return {"mode": {"mode": "eco"}}
@dataclass
class MiningModeHPM(MinerConfigValue):
mode: str = field(init=False, default="high")
@classmethod
def from_dict(cls, dict_conf: Union[dict, None]) -> "MiningModeHPM":
return cls()
def as_am_modern(self):
return {"miner-mode": "0"}
def as_wm(self) -> dict:
return {"mode": self.mode}
def as_auradine(self) -> dict:
return {"mode": {"mode": "turbo"}}
@dataclass
class MiningModePowerTune(MinerConfigValue):
mode: str = field(init=False, default="power_tuning")
power: int = None
@classmethod
def from_dict(cls, dict_conf: Union[dict, None]) -> "MiningModePowerTune":
return cls(dict_conf.get("power"))
def as_am_modern(self) -> dict:
return {"miner-mode": "0"}
def as_wm(self) -> dict:
if self.power is not None:
return {"mode": self.mode, self.mode: {"wattage": self.power}}
return {}
def as_bosminer(self) -> dict:
return {"autotuning": {"enabled": True, "psu_power_limit": self.power}}
def as_boser(self) -> dict:
return {
"set_performance_mode": SetPerformanceModeRequest(
save_action=SaveAction.SAVE_ACTION_SAVE_AND_APPLY,
mode=PerformanceMode(
tuner_mode=TunerPerformanceMode(
power_target=PowerTargetMode(
power_target=Power(watt=self.power)
)
)
),
),
}
def as_auradine(self) -> dict:
return {"mode": {"mode": "custom", "tune": "power", "power": self.power}}
@dataclass
class MiningModeHashrateTune(MinerConfigValue):
mode: str = field(init=False, default="hashrate_tuning")
hashrate: int = None
@classmethod
def from_dict(cls, dict_conf: Union[dict, None]) -> "MiningModeHashrateTune":
return cls(dict_conf.get("hashrate"))
def as_am_modern(self) -> dict:
return {"miner-mode": "0"}
def as_boser(self) -> dict:
return {
"set_performance_mode": SetPerformanceModeRequest(
save_action=SaveAction.SAVE_ACTION_SAVE_AND_APPLY,
mode=PerformanceMode(
tuner_mode=TunerPerformanceMode(
hashrate_target=HashrateTargetMode(
hashrate_target=TeraHashrate(
terahash_per_second=self.hashrate
)
)
)
),
)
}
def as_auradine(self) -> dict:
return {"mode": {"mode": "custom", "tune": "ths", "ths": self.hashrate}}
@dataclass
class ManualBoardSettings(MinerConfigValue):
freq: float
volt: float
@classmethod
def from_dict(cls, dict_conf: Union[dict, None]) -> "ManualBoardSettings":
return cls(freq=dict_conf["freq"], volt=dict_conf["volt"])
def as_am_modern(self) -> dict:
return {"miner-mode": "0"}
@dataclass
class MiningModeManual(MinerConfigValue):
mode: str = field(init=False, default="manual")
global_freq: float
global_volt: float
boards: Dict[int, ManualBoardSettings] = field(default_factory=dict)
@classmethod
def from_dict(cls, dict_conf: Union[dict, None]) -> "MiningModeManual":
return cls(
global_freq=dict_conf["global_freq"],
global_volt=dict_conf["global_volt"],
boards={i: ManualBoardSettings.from_dict(dict_conf[i]) for i in dict_conf},
)
def as_am_modern(self) -> dict:
return {"miner-mode": "0"}
@classmethod
def from_vnish(cls, web_overclock_settings: dict) -> "MiningModeManual":
# will raise KeyError if it cant find the settings, values cannot be empty
voltage = web_overclock_settings["globals"]["volt"]
freq = web_overclock_settings["globals"]["freq"]
boards = {
idx: ManualBoardSettings(
freq=board["freq"],
volt=voltage if not board["freq"] == 0 else 0,
)
for idx, board in enumerate(web_overclock_settings["chains"])
}
return cls(global_freq=freq, global_volt=voltage, boards=boards)
class MiningModeConfig(MinerConfigOption):
normal = MiningModeNormal
low = MiningModeLPM
high = MiningModeHPM
sleep = MiningModeSleep
power_tuning = MiningModePowerTune
hashrate_tuning = MiningModeHashrateTune
manual = MiningModeManual
@classmethod
def default(cls):
return cls.normal()
@classmethod
def from_dict(cls, dict_conf: Union[dict, None]):
if dict_conf is None:
return cls.default()
mode = dict_conf.get("mode")
if mode is None:
return cls.default()
clsattr = getattr(cls, mode)
if clsattr is not None:
return clsattr().from_dict(dict_conf)
@classmethod
def from_am_modern(cls, web_conf: dict):
if web_conf.get("bitmain-work-mode") is not None:
work_mode = web_conf["bitmain-work-mode"]
if work_mode == "":
return cls.default()
if int(work_mode) == 0:
return cls.normal()
elif int(work_mode) == 1:
return cls.sleep()
elif int(work_mode) == 3:
return cls.low()
return cls.default()
@classmethod
def from_epic(cls, web_conf: dict):
try:
work_mode = web_conf["PerpetualTune"]["Running"]
if work_mode:
if (
web_conf["PerpetualTune"]["Algorithm"].get("VoltageOptimizer")
is not None
):
return cls.hashrate_tuning(
web_conf["PerpetualTune"]["Algorithm"]["VoltageOptimizer"][
"Target"
]
)
else:
return cls.hashrate_tuning(
web_conf["PerpetualTune"]["Algorithm"]["ChipTune"]["Target"]
)
else:
return cls.normal()
except KeyError:
return cls.default()
@classmethod
def from_bosminer(cls, toml_conf: dict):
if toml_conf.get("autotuning") is None:
return cls.default()
autotuning_conf = toml_conf["autotuning"]
if autotuning_conf.get("enabled") is None:
return cls.default()
if not autotuning_conf["enabled"]:
return cls.default()
if autotuning_conf.get("psu_power_limit") is not None:
# old autotuning conf
return cls.power_tuning(autotuning_conf["psu_power_limit"])
if autotuning_conf.get("mode") is not None:
# new autotuning conf
mode = autotuning_conf["mode"]
if mode == "power_target":
if autotuning_conf.get("power_target") is not None:
return cls.power_tuning(autotuning_conf["power_target"])
return cls.power_tuning()
if mode == "hashrate_target":
if autotuning_conf.get("hashrate_target") is not None:
return cls.hashrate_tuning(autotuning_conf["hashrate_target"])
return cls.hashrate_tuning()
@classmethod
def from_vnish(cls, web_settings: dict):
try:
mode_settings = web_settings["miner"]["overclock"]
except KeyError:
return cls.default()
if mode_settings["preset"] == "disabled":
return MiningModeManual.from_vnish(mode_settings)
else:
return cls.power_tuning(int(mode_settings["preset"]))
@classmethod
def from_boser(cls, grpc_miner_conf: dict):
try:
tuner_conf = grpc_miner_conf["tuner"]
if not tuner_conf.get("enabled", False):
return cls.default()
except LookupError:
return cls.default()
if tuner_conf.get("tunerMode") is not None:
if tuner_conf["tunerMode"] == 1:
if tuner_conf.get("powerTarget") is not None:
return cls.power_tuning(tuner_conf["powerTarget"]["watt"])
return cls.power_tuning()
if tuner_conf["tunerMode"] == 2:
if tuner_conf.get("hashrateTarget") is not None:
return cls.hashrate_tuning(
int(tuner_conf["hashrateTarget"]["terahashPerSecond"])
)
return cls.hashrate_tuning()
if tuner_conf.get("powerTarget") is not None:
return cls.power_tuning(tuner_conf["powerTarget"]["watt"])
if tuner_conf.get("hashrateTarget") is not None:
return cls.hashrate_tuning(
int(tuner_conf["hashrateTarget"]["terahashPerSecond"])
)
@classmethod
def from_auradine(cls, web_mode: dict):
try:
mode_data = web_mode["Mode"][0]
if mode_data.get("Sleep") == "on":
return cls.sleep()
if mode_data.get("Mode") == "normal":
return cls.normal()
if mode_data.get("Mode") == "eco":
return cls.low()
if mode_data.get("Mode") == "turbo":
return cls.high()
if mode_data.get("Ths") is not None:
return cls.hashrate_tuning(mode_data["Ths"])
if mode_data.get("Power") is not None:
return cls.power_tuning(mode_data["Power"])
except LookupError:
return cls.default()

450
pyasic/config/pools.py Normal file
View File

@@ -0,0 +1,450 @@
# ------------------------------------------------------------------------------
# Copyright 2022 Upstream Data Inc -
# -
# Licensed under the Apache License, Version 2.0 (the "License"); -
# you may not use this file except in compliance with the License. -
# You may obtain a copy of the License at -
# -
# http://www.apache.org/licenses/LICENSE-2.0 -
# -
# Unless required by applicable law or agreed to in writing, software -
# distributed under the License is distributed on an "AS IS" BASIS, -
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------
import random
import string
from dataclasses import dataclass, field
from typing import Dict, List, Union
from pyasic.config.base import MinerConfigValue
@dataclass
class Pool(MinerConfigValue):
url: str
user: str
password: str
def as_am_modern(self, user_suffix: str = None) -> dict:
if user_suffix is not None:
return {
"url": self.url,
"user": f"{self.user}{user_suffix}",
"pass": self.password,
}
return {"url": self.url, "user": self.user, "pass": self.password}
def as_wm(self, idx: int = 1, user_suffix: str = None) -> dict:
if user_suffix is not None:
return {
f"pool_{idx}": self.url,
f"worker_{idx}": f"{self.user}{user_suffix}",
f"passwd_{idx}": self.password,
}
return {
f"pool_{idx}": self.url,
f"worker_{idx}": self.user,
f"passwd_{idx}": self.password,
}
def as_am_old(self, idx: int = 1, user_suffix: str = None) -> dict:
if user_suffix is not None:
return {
f"_ant_pool{idx}url": self.url,
f"_ant_pool{idx}user": f"{self.user}{user_suffix}",
f"_ant_pool{idx}pw": self.password,
}
return {
f"_ant_pool{idx}url": self.url,
f"_ant_pool{idx}user": self.user,
f"_ant_pool{idx}pw": self.password,
}
def as_goldshell(self, user_suffix: str = None) -> dict:
if user_suffix is not None:
return {
"url": self.url,
"user": f"{self.user}{user_suffix}",
"pass": self.password,
}
return {"url": self.url, "user": self.user, "pass": self.password}
def as_avalon(self, user_suffix: str = None) -> str:
if user_suffix is not None:
return ",".join([self.url, f"{self.user}{user_suffix}", self.password])
return ",".join([self.url, self.user, self.password])
def as_inno(self, idx: int = 1, user_suffix: str = None) -> dict:
if user_suffix is not None:
return {
f"Pool{idx}": self.url,
f"UserName{idx}": f"{self.user}{user_suffix}",
f"Password{idx}": self.password,
}
return {
f"Pool{idx}": self.url,
f"UserName{idx}": self.user,
f"Password{idx}": self.password,
}
def as_bosminer(self, user_suffix: str = None) -> dict:
if user_suffix is not None:
return {
"url": self.url,
"user": f"{self.user}{user_suffix}",
"password": self.password,
}
return {"url": self.url, "user": self.user, "password": self.password}
def as_auradine(self, user_suffix: str = None) -> dict:
if user_suffix is not None:
return {
"url": self.url,
"user": f"{self.user}{user_suffix}",
"pass": self.password,
}
return {"url": self.url, "user": self.user, "pass": self.password}
@classmethod
def from_dict(cls, dict_conf: Union[dict, None]) -> "Pool":
return cls(
url=dict_conf["url"], user=dict_conf["user"], password=dict_conf["password"]
)
@classmethod
def from_api(cls, api_pool: dict) -> "Pool":
return cls(url=api_pool["URL"], user=api_pool["User"], password="x")
@classmethod
def from_epic(cls, api_pool: dict) -> "Pool":
return cls(
url=api_pool["pool"], user=api_pool["login"], password=api_pool["password"]
)
@classmethod
def from_am_modern(cls, web_pool: dict) -> "Pool":
return cls(
url=web_pool["url"], user=web_pool["user"], password=web_pool["pass"]
)
# TODO: check if this is accurate, user/username, pass/password
@classmethod
def from_goldshell(cls, web_pool: dict) -> "Pool":
return cls(
url=web_pool["url"], user=web_pool["user"], password=web_pool["pass"]
)
@classmethod
def from_inno(cls, web_pool: dict) -> "Pool":
return cls(
url=web_pool["url"], user=web_pool["user"], password=web_pool["pass"]
)
@classmethod
def from_bosminer(cls, toml_pool_conf: dict) -> "Pool":
return cls(
url=toml_pool_conf["url"],
user=toml_pool_conf["user"],
password=toml_pool_conf["password"],
)
@classmethod
def from_vnish(cls, web_pool: dict) -> "Pool":
return cls(
url=web_pool["url"],
user=web_pool["user"],
password=web_pool["pass"],
)
@classmethod
def from_boser(cls, grpc_pool: dict) -> "Pool":
return cls(
url=grpc_pool["url"],
user=grpc_pool["user"],
password=grpc_pool["password"],
)
@dataclass
class PoolGroup(MinerConfigValue):
pools: List[Pool] = field(default_factory=list)
quota: int = 1
name: str = None
def __post_init__(self):
if self.name is None:
self.name = "".join(
random.choice(string.ascii_uppercase + string.digits) for _ in range(6)
) # generate random pool group name in case it isn't set
def as_am_modern(self, user_suffix: str = None) -> list:
pools = []
idx = 0
while idx < 3:
if len(self.pools) > idx:
pools.append(self.pools[idx].as_am_modern(user_suffix=user_suffix))
else:
pools.append(Pool("", "", "").as_am_modern())
idx += 1
return pools
def as_wm(self, user_suffix: str = None) -> dict:
pools = {}
idx = 0
while idx < 3:
if len(self.pools) > idx:
pools.update(
**self.pools[idx].as_wm(idx=idx + 1, user_suffix=user_suffix)
)
else:
pools.update(**Pool("", "", "").as_wm(idx=idx + 1))
idx += 1
return pools
def as_am_old(self, user_suffix: str = None) -> dict:
pools = {}
idx = 0
while idx < 3:
if len(self.pools) > idx:
pools.update(
**self.pools[idx].as_am_old(idx=idx + 1, user_suffix=user_suffix)
)
else:
pools.update(**Pool("", "", "").as_am_old(idx=idx + 1))
idx += 1
return pools
def as_goldshell(self, user_suffix: str = None) -> list:
return [pool.as_goldshell(user_suffix) for pool in self.pools]
def as_avalon(self, user_suffix: str = None) -> dict:
if len(self.pools) > 0:
return self.pools[0].as_avalon(user_suffix=user_suffix)
return Pool("", "", "").as_avalon()
def as_inno(self, user_suffix: str = None) -> dict:
pools = {}
idx = 0
while idx < 3:
if len(self.pools) > idx:
pools.update(
**self.pools[idx].as_inno(idx=idx + 1, user_suffix=user_suffix)
)
else:
pools.update(**Pool("", "", "").as_inno(idx=idx + 1))
idx += 1
return pools
def as_bosminer(self, user_suffix: str = None) -> dict:
if len(self.pools) > 0:
conf = {
"name": self.name,
"pool": [
pool.as_bosminer(user_suffix=user_suffix) for pool in self.pools
],
}
if self.quota is not None:
conf["quota"] = self.quota
return conf
return {"name": "Group", "pool": []}
def as_auradine(self, user_suffix: str = None) -> list:
return [p.as_auradine(user_suffix=user_suffix) for p in self.pools]
@classmethod
def from_dict(cls, dict_conf: Union[dict, None]) -> "PoolGroup":
cls_conf = {}
if dict_conf.get("quota") is not None:
cls_conf["quota"] = dict_conf["quota"]
if dict_conf.get("name") is not None:
cls_conf["name"] = dict_conf["name"]
cls_conf["pools"] = [Pool.from_dict(p) for p in dict_conf["pools"]]
return cls(**cls_conf)
@classmethod
def from_api(cls, api_pool_list: list) -> "PoolGroup":
pools = []
for pool in api_pool_list:
pools.append(Pool.from_api(pool))
return cls(pools=pools)
@classmethod
def from_epic(cls, api_pool_list: list) -> "PoolGroup":
pools = []
for pool in api_pool_list:
pools.append(Pool.from_epic(pool))
return cls(pools=pools)
@classmethod
def from_am_modern(cls, web_pool_list: list) -> "PoolGroup":
pools = []
for pool in web_pool_list:
pools.append(Pool.from_am_modern(pool))
return cls(pools=pools)
@classmethod
def from_goldshell(cls, web_pools: list) -> "PoolGroup":
return cls([Pool.from_goldshell(p) for p in web_pools])
@classmethod
def from_inno(cls, web_pools: list) -> "PoolGroup":
return cls([Pool.from_inno(p) for p in web_pools])
@classmethod
def from_bosminer(cls, toml_group_conf: dict) -> "PoolGroup":
if toml_group_conf.get("pool") is not None:
return cls(
name=toml_group_conf["name"],
quota=toml_group_conf.get("quota"),
pools=[Pool.from_bosminer(p) for p in toml_group_conf["pool"]],
)
return cls()
@classmethod
def from_vnish(cls, web_settings_pools: dict) -> "PoolGroup":
return cls([Pool.from_vnish(p) for p in web_settings_pools])
@classmethod
def from_boser(cls, grpc_pool_group: dict) -> "PoolGroup":
try:
return cls(
pools=[Pool.from_boser(p) for p in grpc_pool_group["pools"]],
name=grpc_pool_group["name"],
quota=grpc_pool_group["quota"]["value"]
if grpc_pool_group.get("quota") is not None
else 1,
)
except LookupError:
return cls()
@dataclass
class PoolConfig(MinerConfigValue):
groups: List[PoolGroup] = field(default_factory=list)
@classmethod
def default(cls) -> "PoolConfig":
return cls()
@classmethod
def from_dict(cls, dict_conf: Union[dict, None]) -> "PoolConfig":
if dict_conf is None:
return cls.default()
return cls(groups=[PoolGroup.from_dict(g) for g in dict_conf["groups"]])
@classmethod
def simple(cls, pools: List[Union[Pool, Dict[str, str]]]) -> "PoolConfig":
group_pools = []
for pool in pools:
if isinstance(pool, dict):
pool = Pool(**pool)
group_pools.append(pool)
return cls(groups=[PoolGroup(pools=group_pools)])
def as_am_modern(self, user_suffix: str = None) -> dict:
if len(self.groups) > 0:
return {"pools": self.groups[0].as_am_modern(user_suffix=user_suffix)}
return {"pools": PoolGroup().as_am_modern()}
def as_wm(self, user_suffix: str = None) -> dict:
if len(self.groups) > 0:
return {"pools": self.groups[0].as_wm(user_suffix=user_suffix)}
return {"pools": PoolGroup().as_wm()}
def as_am_old(self, user_suffix: str = None) -> dict:
if len(self.groups) > 0:
return self.groups[0].as_am_old(user_suffix=user_suffix)
return PoolGroup().as_am_old()
def as_goldshell(self, user_suffix: str = None) -> dict:
if len(self.groups) > 0:
return {"pools": self.groups[0].as_goldshell(user_suffix=user_suffix)}
return {"pools": PoolGroup().as_goldshell()}
def as_avalon(self, user_suffix: str = None) -> dict:
if len(self.groups) > 0:
return {"pools": self.groups[0].as_avalon(user_suffix=user_suffix)}
return {"pools": PoolGroup().as_avalon()}
def as_inno(self, user_suffix: str = None) -> dict:
if len(self.groups) > 0:
return self.groups[0].as_inno(user_suffix=user_suffix)
return PoolGroup().as_inno()
def as_bosminer(self, user_suffix: str = None) -> dict:
if len(self.groups) > 0:
return {
"group": [g.as_bosminer(user_suffix=user_suffix) for g in self.groups]
}
return {"group": [PoolGroup().as_bosminer()]}
def as_boser(self, user_suffix: str = None) -> dict:
return {}
def as_auradine(self, user_suffix: str = None) -> dict:
if len(self.groups) > 0:
return {
"updatepools": {
"pools": self.groups[0].as_auradine(user_suffix=user_suffix)
}
}
return {"updatepools": {"pools": PoolGroup().as_auradine()}}
@classmethod
def from_api(cls, api_pools: dict) -> "PoolConfig":
try:
pool_data = api_pools["POOLS"]
except KeyError:
return PoolConfig.default()
pool_data = sorted(pool_data, key=lambda x: int(x["POOL"]))
return cls([PoolGroup.from_api(pool_data)])
@classmethod
def from_epic(cls, web_conf: dict) -> "PoolConfig":
pool_data = web_conf["StratumConfigs"]
return cls([PoolGroup.from_epic(pool_data)])
@classmethod
def from_am_modern(cls, web_conf: dict) -> "PoolConfig":
pool_data = web_conf["pools"]
return cls([PoolGroup.from_am_modern(pool_data)])
@classmethod
def from_goldshell(cls, web_pools: list) -> "PoolConfig":
return cls([PoolGroup.from_goldshell(web_pools)])
@classmethod
def from_inno(cls, web_pools: list) -> "PoolConfig":
return cls([PoolGroup.from_inno(web_pools)])
@classmethod
def from_bosminer(cls, toml_conf: dict) -> "PoolConfig":
if toml_conf.get("group") is None:
return cls()
return cls([PoolGroup.from_bosminer(g) for g in toml_conf["group"]])
@classmethod
def from_vnish(cls, web_settings: dict) -> "PoolConfig":
try:
return cls([PoolGroup.from_vnish(web_settings["miner"]["pools"])])
except LookupError:
return cls()
@classmethod
def from_boser(cls, grpc_miner_conf: dict) -> "PoolConfig":
try:
return cls(
groups=[
PoolGroup.from_boser(group)
for group in grpc_miner_conf["poolGroups"]
]
)
except LookupError:
return cls()

View File

@@ -0,0 +1,218 @@
# ------------------------------------------------------------------------------
# Copyright 2022 Upstream Data Inc -
# -
# Licensed under the Apache License, Version 2.0 (the "License"); -
# you may not use this file except in compliance with the License. -
# You may obtain a copy of the License at -
# -
# http://www.apache.org/licenses/LICENSE-2.0 -
# -
# Unless required by applicable law or agreed to in writing, software -
# distributed under the License is distributed on an "AS IS" BASIS, -
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------
from dataclasses import dataclass, field
from typing import Union
from pyasic.config.base import MinerConfigOption, MinerConfigValue
from pyasic.web.braiins_os.proto.braiins.bos.v1 import (
DpsPowerTarget,
DpsTarget,
Power,
SetDpsRequest,
)
@dataclass
class PowerScalingShutdownEnabled(MinerConfigValue):
mode: str = field(init=False, default="enabled")
duration: int = None
@classmethod
def from_dict(cls, dict_conf: Union[dict, None]) -> "PowerScalingShutdownEnabled":
return cls(duration=dict_conf.get("duration"))
def as_bosminer(self) -> dict:
cfg = {"shutdown_enabled": True}
if self.duration is not None:
cfg["shutdown_duration"] = self.duration
return cfg
def as_boser(self) -> dict:
return {"enable_shutdown": True, "shutdown_duration": self.duration}
@dataclass
class PowerScalingShutdownDisabled(MinerConfigValue):
mode: str = field(init=False, default="disabled")
@classmethod
def from_dict(cls, dict_conf: Union[dict, None]) -> "PowerScalingShutdownDisabled":
return cls()
def as_bosminer(self) -> dict:
return {"shutdown_enabled": False}
def as_boser(self) -> dict:
return {"enable_shutdown ": False}
class PowerScalingShutdown(MinerConfigOption):
enabled = PowerScalingShutdownEnabled
disabled = PowerScalingShutdownDisabled
@classmethod
def from_dict(cls, dict_conf: Union[dict, None]):
if dict_conf is None:
return cls.default()
mode = dict_conf.get("mode")
if mode is None:
return cls.default()
clsattr = getattr(cls, mode)
if clsattr is not None:
return clsattr().from_dict(dict_conf)
@classmethod
def from_bosminer(cls, power_scaling_conf: dict):
sd_enabled = power_scaling_conf.get("shutdown_enabled")
if sd_enabled is not None:
if sd_enabled:
return cls.enabled(power_scaling_conf.get("shutdown_duration"))
else:
return cls.disabled()
return None
@classmethod
def from_boser(cls, power_scaling_conf: dict):
sd_enabled = power_scaling_conf.get("shutdownEnabled")
if sd_enabled is not None:
if sd_enabled:
try:
return cls.enabled(power_scaling_conf["shutdownDuration"]["hours"])
except KeyError:
return cls.enabled()
else:
return cls.disabled()
return None
@dataclass
class PowerScalingEnabled(MinerConfigValue):
mode: str = field(init=False, default="enabled")
power_step: int = None
minimum_power: int = None
shutdown_enabled: Union[
PowerScalingShutdownEnabled, PowerScalingShutdownDisabled
] = None
@classmethod
def from_bosminer(cls, power_scaling_conf: dict) -> "PowerScalingEnabled":
power_step = power_scaling_conf.get("power_step")
min_power = power_scaling_conf.get("min_psu_power_limit")
sd_mode = PowerScalingShutdown.from_bosminer(power_scaling_conf)
return cls(
power_step=power_step, minimum_power=min_power, shutdown_enabled=sd_mode
)
@classmethod
def from_dict(cls, dict_conf: Union[dict, None]) -> "PowerScalingEnabled":
cls_conf = {
"power_step": dict_conf.get("power_step"),
"minimum_power": dict_conf.get("minimum_power"),
}
shutdown_enabled = dict_conf.get("shutdown_enabled")
if shutdown_enabled is not None:
cls_conf["shutdown_enabled"] = PowerScalingShutdown.from_dict(
shutdown_enabled
)
return cls(**cls_conf)
def as_bosminer(self) -> dict:
cfg = {"enabled": True}
if self.power_step is not None:
cfg["power_step"] = self.power_step
if self.minimum_power is not None:
cfg["min_psu_power_limit"] = self.minimum_power
if self.shutdown_enabled is not None:
cfg = {**cfg, **self.shutdown_enabled.as_bosminer()}
return {"power_scaling": cfg}
def as_boser(self) -> dict:
return {
"set_dps": SetDpsRequest(
enable=True,
**self.shutdown_enabled.as_boser(),
target=DpsTarget(
power_target=DpsPowerTarget(
power_step=Power(self.power_step),
min_power_target=Power(self.minimum_power),
)
),
),
}
@dataclass
class PowerScalingDisabled(MinerConfigValue):
mode: str = field(init=False, default="disabled")
class PowerScalingConfig(MinerConfigOption):
enabled = PowerScalingEnabled
disabled = PowerScalingDisabled
@classmethod
def default(cls):
return cls.disabled()
@classmethod
def from_dict(cls, dict_conf: Union[dict, None]):
if dict_conf is None:
return cls.default()
mode = dict_conf.get("mode")
if mode is None:
return cls.default()
clsattr = getattr(cls, mode)
if clsattr is not None:
return clsattr().from_dict(dict_conf)
@classmethod
def from_bosminer(cls, toml_conf: dict):
power_scaling = toml_conf.get("power_scaling")
if power_scaling is not None:
enabled = power_scaling.get("enabled")
if enabled is not None:
if enabled:
return cls.enabled().from_bosminer(power_scaling)
else:
return cls.disabled()
return cls.default()
@classmethod
def from_boser(cls, grpc_miner_conf: dict):
try:
dps_conf = grpc_miner_conf["dps"]
if not dps_conf.get("enabled", False):
return cls.disabled()
except LookupError:
return cls.default()
conf = {"shutdown_enabled": PowerScalingShutdown.from_boser(dps_conf)}
if dps_conf.get("minPowerTarget") is not None:
conf["minimum_power"] = dps_conf["minPowerTarget"]["watt"]
if dps_conf.get("powerStep") is not None:
conf["power_step"] = dps_conf["powerStep"]["watt"]
return cls.enabled(**conf)

View File

@@ -0,0 +1,112 @@
# ------------------------------------------------------------------------------
# Copyright 2022 Upstream Data Inc -
# -
# Licensed under the Apache License, Version 2.0 (the "License"); -
# you may not use this file except in compliance with the License. -
# You may obtain a copy of the License at -
# -
# http://www.apache.org/licenses/LICENSE-2.0 -
# -
# Unless required by applicable law or agreed to in writing, software -
# distributed under the License is distributed on an "AS IS" BASIS, -
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------
from dataclasses import dataclass
from typing import Union
from pyasic.config.base import MinerConfigValue
@dataclass
class TemperatureConfig(MinerConfigValue):
target: int = None
hot: int = None
danger: int = None
@classmethod
def default(cls):
return cls()
def as_bosminer(self) -> dict:
temp_cfg = {}
if self.target is not None:
temp_cfg["target_temp"] = self.target
if self.hot is not None:
temp_cfg["hot_temp"] = self.hot
if self.danger is not None:
temp_cfg["dangerous_temp"] = self.danger
return {"temp_control": temp_cfg}
@classmethod
def from_dict(cls, dict_conf: Union[dict, None]) -> "TemperatureConfig":
return cls(
target=dict_conf.get("target"),
hot=dict_conf.get("hot"),
danger=dict_conf.get("danger"),
)
@classmethod
def from_bosminer(cls, toml_conf: dict) -> "TemperatureConfig":
temp_control = toml_conf.get("temp_control")
if temp_control is not None:
return cls(
target=temp_control.get("target_temp"),
hot=temp_control.get("hot_temp"),
danger=temp_control.get("dangerous_temp"),
)
@classmethod
def from_epic(cls, web_conf: dict) -> "TemperatureConfig":
try:
dangerous_temp = web_conf["Misc"]["Shutdown Temp"]
except KeyError:
dangerous_temp = None
# Need to do this in two blocks to avoid KeyError if one is missing
try:
target_temp = web_conf["Fans"]["Fan Mode"]["Auto"]["Target Temperature"]
except KeyError:
target_temp = None
return cls(target=target_temp, danger=dangerous_temp)
@classmethod
def from_vnish(cls, web_settings: dict):
try:
if web_settings["miner"]["cooling"]["mode"]["name"] == "auto":
return cls(target=web_settings["miner"]["cooling"]["mode"]["param"])
except KeyError:
pass
return cls()
@classmethod
def from_boser(cls, grpc_miner_conf: dict):
try:
temperature_conf = grpc_miner_conf["temperature"]
except KeyError:
return cls.default()
root_key = None
for key in ["auto", "manual", "disabled"]:
if key in temperature_conf.keys():
root_key = key
break
if root_key is None:
return cls.default()
conf = {}
keys = temperature_conf[root_key].keys()
if "targetTemperature" in keys:
conf["target"] = int(
temperature_conf[root_key]["targetTemperature"]["degreeC"]
)
if "hotTemperature" in keys:
conf["hot"] = int(temperature_conf[root_key]["hotTemperature"]["degreeC"])
if "dangerousTemperature" in keys:
conf["danger"] = int(
temperature_conf[root_key]["dangerousTemperature"]["degreeC"]
)
return cls(**conf)
return cls.default()

View File

@@ -16,12 +16,14 @@
import copy
import json
import logging
import time
from dataclasses import asdict, dataclass, field, fields
from datetime import datetime, timezone
from typing import Any, List, Union
from pyasic.config import MinerConfig
from pyasic.config.mining import MiningModePowerTune
from .error_codes import BraiinsOSError, InnosiliconError, WhatsminerError, X19Error
@@ -35,7 +37,8 @@ class HashBoard:
temp: The temperature of the PCB as an int.
chip_temp: The temperature of the chips as an int.
chips: The chip count of the board as an int.
expected_chips: The ideal chip count of the board as an int.
expected_chips: The expected chip count of the board as an int.
serial_number: The serial number of the board.
missing: Whether the board is returned from the miners data as a bool.
"""
@@ -45,6 +48,7 @@ class HashBoard:
chip_temp: int = None
chips: int = None
expected_chips: int = None
serial_number: str = None
missing: bool = True
def get(self, __key: str, default: Any = None):
@@ -105,8 +109,8 @@ class MinerData:
hostname: The network hostname of the miner as a str.
hashrate: The hashrate of the miner in TH/s as a float. Calculated automatically.
_hashrate: Backup for hashrate found via API instead of hashboards.
nominal_hashrate: The factory nominal hashrate of the miner in TH/s as a float.
hashboards: A list of hashboards on the miner with their statistics.
expected_hashrate: The factory nominal hashrate of the miner in TH/s as a float.
hashboards: A list of [`HashBoard`][pyasic.data.HashBoard]s on the miner with their statistics.
temperature_avg: The average temperature across the boards. Calculated automatically.
env_temp: The environment temps as a float.
wattage: Current power draw of the miner as an int.
@@ -114,16 +118,12 @@ class MinerData:
fans: A list of fans on the miner with their speeds.
fan_psu: The speed of the PSU on the fan if the miner collects it.
total_chips: The total number of chips on all boards. Calculated automatically.
ideal_chips: The ideal number of chips in the miner as an int.
percent_ideal_chips: The percent of total chips out of the ideal count. Calculated automatically.
percent_ideal_hashrate: The percent of total hashrate out of the ideal hashrate. Calculated automatically.
percent_ideal_wattage: The percent of total wattage out of the ideal wattage. Calculated automatically.
expected_chips: The expected number of chips in the miner as an int.
percent_expected_chips: The percent of total chips out of the expected count. Calculated automatically.
percent_expected_hashrate: The percent of total hashrate out of the expected hashrate. Calculated automatically.
percent_expected_wattage: The percent of total wattage out of the expected wattage. Calculated automatically.
nominal: Whether the number of chips in the miner is nominal. Calculated automatically.
pool_split: The pool split as a str.
pool_1_url: The first pool url on the miner as a str.
pool_1_user: The first pool user on the miner as a str.
pool_2_url: The second pool url on the miner as a str.
pool_2_user: The second pool user on the miner as a str.
config: The parsed config of the miner, using [`MinerConfig`][pyasic.config.MinerConfig].
errors: A list of errors on the miner.
fault_light: Whether the fault light is on as a boolean.
efficiency: Efficiency of the miner in J/TH (Watts per TH/s). Calculated automatically.
@@ -140,27 +140,24 @@ class MinerData:
fw_ver: str = None
hostname: str = None
hashrate: float = field(init=False)
_hashrate: float = None
nominal_hashrate: float = None
_hashrate: float = field(repr=False, default=None)
expected_hashrate: float = None
hashboards: List[HashBoard] = field(default_factory=list)
ideal_hashboards: int = None
expected_hashboards: int = None
temperature_avg: int = field(init=False)
env_temp: float = None
wattage: int = None
wattage_limit: int = None
wattage_limit: int = field(init=False)
_wattage_limit: int = field(repr=False, default=None)
fans: List[Fan] = field(default_factory=list)
fan_psu: int = None
total_chips: int = field(init=False)
ideal_chips: int = None
percent_ideal_chips: float = field(init=False)
percent_ideal_hashrate: float = field(init=False)
percent_ideal_wattage: float = field(init=False)
expected_chips: int = None
percent_expected_chips: float = field(init=False)
percent_expected_hashrate: float = field(init=False)
percent_expected_wattage: float = field(init=False)
nominal: bool = field(init=False)
pool_split: str = "0"
pool_1_url: str = "Unknown"
pool_1_user: str = "Unknown"
pool_2_url: str = ""
pool_2_user: str = ""
config: MinerConfig = None
errors: List[
Union[WhatsminerError, BraiinsOSError, X19Error, InnosiliconError]
] = field(default_factory=list)
@@ -170,7 +167,11 @@ class MinerData:
@classmethod
def fields(cls):
return [f.name for f in fields(cls)]
return [f.name for f in fields(cls) if not f.name.startswith("_")]
@staticmethod
def dict_factory(x):
return {k: v for (k, v) in x if not k.startswith("_")}
def __post_init__(self):
self.datetime = datetime.now(timezone.utc).astimezone()
@@ -241,13 +242,24 @@ class MinerData:
if item.hashrate is not None:
hr_data.append(item.hashrate)
if len(hr_data) > 0:
return sum(hr_data)
return round(sum(hr_data), 2)
return self._hashrate
@hashrate.setter
def hashrate(self, val):
self._hashrate = val
@property
def wattage_limit(self): # noqa - Skip PyCharm inspection
if self.config is not None:
if isinstance(self.config.mining_mode, MiningModePowerTune):
return self.config.mining_mode.power
return self._wattage_limit
@wattage_limit.setter
def wattage_limit(self, val: int):
self._wattage_limit = val
@property
def total_chips(self): # noqa - Skip PyCharm inspection
if len(self.hashboards) > 0:
@@ -265,48 +277,48 @@ class MinerData:
@property
def nominal(self): # noqa - Skip PyCharm inspection
if self.total_chips is None or self.ideal_chips is None:
if self.total_chips is None or self.expected_chips is None:
return None
return self.ideal_chips == self.total_chips
return self.expected_chips == self.total_chips
@nominal.setter
def nominal(self, val):
pass
@property
def percent_ideal_chips(self): # noqa - Skip PyCharm inspection
if self.total_chips is None or self.ideal_chips is None:
def percent_expected_chips(self): # noqa - Skip PyCharm inspection
if self.total_chips is None or self.expected_chips is None:
return None
if self.total_chips == 0 or self.ideal_chips == 0:
if self.total_chips == 0 or self.expected_chips == 0:
return 0
return round((self.total_chips / self.ideal_chips) * 100)
return round((self.total_chips / self.expected_chips) * 100)
@percent_ideal_chips.setter
def percent_ideal_chips(self, val):
@percent_expected_chips.setter
def percent_expected_chips(self, val):
pass
@property
def percent_ideal_hashrate(self): # noqa - Skip PyCharm inspection
if self.hashrate is None or self.nominal_hashrate is None:
def percent_expected_hashrate(self): # noqa - Skip PyCharm inspection
if self.hashrate is None or self.expected_hashrate is None:
return None
if self.hashrate == 0 or self.nominal_hashrate == 0:
if self.hashrate == 0 or self.expected_hashrate == 0:
return 0
return round((self.hashrate / self.nominal_hashrate) * 100)
return round((self.hashrate / self.expected_hashrate) * 100)
@percent_ideal_hashrate.setter
def percent_ideal_hashrate(self, val):
@percent_expected_hashrate.setter
def percent_expected_hashrate(self, val):
pass
@property
def percent_ideal_wattage(self): # noqa - Skip PyCharm inspection
def percent_expected_wattage(self): # noqa - Skip PyCharm inspection
if self.wattage_limit is None or self.wattage is None:
return None
if self.wattage_limit == 0 or self.wattage == 0:
return 0
return round((self.wattage / self.wattage_limit) * 100)
@percent_ideal_wattage.setter
def percent_ideal_wattage(self, val):
@percent_expected_wattage.setter
def percent_expected_wattage(self, val):
pass
@property
@@ -338,13 +350,15 @@ class MinerData:
pass
def asdict(self) -> dict:
return asdict(self, dict_factory=self.dict_factory)
def as_dict(self) -> dict:
"""Get this dataclass as a dictionary.
Returns:
A dictionary version of this class.
"""
logging.debug(f"MinerData - (To Dict) - Dumping Dict data")
return asdict(self)
return self.asdict()
def as_json(self) -> str:
"""Get this dataclass as JSON.
@@ -352,7 +366,6 @@ class MinerData:
Returns:
A JSON version of this class.
"""
logging.debug(f"MinerData - (To JSON) - Dumping JSON data")
data = self.asdict()
data["datetime"] = str(int(time.mktime(data["datetime"].timetuple())))
return json.dumps(data)
@@ -363,7 +376,6 @@ class MinerData:
Returns:
A CSV version of this class with no headers.
"""
logging.debug(f"MinerData - (To CSV) - Dumping CSV data")
data = self.asdict()
data["datetime"] = str(int(time.mktime(data["datetime"].timetuple())))
errs = []
@@ -382,7 +394,6 @@ class MinerData:
Returns:
A influxdb line protocol version of this class.
"""
logging.debug(f"MinerData - (To InfluxDB) - Dumping InfluxDB data")
tag_data = [measurement_name]
field_data = []

View File

@@ -16,8 +16,6 @@
from dataclasses import asdict, dataclass, field, fields
C_N_CODES = ["52", "53", "54", "55", "56"]
@dataclass
class WhatsminerError:
@@ -37,10 +35,8 @@ class WhatsminerError:
@property
def error_message(self): # noqa - Skip PyCharm inspection
if len(str(self.error_code)) > 3 and str(self.error_code)[:2] in C_N_CODES:
# 55 error code base has chip numbers, so the format is
# 55 -> board num len 1 -> chip num len 3
err_type = 55
if len(str(self.error_code)) == 6 and not str(self.error_code)[:1] == "1":
err_type = int(str(self.error_code)[:2])
err_subtype = int(str(self.error_code)[2:3])
err_value = int(str(self.error_code)[3:])
else:
@@ -88,7 +84,9 @@ class WhatsminerError:
ERROR_CODES = {
1: { # Fan error
0: {0: "Fan unknown."},
0: {
0: "Fan unknown.",
},
1: { # Fan speed error of 1000+
0: "Intake fan speed error.",
1: "Exhaust fan speed error.",
@@ -101,7 +99,9 @@ ERROR_CODES = {
0: "Intake fan speed error. Fan speed deviates by more than 3000.",
1: "Exhaust fan speed error. Fan speed deviates by more than 3000.",
},
4: {0: "Fan speed too high."}, # High speed
4: {
0: "Fan speed too high.",
}, # High speed
},
2: { # Power error
0: {
@@ -126,6 +126,7 @@ ERROR_CODES = {
6: "Power remained unchanged for a long time.",
7: "Power set enable error.",
8: "Power input voltage is lower than 230V for high power mode.",
9: "Power input current is incorrect.",
},
3: {
3: "Power output high temperature protection error.",
@@ -159,6 +160,8 @@ ERROR_CODES = {
6: {
3: "Power communication warning.",
4: "Power communication error.",
5: "Power unknown error.",
6: "Power unknown error.",
7: "Power watchdog protection.",
8: "Power output high current protection.",
9: "Power input high current protection.",
@@ -170,57 +173,134 @@ ERROR_CODES = {
3: "Power input too high warning.",
4: "Power fan warning.",
5: "Power high temperature warning.",
6: "Power unknown error.",
7: "Power unknown error.",
8: "Power unknown error.",
9: "Power unknown error.",
},
8: {
0: "Power unknown error.",
1: "Power vendor status 1 bit 0 error.",
2: "Power vendor status 1 bit 1 error.",
3: "Power vendor status 1 bit 2 error.",
4: "Power vendor status 1 bit 3 error.",
5: "Power vendor status 1 bit 4 error.",
6: "Power vendor status 1 bit 5 error.",
7: "Power vendor status 1 bit 6 error.",
8: "Power vendor status 1 bit 7 error.",
9: "Power vendor status 2 bit 0 error.",
},
9: {
0: "Power vendor status 2 bit 1 error.",
1: "Power vendor status 2 bit 2 error.",
2: "Power vendor status 2 bit 3 error.",
3: "Power vendor status 2 bit 4 error.",
4: "Power vendor status 2 bit 5 error.",
5: "Power vendor status 2 bit 6 error.",
6: "Power vendor status 2 bit 7 error.",
},
},
3: { # temperature error
0: { # sensor detection error
"n": "Slot {n} temperature sensor detection error."
"n": "Slot {n} temperature sensor detection error.",
},
2: { # temperature reading error
"n": "Slot {n} temperature reading error.",
9: "Control board temperature sensor communication error.",
},
5: {"n": "Slot {n} temperature protecting."}, # temperature protection
6: {0: "Hashboard high temperature error."}, # high temp
5: {
"n": "Slot {n} temperature protecting.",
}, # temperature protection
6: {
0: "Hashboard high temperature error.",
1: "Hashboard high temperature error.",
2: "Hashboard high temperature error.",
3: "Hashboard high temperature error.",
}, # high temp
7: {
0: "The environment temperature fluctuates too much.",
}, # env temp
8: {
0: "Humidity sensor not found.",
1: "Humidity sensor read error.",
2: "Humidity sensor read error.",
3: "Humidity sensor protecting.",
},
}, # humidity
},
4: { # EEPROM error
0: {0: "Eeprom unknown error."},
1: {"n": "Slot {n} eeprom detection error."}, # EEPROM detection error
2: {"n": "Slot {n} eeprom parsing error."}, # EEPROM parsing error
3: {"n": "Slot {n} chip bin type error."}, # chip bin error
4: {"n": "Slot {n} eeprom chip number X error."}, # EEPROM chip number error
5: {"n": "Slot {n} eeprom xfer error."}, # EEPROM xfer error
0: {
0: "Eeprom unknown error.",
},
1: {
"n": "Slot {n} eeprom detection error.",
}, # EEPROM detection error
2: {
"n": "Slot {n} eeprom parsing error.",
}, # EEPROM parsing error
3: {
"n": "Slot {n} chip bin type error.",
}, # chip bin error
4: {
"n": "Slot {n} eeprom chip number X error.",
}, # EEPROM chip number error
5: {
"n": "Slot {n} eeprom xfer error.",
}, # EEPROM xfer error
},
5: { # hashboard error
0: {0: "Board unknown error."},
1: {"n": "Slot {n} miner type error."}, # board miner type error
2: {"n": "Slot {n} bin type error."}, # chip bin type error
3: {"n": "Slot {n} not found."}, # board not found error
4: {"n": "Slot {n} error reading chip id."}, # reading chip id error
5: {"n": "Slot {n} has bad chips."}, # board has bad chips error
6: {"n": "Slot {n} loss of balance error."}, # loss of balance error
7: {"n": "Slot {n} xfer error chip."}, # xfer error
8: {"n": "Slot {n} reset error."}, # reset error
9: {"n": "Slot {n} frequency too low."}, # freq error
0: {
0: "Board unknown error.",
},
1: {
"n": "Slot {n} miner type error.",
}, # board miner type error
2: {
"n": "Slot {n} bin type error.",
}, # chip bin type error
3: {
"n": "Slot {n} not found.",
}, # board not found error
4: {
"n": "Slot {n} error reading chip id.",
}, # reading chip id error
5: {
"n": "Slot {n} has bad chips.",
}, # board has bad chips error
6: {
"n": "Slot {n} loss of balance error.",
}, # loss of balance error
7: {
"n": "Slot {n} xfer error chip.",
}, # xfer error
8: {
"n": "Slot {n} reset error.",
}, # reset error
9: {
"n": "Slot {n} frequency too low.",
}, # freq error
},
6: { # env temp error
0: {0: "Environment temperature is too high."}, # normal env temp error
0: {
0: "Environment temperature is too high.",
}, # normal env temp error
1: { # high power env temp error
0: "Environment temperature is too high for high performance mode."
0: "Environment temperature is too high for high performance mode.",
},
},
7: { # control board error
0: {0: "MAC address invalid", 1: "Control board no support chip."},
0: {
0: "MAC address invalid",
1: "Control board no support chip.",
},
1: {
0: "Control board rebooted as an exception.",
1: "Control board rebooted as exception and cpufreq reduced, please upgrade the firmware",
2: "Control board rebooted as an exception.",
3: "The network is unstable, change time.",
4: "Unknown error.",
},
2: {
"n": "Control board slot {n} frame error.",
},
},
8: { # checksum error
@@ -228,63 +308,152 @@ ERROR_CODES = {
0: "CGMiner checksum error.",
1: "System monitor checksum error.",
2: "Remote daemon checksum error.",
}
},
1: {0: "Air to liquid PCB serial # does not match."},
},
9: {0: {1: "Power rate error."}}, # power rate error
9: {
0: {0: "Unknown error.", 1: "Power rate error.", 2: "Unknown error."}
}, # power rate error
20: { # pool error
1: {0: "All pools are disabled."}, # all disabled error
2: {"n": "Pool {n} connection failed."}, # pool connection failed error
3: {0: "High rejection rate on pool."}, # rejection rate error
0: {
0: "No pool information configured.",
},
1: {
0: "All pools are disabled.",
}, # all disabled error
2: {
"n": "Pool {n} connection failed.",
}, # pool connection failed error
3: {
0: "High rejection rate on pool.",
}, # rejection rate error
4: { # asicboost not supported error
0: "The pool does not support asicboost mode."
0: "The pool does not support asicboost mode.",
},
},
21: {1: {"n": "Slot {n} factory test step failed."}},
21: {
1: {
"n": "Slot {n} factory test step failed.",
}
},
23: { # hashrate error
1: {0: "Hashrate is too low."},
2: {0: "Hashrate is too low."},
3: {0: "Hashrate loss is too high."},
4: {0: "Hashrate loss is too high."},
5: {0: "Hashrate loss."},
1: {
0: "Hashrate is too low.",
},
2: {
0: "Hashrate is too low.",
},
3: {
0: "Hashrate loss is too high.",
},
4: {
0: "Hashrate loss is too high.",
},
5: {
0: "Hashrate loss.",
},
},
50: { # water velocity error/voltage error
1: {"n": "Slot {n} chip voltage too low."},
2: {"n": "Slot {n} chip voltage changed."},
3: {"n": "Slot {n} chip temperature difference is too large."},
4: {"n": "Slot {n} chip hottest temperature difference is too large."},
7: {"n": "Slot {n} water velocity is abnormal."}, # abnormal water velocity
8: {0: "Chip temp calibration failed, please restore factory settings."},
9: {"n": "Slot {n} chip temp calibration check no balance."},
1: {
"n": "Slot {n} chip voltage too low.",
},
2: {
"n": "Slot {n} chip voltage changed.",
},
3: {
"n": "Slot {n} chip temperature difference is too large.",
},
4: {
"n": "Slot {n} chip hottest temperature difference is too large.",
},
5: {"n": "Slot {n} stopped hashing, chips temperature protecting."},
7: {
"n": "Slot {n} water velocity is abnormal.",
}, # abnormal water velocity
8: {
0: "Chip temp calibration failed, please restore factory settings.",
},
9: {
"n": "Slot {n} chip temp calibration check no balance.",
},
},
51: { # frequency error
1: {"n": "Slot {n} frequency up timeout."}, # frequency up timeout
7: {"n": "Slot {n} frequency up timeout."}, # frequency up timeout
1: {
"n": "Slot {n} frequency up timeout.",
}, # frequency up timeout
2: {"n": "Slot {n} too many CRC errors."},
3: {"n": "Slot {n} unstable."},
7: {
"n": "Slot {n} frequency up timeout.",
}, # frequency up timeout
},
52: {
"n": {
"c": "Slot {n} chip {c} error nonce.",
},
},
53: {
"n": {
"c": "Slot {n} chip {c} too few nonce.",
},
},
54: {
"n": {
"c": "Slot {n} chip {c} temp protected.",
},
},
55: {
"n": {
"c": "Slot {n} chip {c} has been reset.",
},
},
56: {
"n": {
"c": "Slot {n} chip {c} zero nonce.",
},
},
52: {"n": {"c": "Slot {n} chip {c} error nonce."}},
53: {"n": {"c": "Slot {n} chip {c} too few nonce."}},
54: {"n": {"c": "Slot {n} chip {c} temp protected."}},
55: {"n": {"c": "Slot {n} chip {c} has been reset."}},
56: {"n": {"c": "Slot {n} chip {c} does not return to the nonce."}},
80: {
0: {0: "The tool version is too low, please update."},
1: {0: "Low freq."},
2: {0: "Low hashrate."},
3: {5: "High env temp."},
0: {
0: "The tool version is too low, please update.",
},
1: {
0: "Low freq.",
},
2: {
0: "Low hashrate.",
},
3: {
5: "High env temp.",
},
},
81: {
0: {0: "Chip data error."},
0: {
0: "Chip data error.",
},
},
82: {
0: {0: "Power version error."},
1: {0: "Miner type error."},
2: {0: "Version info error."},
0: {
0: "Power version error.",
},
1: {
0: "Miner type error.",
},
2: {
0: "Version info error.",
},
},
83: {
0: {0: "Empty level error."},
0: {
0: "Empty level error.",
},
},
84: {
0: {0: "Old firmware."},
1: {0: "Software version error."},
0: {
0: "Old firmware.",
},
1: {
0: "Software version error.",
},
},
85: {
"n": {
@@ -296,8 +465,12 @@ ERROR_CODES = {
},
},
86: {
0: {0: "Missing product serial #."},
1: {0: "Missing product type."},
0: {
0: "Missing product serial #.",
},
1: {
0: "Missing product type.",
},
2: {
0: "Missing miner serial #.",
1: "Wrong miner serial # length.",
@@ -314,12 +487,34 @@ ERROR_CODES = {
3: "Wrong power model rate.",
4: "Wrong power model format.",
},
5: {0: "Wrong hash board struct."},
6: {0: "Wrong miner cooling type."},
7: {0: "Missing PCB serial #."},
5: {
0: "Wrong hash board struct.",
},
6: {
0: "Wrong miner cooling type.",
},
7: {
0: "Missing PCB serial #.",
},
},
87: {
0: {
0: "Miner power mismatch.",
},
},
90: {
0: {
0: "Process error, exited with signal: 3.",
},
1: {
0: "Process error, exited with signal: 3.",
},
},
99: {
9: {
9: "Miner unknown error.",
},
},
87: {0: {0: "Miner power mismatch."}},
99: {9: {9: "Miner unknown error."}},
1000: {
0: {
0: "Security library error, please upgrade firmware",
@@ -328,7 +523,11 @@ ERROR_CODES = {
3: "/antiv/dig/pf_partial.dig illegal.",
},
},
1001: {0: {0: "Security BTMiner removed, please upgrade firmware."}},
1001: {
0: {
0: "Security BTMiner removed, please upgrade firmware.",
},
},
1100: {
0: {
0: "Security illegal file, please upgrade firmware.",

View File

@@ -66,14 +66,14 @@ class _MinerPhaseBalancer:
str(miner.ip): {
"miner": miner,
"set": 0,
"min": miner.fan_count * FAN_USAGE,
"min": miner.expected_fans * FAN_USAGE,
}
for miner in miners
}
for miner in miners:
if (
isinstance(miner, BTMiner)
and not (miner.model.startswith("M2") if miner.model else True)
and not (miner.raw_model.startswith("M2") if miner.raw_model else True)
) or isinstance(miner, BOSMiner):
if isinstance(miner, S9):
self.miners[str(miner.ip)]["tune"] = True
@@ -98,8 +98,8 @@ class _MinerPhaseBalancer:
self.miners[str(miner.ip)]["tune"] = False
self.miners[str(miner.ip)]["shutdown"] = True
self.miners[str(miner.ip)]["max"] = 3600
if miner.model:
if miner.model.startswith("M2"):
if miner.raw_model:
if miner.raw_model.startswith("M2"):
self.miners[str(miner.ip)]["tune"] = False
self.miners[str(miner.ip)]["shutdown"] = True
self.miners[str(miner.ip)]["max"] = 2400
@@ -137,10 +137,10 @@ class _MinerPhaseBalancer:
for miner in self.miners
]
)
pct_ideal_list = [d.percent_ideal for d in data]
pct_expected_list = [d.percent_ideal for d in data]
pct_ideal = 0
if len(pct_ideal_list) > 0:
pct_ideal = sum(pct_ideal_list) / len(pct_ideal_list)
if len(pct_expected_list) > 0:
pct_ideal = sum(pct_expected_list) / len(pct_expected_list)
wattage = round(wattage * 1 / (pct_ideal / 100))
@@ -149,10 +149,10 @@ class _MinerPhaseBalancer:
not self.miners[data_point.ip]["shutdown"]
):
# cant do anything with it so need to find a semi-accurate power limit
if not data_point.wattage_limit == None:
if data_point.wattage_limit is not None:
self.miners[data_point.ip]["max"] = int(data_point.wattage_limit)
self.miners[data_point.ip]["min"] = int(data_point.wattage_limit)
elif not data_point.wattage == None:
elif data_point.wattage is not None:
self.miners[data_point.ip]["max"] = int(data_point.wattage)
self.miners[data_point.ip]["min"] = int(data_point.wattage)
@@ -183,13 +183,19 @@ class _MinerPhaseBalancer:
if (not miner["tune"]) and (miner["shutdown"])
]
)
# min_other_wattage = sum([miner["min"] for miner in self.miners.values() if (not miner["tune"]) and (not miner["shutdown"])])
# min_other_wattage = sum(
# [
# miner["min"]
# for miner in self.miners.values()
# if (not miner["tune"]) and (not miner["shutdown"])
# ]
# )
# make sure wattage isnt set too high
if wattage > (max_tune_wattage + max_shutdown_wattage + max_other_wattage):
raise APIError(
f"Wattage setpoint is too high, setpoint: {wattage}W, max: {max_tune_wattage + max_shutdown_wattage + max_other_wattage}W"
) # PhaseBalancingError(f"Wattage setpoint is too high, setpoint: {wattage}W, max: {max_tune_wattage + max_shutdown_wattage + max_other_wattage}W")
)
# should now know wattage limits and which can be tuned/shutdown
# check if 1/2 max of the miners which can be tuned is low enough

View File

@@ -16,31 +16,29 @@
import logging
from pyasic.settings import PyasicSettings
def init_logger():
if PyasicSettings().logfile:
logging.basicConfig(
filename="logfile.txt",
filemode="a",
format="%(pathname)s:%(lineno)d in %(funcName)s\n[%(levelname)s][%(asctime)s](%(name)s) - %(message)s",
datefmt="%x %X",
)
else:
logging.basicConfig(
format="%(pathname)s:%(lineno)d in %(funcName)s\n[%(levelname)s][%(asctime)s](%(name)s) - %(message)s",
datefmt="%x %X",
)
# if PyasicSettings().logfile:
# logging.basicConfig(
# filename="logfile.txt",
# filemode="a",
# format="%(pathname)s:%(lineno)d in %(funcName)s\n[%(levelname)s][%(asctime)s](%(name)s) - %(message)s",
# datefmt="%x %X",
# )
# else:
logging.basicConfig(
format="%(pathname)s:%(lineno)d in %(funcName)s\n[%(levelname)s][%(asctime)s](%(name)s) - %(message)s",
datefmt="%x %X",
)
_logger = logging.getLogger()
if PyasicSettings().debug:
_logger.setLevel(logging.DEBUG)
logging.getLogger("asyncssh").setLevel(logging.DEBUG)
else:
_logger.setLevel(logging.WARNING)
logging.getLogger("asyncssh").setLevel(logging.WARNING)
# if PyasicSettings().debug:
# _logger.setLevel(logging.DEBUG)
# logging.getLogger("asyncssh").setLevel(logging.DEBUG)
# else:
_logger.setLevel(logging.WARNING)
logging.getLogger("asyncssh").setLevel(logging.WARNING)
return _logger

View File

@@ -17,6 +17,7 @@
from .bmminer import *
from .bosminer import *
from .cgminer import *
from .epic import *
from .hiveon import *
from .luxos import *
from .vnish import *

View File

@@ -21,10 +21,13 @@ from pyasic.miners.types import (
S19XP,
S19a,
S19aPro,
S19i,
S19j,
S19jNoPIC,
S19jPro,
S19Plus,
S19Pro,
S19ProHydro,
S19ProPlus,
)
@@ -33,6 +36,14 @@ class BMMinerS19(AntminerModern, S19):
pass
class BMMinerS19Plus(AntminerModern, S19Plus):
pass
class BMMinerS19i(AntminerModern, S19i):
pass
class BMMinerS19Pro(AntminerModern, S19Pro):
pass
@@ -67,3 +78,7 @@ class BMMinerS19jPro(AntminerModern, S19jPro):
class BMMinerS19L(AntminerModern, S19L):
pass
class BMMinerS19ProHydro(AntminerModern, S19ProHydro):
pass

View File

@@ -17,8 +17,6 @@
from pyasic.miners.backends import AntminerModern
from pyasic.miners.types import T19
# noqa - Ignore access to _module
class BMMinerT19(AntminerModern, T19):
pass

View File

@@ -18,11 +18,14 @@ from .S19 import (
BMMinerS19,
BMMinerS19a,
BMMinerS19aPro,
BMMinerS19i,
BMMinerS19j,
BMMinerS19jNoPIC,
BMMinerS19jPro,
BMMinerS19L,
BMMinerS19Plus,
BMMinerS19Pro,
BMMinerS19ProHydro,
BMMinerS19ProPlus,
BMMinerS19XP,
)

View File

@@ -19,6 +19,4 @@ from pyasic.miners.types import HS3
class BMMinerHS3(AntminerModern, HS3):
def __init__(self, ip: str, api_ver: str = "0.0.0"):
super().__init__(ip, api_ver)
self.supports_shutdown = False
supports_shutdown = False

View File

@@ -18,6 +18,4 @@ from pyasic.miners.types import L7
class BMMinerL7(AntminerModern, L7):
def __init__(self, ip: str, api_ver: str = "0.0.0"):
super().__init__(ip, api_ver)
self.supports_shutdown = False
supports_shutdown = False

View File

@@ -19,6 +19,4 @@ from pyasic.miners.types import E9Pro
class BMMinerE9Pro(AntminerModern, E9Pro):
def __init__(self, ip: str, api_ver: str = "0.0.0"):
super().__init__(ip, api_ver)
self.supports_shutdown = False
supports_shutdown = False

View File

@@ -14,21 +14,21 @@
# limitations under the License. -
# ------------------------------------------------------------------------------
from pyasic.miners.backends import BOSMiner
from pyasic.miners.backends import BOSer
from pyasic.miners.types import S17, S17e, S17Plus, S17Pro
class BOSMinerS17(BOSMiner, S17):
class BOSMinerS17(BOSer, S17):
pass
class BOSMinerS17Plus(BOSMiner, S17Plus):
class BOSMinerS17Plus(BOSer, S17Plus):
pass
class BOSMinerS17Pro(BOSMiner, S17Pro):
class BOSMinerS17Pro(BOSer, S17Pro):
pass
class BOSMinerS17e(BOSMiner, S17e):
class BOSMinerS17e(BOSer, S17e):
pass

View File

@@ -14,17 +14,17 @@
# limitations under the License. -
# ------------------------------------------------------------------------------
from pyasic.miners.backends import BOSMiner
from pyasic.miners.backends import BOSer
from pyasic.miners.types import T17, T17e, T17Plus
class BOSMinerT17(BOSMiner, T17):
class BOSMinerT17(BOSer, T17):
pass
class BOSMinerT17Plus(BOSMiner, T17Plus):
class BOSMinerT17Plus(BOSer, T17Plus):
pass
class BOSMinerT17e(BOSMiner, T17e):
class BOSMinerT17e(BOSer, T17e):
pass

View File

@@ -14,25 +14,61 @@
# limitations under the License. -
# ------------------------------------------------------------------------------
from pyasic.miners.backends import BOSMiner
from pyasic.miners.types import S19, S19j, S19jNoPIC, S19jPro, S19Pro
from pyasic.miners.backends import BOSer
from pyasic.miners.types import (
S19,
S19XP,
S19a,
S19aPro,
S19j,
S19jNoPIC,
S19jPro,
S19jProPlus,
S19kProNoPIC,
S19Plus,
S19Pro,
)
class BOSMinerS19(BOSMiner, S19):
class BOSMinerS19(BOSer, S19):
pass
class BOSMinerS19Pro(BOSMiner, S19Pro):
class BOSMinerS19Plus(BOSer, S19Plus):
pass
class BOSMinerS19j(BOSMiner, S19j):
class BOSMinerS19Pro(BOSer, S19Pro):
pass
class BOSMinerS19jNoPIC(BOSMiner, S19jNoPIC):
class BOSMinerS19a(BOSer, S19a):
pass
class BOSMinerS19jPro(BOSMiner, S19jPro):
class BOSMinerS19j(BOSer, S19j):
pass
class BOSMinerS19jNoPIC(BOSer, S19jNoPIC):
pass
class BOSMinerS19jPro(BOSer, S19jPro):
pass
class BOSMinerS19kProNoPIC(BOSer, S19kProNoPIC):
pass
class BOSMinerS19aPro(BOSer, S19aPro):
pass
class BOSMinerS19jProPlus(BOSer, S19jProPlus):
pass
class BOSMinerS19XP(BOSer, S19XP):
pass

View File

@@ -14,9 +14,9 @@
# limitations under the License. -
# ------------------------------------------------------------------------------
from pyasic.miners.backends import BOSMiner
from pyasic.miners.backends import BOSer
from pyasic.miners.types import T19
class BOSMinerT19(BOSMiner, T19):
class BOSMinerT19(BOSer, T19):
pass

View File

@@ -16,9 +16,15 @@
from .S19 import (
BOSMinerS19,
BOSMinerS19a,
BOSMinerS19aPro,
BOSMinerS19j,
BOSMinerS19jNoPIC,
BOSMinerS19jPro,
BOSMinerS19jProPlus,
BOSMinerS19kProNoPIC,
BOSMinerS19Plus,
BOSMinerS19Pro,
BOSMinerS19XP,
)
from .T19 import BOSMinerT19

View File

@@ -19,6 +19,4 @@ from pyasic.miners.types import Z15
class CGMinerZ15(AntminerOld, Z15):
def __init__(self, ip: str, api_ver: str = "0.0.0"):
super().__init__(ip, api_ver)
self.supports_shutdown = False
supports_shutdown = False

View File

@@ -18,6 +18,4 @@ from pyasic.miners.types import D3
class CGMinerD3(AntminerOld, D3):
def __init__(self, ip: str, api_ver: str = "0.0.0"):
super().__init__(ip, api_ver)
self.supports_shutdown = False
supports_shutdown = False

View File

@@ -19,6 +19,4 @@ from pyasic.miners.types import DR5
class CGMinerDR5(AntminerOld, DR5):
def __init__(self, ip: str, api_ver: str = "0.0.0"):
super().__init__(ip, api_ver)
self.supports_shutdown = False
supports_shutdown = False

View File

@@ -0,0 +1,46 @@
# ------------------------------------------------------------------------------
# Copyright 2022 Upstream Data Inc -
# -
# Licensed under the Apache License, Version 2.0 (the "License"); -
# you may not use this file except in compliance with the License. -
# You may obtain a copy of the License at -
# -
# http://www.apache.org/licenses/LICENSE-2.0 -
# -
# Unless required by applicable law or agreed to in writing, software -
# distributed under the License is distributed on an "AS IS" BASIS, -
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------
from pyasic.miners.backends import ePIC
from pyasic.miners.types import S19, S19XP, S19j, S19jPro, S19jProPlus, S19kPro, S19Pro
class ePICS19(ePIC, S19):
pass
class ePICS19Pro(ePIC, S19Pro):
pass
class ePICS19j(ePIC, S19j):
pass
class ePICS19jPro(ePIC, S19jPro):
pass
class ePICS19jProPlus(ePIC, S19jProPlus):
pass
class ePICS19kPro(ePIC, S19kPro):
pass
class ePICS19XP(ePIC, S19XP):
pass

View File

@@ -0,0 +1,25 @@
# ------------------------------------------------------------------------------
# Copyright 2022 Upstream Data Inc -
# -
# Licensed under the Apache License, Version 2.0 (the "License"); -
# you may not use this file except in compliance with the License. -
# You may obtain a copy of the License at -
# -
# http://www.apache.org/licenses/LICENSE-2.0 -
# -
# Unless required by applicable law or agreed to in writing, software -
# distributed under the License is distributed on an "AS IS" BASIS, -
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------
from .S19 import (
ePICS19,
ePICS19j,
ePICS19jPro,
ePICS19jProPlus,
ePICS19kPro,
ePICS19Pro,
ePICS19XP,
)

View File

@@ -0,0 +1,17 @@
# ------------------------------------------------------------------------------
# Copyright 2022 Upstream Data Inc -
# -
# Licensed under the Apache License, Version 2.0 (the "License"); -
# you may not use this file except in compliance with the License. -
# You may obtain a copy of the License at -
# -
# http://www.apache.org/licenses/LICENSE-2.0 -
# -
# Unless required by applicable law or agreed to in writing, software -
# distributed under the License is distributed on an "AS IS" BASIS, -
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------
from .X19 import *

View File

@@ -21,14 +21,53 @@ import asyncssh
from pyasic.data import HashBoard
from pyasic.errors import APIError
from pyasic.miners.backends import Hiveon
from pyasic.miners.base import DataFunction, DataLocations, DataOptions, RPCAPICommand
from pyasic.miners.types import T9
HIVEON_T9_DATA_LOC = DataLocations(
**{
str(DataOptions.API_VERSION): DataFunction(
"_get_api_ver",
[RPCAPICommand("api_version", "version")],
),
str(DataOptions.FW_VERSION): DataFunction(
"_get_fw_ver",
[RPCAPICommand("api_version", "version")],
),
str(DataOptions.HASHRATE): DataFunction(
"_get_hashrate",
[RPCAPICommand("api_summary", "summary")],
),
str(DataOptions.EXPECTED_HASHRATE): DataFunction(
"_get_expected_hashrate",
[RPCAPICommand("api_stats", "stats")],
),
str(DataOptions.HASHBOARDS): DataFunction(
"_get_hashboards",
[RPCAPICommand("api_stats", "stats")],
),
str(DataOptions.ENVIRONMENT_TEMP): DataFunction(
"_get_env_temp",
[RPCAPICommand("api_stats", "stats")],
),
str(DataOptions.WATTAGE): DataFunction(
"_get_wattage",
[RPCAPICommand("api_stats", "stats")],
),
str(DataOptions.FANS): DataFunction(
"_get_fans",
[RPCAPICommand("api_stats", "stats")],
),
str(DataOptions.UPTIME): DataFunction(
"_get_uptime",
[RPCAPICommand("api_stats", "stats")],
),
}
)
class HiveonT9(Hiveon, T9):
def __init__(self, ip: str, api_ver: str = "0.0.0") -> None:
super().__init__(ip, api_ver=api_ver)
self.ip = ip
self.pwd = "admin"
data_locations = HIVEON_T9_DATA_LOC
##################################################
### DATA GATHERING FUNCTIONS (get_{some_data}) ###
@@ -45,39 +84,49 @@ class HiveonT9(Hiveon, T9):
except (TypeError, ValueError, asyncssh.Error, OSError, AttributeError):
pass
async def get_hashboards(self, api_stats: dict = None) -> List[HashBoard]:
async def _get_hashboards(self, api_stats: dict = None) -> List[HashBoard]:
hashboards = [
HashBoard(slot=board, expected_chips=self.expected_chips)
for board in range(self.expected_hashboards)
]
if api_stats is None:
try:
api_stats = self.api.stats()
except APIError:
return []
board_map = {
0: [2, 9, 10],
1: [3, 11, 12],
2: [4, 13, 14],
}
hashboards = []
for board in board_map:
hashboard = HashBoard(slot=board, expected_chips=self.nominal_chips)
hashrate = 0
chips = 0
for chipset in board_map[board]:
if hashboard.chip_temp == None:
if hashboards[board].chip_temp is None:
try:
hashboard.board_temp = api_stats["STATS"][1][f"temp{chipset}"]
hashboard.chip_temp = api_stats["STATS"][1][f"temp2_{chipset}"]
hashboards[board].temp = api_stats["STATS"][1][f"temp{chipset}"]
hashboards[board].chip_temp = api_stats["STATS"][1][
f"temp2_{chipset}"
]
except (KeyError, IndexError):
pass
else:
hashboard.missing = False
hashboards[board].missing = False
try:
hashrate += api_stats["STATS"][1][f"chain_rate{chipset}"]
chips += api_stats["STATS"][1][f"chain_acn{chipset}"]
except (KeyError, IndexError):
pass
hashboard.hashrate = round(hashrate / 1000, 2)
hashboard.chips = chips
hashboards.append(hashboard)
hashboards[board].hashrate = round(hashrate / 1000, 2)
hashboards[board].chips = chips
return hashboards
async def get_wattage(self, api_stats: dict = None) -> Optional[int]:
async def _get_wattage(self, api_stats: dict = None) -> Optional[int]:
if not api_stats:
try:
api_stats = await self.api.stats()
@@ -94,7 +143,7 @@ class HiveonT9(Hiveon, T9):
# parse wattage position out of raw data
return round(float(wattage_raw.split(" ")[0]))
async def get_env_temp(self, api_stats: dict = None) -> Optional[float]:
async def _get_env_temp(self, api_stats: dict = None) -> Optional[float]:
env_temp_list = []
board_map = {
0: [2, 9, 10],

View File

@@ -19,5 +19,4 @@ from pyasic.miners.types import L3Plus
class VnishL3Plus(VNish, L3Plus):
def __init__(self, ip: str, api_ver: str = "0.0.0"):
super().__init__(ip, api_ver)
pass

View File

@@ -0,0 +1 @@
from .flux import *

View File

@@ -0,0 +1,6 @@
from pyasic.miners.backends import Auradine
from pyasic.miners.types import AuradineAT1500
class AuradineFluxAT1500(AuradineAT1500, Auradine):
pass

View File

@@ -0,0 +1,10 @@
from pyasic.miners.backends import Auradine
from pyasic.miners.types import AuradineAT2860, AuradineAT2880
class AuradineFluxAT2860(AuradineAT2860, Auradine):
pass
class AuradineFluxAT2880(AuradineAT2880, Auradine):
pass

View File

@@ -0,0 +1,2 @@
from .AT1 import AuradineFluxAT1500
from .AT2 import AuradineFluxAT2860, AuradineFluxAT2880

View File

@@ -0,0 +1,6 @@
from pyasic.miners.backends import Auradine
from pyasic.miners.types import AuradineAI2500
class AuradineFluxAI2500(AuradineAI2500, Auradine):
pass

View File

@@ -0,0 +1,6 @@
from pyasic.miners.backends import Auradine
from pyasic.miners.types import AuradineAI3680
class AuradineFluxAI3680(AuradineAI3680, Auradine):
pass

View File

@@ -0,0 +1,2 @@
from .AI2 import AuradineFluxAI2500
from .AI3 import AuradineFluxAI3680

View File

@@ -0,0 +1,6 @@
from pyasic.miners.backends import Auradine
from pyasic.miners.types import AuradineAD2500
class AuradineFluxAD2500(AuradineAD2500, Auradine):
pass

View File

@@ -0,0 +1,6 @@
from pyasic.miners.backends import Auradine
from pyasic.miners.types import AuradineAD3500
class AuradineFluxAD3500(AuradineAD3500, Auradine):
pass

View File

@@ -0,0 +1,2 @@
from .AD2 import AuradineFluxAD2500
from .AD3 import AuradineFluxAD3500

View File

@@ -0,0 +1,3 @@
from .AD import *
from .AI import *
from .AT import *

View File

@@ -14,9 +14,9 @@
# limitations under the License. -
# ------------------------------------------------------------------------------
from pyasic.miners.backends import CGMinerAvalon
from pyasic.miners.backends import AvalonMiner
from pyasic.miners.types import Avalon1026
class CGMinerAvalon1026(CGMinerAvalon, Avalon1026):
class CGMinerAvalon1026(AvalonMiner, Avalon1026):
pass

View File

@@ -14,9 +14,9 @@
# limitations under the License. -
# ------------------------------------------------------------------------------
from pyasic.miners.backends import CGMinerAvalon
from pyasic.miners.backends import AvalonMiner
from pyasic.miners.types import Avalon1047
class CGMinerAvalon1047(CGMinerAvalon, Avalon1047):
class CGMinerAvalon1047(AvalonMiner, Avalon1047):
pass

View File

@@ -14,9 +14,9 @@
# limitations under the License. -
# ------------------------------------------------------------------------------
from pyasic.miners.backends import CGMinerAvalon
from pyasic.miners.backends import AvalonMiner
from pyasic.miners.types import Avalon1066
class CGMinerAvalon1066(CGMinerAvalon, Avalon1066):
class CGMinerAvalon1066(AvalonMiner, Avalon1066):
pass

View File

@@ -14,9 +14,9 @@
# limitations under the License. -
# ------------------------------------------------------------------------------
from pyasic.miners.backends import CGMinerAvalon
from pyasic.miners.backends import AvalonMiner
from pyasic.miners.types import Avalon1166Pro
class CGMinerAvalon1166Pro(CGMinerAvalon, Avalon1166Pro):
class CGMinerAvalon1166Pro(AvalonMiner, Avalon1166Pro):
pass

View File

@@ -14,9 +14,9 @@
# limitations under the License. -
# ------------------------------------------------------------------------------
from pyasic.miners.backends import CGMinerAvalon
from pyasic.miners.backends import AvalonMiner
from pyasic.miners.types import Avalon1246
class CGMinerAvalon1246(CGMinerAvalon, Avalon1246):
class CGMinerAvalon1246(AvalonMiner, Avalon1246):
pass

View File

@@ -14,9 +14,9 @@
# limitations under the License. -
# ------------------------------------------------------------------------------
from pyasic.miners.backends import CGMinerAvalon
from pyasic.miners.backends import AvalonMiner
from pyasic.miners.types import Avalon721
class CGMinerAvalon721(CGMinerAvalon, Avalon721):
class CGMinerAvalon721(AvalonMiner, Avalon721):
pass

View File

@@ -14,9 +14,9 @@
# limitations under the License. -
# ------------------------------------------------------------------------------
from pyasic.miners.backends import CGMinerAvalon
from pyasic.miners.backends import AvalonMiner
from pyasic.miners.types import Avalon741
class CGMinerAvalon741(CGMinerAvalon, Avalon741):
class CGMinerAvalon741(AvalonMiner, Avalon741):
pass

View File

@@ -14,9 +14,9 @@
# limitations under the License. -
# ------------------------------------------------------------------------------
from pyasic.miners.backends import CGMinerAvalon
from pyasic.miners.backends import AvalonMiner
from pyasic.miners.types import Avalon761
class CGMinerAvalon761(CGMinerAvalon, Avalon761):
class CGMinerAvalon761(AvalonMiner, Avalon761):
pass

View File

@@ -14,9 +14,9 @@
# limitations under the License. -
# ------------------------------------------------------------------------------
from pyasic.miners.backends import CGMinerAvalon
from pyasic.miners.backends import AvalonMiner
from pyasic.miners.types import Avalon821
class CGMinerAvalon821(CGMinerAvalon, Avalon821):
class CGMinerAvalon821(AvalonMiner, Avalon821):
pass

View File

@@ -14,9 +14,9 @@
# limitations under the License. -
# ------------------------------------------------------------------------------
from pyasic.miners.backends import CGMinerAvalon
from pyasic.miners.backends import AvalonMiner
from pyasic.miners.types import Avalon841
class CGMinerAvalon841(CGMinerAvalon, Avalon841):
class CGMinerAvalon841(AvalonMiner, Avalon841):
pass

View File

@@ -14,9 +14,9 @@
# limitations under the License. -
# ------------------------------------------------------------------------------
from pyasic.miners.backends import CGMinerAvalon
from pyasic.miners.backends import AvalonMiner
from pyasic.miners.types import Avalon851
class CGMinerAvalon851(CGMinerAvalon, Avalon851):
class CGMinerAvalon851(AvalonMiner, Avalon851):
pass

View File

@@ -14,9 +14,9 @@
# limitations under the License. -
# ------------------------------------------------------------------------------
from pyasic.miners.backends import CGMinerAvalon
from pyasic.miners.backends import AvalonMiner
from pyasic.miners.types import Avalon921
class CGMinerAvalon921(CGMinerAvalon, Avalon921):
class CGMinerAvalon921(AvalonMiner, Avalon921):
pass

View File

@@ -14,14 +14,17 @@
# limitations under the License. -
# ------------------------------------------------------------------------------
from .antminer import AntminerModern, AntminerOld
from .auradine import Auradine
from .avalonminer import AvalonMiner
from .bfgminer import BFGMiner
from .bfgminer_goldshell import BFGMinerGoldshell
from .bmminer import BMMiner
from .bosminer import BOSMiner
from .braiins_os import BOSer, BOSMiner
from .btminer import BTMiner
from .cgminer import CGMiner
from .cgminer_avalon import CGMinerAvalon
from .epic import ePIC
from .goldshell import GoldshellMiner
from .hiveon import Hiveon
from .innosilicon import Innosilicon
from .luxminer import LUXMiner
from .vnish import VNish
from .whatsminer import M2X, M3X, M5X
from .whatsminer import M2X, M3X, M5X, M6X

View File

@@ -14,89 +14,105 @@
# limitations under the License. -
# ------------------------------------------------------------------------------
import asyncio
from typing import List, Optional, Union
from pyasic.API import APIError
from pyasic.config import MinerConfig, X19PowerMode
from pyasic.config import MinerConfig, MiningModeConfig
from pyasic.data import Fan, HashBoard
from pyasic.data.error_codes import MinerErrorData, X19Error
from pyasic.miners.backends.bmminer import BMMiner
from pyasic.miners.backends.cgminer import CGMiner
from pyasic.miners.base import (
DataFunction,
DataLocations,
DataOptions,
RPCAPICommand,
WebAPICommand,
)
from pyasic.rpc import APIError
from pyasic.ssh.antminer import AntminerModernSSH
from pyasic.web.antminer import AntminerModernWebAPI, AntminerOldWebAPI
ANTMINER_MODERN_DATA_LOC = {
"mac": {
"cmd": "get_mac",
"kwargs": {"web_get_system_info": {"web": "get_system_info"}},
},
"model": {"cmd": "get_model", "kwargs": {}},
"api_ver": {"cmd": "get_api_ver", "kwargs": {"api_version": {"api": "version"}}},
"fw_ver": {"cmd": "get_fw_ver", "kwargs": {"api_version": {"api": "version"}}},
"hostname": {
"cmd": "get_hostname",
"kwargs": {"web_get_system_info": {"web": "get_system_info"}},
},
"hashrate": {"cmd": "get_hashrate", "kwargs": {"api_summary": {"api": "summary"}}},
"nominal_hashrate": {
"cmd": "get_nominal_hashrate",
"kwargs": {"api_stats": {"api": "stats"}},
},
"hashboards": {"cmd": "get_hashboards", "kwargs": {"api_stats": {"api": "stats"}}},
"env_temp": {"cmd": "get_env_temp", "kwargs": {}},
"wattage": {"cmd": "get_wattage", "kwargs": {}},
"wattage_limit": {"cmd": "get_wattage_limit", "kwargs": {}},
"fans": {"cmd": "get_fans", "kwargs": {"api_stats": {"api": "stats"}}},
"fan_psu": {"cmd": "get_fan_psu", "kwargs": {}},
"errors": {"cmd": "get_errors", "kwargs": {"web_summary": {"web": "summary"}}},
"fault_light": {
"cmd": "get_fault_light",
"kwargs": {"web_get_blink_status": {"web": "get_blink_status"}},
},
"pools": {"cmd": "get_pools", "kwargs": {"api_pools": {"api": "pools"}}},
"is_mining": {
"cmd": "is_mining",
"kwargs": {"web_get_conf": {"web": "get_miner_conf"}},
},
"uptime": {
"cmd": "get_uptime",
"kwargs": {"api_stats": {"api": "stats"}},
},
}
ANTMINER_MODERN_DATA_LOC = DataLocations(
**{
str(DataOptions.MAC): DataFunction(
"_get_mac",
[WebAPICommand("web_get_system_info", "get_system_info")],
),
str(DataOptions.API_VERSION): DataFunction(
"_get_api_ver",
[RPCAPICommand("api_version", "version")],
),
str(DataOptions.FW_VERSION): DataFunction(
"_get_fw_ver",
[RPCAPICommand("api_version", "version")],
),
str(DataOptions.HOSTNAME): DataFunction(
"_get_hostname",
[WebAPICommand("web_get_system_info", "get_system_info")],
),
str(DataOptions.HASHRATE): DataFunction(
"_get_hashrate",
[RPCAPICommand("api_summary", "summary")],
),
str(DataOptions.EXPECTED_HASHRATE): DataFunction(
"_get_expected_hashrate",
[RPCAPICommand("api_stats", "stats")],
),
str(DataOptions.FANS): DataFunction(
"_get_fans",
[RPCAPICommand("api_stats", "stats")],
),
str(DataOptions.ERRORS): DataFunction(
"_get_errors",
[WebAPICommand("web_summary", "summary")],
),
str(DataOptions.FAULT_LIGHT): DataFunction(
"_get_fault_light",
[WebAPICommand("web_get_blink_status", "get_blink_status")],
),
str(DataOptions.IS_MINING): DataFunction(
"_is_mining",
[WebAPICommand("web_get_conf", "get_miner_conf")],
),
str(DataOptions.UPTIME): DataFunction(
"_get_uptime",
[RPCAPICommand("api_stats", "stats")],
),
}
)
class AntminerModern(BMMiner):
def __init__(self, ip: str, api_ver: str = "0.0.0") -> None:
super().__init__(ip, api_ver)
# interfaces
self.web = AntminerModernWebAPI(ip)
"""Handler for AntMiners with the modern web interface, such as S19"""
# static data
# data gathering locations
self.data_locations = ANTMINER_MODERN_DATA_LOC
# autotuning/shutdown support
self.supports_shutdown = True
_web_cls = AntminerModernWebAPI
web: AntminerModernWebAPI
_ssh_cls = AntminerModernSSH
ssh: AntminerModernSSH
data_locations = ANTMINER_MODERN_DATA_LOC
supports_shutdown = True
async def get_config(self) -> MinerConfig:
data = await self.web.get_miner_conf()
if data:
self.config = MinerConfig().from_raw(data)
self.config = MinerConfig.from_am_modern(data)
return self.config
async def send_config(self, config: MinerConfig, user_suffix: str = None) -> None:
self.config = config
conf = config.as_x19(user_suffix=user_suffix)
data = await self.web.set_miner_conf(conf)
if data:
if data.get("code") == "M000":
return
for i in range(7):
data = await self.get_config()
if data.as_x19() == conf:
break
await asyncio.sleep(1)
await self.web.set_miner_conf(config.as_am_modern(user_suffix=user_suffix))
# if data:
# if data.get("code") == "M000":
# return
#
# for i in range(7):
# data = await self.get_config()
# if data == self.config:
# break
# await asyncio.sleep(1)
async def fault_light_on(self) -> bool:
data = await self.web.blink(blink=True)
@@ -109,7 +125,7 @@ class AntminerModern(BMMiner):
data = await self.web.blink(blink=False)
if data:
if data.get("code") == "B100":
self.light = True
self.light = False
return self.light
async def reboot(self) -> bool:
@@ -120,37 +136,37 @@ class AntminerModern(BMMiner):
async def stop_mining(self) -> bool:
cfg = await self.get_config()
cfg.miner_mode = X19PowerMode.Sleep
cfg.miner_mode = MiningModeConfig.sleep
await self.send_config(cfg)
return True
async def resume_mining(self) -> bool:
cfg = await self.get_config()
cfg.miner_mode = X19PowerMode.Normal
cfg.miner_mode = MiningModeConfig.normal
await self.send_config(cfg)
return True
async def get_hostname(self, web_get_system_info: dict = None) -> Union[str, None]:
if not web_get_system_info:
async def _get_hostname(self, web_get_system_info: dict = None) -> Union[str, None]:
if web_get_system_info is None:
try:
web_get_system_info = await self.web.get_system_info()
except APIError:
pass
if web_get_system_info:
if web_get_system_info is not None:
try:
return web_get_system_info["hostname"]
except KeyError:
pass
async def get_mac(self, web_get_system_info: dict = None) -> Union[str, None]:
if not web_get_system_info:
async def _get_mac(self, web_get_system_info: dict = None) -> Union[str, None]:
if web_get_system_info is None:
try:
web_get_system_info = await self.web.get_system_info()
except APIError:
pass
if web_get_system_info:
if web_get_system_info is not None:
try:
return web_get_system_info["macaddr"]
except KeyError:
@@ -163,15 +179,15 @@ class AntminerModern(BMMiner):
except KeyError:
pass
async def get_errors(self, web_summary: dict = None) -> List[MinerErrorData]:
if not web_summary:
async def _get_errors(self, web_summary: dict = None) -> List[MinerErrorData]:
if web_summary is None:
try:
web_summary = await self.web.summary()
except APIError:
pass
errors = []
if web_summary:
if web_summary is not None:
try:
for item in web_summary["SUMMARY"][0]["status"]:
try:
@@ -179,48 +195,86 @@ class AntminerModern(BMMiner):
errors.append(X19Error(item["msg"]))
except KeyError:
continue
except (KeyError, IndexError):
except LookupError:
pass
return errors
async def get_fault_light(self, web_get_blink_status: dict = None) -> bool:
async def _get_hashboards(self) -> List[HashBoard]:
hashboards = [
HashBoard(idx, expected_chips=self.expected_chips)
for idx in range(self.expected_hashboards)
]
try:
api_stats = await self.api.send_command("stats", new_api=True)
except APIError:
return hashboards
if api_stats is not None:
try:
for board in api_stats["STATS"][0]["chain"]:
hashboards[board["index"]].hashrate = round(
board["rate_real"] / 1000, 2
)
hashboards[board["index"]].chips = board["asic_num"]
board_temp_data = list(
filter(lambda x: not x == 0, board["temp_pcb"])
)
hashboards[board["index"]].temp = sum(board_temp_data) / len(
board_temp_data
)
chip_temp_data = list(
filter(lambda x: not x == 0, board["temp_chip"])
)
hashboards[board["index"]].chip_temp = sum(chip_temp_data) / len(
chip_temp_data
)
hashboards[board["index"]].serial_number = board["sn"]
hashboards[board["index"]].missing = False
except LookupError:
pass
return hashboards
async def _get_fault_light(
self, web_get_blink_status: dict = None
) -> Optional[bool]:
if self.light:
return self.light
if not web_get_blink_status:
if web_get_blink_status is None:
try:
web_get_blink_status = await self.web.get_blink_status()
except APIError:
pass
if web_get_blink_status:
if web_get_blink_status is not None:
try:
self.light = web_get_blink_status["blink"]
except KeyError:
pass
return self.light
async def get_nominal_hashrate(self, api_stats: dict = None) -> Optional[float]:
if not api_stats:
async def _get_expected_hashrate(self, api_stats: dict = None) -> Optional[float]:
if api_stats is None:
try:
api_stats = await self.api.stats()
except APIError:
pass
if api_stats:
if api_stats is not None:
try:
ideal_rate = api_stats["STATS"][1]["total_rateideal"]
expected_rate = api_stats["STATS"][1]["total_rateideal"]
try:
rate_unit = api_stats["STATS"][1]["rate_unit"]
except KeyError:
rate_unit = "GH"
if rate_unit == "GH":
return round(ideal_rate / 1000, 2)
return round(expected_rate / 1000, 2)
if rate_unit == "MH":
return round(ideal_rate / 1000000, 2)
return round(expected_rate / 1000000, 2)
else:
return round(ideal_rate, 2)
except (KeyError, IndexError):
return round(expected_rate, 2)
except LookupError:
pass
async def set_static_ip(
@@ -265,93 +319,98 @@ class AntminerModern(BMMiner):
protocol=protocol,
)
async def is_mining(self, web_get_conf: dict = None) -> Optional[bool]:
if not web_get_conf:
async def _is_mining(self, web_get_conf: dict = None) -> Optional[bool]:
if web_get_conf is None:
try:
web_get_conf = await self.web.get_miner_conf()
except APIError:
pass
if web_get_conf:
if web_get_conf is not None:
try:
return False if int(web_get_conf["bitmain-work-mode"]) == 1 else True
if web_get_conf["bitmain-work-mode"].isdigit():
return (
False if int(web_get_conf["bitmain-work-mode"]) == 1 else True
)
return False
except LookupError:
pass
async def get_uptime(self, api_stats: dict = None) -> Optional[int]:
if not api_stats:
async def _get_uptime(self, api_stats: dict = None) -> Optional[int]:
if api_stats is None:
try:
api_stats = await self.api.stats()
except APIError:
pass
if api_stats:
if api_stats is not None:
try:
return int(api_stats["STATS"][1]["Elapsed"])
except LookupError:
pass
ANTMINER_OLD_DATA_LOC = {
"mac": {"cmd": "get_mac", "kwargs": {}},
"model": {
"cmd": "get_model",
"kwargs": {},
},
"api_ver": {"cmd": "get_api_ver", "kwargs": {"api_version": {"api": "version"}}},
"fw_ver": {"cmd": "get_fw_ver", "kwargs": {"api_version": {"api": "version"}}},
"hostname": {
"cmd": "get_hostname",
"kwargs": {"web_get_system_info": {"web": "get_system_info"}},
},
"hashrate": {"cmd": "get_hashrate", "kwargs": {"api_summary": {"api": "summary"}}},
"nominal_hashrate": {
"cmd": "get_nominal_hashrate",
"kwargs": {"api_stats": {"api": "stats"}},
},
"hashboards": {"cmd": "get_hashboards", "kwargs": {"api_stats": {"api": "stats"}}},
"env_temp": {"cmd": "get_env_temp", "kwargs": {}},
"wattage": {"cmd": "get_wattage", "kwargs": {}},
"wattage_limit": {"cmd": "get_wattage_limit", "kwargs": {}},
"fans": {"cmd": "get_fans", "kwargs": {"api_stats": {"api": "stats"}}},
"fan_psu": {"cmd": "get_fan_psu", "kwargs": {}},
"errors": {"cmd": "get_errors", "kwargs": {}},
"fault_light": {
"cmd": "get_fault_light",
"kwargs": {"web_get_blink_status": {"web": "get_blink_status"}},
},
"pools": {"cmd": "get_pools", "kwargs": {"api_pools": {"api": "pools"}}},
"is_mining": {
"cmd": "is_mining",
"kwargs": {"web_get_conf": {"web": "get_miner_conf"}},
},
"uptime": {
"cmd": "get_uptime",
"kwargs": {"api_stats": {"api": "stats"}},
},
}
ANTMINER_OLD_DATA_LOC = DataLocations(
**{
str(DataOptions.API_VERSION): DataFunction(
"_get_api_ver",
[RPCAPICommand("api_version", "version")],
),
str(DataOptions.FW_VERSION): DataFunction(
"_get_fw_ver",
[RPCAPICommand("api_version", "version")],
),
str(DataOptions.HOSTNAME): DataFunction(
"_get_hostname",
[WebAPICommand("web_get_system_info", "get_system_info")],
),
str(DataOptions.HASHRATE): DataFunction(
"_get_hashrate",
[RPCAPICommand("api_summary", "summary")],
),
str(DataOptions.HASHBOARDS): DataFunction(
"_get_hashboards",
[RPCAPICommand("api_stats", "stats")],
),
str(DataOptions.FANS): DataFunction(
"_get_fans",
[RPCAPICommand("api_stats", "stats")],
),
str(DataOptions.FAULT_LIGHT): DataFunction(
"_get_fault_light",
[WebAPICommand("web_get_blink_status", "get_blink_status")],
),
str(DataOptions.IS_MINING): DataFunction(
"_is_mining",
[WebAPICommand("web_get_conf", "get_miner_conf")],
),
str(DataOptions.UPTIME): DataFunction(
"_get_uptime",
[RPCAPICommand("api_stats", "stats")],
),
}
)
class AntminerOld(CGMiner):
def __init__(self, ip: str, api_ver: str = "0.0.0") -> None:
super().__init__(ip, api_ver)
# interfaces
self.web = AntminerOldWebAPI(ip)
"""Handler for AntMiners with the old web interface, such as S17"""
# static data
# data gathering locations
self.data_locations = ANTMINER_OLD_DATA_LOC
_web_cls = AntminerOldWebAPI
web: AntminerOldWebAPI
data_locations = ANTMINER_OLD_DATA_LOC
async def get_config(self) -> MinerConfig:
data = await self.web.get_miner_conf()
if data:
self.config = MinerConfig().from_raw(data)
self.config = MinerConfig.from_am_old(data)
return self.config
async def send_config(self, config: MinerConfig, user_suffix: str = None) -> None:
await self.web.set_miner_conf(config.as_x17(user_suffix=user_suffix))
self.config = config
await self.web.set_miner_conf(config.as_am_old(user_suffix=user_suffix))
async def get_mac(self) -> Union[str, None]:
async def _get_mac(self) -> Union[str, None]:
try:
data = await self.web.get_system_info()
if data:
@@ -388,45 +447,47 @@ class AntminerOld(CGMiner):
return True
return False
async def get_fault_light(self, web_get_blink_status: dict = None) -> bool:
async def _get_fault_light(
self, web_get_blink_status: dict = None
) -> Optional[bool]:
if self.light:
return self.light
if not web_get_blink_status:
if web_get_blink_status is None:
try:
web_get_blink_status = await self.web.get_blink_status()
except APIError:
pass
if web_get_blink_status:
if web_get_blink_status is not None:
try:
self.light = web_get_blink_status["isBlinking"]
except KeyError:
pass
return self.light
async def get_hostname(self, web_get_system_info: dict = None) -> Optional[str]:
if not web_get_system_info:
async def _get_hostname(self, web_get_system_info: dict = None) -> Optional[str]:
if web_get_system_info is None:
try:
web_get_system_info = await self.web.get_system_info()
except APIError:
pass
if web_get_system_info:
if web_get_system_info is not None:
try:
return web_get_system_info["hostname"]
except KeyError:
pass
async def get_fans(self, api_stats: dict = None) -> List[Fan]:
if not api_stats:
async def _get_fans(self, api_stats: dict = None) -> List[Fan]:
if api_stats is None:
try:
api_stats = await self.api.stats()
except APIError:
pass
fans_data = [Fan() for _ in range(self.fan_count)]
if api_stats:
fans_data = [Fan() for _ in range(self.expected_fans)]
if api_stats is not None:
try:
fan_offset = -1
@@ -438,24 +499,24 @@ class AntminerOld(CGMiner):
if fan_offset == -1:
fan_offset = 3
for fan in range(self.fan_count):
for fan in range(self.expected_fans):
fans_data[fan].speed = api_stats["STATS"][1].get(
f"fan{fan_offset+fan}", 0
)
except (KeyError, IndexError):
except LookupError:
pass
return fans_data
async def get_hashboards(self, api_stats: dict = None) -> List[HashBoard]:
async def _get_hashboards(self, api_stats: dict = None) -> List[HashBoard]:
hashboards = []
if not api_stats:
if api_stats is None:
try:
api_stats = await self.api.stats()
except APIError:
pass
if api_stats:
if api_stats is not None:
try:
board_offset = -1
boards = api_stats["STATS"]
@@ -469,9 +530,11 @@ class AntminerOld(CGMiner):
if board_offset == -1:
board_offset = 1
for i in range(board_offset, board_offset + self.ideal_hashboards):
for i in range(
board_offset, board_offset + self.expected_hashboards
):
hashboard = HashBoard(
slot=i - board_offset, expected_chips=self.nominal_chips
slot=i - board_offset, expected_chips=self.expected_chips
)
chip_temp = boards[1].get(f"temp{i}")
@@ -493,19 +556,19 @@ class AntminerOld(CGMiner):
if (not chips) or (not chips > 0):
hashboard.missing = True
hashboards.append(hashboard)
except (IndexError, KeyError, ValueError, TypeError):
except (LookupError, ValueError, TypeError):
pass
return hashboards
async def is_mining(self, web_get_conf: dict = None) -> Optional[bool]:
if not web_get_conf:
async def _is_mining(self, web_get_conf: dict = None) -> Optional[bool]:
if web_get_conf is None:
try:
web_get_conf = await self.web.get_miner_conf()
except APIError:
pass
if web_get_conf:
if web_get_conf is not None:
try:
return False if int(web_get_conf["bitmain-work-mode"]) == 1 else True
except LookupError:
@@ -523,14 +586,14 @@ class AntminerOld(CGMiner):
else:
return False
async def get_uptime(self, api_stats: dict = None) -> Optional[int]:
if not api_stats:
async def _get_uptime(self, api_stats: dict = None) -> Optional[int]:
if api_stats is None:
try:
api_stats = await self.api.stats()
except APIError:
pass
if api_stats:
if api_stats is not None:
try:
return int(api_stats["STATS"][1]["Elapsed"])
except LookupError:

View File

@@ -0,0 +1,378 @@
# ------------------------------------------------------------------------------
# Copyright 2022 Upstream Data Inc -
# -
# Licensed under the Apache License, Version 2.0 (the "License"); -
# you may not use this file except in compliance with the License. -
# You may obtain a copy of the License at -
# -
# http://www.apache.org/licenses/LICENSE-2.0 -
# -
# Unless required by applicable law or agreed to in writing, software -
# distributed under the License is distributed on an "AS IS" BASIS, -
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------
import logging
from enum import Enum
from typing import List, Optional
from pyasic import APIError, MinerConfig
from pyasic.data import Fan, HashBoard
from pyasic.miners.base import (
BaseMiner,
DataFunction,
DataLocations,
DataOptions,
RPCAPICommand,
WebAPICommand,
)
from pyasic.rpc.gcminer import GCMinerRPCAPI
from pyasic.web.auradine import FluxWebAPI
AURADINE_DATA_LOC = DataLocations(
**{
str(DataOptions.MAC): DataFunction(
"_get_mac",
[WebAPICommand("web_ipreport", "ipreport")],
),
str(DataOptions.FW_VERSION): DataFunction(
"_get_fw_ver",
[WebAPICommand("web_ipreport", "ipreport")],
),
str(DataOptions.HOSTNAME): DataFunction(
"_get_hostname",
[WebAPICommand("web_ipreport", "ipreport")],
),
str(DataOptions.HASHRATE): DataFunction(
"_get_hashrate",
[RPCAPICommand("api_summary", "summary")],
),
str(DataOptions.HASHBOARDS): DataFunction(
"_get_hashboards",
[
RPCAPICommand("api_devs", "devs"),
WebAPICommand("web_ipreport", "ipreport"),
],
),
str(DataOptions.WATTAGE): DataFunction(
"_get_wattage",
[WebAPICommand("web_psu", "psu")],
),
str(DataOptions.WATTAGE_LIMIT): DataFunction(
"_get_wattage_limit",
[WebAPICommand("web_mode", "mode"), WebAPICommand("web_psu", "psu")],
),
str(DataOptions.FANS): DataFunction(
"_get_fans",
[WebAPICommand("web_fan", "fan")],
),
str(DataOptions.FAULT_LIGHT): DataFunction(
"_get_fault_light",
[WebAPICommand("web_led", "led")],
),
str(DataOptions.IS_MINING): DataFunction(
"_is_mining",
[WebAPICommand("web_mode", "mode")],
),
str(DataOptions.UPTIME): DataFunction(
"_get_uptime",
[RPCAPICommand("api_summary", "summary")],
),
}
)
class AuradineLEDColors(Enum):
OFF = 0
GREEN = 1
RED = 2
YELLOW = 3
GREEN_FLASHING = 4
RED_FLASHING = 5
YELLOW_FLASHING = 6
def __int__(self):
return self.value
class AuradineLEDCodes(Enum):
NO_POWER = 1
NORMAL = 2
LOCATE_MINER = 3
TEMPERATURE = 4
POOL_CONFIG = 5
NETWORK = 6
CONTROL_BOARD = 7
HASH_RATE_LOW = 8
CUSTOM1 = 101
CUSTOM2 = 102
def __int__(self):
return self.value
class Auradine(BaseMiner):
"""Base handler for Auradine miners"""
_api_cls = GCMinerRPCAPI
api: GCMinerRPCAPI
_web_cls = FluxWebAPI
web: FluxWebAPI
data_locations = AURADINE_DATA_LOC
supports_shutdown = True
supports_autotuning = True
async def fault_light_on(self) -> bool:
return await self.web.set_led(code=int(AuradineLEDCodes.LOCATE_MINER))
async def fault_light_off(self) -> bool:
return await self.web.set_led(code=int(AuradineLEDCodes.NORMAL))
async def reboot(self) -> bool:
try:
await self.web.reboot()
except APIError:
return False
return True
async def restart_backend(self) -> bool:
try:
await self.web.restart_gcminer()
except APIError:
return False
return True
async def stop_mining(self) -> bool:
try:
await self.web.set_mode(sleep="on")
except APIError:
return False
return True
async def resume_mining(self) -> bool:
try:
await self.web.set_mode(sleep="off")
except APIError:
return False
return True
async def set_power_limit(self, wattage: int) -> bool:
try:
await self.web.set_mode(mode="custom", tune="power", power=wattage)
except APIError:
return False
return True
async def get_config(self) -> MinerConfig:
try:
web_conf = await self.web.multicommand("pools", "mode", "fan")
return MinerConfig.from_auradine(web_conf=web_conf)
except APIError as e:
logging.warning(e)
except LookupError:
pass
return MinerConfig()
async def send_config(self, config: MinerConfig, user_suffix: str = None) -> None:
self.config = config
conf = config.as_auradine(user_suffix=user_suffix)
for key in conf.keys():
await self.web.send_command(command=key, **conf[key])
##################################################
### DATA GATHERING FUNCTIONS (get_{some_data}) ###
##################################################
async def _get_mac(self, web_ipreport: dict = None) -> Optional[str]:
if web_ipreport is None:
try:
web_ipreport = await self.web.ipreport()
except APIError:
pass
if web_ipreport is not None:
try:
return web_ipreport["IPReport"][0]["mac"].upper()
except (LookupError, AttributeError):
pass
async def _get_fw_ver(self, web_ipreport: dict = None) -> Optional[str]:
if web_ipreport is None:
try:
web_ipreport = await self.web.ipreport()
except APIError:
pass
if web_ipreport is not None:
try:
return web_ipreport["IPReport"][0]["version"]
except LookupError:
pass
async def _get_hostname(self, web_ipreport: dict = None) -> Optional[str]:
if web_ipreport is None:
try:
web_ipreport = await self.web.ipreport()
except APIError:
pass
if web_ipreport is not None:
try:
return web_ipreport["IPReport"][0]["hostname"]
except LookupError:
pass
async def _get_hashrate(self, api_summary: dict = None) -> Optional[float]:
if api_summary is None:
try:
api_summary = await self.api.summary()
except APIError:
pass
if api_summary is not None:
try:
return round(
float(float(api_summary["SUMMARY"][0]["MHS 5s"]) / 1000000), 2
)
except (LookupError, ValueError, TypeError):
pass
async def _get_hashboards(
self, api_devs: dict = None, web_ipreport: dict = None
) -> List[HashBoard]:
hashboards = [
HashBoard(slot=i, expected_chips=self.expected_chips)
for i in range(self.expected_hashboards)
]
if api_devs is None:
try:
api_devs = await self.api.devs()
except APIError:
pass
if web_ipreport is None:
try:
web_ipreport = await self.web.ipreport()
except APIError:
pass
if api_devs is not None:
try:
for board in api_devs["DEVS"]:
b_id = board["ID"] - 1
hashboards[b_id].hashrate = round(
float(float(board["MHS 5s"]) / 1000000), 2
)
hashboards[b_id].temp = round(float(float(board["Temperature"])), 2)
hashboards[b_id].missing = False
except LookupError:
pass
if web_ipreport is not None:
try:
for board, sn in enumerate(web_ipreport["IPReport"][0]["HBSerialNo"]):
hashboards[board].serial_number = sn
hashboards[board].missing = False
except LookupError:
pass
return hashboards
async def _get_wattage(self, web_psu: dict = None) -> Optional[int]:
if web_psu is None:
try:
web_psu = await self.web.get_psu()
except APIError:
pass
if web_psu is not None:
try:
return int(float(web_psu["PSU"][0]["PowerIn"].replace("W", "")))
except (LookupError, TypeError, ValueError):
pass
async def _get_wattage_limit(
self, web_mode: dict = None, web_psu: dict = None
) -> Optional[int]:
if web_mode is None:
try:
web_mode = await self.web.get_mode()
except APIError:
pass
if web_mode is not None:
try:
return web_mode["Mode"][0]["Power"]
except (LookupError, TypeError, ValueError):
pass
if web_psu is None:
try:
web_psu = await self.web.get_psu()
except APIError:
pass
if web_psu is not None:
try:
return int(float(web_psu["PSU"][0]["PoutMax"].replace("W", "")))
except (LookupError, TypeError, ValueError):
pass
async def _get_fans(self, web_fan: dict = None) -> List[Fan]:
if web_fan is None:
try:
web_fan = await self.web.get_fan()
except APIError:
pass
fans = []
if web_fan is not None:
try:
for fan in web_fan["Fan"]:
fans.append(Fan(round(fan["Speed"])))
except LookupError:
pass
return fans
async def _get_fault_light(self, web_led: dict = None) -> Optional[bool]:
if web_led is None:
try:
web_led = await self.web.get_led()
except APIError:
pass
if web_led is not None:
try:
return web_led["LED"][0]["Code"] == int(AuradineLEDCodes.LOCATE_MINER)
except LookupError:
pass
async def _is_mining(self, web_mode: dict = None) -> Optional[bool]:
if web_mode is None:
try:
web_mode = await self.web.get_mode()
except APIError:
pass
if web_mode is not None:
try:
return web_mode["Mode"][0]["Sleep"] == "off"
except (LookupError, TypeError, ValueError):
pass
async def _get_uptime(self, api_summary: dict = None) -> Optional[int]:
if api_summary is None:
try:
api_summary = await self.api.summary()
except APIError:
pass
if api_summary is not None:
try:
return api_summary["SUMMARY"][0]["Elapsed"]
except LookupError:
pass

View File

@@ -14,53 +14,68 @@
# limitations under the License. -
# ------------------------------------------------------------------------------
import logging
import re
from typing import List, Optional
from pyasic.config import MinerConfig
from pyasic.data import Fan, HashBoard
from pyasic.data.error_codes import MinerErrorData
from pyasic.errors import APIError
from pyasic.miners.backends import CGMiner
from pyasic.miners.backends.cgminer import CGMiner
from pyasic.miners.base import DataFunction, DataLocations, DataOptions, RPCAPICommand
AVALON_DATA_LOC = {
"mac": {"cmd": "get_mac", "kwargs": {"api_version": {"api": "version"}}},
"model": {"cmd": "get_model", "kwargs": {}},
"api_ver": {"cmd": "get_api_ver", "kwargs": {"api_version": {"api": "version"}}},
"fw_ver": {"cmd": "get_fw_ver", "kwargs": {"api_version": {"api": "version"}}},
"hostname": {"cmd": "get_hostname", "kwargs": {"mac": {"api": "version"}}},
"hashrate": {"cmd": "get_hashrate", "kwargs": {"api_devs": {"api": "devs"}}},
"nominal_hashrate": {
"cmd": "get_nominal_hashrate",
"kwargs": {"api_stats": {"api": "stats"}},
},
"hashboards": {"cmd": "get_hashboards", "kwargs": {"api_stats": {"api": "stats"}}},
"env_temp": {"cmd": "get_env_temp", "kwargs": {"api_stats": {"api": "stats"}}},
"wattage": {"cmd": "get_wattage", "kwargs": {}},
"wattage_limit": {
"cmd": "get_wattage_limit",
"kwargs": {"api_stats": {"api": "stats"}},
},
"fans": {"cmd": "get_fans", "kwargs": {"api_stats": {"api": "stats"}}},
"fan_psu": {"cmd": "get_fan_psu", "kwargs": {}},
"errors": {"cmd": "get_errors", "kwargs": {}},
"fault_light": {
"cmd": "get_fault_light",
"kwargs": {"api_stats": {"api": "stats"}},
},
"pools": {"cmd": "get_pools", "kwargs": {"api_pools": {"api": "pools"}}},
"is_mining": {"cmd": "is_mining", "kwargs": {}},
"uptime": {"cmd": "get_uptime", "kwargs": {}},
}
AVALON_DATA_LOC = DataLocations(
**{
str(DataOptions.MAC): DataFunction(
"_get_mac",
[RPCAPICommand("api_version", "version")],
),
str(DataOptions.API_VERSION): DataFunction(
"_get_api_ver",
[RPCAPICommand("api_version", "version")],
),
str(DataOptions.FW_VERSION): DataFunction(
"_get_fw_ver",
[RPCAPICommand("api_version", "version")],
),
str(DataOptions.HASHRATE): DataFunction(
"_get_hashrate",
[RPCAPICommand("api_devs", "devs")],
),
str(DataOptions.EXPECTED_HASHRATE): DataFunction(
"_get_expected_hashrate",
[RPCAPICommand("api_stats", "stats")],
),
str(DataOptions.HASHBOARDS): DataFunction(
"_get_hashboards",
[RPCAPICommand("api_stats", "stats")],
),
str(DataOptions.ENVIRONMENT_TEMP): DataFunction(
"_get_env_temp",
[RPCAPICommand("api_stats", "stats")],
),
str(DataOptions.WATTAGE_LIMIT): DataFunction(
"_get_wattage_limit",
[RPCAPICommand("api_stats", "stats")],
),
str(DataOptions.FANS): DataFunction(
"_get_fans",
[RPCAPICommand("api_stats", "stats")],
),
str(DataOptions.FAULT_LIGHT): DataFunction(
"_get_fault_light",
[RPCAPICommand("api_stats", "stats")],
),
str(DataOptions.UPTIME): DataFunction(
"_get_uptime",
[RPCAPICommand("api_stats", "stats")],
),
}
)
class CGMinerAvalon(CGMiner):
def __init__(self, ip: str, api_ver: str = "0.0.0") -> None:
super().__init__(ip, api_ver)
class AvalonMiner(CGMiner):
"""Handler for Avalon Miners"""
# data gathering locations
self.data_locations = AVALON_DATA_LOC
data_locations = AVALON_DATA_LOC
async def fault_light_on(self) -> bool:
try:
@@ -93,29 +108,9 @@ class CGMinerAvalon(CGMiner):
return False
return False
async def stop_mining(self) -> bool:
return False
async def resume_mining(self) -> bool:
return False
async def send_config(self, config: MinerConfig, user_suffix: str = None) -> None:
"""Configures miner with yaml config."""
self.config = config
return None
logging.debug(f"{self}: Sending config.") # noqa - This doesnt work...
conf = config.as_avalon(user_suffix=user_suffix)
try:
data = await self.api.ascset( # noqa
0, "setpool", f"root,root,{conf}"
) # this should work but doesn't
except APIError:
pass
# return data
@staticmethod
def parse_stats(stats):
_stats_items = re.findall(".+?\[*?]", stats)
_stats_items = re.findall(".+?\\[*?]", stats)
stats_items = []
stats_dict = {}
for item in _stats_items:
@@ -130,9 +125,9 @@ class CGMinerAvalon(CGMiner):
# --avalon args
for arg_item in data_list:
item_data = arg_item[0].split(" ")
for idx in range(len(item_data)):
for idx, val in enumerate(item_data):
if idx % 2 == 0 or idx == 0:
data_dict[item_data[idx]] = item_data[idx + 1]
data_dict[val] = item_data[idx + 1]
raw_data = [data[0].strip(), data_dict]
else:
@@ -160,14 +155,14 @@ class CGMinerAvalon(CGMiner):
### DATA GATHERING FUNCTIONS (get_{some_data}) ###
##################################################
async def get_mac(self, api_version: dict = None) -> Optional[str]:
if not api_version:
async def _get_mac(self, api_version: dict = None) -> Optional[str]:
if api_version is None:
try:
api_version = await self.api.version()
except APIError:
pass
if api_version:
if api_version is not None:
try:
base_mac = api_version["VERSION"][0]["MAC"]
base_mac = base_mac.upper()
@@ -178,46 +173,39 @@ class CGMinerAvalon(CGMiner):
except (KeyError, ValueError):
pass
async def get_hostname(self, mac: str = None) -> Optional[str]:
if not mac:
mac = await self.get_mac()
if mac:
return f"Avalon{mac.replace(':', '')[-6:]}"
async def get_hashrate(self, api_devs: dict = None) -> Optional[float]:
if not api_devs:
async def _get_hashrate(self, api_devs: dict = None) -> Optional[float]:
if api_devs is None:
try:
api_devs = await self.api.devs()
except APIError:
pass
if api_devs:
if api_devs is not None:
try:
return round(float(api_devs["DEVS"][0]["MHS 1m"] / 1000000), 2)
except (KeyError, IndexError, ValueError, TypeError):
pass
async def get_hashboards(self, api_stats: dict = None) -> List[HashBoard]:
async def _get_hashboards(self, api_stats: dict = None) -> List[HashBoard]:
hashboards = [
HashBoard(slot=i, expected_chips=self.nominal_chips)
for i in range(self.ideal_hashboards)
HashBoard(slot=i, expected_chips=self.expected_chips)
for i in range(self.expected_hashboards)
]
if not api_stats:
if api_stats is None:
try:
api_stats = await self.api.stats()
except APIError:
pass
if api_stats:
if api_stats is not None:
try:
unparsed_stats = api_stats["STATS"][0]["MM ID0"]
parsed_stats = self.parse_stats(unparsed_stats)
except (IndexError, KeyError, ValueError, TypeError):
return hashboards
for board in range(self.ideal_hashboards):
for board in range(self.expected_hashboards):
try:
hashboards[board].chip_temp = int(parsed_stats["MTmax"][board])
except LookupError:
@@ -246,14 +234,14 @@ class CGMinerAvalon(CGMiner):
return hashboards
async def get_nominal_hashrate(self, api_stats: dict = None) -> Optional[float]:
if not api_stats:
async def _get_expected_hashrate(self, api_stats: dict = None) -> Optional[float]:
if api_stats is None:
try:
api_stats = await self.api.stats()
except APIError:
pass
if api_stats:
if api_stats is not None:
try:
unparsed_stats = api_stats["STATS"][0]["MM ID0"]
parsed_stats = self.parse_stats(unparsed_stats)
@@ -261,14 +249,14 @@ class CGMinerAvalon(CGMiner):
except (IndexError, KeyError, ValueError, TypeError):
pass
async def get_env_temp(self, api_stats: dict = None) -> Optional[float]:
if not api_stats:
async def _get_env_temp(self, api_stats: dict = None) -> Optional[float]:
if api_stats is None:
try:
api_stats = await self.api.stats()
except APIError:
pass
if api_stats:
if api_stats is not None:
try:
unparsed_stats = api_stats["STATS"][0]["MM ID0"]
parsed_stats = self.parse_stats(unparsed_stats)
@@ -276,17 +264,14 @@ class CGMinerAvalon(CGMiner):
except (IndexError, KeyError, ValueError, TypeError):
pass
async def get_wattage(self) -> Optional[int]:
return None
async def get_wattage_limit(self, api_stats: dict = None) -> Optional[int]:
if not api_stats:
async def _get_wattage_limit(self, api_stats: dict = None) -> Optional[int]:
if api_stats is None:
try:
api_stats = await self.api.stats()
except APIError:
pass
if api_stats:
if api_stats is not None:
try:
unparsed_stats = api_stats["STATS"][0]["MM ID0"]
parsed_stats = self.parse_stats(unparsed_stats)
@@ -294,67 +279,38 @@ class CGMinerAvalon(CGMiner):
except (IndexError, KeyError, ValueError, TypeError):
pass
async def get_fans(self, api_stats: dict = None) -> List[Fan]:
if not api_stats:
async def _get_fans(self, api_stats: dict = None) -> List[Fan]:
if api_stats is None:
try:
api_stats = await self.api.stats()
except APIError:
pass
fans_data = [Fan() for _ in range(self.fan_count)]
if api_stats:
fans_data = [Fan() for _ in range(self.expected_fans)]
if api_stats is not None:
try:
unparsed_stats = api_stats["STATS"][0]["MM ID0"]
parsed_stats = self.parse_stats(unparsed_stats)
except LookupError:
return fans_data
for fan in range(self.fan_count):
for fan in range(self.expected_fans):
try:
fans_data[fan].speed = int(parsed_stats[f"Fan{fan + 1}"])
except (IndexError, KeyError, ValueError, TypeError):
pass
return fans_data
async def get_pools(self, api_pools: dict = None) -> List[dict]:
groups = []
if not api_pools:
try:
api_pools = await self.api.pools()
except APIError:
pass
if api_pools:
try:
pools = {}
for i, pool in enumerate(api_pools["POOLS"]):
pools[f"pool_{i + 1}_url"] = (
pool["URL"]
.replace("stratum+tcp://", "")
.replace("stratum2+tcp://", "")
)
pools[f"pool_{i + 1}_user"] = pool["User"]
pools["quota"] = pool["Quota"] if pool.get("Quota") else "0"
groups.append(pools)
except KeyError:
pass
return groups
async def get_errors(self) -> List[MinerErrorData]:
return []
async def get_fault_light(self, api_stats: dict = None) -> bool: # noqa
async def _get_fault_light(self, api_stats: dict = None) -> Optional[bool]:
if self.light:
return self.light
if not api_stats:
if api_stats is None:
try:
api_stats = await self.api.stats()
except APIError:
pass
if api_stats:
if api_stats is not None:
try:
unparsed_stats = api_stats["STATS"][0]["MM ID0"]
parsed_stats = self.parse_stats(unparsed_stats)
@@ -373,6 +329,3 @@ class CGMinerAvalon(CGMiner):
except LookupError:
pass
return False
async def is_mining(self, *args, **kwargs) -> Optional[bool]:
return None

View File

@@ -14,56 +14,57 @@
# limitations under the License. -
# ------------------------------------------------------------------------------
from collections import namedtuple
from typing import List, Optional, Tuple
from typing import List, Optional
from pyasic.API.bfgminer import BFGMinerAPI
from pyasic.config import MinerConfig
from pyasic.data import Fan, HashBoard
from pyasic.data.error_codes import MinerErrorData
from pyasic.errors import APIError
from pyasic.miners.base import BaseMiner
from pyasic.miners.base import (
BaseMiner,
DataFunction,
DataLocations,
DataOptions,
RPCAPICommand,
)
from pyasic.rpc.bfgminer import BFGMinerRPCAPI
BFGMINER_DATA_LOC = {
"mac": {"cmd": "get_mac", "kwargs": {}},
"model": {"cmd": "get_model", "kwargs": {}},
"api_ver": {"cmd": "get_api_ver", "kwargs": {"api_version": {"api": "version"}}},
"fw_ver": {"cmd": "get_fw_ver", "kwargs": {"api_version": {"api": "version"}}},
"hostname": {"cmd": "get_hostname", "kwargs": {}},
"hashrate": {"cmd": "get_hashrate", "kwargs": {"api_summary": {"api": "summary"}}},
"nominal_hashrate": {
"cmd": "get_nominal_hashrate",
"kwargs": {"api_stats": {"api": "stats"}},
},
"hashboards": {"cmd": "get_hashboards", "kwargs": {"api_stats": {"api": "stats"}}},
"env_temp": {"cmd": "get_env_temp", "kwargs": {}},
"wattage": {"cmd": "get_wattage", "kwargs": {}},
"wattage_limit": {"cmd": "get_wattage_limit", "kwargs": {}},
"fans": {"cmd": "get_fans", "kwargs": {"api_stats": {"api": "stats"}}},
"fan_psu": {"cmd": "get_fan_psu", "kwargs": {}},
"errors": {"cmd": "get_errors", "kwargs": {}},
"fault_light": {"cmd": "get_fault_light", "kwargs": {}},
"pools": {"cmd": "get_pools", "kwargs": {"api_pools": {"api": "pools"}}},
"is_mining": {"cmd": "is_mining", "kwargs": {}},
"uptime": {"cmd": "get_uptime", "kwargs": {}},
}
BFGMINER_DATA_LOC = DataLocations(
**{
str(DataOptions.API_VERSION): DataFunction(
"_get_api_ver",
[RPCAPICommand("api_version", "version")],
),
str(DataOptions.FW_VERSION): DataFunction(
"_get_fw_ver",
[RPCAPICommand("api_version", "version")],
),
str(DataOptions.HASHRATE): DataFunction(
"_get_hashrate",
[RPCAPICommand("api_summary", "summary")],
),
str(DataOptions.EXPECTED_HASHRATE): DataFunction(
"_get_expected_hashrate",
[RPCAPICommand("api_stats", "stats")],
),
str(DataOptions.HASHBOARDS): DataFunction(
"_get_hashboards",
[RPCAPICommand("api_stats", "stats")],
),
str(DataOptions.FANS): DataFunction(
"_get_fans",
[RPCAPICommand("api_stats", "stats")],
),
}
)
class BFGMiner(BaseMiner):
"""Base handler for BFGMiner based miners."""
def __init__(self, ip: str, api_ver: str = "0.0.0") -> None:
super().__init__(ip)
# interfaces
self.api = BFGMinerAPI(ip, api_ver)
_api_cls = BFGMinerRPCAPI
api: BFGMinerRPCAPI
# static data
self.api_type = "BFGMiner"
# data gathering locations
self.data_locations = BFGMINER_DATA_LOC
# data storage
self.api_ver = api_ver
data_locations = BFGMINER_DATA_LOC
async def get_config(self) -> MinerConfig:
# get pool data
@@ -72,118 +73,67 @@ class BFGMiner(BaseMiner):
except APIError:
return self.config
self.config = MinerConfig().from_api(pools["POOLS"])
self.config = MinerConfig.from_api(pools)
return self.config
async def send_config(self, config: MinerConfig, user_suffix: str = None) -> None:
return None
async def fault_light_off(self) -> bool:
return False
async def fault_light_on(self) -> bool:
return False
async def restart_backend(self) -> bool:
return False
async def stop_mining(self) -> bool:
return False
async def resume_mining(self) -> bool:
return False
async def set_power_limit(self, wattage: int) -> bool:
return False
##################################################
### DATA GATHERING FUNCTIONS (get_{some_data}) ###
##################################################
async def get_mac(self) -> str:
return "00:00:00:00:00:00"
async def get_api_ver(self, api_version: dict = None) -> Optional[str]:
# Check to see if the version info is already cached
if self.api_ver:
return self.api_ver
if not api_version:
async def _get_api_ver(self, api_version: dict = None) -> Optional[str]:
if api_version is None:
try:
api_version = await self.api.version()
except APIError:
pass
if api_version:
if api_version is not None:
try:
self.api_ver = api_version["VERSION"][0]["API"]
except (KeyError, IndexError):
except LookupError:
pass
return self.api_ver
async def get_fw_ver(self, api_version: dict = None) -> Optional[str]:
# Check to see if the version info is already cached
if self.fw_ver:
return self.fw_ver
if not api_version:
async def _get_fw_ver(self, api_version: dict = None) -> Optional[str]:
if api_version is None:
try:
api_version = await self.api.version()
except APIError:
pass
if api_version:
if api_version is not None:
try:
self.fw_ver = api_version["VERSION"][0]["CompileTime"]
except (KeyError, IndexError):
except LookupError:
pass
return self.fw_ver
async def get_version(
self, api_version: dict = None
) -> Tuple[Optional[str], Optional[str]]:
# check if version is cached
miner_version = namedtuple("MinerVersion", "api_ver fw_ver")
return miner_version(
api_ver=await self.get_api_ver(api_version),
fw_ver=await self.get_fw_ver(api_version=api_version),
)
async def reboot(self) -> bool:
return False
async def get_fan_psu(self):
return None
async def get_hostname(self) -> Optional[str]:
return None
async def get_hashrate(self, api_summary: dict = None) -> Optional[float]:
async def _get_hashrate(self, api_summary: dict = None) -> Optional[float]:
# get hr from API
if not api_summary:
if api_summary is None:
try:
api_summary = await self.api.summary()
except APIError:
pass
if api_summary:
if api_summary is not None:
try:
return round(float(api_summary["SUMMARY"][0]["MHS 20s"] / 1000000), 2)
except (IndexError, KeyError, ValueError, TypeError):
except (LookupError, ValueError, TypeError):
pass
async def get_hashboards(self, api_stats: dict = None) -> List[HashBoard]:
async def _get_hashboards(self, api_stats: dict = None) -> List[HashBoard]:
hashboards = []
if not api_stats:
if api_stats is None:
try:
api_stats = await self.api.stats()
except APIError:
pass
if api_stats:
if api_stats is not None:
try:
board_offset = -1
boards = api_stats["STATS"]
@@ -197,9 +147,11 @@ class BFGMiner(BaseMiner):
if board_offset == -1:
board_offset = 1
for i in range(board_offset, board_offset + self.ideal_hashboards):
for i in range(
board_offset, board_offset + self.expected_hashboards
):
hashboard = HashBoard(
slot=i - board_offset, expected_chips=self.nominal_chips
slot=i - board_offset, expected_chips=self.expected_chips
)
chip_temp = boards[1].get(f"temp{i}")
@@ -221,29 +173,20 @@ class BFGMiner(BaseMiner):
if (not chips) or (not chips > 0):
hashboard.missing = True
hashboards.append(hashboard)
except (IndexError, KeyError, ValueError, TypeError):
except (LookupError, ValueError, TypeError):
pass
return hashboards
async def get_env_temp(self) -> Optional[float]:
return None
async def get_wattage(self) -> Optional[int]:
return None
async def get_wattage_limit(self) -> Optional[int]:
return None
async def get_fans(self, api_stats: dict = None) -> List[Fan]:
if not api_stats:
async def _get_fans(self, api_stats: dict = None) -> List[Fan]:
if api_stats is None:
try:
api_stats = await self.api.stats()
except APIError:
pass
fans_data = [None, None, None, None]
if api_stats:
if api_stats is not None:
try:
fan_offset = -1
@@ -255,74 +198,36 @@ class BFGMiner(BaseMiner):
if fan_offset == -1:
fan_offset = 1
for fan in range(self.fan_count):
for fan in range(self.expected_fans):
fans_data[fan] = api_stats["STATS"][1].get(
f"fan{fan_offset+fan}", 0
)
except (KeyError, IndexError):
except LookupError:
pass
fans = [Fan(speed=d) if d else Fan() for d in fans_data]
return fans
async def get_pools(self, api_pools: dict = None) -> List[dict]:
groups = []
if not api_pools:
try:
api_pools = await self.api.pools()
except APIError:
pass
if api_pools:
try:
pools = {}
for i, pool in enumerate(api_pools["POOLS"]):
pools[f"pool_{i + 1}_url"] = (
pool["URL"]
.replace("stratum+tcp://", "")
.replace("stratum2+tcp://", "")
)
pools[f"pool_{i + 1}_user"] = pool["User"]
pools["quota"] = pool["Quota"] if pool.get("Quota") else "0"
groups.append(pools)
except KeyError:
pass
return groups
async def get_errors(self) -> List[MinerErrorData]:
return []
async def get_fault_light(self) -> bool:
return False
async def get_nominal_hashrate(self, api_stats: dict = None) -> Optional[float]:
async def _get_expected_hashrate(self, api_stats: dict = None) -> Optional[float]:
# X19 method, not sure compatibility
if not api_stats:
if api_stats is None:
try:
api_stats = await self.api.stats()
except APIError:
pass
if api_stats:
if api_stats is not None:
try:
ideal_rate = api_stats["STATS"][1]["total_rateideal"]
expected_rate = api_stats["STATS"][1]["total_rateideal"]
try:
rate_unit = api_stats["STATS"][1]["rate_unit"]
except KeyError:
rate_unit = "GH"
if rate_unit == "GH":
return round(ideal_rate / 1000, 2)
return round(expected_rate / 1000, 2)
if rate_unit == "MH":
return round(ideal_rate / 1000000, 2)
return round(expected_rate / 1000000, 2)
else:
return round(ideal_rate, 2)
except (KeyError, IndexError):
return round(expected_rate, 2)
except LookupError:
pass
async def is_mining(self, *args, **kwargs) -> Optional[bool]:
return None
async def get_uptime(self, *args, **kwargs) -> Optional[int]:
return None

View File

@@ -14,88 +14,61 @@
# limitations under the License. -
# ------------------------------------------------------------------------------
import logging
from collections import namedtuple
from typing import List, Optional, Tuple
from typing import List, Optional
from pyasic.API.bmminer import BMMinerAPI
from pyasic.config import MinerConfig
from pyasic.data import Fan, HashBoard
from pyasic.data.error_codes import MinerErrorData
from pyasic.errors import APIError
from pyasic.miners.base import BaseMiner
from pyasic.miners.base import (
BaseMiner,
DataFunction,
DataLocations,
DataOptions,
RPCAPICommand,
)
from pyasic.rpc.bmminer import BMMinerRPCAPI
BMMINER_DATA_LOC = {
"mac": {"cmd": "get_mac", "kwargs": {}},
"model": {"cmd": "get_model", "kwargs": {}},
"api_ver": {"cmd": "get_api_ver", "kwargs": {"api_version": {"api": "version"}}},
"fw_ver": {"cmd": "get_fw_ver", "kwargs": {"api_version": {"api": "version"}}},
"hostname": {"cmd": "get_hostname", "kwargs": {}},
"hashrate": {"cmd": "get_hashrate", "kwargs": {"api_summary": {"api": "summary"}}},
"nominal_hashrate": {
"cmd": "get_nominal_hashrate",
"kwargs": {"api_stats": {"api": "stats"}},
},
"hashboards": {"cmd": "get_hashboards", "kwargs": {"api_stats": {"api": "stats"}}},
"env_temp": {"cmd": "get_env_temp", "kwargs": {}},
"wattage": {"cmd": "get_wattage", "kwargs": {}},
"wattage_limit": {"cmd": "get_wattage_limit", "kwargs": {}},
"fans": {"cmd": "get_fans", "kwargs": {"api_stats": {"api": "stats"}}},
"fan_psu": {"cmd": "get_fan_psu", "kwargs": {}},
"errors": {"cmd": "get_errors", "kwargs": {}},
"fault_light": {"cmd": "get_fault_light", "kwargs": {}},
"pools": {"cmd": "get_pools", "kwargs": {"api_pools": {"api": "pools"}}},
"is_mining": {"cmd": "is_mining", "kwargs": {}},
"uptime": {
"cmd": "get_uptime",
"kwargs": {"api_stats": {"api": "stats"}},
},
}
BMMINER_DATA_LOC = DataLocations(
**{
str(DataOptions.API_VERSION): DataFunction(
"_get_api_ver",
[RPCAPICommand("api_version", "version")],
),
str(DataOptions.FW_VERSION): DataFunction(
"_get_fw_ver",
[RPCAPICommand("api_version", "version")],
),
str(DataOptions.HASHRATE): DataFunction(
"_get_hashrate",
[RPCAPICommand("api_summary", "summary")],
),
str(DataOptions.EXPECTED_HASHRATE): DataFunction(
"_get_expected_hashrate",
[RPCAPICommand("api_stats", "stats")],
),
str(DataOptions.HASHBOARDS): DataFunction(
"_get_hashboards",
[RPCAPICommand("api_stats", "stats")],
),
str(DataOptions.FANS): DataFunction(
"_get_fans",
[RPCAPICommand("api_stats", "stats")],
),
str(DataOptions.UPTIME): DataFunction(
"_get_uptime",
[RPCAPICommand("api_stats", "stats")],
),
}
)
class BMMiner(BaseMiner):
"""Base handler for BMMiner based miners."""
def __init__(self, ip: str, api_ver: str = "0.0.0") -> None:
super().__init__(ip)
# interfaces
self.api = BMMinerAPI(ip, api_ver)
_api_cls = BMMinerRPCAPI
api: BMMinerRPCAPI
# static data
self.api_type = "BMMiner"
# data gathering locations
self.data_locations = BMMINER_DATA_LOC
# data storage
self.api_ver = api_ver
async def send_ssh_command(self, cmd: str) -> Optional[str]:
result = None
try:
conn = await self._get_ssh_connection()
except ConnectionError:
return None
# open an ssh connection
async with conn:
# 3 retries
for i in range(3):
try:
# run the command and get the result
result = await conn.run(cmd)
result = result.stdout
except Exception as e:
# if the command fails, log it
logging.warning(f"{self} command {cmd} error: {e}")
# on the 3rd retry, return None
if i == 3:
return
continue
# return the result, either command output or None
return result
data_locations = BMMINER_DATA_LOC
async def get_config(self) -> MinerConfig:
# get pool data
@@ -104,124 +77,67 @@ class BMMiner(BaseMiner):
except APIError:
return self.config
self.config = MinerConfig().from_api(pools["POOLS"])
self.config = MinerConfig.from_api(pools)
return self.config
async def reboot(self) -> bool:
logging.debug(f"{self}: Sending reboot command.")
ret = await self.send_ssh_command("reboot")
logging.debug(f"{self}: Reboot command completed.")
if ret is None:
return False
return True
async def send_config(self, config: MinerConfig, user_suffix: str = None) -> None:
return None
async def fault_light_off(self) -> bool:
return False
async def fault_light_on(self) -> bool:
return False
async def restart_backend(self) -> bool:
return False
async def stop_mining(self) -> bool:
return False
async def resume_mining(self) -> bool:
return False
async def set_power_limit(self, wattage: int) -> bool:
return False
##################################################
### DATA GATHERING FUNCTIONS (get_{some_data}) ###
##################################################
async def get_mac(self) -> str:
return "00:00:00:00:00:00"
async def get_api_ver(self, api_version: dict = None) -> Optional[str]:
# Check to see if the version info is already cached
if self.api_ver:
return self.api_ver
if not api_version:
async def _get_api_ver(self, api_version: dict = None) -> Optional[str]:
if api_version is None:
try:
api_version = await self.api.version()
except APIError:
pass
if api_version:
if api_version is not None:
try:
self.api_ver = api_version["VERSION"][0]["API"]
except (KeyError, IndexError):
except LookupError:
pass
return self.api_ver
async def get_fw_ver(self, api_version: dict = None) -> Optional[str]:
# Check to see if the version info is already cached
if self.fw_ver:
return self.fw_ver
if not api_version:
async def _get_fw_ver(self, api_version: dict = None) -> Optional[str]:
if api_version is None:
try:
api_version = await self.api.version()
except APIError:
pass
if api_version:
if api_version is not None:
try:
self.fw_ver = api_version["VERSION"][0]["CompileTime"]
except (KeyError, IndexError):
except LookupError:
pass
return self.fw_ver
async def get_version(
self, api_version: dict = None
) -> Tuple[Optional[str], Optional[str]]:
# check if version is cached
miner_version = namedtuple("MinerVersion", "api_ver fw_ver")
return miner_version(
api_ver=await self.get_api_ver(api_version),
fw_ver=await self.get_fw_ver(api_version=api_version),
)
async def get_fan_psu(self):
return None
async def get_hostname(self) -> Optional[str]:
hn = await self.send_ssh_command("cat /proc/sys/kernel/hostname")
return hn
async def get_hashrate(self, api_summary: dict = None) -> Optional[float]:
async def _get_hashrate(self, api_summary: dict = None) -> Optional[float]:
# get hr from API
if not api_summary:
if api_summary is None:
try:
api_summary = await self.api.summary()
except APIError:
pass
if api_summary:
if api_summary is not None:
try:
return round(float(api_summary["SUMMARY"][0]["GHS 5s"] / 1000), 2)
except (IndexError, KeyError, ValueError, TypeError):
except (LookupError, ValueError, TypeError):
pass
async def get_hashboards(self, api_stats: dict = None) -> List[HashBoard]:
async def _get_hashboards(self, api_stats: dict = None) -> List[HashBoard]:
hashboards = []
if not api_stats:
if api_stats is None:
try:
api_stats = await self.api.stats()
except APIError:
pass
if api_stats:
if api_stats is not None:
try:
board_offset = -1
boards = api_stats["STATS"]
@@ -235,9 +151,24 @@ class BMMiner(BaseMiner):
if board_offset == -1:
board_offset = 1
for i in range(board_offset, board_offset + self.ideal_hashboards):
real_slots = []
for i in range(board_offset, board_offset + 4):
try:
key = f"chain_acs{i}"
if boards[1].get(key, "") != "":
real_slots.append(i)
except LookupError:
pass
if len(real_slots) < 3:
real_slots = list(
range(board_offset, board_offset + self.expected_hashboards)
)
for i in real_slots:
hashboard = HashBoard(
slot=i - board_offset, expected_chips=self.nominal_chips
slot=i - board_offset, expected_chips=self.expected_chips
)
chip_temp = boards[1].get(f"temp{i}")
@@ -259,29 +190,20 @@ class BMMiner(BaseMiner):
if (not chips) or (not chips > 0):
hashboard.missing = True
hashboards.append(hashboard)
except (IndexError, KeyError, ValueError, TypeError):
except (LookupError, ValueError, TypeError):
pass
return hashboards
async def get_env_temp(self) -> Optional[float]:
return None
async def get_wattage(self) -> Optional[int]:
return None
async def get_wattage_limit(self) -> Optional[int]:
return None
async def get_fans(self, api_stats: dict = None) -> List[Fan]:
if not api_stats:
async def _get_fans(self, api_stats: dict = None) -> List[Fan]:
if api_stats is None:
try:
api_stats = await self.api.stats()
except APIError:
pass
fans = [Fan() for _ in range(self.fan_count)]
if api_stats:
fans = [Fan() for _ in range(self.expected_fans)]
if api_stats is not None:
try:
fan_offset = -1
@@ -293,82 +215,47 @@ class BMMiner(BaseMiner):
if fan_offset == -1:
fan_offset = 1
for fan in range(self.fan_count):
for fan in range(self.expected_fans):
fans[fan].speed = api_stats["STATS"][1].get(
f"fan{fan_offset+fan}", 0
)
except (KeyError, IndexError):
except LookupError:
pass
return fans
async def get_pools(self, api_pools: dict = None) -> List[dict]:
groups = []
if not api_pools:
try:
api_pools = await self.api.pools()
except APIError:
pass
if api_pools:
try:
pools = {}
for i, pool in enumerate(api_pools["POOLS"]):
pools[f"pool_{i + 1}_url"] = (
pool["URL"]
.replace("stratum+tcp://", "")
.replace("stratum2+tcp://", "")
)
pools[f"pool_{i + 1}_user"] = pool["User"]
pools["quota"] = pool["Quota"] if pool.get("Quota") else "0"
groups.append(pools)
except KeyError:
pass
return groups
async def get_errors(self) -> List[MinerErrorData]:
return []
async def get_fault_light(self) -> bool:
return False
async def get_nominal_hashrate(self, api_stats: dict = None) -> Optional[float]:
async def _get_expected_hashrate(self, api_stats: dict = None) -> Optional[float]:
# X19 method, not sure compatibility
if not api_stats:
if api_stats is None:
try:
api_stats = await self.api.stats()
except APIError:
pass
if api_stats:
if api_stats is not None:
try:
ideal_rate = api_stats["STATS"][1]["total_rateideal"]
expected_rate = api_stats["STATS"][1]["total_rateideal"]
try:
rate_unit = api_stats["STATS"][1]["rate_unit"]
except KeyError:
rate_unit = "GH"
if rate_unit == "GH":
return round(ideal_rate / 1000, 2)
return round(expected_rate / 1000, 2)
if rate_unit == "MH":
return round(ideal_rate / 1000000, 2)
return round(expected_rate / 1000000, 2)
else:
return round(ideal_rate, 2)
except (KeyError, IndexError):
return round(expected_rate, 2)
except LookupError:
pass
async def is_mining(self, *args, **kwargs) -> Optional[bool]:
return None
async def get_uptime(self, api_stats: dict = None) -> Optional[int]:
if not api_stats:
async def _get_uptime(self, api_stats: dict = None) -> Optional[int]:
if api_stats is None:
try:
api_stats = await self.web.get_miner_conf()
api_stats = await self.api.stats()
except APIError:
pass
if api_stats:
if api_stats is not None:
try:
return int(api_stats["STATS"][1]["Elapsed"])
except LookupError:

File diff suppressed because it is too large Load Diff

View File

@@ -1,158 +0,0 @@
# ------------------------------------------------------------------------------
# Copyright 2022 Upstream Data Inc -
# -
# Licensed under the Apache License, Version 2.0 (the "License"); -
# you may not use this file except in compliance with the License. -
# You may obtain a copy of the License at -
# -
# http://www.apache.org/licenses/LICENSE-2.0 -
# -
# Unless required by applicable law or agreed to in writing, software -
# distributed under the License is distributed on an "AS IS" BASIS, -
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------
import logging
from typing import List, Optional, Tuple
from pyasic.config import MinerConfig
from pyasic.data import Fan, HashBoard, MinerData
from pyasic.data.error_codes import MinerErrorData
from pyasic.miners.backends import BOSMiner
class BOSMinerOld(BOSMiner):
def __init__(self, ip: str, api_ver: str = "0.0.0") -> None:
super().__init__(ip, api_ver)
async def send_ssh_command(self, cmd: str) -> Optional[str]:
result = None
try:
conn = await self._get_ssh_connection()
except ConnectionError:
return None
# open an ssh connection
async with conn:
# 3 retries
for i in range(3):
try:
# run the command and get the result
result = await conn.run(cmd)
result = result.stdout
except Exception as e:
# if the command fails, log it
logging.warning(f"{self} command {cmd} error: {e}")
# on the 3rd retry, return None
if i == 3:
return
continue
# return the result, either command output or None
return result
async def update_to_plus(self):
result = await self.send_ssh_command("opkg update && opkg install bos_plus")
return result
async def check_light(self) -> bool:
return False
async def fault_light_on(self) -> bool:
return False
async def fault_light_off(self) -> bool:
return False
async def get_config(self) -> None:
return None
async def reboot(self) -> bool:
return False
async def restart_backend(self) -> bool:
return False
async def stop_mining(self) -> bool:
return False
async def resume_mining(self) -> bool:
return False
async def send_config(self, config: MinerConfig, user_suffix: str = None) -> None:
return None
async def set_power_limit(self, wattage: int) -> bool:
return False
##################################################
### DATA GATHERING FUNCTIONS (get_{some_data}) ###
##################################################
async def get_mac(self, *args, **kwargs) -> Optional[str]:
return None
async def get_model(self, *args, **kwargs) -> str:
return "S9"
async def get_version(self, *args, **kwargs) -> Tuple[Optional[str], Optional[str]]:
return None, None
async def get_hostname(self, *args, **kwargs) -> Optional[str]:
return None
async def get_hashrate(self, *args, **kwargs) -> Optional[float]:
return None
async def get_hashboards(self, *args, **kwargs) -> List[HashBoard]:
return []
async def get_env_temp(self, *args, **kwargs) -> Optional[float]:
return None
async def get_wattage(self, *args, **kwargs) -> Optional[int]:
return None
async def get_wattage_limit(self, *args, **kwargs) -> Optional[int]:
return None
async def get_fans(
self,
*args,
**kwargs,
) -> List[Fan]:
return [Fan(), Fan(), Fan(), Fan()]
async def get_fan_psu(self, *args, **kwargs) -> Optional[int]:
return None
async def get_api_ver(self, *args, **kwargs) -> Optional[str]:
return None
async def get_fw_ver(self, *args, **kwargs) -> Optional[str]:
return None
async def get_pools(self, *args, **kwargs) -> List[dict]:
return []
async def get_errors(self, *args, **kwargs) -> List[MinerErrorData]:
return []
async def get_fault_light(self, *args, **kwargs) -> bool:
return False
async def get_nominal_hashrate(self, *args, **kwargs) -> Optional[float]:
return None
async def get_data(self, allow_warning: bool = False, **kwargs) -> MinerData:
return MinerData(ip=str(self.ip))
async def is_mining(self, *args, **kwargs) -> Optional[bool]:
return None
async def get_uptime(self, *args, **kwargs) -> Optional[int]:
return None

View File

@@ -0,0 +1,965 @@
# ------------------------------------------------------------------------------
# Copyright 2022 Upstream Data Inc -
# -
# Licensed under the Apache License, Version 2.0 (the "License"); -
# you may not use this file except in compliance with the License. -
# You may obtain a copy of the License at -
# -
# http://www.apache.org/licenses/LICENSE-2.0 -
# -
# Unless required by applicable law or agreed to in writing, software -
# distributed under the License is distributed on an "AS IS" BASIS, -
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------
import logging
import time
from typing import List, Optional, Union
import toml
from pyasic.config import MinerConfig
from pyasic.config.mining import MiningModePowerTune
from pyasic.data import Fan, HashBoard
from pyasic.data.error_codes import BraiinsOSError, MinerErrorData
from pyasic.errors import APIError
from pyasic.miners.base import (
BaseMiner,
DataFunction,
DataLocations,
DataOptions,
GRPCCommand,
RPCAPICommand,
WebAPICommand,
)
from pyasic.rpc.bosminer import BOSMinerRPCAPI
from pyasic.ssh.braiins_os import BOSMinerSSH
from pyasic.web.braiins_os import BOSerWebAPI, BOSMinerWebAPI
BOSMINER_DATA_LOC = DataLocations(
**{
str(DataOptions.MAC): DataFunction(
"_get_mac",
[WebAPICommand("web_net_conf", "admin/network/iface_status/lan")],
),
str(DataOptions.API_VERSION): DataFunction(
"_get_api_ver",
[RPCAPICommand("api_version", "version")],
),
str(DataOptions.FW_VERSION): DataFunction(
"_get_fw_ver",
[WebAPICommand("web_bos_info", "bos/info")],
),
str(DataOptions.HASHRATE): DataFunction(
"_get_hashrate",
[RPCAPICommand("api_summary", "summary")],
),
str(DataOptions.EXPECTED_HASHRATE): DataFunction(
"_get_expected_hashrate",
[RPCAPICommand("api_devs", "devs")],
),
str(DataOptions.HASHBOARDS): DataFunction(
"_get_hashboards",
[
RPCAPICommand("api_temps", "temps"),
RPCAPICommand("api_devdetails", "devdetails"),
RPCAPICommand("api_devs", "devs"),
],
),
str(DataOptions.WATTAGE): DataFunction(
"_get_wattage",
[RPCAPICommand("api_tunerstatus", "tunerstatus")],
),
str(DataOptions.WATTAGE_LIMIT): DataFunction(
"_get_wattage_limit",
[RPCAPICommand("api_tunerstatus", "tunerstatus")],
),
str(DataOptions.FANS): DataFunction(
"_get_fans",
[RPCAPICommand("api_fans", "fans")],
),
str(DataOptions.ERRORS): DataFunction(
"_get_errors",
[RPCAPICommand("api_tunerstatus", "tunerstatus")],
),
str(DataOptions.IS_MINING): DataFunction(
"_is_mining",
[RPCAPICommand("api_devdetails", "devdetails")],
),
str(DataOptions.UPTIME): DataFunction(
"_get_uptime",
[RPCAPICommand("api_summary", "summary")],
),
}
)
class BOSMiner(BaseMiner):
"""Handler for old versions of BraiinsOS+ (pre-gRPC)"""
_api_cls = BOSMinerRPCAPI
api: BOSMinerRPCAPI
_web_cls = BOSMinerWebAPI
web: BOSMinerWebAPI
_ssh_cls = BOSMinerSSH
ssh: BOSMinerSSH
firmware = "BOS+"
data_locations = BOSMINER_DATA_LOC
supports_shutdown = True
supports_autotuning = True
async def fault_light_on(self) -> bool:
ret = await self.ssh.fault_light_on()
if isinstance(ret, str):
self.light = True
return self.light
return False
async def fault_light_off(self) -> bool:
ret = await self.ssh.fault_light_off()
if isinstance(ret, str):
self.light = False
return True
return False
async def restart_backend(self) -> bool:
return await self.restart_bosminer()
async def restart_bosminer(self) -> bool:
ret = await self.ssh.restart_bosminer()
if isinstance(ret, str):
return True
return False
async def stop_mining(self) -> bool:
try:
data = await self.api.pause()
except APIError:
return False
if data.get("PAUSE"):
if data["PAUSE"][0]:
return True
return False
async def resume_mining(self) -> bool:
try:
data = await self.api.resume()
except APIError:
return False
if data.get("RESUME"):
if data["RESUME"][0]:
return True
return False
async def reboot(self) -> bool:
ret = await self.ssh.reboot()
if isinstance(ret, str):
return True
return False
async def get_config(self) -> MinerConfig:
raw_data = await self.ssh.get_config_file()
try:
toml_data = toml.loads(raw_data)
cfg = MinerConfig.from_bosminer(toml_data)
self.config = cfg
except toml.TomlDecodeError as e:
raise APIError("Failed to decode toml when getting config.") from e
return self.config
async def send_config(self, config: MinerConfig, user_suffix: str = None) -> None:
self.config = config
toml_conf = toml.dumps(
{
"format": {
"version": "1.2+",
"generator": "pyasic",
"model": f"{self.make.replace('Miner', 'miner')} {self.raw_model.replace('j', 'J')}",
"timestamp": int(time.time()),
},
**config.as_bosminer(user_suffix=user_suffix),
}
)
try:
conn = await self.ssh._get_connection()
except ConnectionError as e:
raise APIError("SSH connection failed when sending config.") from e
async with conn:
await conn.run("/etc/init.d/bosminer stop")
async with conn.start_sftp_client() as sftp:
async with sftp.open("/etc/bosminer.toml", "w+") as file:
await file.write(toml_conf)
await conn.run("/etc/init.d/bosminer start")
async def set_power_limit(self, wattage: int) -> bool:
try:
cfg = await self.get_config()
if cfg is None:
return False
cfg.mining_mode = MiningModePowerTune(wattage)
await self.send_config(cfg)
except APIError:
raise
except Exception as e:
logging.warning(f"{self} - Failed to set power limit: {e}")
return False
else:
return True
async def set_static_ip(
self,
ip: str,
dns: str,
gateway: str,
subnet_mask: str = "255.255.255.0",
):
cfg_data_lan = "\n\t".join(
[
"config interface 'lan'",
"option type 'bridge'",
"option ifname 'eth0'",
"option proto 'static'",
f"option ipaddr '{ip}'",
f"option netmask '{subnet_mask}'",
f"option gateway '{gateway}'",
f"option dns '{dns}'",
]
)
data = await self.ssh.get_network_config()
split_data = data.split("\n\n")
for idx, val in enumerate(split_data):
if "config interface 'lan'" in val:
split_data[idx] = cfg_data_lan
config = "\n\n".join(split_data)
await self.ssh.send_command("echo '" + config + "' > /etc/config/network")
async def set_dhcp(self):
cfg_data_lan = "\n\t".join(
[
"config interface 'lan'",
"option type 'bridge'",
"option ifname 'eth0'",
"option proto 'dhcp'",
]
)
data = await self.ssh.get_network_config()
split_data = data.split("\n\n")
for idx, val in enumerate(split_data):
if "config interface 'lan'" in val:
split_data[idx] = cfg_data_lan
config = "\n\n".join(split_data)
await self.ssh.send_command("echo '" + config + "' > /etc/config/network")
##################################################
### DATA GATHERING FUNCTIONS (get_{some_data}) ###
##################################################
async def _get_mac(self, web_net_conf: Union[dict, list] = None) -> Optional[str]:
if web_net_conf is None:
try:
web_net_conf = await self.web.luci.get_net_conf()
except APIError:
pass
if isinstance(web_net_conf, dict):
if "admin/network/iface_status/lan" in web_net_conf.keys():
web_net_conf = web_net_conf["admin/network/iface_status/lan"]
if web_net_conf is not None:
try:
return web_net_conf[0]["macaddr"]
except LookupError:
pass
# could use ssh, but its slow and buggy
# result = await self.send_ssh_command("cat /sys/class/net/eth0/address")
# if result:
# return result.upper().strip()
async def _get_api_ver(self, api_version: dict = None) -> Optional[str]:
if api_version is None:
try:
api_version = await self.api.version()
except APIError:
pass
# Now get the API version
if api_version is not None:
try:
api_ver = api_version["VERSION"][0]["API"]
except LookupError:
api_ver = None
self.api_ver = api_ver
self.api.api_ver = self.api_ver
return self.api_ver
async def _get_fw_ver(self, web_bos_info: dict = None) -> Optional[str]:
if web_bos_info is None:
try:
web_bos_info = await self.web.luci.get_bos_info()
except APIError:
return None
if isinstance(web_bos_info, dict):
if "bos/info" in web_bos_info.keys():
web_bos_info = web_bos_info["bos/info"]
try:
ver = web_bos_info["version"].split("-")[5]
if "." in ver:
self.fw_ver = ver
except (LookupError, AttributeError):
return None
return self.fw_ver
async def _get_hostname(self) -> Union[str, None]:
try:
hostname = (await self.ssh.get_hostname()).strip()
except AttributeError:
return None
except Exception as e:
logging.error(f"{self} - Getting hostname failed: {e}")
return None
return hostname
async def _get_hashrate(self, api_summary: dict = None) -> Optional[float]:
if api_summary is None:
try:
api_summary = await self.api.summary()
except APIError:
pass
if api_summary is not None:
try:
return round(float(api_summary["SUMMARY"][0]["MHS 1m"] / 1000000), 2)
except (KeyError, IndexError, ValueError, TypeError):
pass
async def _get_hashboards(
self,
api_temps: dict = None,
api_devdetails: dict = None,
api_devs: dict = None,
) -> List[HashBoard]:
hashboards = [
HashBoard(slot=i, expected_chips=self.expected_chips)
for i in range(self.expected_hashboards)
]
cmds = []
if api_temps is None:
cmds.append("temps")
if api_devdetails is None:
cmds.append("devdetails")
if api_devs is None:
cmds.append("devs")
if len(cmds) > 0:
try:
d = await self.api.multicommand(*cmds)
except APIError:
d = {}
try:
api_temps = d["temps"][0]
except LookupError:
api_temps = None
try:
api_devdetails = d["devdetails"][0]
except (KeyError, IndexError):
api_devdetails = None
try:
api_devs = d["devs"][0]
except LookupError:
api_devs = None
if api_temps is not None:
try:
offset = 6 if api_temps["TEMPS"][0]["ID"] in [6, 7, 8] else 1
for board in api_temps["TEMPS"]:
_id = board["ID"] - offset
chip_temp = round(board["Chip"])
board_temp = round(board["Board"])
hashboards[_id].chip_temp = chip_temp
hashboards[_id].temp = board_temp
except (IndexError, KeyError, ValueError, TypeError):
pass
if api_devdetails is not None:
try:
offset = 6 if api_devdetails["DEVDETAILS"][0]["ID"] in [6, 7, 8] else 1
for board in api_devdetails["DEVDETAILS"]:
_id = board["ID"] - offset
chips = board["Chips"]
hashboards[_id].chips = chips
hashboards[_id].missing = False
except (IndexError, KeyError):
pass
if api_devs is not None:
try:
offset = 6 if api_devs["DEVS"][0]["ID"] in [6, 7, 8] else 1
for board in api_devs["DEVS"]:
_id = board["ID"] - offset
hashrate = round(float(board["MHS 1m"] / 1000000), 2)
hashboards[_id].hashrate = hashrate
except (IndexError, KeyError):
pass
return hashboards
async def _get_wattage(self, api_tunerstatus: dict = None) -> Optional[int]:
if api_tunerstatus is None:
try:
api_tunerstatus = await self.api.tunerstatus()
except APIError:
pass
if api_tunerstatus is not None:
try:
return api_tunerstatus["TUNERSTATUS"][0][
"ApproximateMinerPowerConsumption"
]
except LookupError:
pass
async def _get_wattage_limit(self, api_tunerstatus: dict = None) -> Optional[int]:
if api_tunerstatus is None:
try:
api_tunerstatus = await self.api.tunerstatus()
except APIError:
pass
if api_tunerstatus is not None:
try:
return api_tunerstatus["TUNERSTATUS"][0]["PowerLimit"]
except LookupError:
pass
async def _get_fans(self, api_fans: dict = None) -> List[Fan]:
if api_fans is None:
try:
api_fans = await self.api.fans()
except APIError:
pass
if api_fans is not None:
fans = []
for n in range(self.expected_fans):
try:
fans.append(Fan(api_fans["FANS"][n]["RPM"]))
except (IndexError, KeyError):
pass
return fans
return [Fan() for _ in range(self.expected_fans)]
async def _get_errors(self, api_tunerstatus: dict = None) -> List[MinerErrorData]:
if api_tunerstatus is None:
try:
api_tunerstatus = await self.api.tunerstatus()
except APIError:
pass
if api_tunerstatus is not None:
errors = []
try:
chain_status = api_tunerstatus["TUNERSTATUS"][0]["TunerChainStatus"]
if chain_status and len(chain_status) > 0:
offset = (
6 if int(chain_status[0]["HashchainIndex"]) in [6, 7, 8] else 0
)
for board in chain_status:
_id = board["HashchainIndex"] - offset
if board["Status"] not in [
"Stable",
"Testing performance profile",
"Tuning individual chips",
]:
_error = board["Status"].split(" {")[0]
_error = _error[0].lower() + _error[1:]
errors.append(BraiinsOSError(f"Slot {_id} {_error}"))
return errors
except (KeyError, IndexError):
pass
async def _get_fault_light(self) -> bool:
if self.light:
return self.light
try:
data = (await self.ssh.get_led_status()).strip()
self.light = False
if data == "50":
self.light = True
return self.light
except (TypeError, AttributeError):
return self.light
async def _get_expected_hashrate(self, api_devs: dict = None) -> Optional[float]:
if api_devs is None:
try:
api_devs = await self.api.devs()
except APIError:
pass
if api_devs is not None:
try:
hr_list = []
for board in api_devs["DEVS"]:
expected_hashrate = round(float(board["Nominal MHS"] / 1000000), 2)
if expected_hashrate:
hr_list.append(expected_hashrate)
if len(hr_list) == 0:
return 0
else:
return round(
(sum(hr_list) / len(hr_list)) * self.expected_hashboards, 2
)
except (IndexError, KeyError):
pass
async def _is_mining(self, api_devdetails: dict = None) -> Optional[bool]:
if api_devdetails is None:
try:
api_devdetails = await self.api.send_command(
"devdetails", ignore_errors=True, allow_warning=False
)
except APIError:
pass
if api_devdetails is not None:
try:
return not api_devdetails["STATUS"][0]["Msg"] == "Unavailable"
except LookupError:
pass
async def _get_uptime(self, api_summary: dict = None) -> Optional[int]:
if api_summary is None:
try:
api_summary = await self.api.summary()
except APIError:
pass
if api_summary is not None:
try:
return int(api_summary["SUMMARY"][0]["Elapsed"])
except LookupError:
pass
BOSER_DATA_LOC = DataLocations(
**{
str(DataOptions.MAC): DataFunction(
"_get_mac",
[GRPCCommand("grpc_miner_details", "get_miner_details")],
),
str(DataOptions.API_VERSION): DataFunction(
"_get_api_ver",
[GRPCCommand("api_version", "get_api_version")],
),
str(DataOptions.FW_VERSION): DataFunction(
"_get_fw_ver",
[GRPCCommand("grpc_miner_details", "get_miner_details")],
),
str(DataOptions.HOSTNAME): DataFunction(
"_get_hostname",
[GRPCCommand("grpc_miner_details", "get_miner_details")],
),
str(DataOptions.HASHRATE): DataFunction(
"_get_hashrate",
[RPCAPICommand("api_summary", "summary")],
),
str(DataOptions.EXPECTED_HASHRATE): DataFunction(
"_get_expected_hashrate",
[GRPCCommand("grpc_miner_details", "get_miner_details")],
),
str(DataOptions.HASHBOARDS): DataFunction(
"_get_hashboards",
[GRPCCommand("grpc_hashboards", "get_hashboards")],
),
str(DataOptions.WATTAGE): DataFunction(
"_get_wattage",
[GRPCCommand("grpc_miner_stats", "get_miner_stats")],
),
str(DataOptions.WATTAGE_LIMIT): DataFunction(
"_get_wattage_limit",
[
GRPCCommand(
"grpc_active_performance_mode", "get_active_performance_mode"
)
],
),
str(DataOptions.FANS): DataFunction(
"_get_fans",
[GRPCCommand("grpc_cooling_state", "get_cooling_state")],
),
str(DataOptions.ERRORS): DataFunction(
"_get_errors",
[RPCAPICommand("api_tunerstatus", "tunerstatus")],
),
str(DataOptions.FAULT_LIGHT): DataFunction(
"_get_fault_light",
[GRPCCommand("grpc_locate_device_status", "get_locate_device_status")],
),
str(DataOptions.IS_MINING): DataFunction(
"_is_mining",
[RPCAPICommand("api_devdetails", "devdetails")],
),
str(DataOptions.UPTIME): DataFunction(
"_get_uptime",
[RPCAPICommand("api_summary", "summary")],
),
}
)
class BOSer(BaseMiner):
"""Handler for new versions of BraiinsOS+ (post-gRPC)"""
_api_cls = BOSMinerRPCAPI
web: BOSMinerRPCAPI
_web_cls = BOSerWebAPI
web: BOSerWebAPI
data_locations = BOSER_DATA_LOC
supports_autotuning = True
supports_shutdown = True
async def fault_light_on(self) -> bool:
resp = await self.web.grpc.set_locate_device_status(True)
if resp.get("enabled", False):
return True
return False
async def fault_light_off(self) -> bool:
resp = await self.web.grpc.set_locate_device_status(False)
if resp == {}:
return True
return False
async def restart_backend(self) -> bool:
return await self.restart_boser()
async def restart_boser(self) -> bool:
await self.web.grpc.restart()
return True
async def stop_mining(self) -> bool:
try:
await self.web.grpc.pause_mining()
except APIError:
return False
return True
async def resume_mining(self) -> bool:
try:
await self.web.grpc.resume_mining()
except APIError:
return False
return True
async def reboot(self) -> bool:
ret = await self.web.grpc.reboot()
if ret == {}:
return True
return False
async def get_config(self) -> MinerConfig:
grpc_conf = await self.web.grpc.get_miner_configuration()
return MinerConfig.from_boser(grpc_conf)
async def set_power_limit(self, wattage: int) -> bool:
try:
result = await self.web.grpc.set_power_target(wattage)
except APIError:
return False
try:
if result["powerTarget"]["watt"] == wattage:
return True
except KeyError:
pass
return False
##################################################
### DATA GATHERING FUNCTIONS (get_{some_data}) ###
##################################################
async def _get_mac(self, grpc_miner_details: dict = None) -> Optional[str]:
if grpc_miner_details is None:
try:
grpc_miner_details = await self.web.grpc.get_miner_details()
except APIError:
pass
if grpc_miner_details is not None:
try:
return grpc_miner_details["macAddress"].upper()
except (LookupError, TypeError):
pass
async def _get_api_ver(self, api_version: dict = None) -> Optional[str]:
if api_version is None:
try:
api_version = await self.api.version()
except APIError:
pass
if api_version is not None:
try:
api_ver = api_version["VERSION"][0]["API"]
except LookupError:
api_ver = None
self.api_ver = api_ver
self.api.api_ver = self.api_ver
return self.api_ver
async def _get_fw_ver(self, grpc_miner_details: dict = None) -> Optional[str]:
if grpc_miner_details is None:
try:
grpc_miner_details = await self.web.grpc.get_miner_details()
except APIError:
pass
fw_ver = None
if grpc_miner_details is not None:
try:
fw_ver = grpc_miner_details["bosVersion"]["current"]
except (KeyError, TypeError):
pass
# if we get the version data, parse it
if fw_ver is not None:
ver = fw_ver.split("-")[5]
if "." in ver:
self.fw_ver = ver
return self.fw_ver
async def _get_hostname(self, grpc_miner_details: dict = None) -> Optional[str]:
if grpc_miner_details is None:
try:
grpc_miner_details = await self.web.grpc.get_miner_details()
except APIError:
pass
if grpc_miner_details is not None:
try:
return grpc_miner_details["hostname"]
except LookupError:
pass
async def _get_hashrate(self, api_summary: dict = None) -> Optional[float]:
if api_summary is None:
try:
api_summary = await self.api.summary()
except APIError:
pass
if api_summary is not None:
try:
return round(float(api_summary["SUMMARY"][0]["MHS 1m"] / 1000000), 2)
except (KeyError, IndexError, ValueError, TypeError):
pass
async def _get_expected_hashrate(
self, grpc_miner_details: dict = None
) -> Optional[float]:
if grpc_miner_details is None:
try:
grpc_miner_details = await self.web.grpc.get_miner_details()
except APIError:
pass
if grpc_miner_details is not None:
try:
return grpc_miner_details["stickerHashrate"]["gigahashPerSecond"] / 1000
except LookupError:
pass
async def _get_hashboards(self, grpc_hashboards: dict = None) -> List[HashBoard]:
hashboards = [
HashBoard(slot=i, expected_chips=self.expected_chips)
for i in range(self.expected_hashboards)
]
if grpc_hashboards is None:
try:
grpc_hashboards = await self.web.grpc.get_hashboards()
except APIError:
pass
if grpc_hashboards is not None:
for board in grpc_hashboards["hashboards"]:
idx = int(board["id"]) - 1
if board.get("chipsCount") is not None:
hashboards[idx].chips = board["chipsCount"]
if board.get("boardTemp") is not None:
hashboards[idx].temp = board["boardTemp"]["degreeC"]
if board.get("highestChipTemp") is not None:
hashboards[idx].chip_temp = board["highestChipTemp"]["temperature"][
"degreeC"
]
if board.get("stats") is not None:
if not board["stats"]["realHashrate"]["last5S"] == {}:
hashboards[idx].hashrate = round(
board["stats"]["realHashrate"]["last5S"][
"gigahashPerSecond"
]
/ 1000,
2,
)
hashboards[idx].missing = False
return hashboards
async def _get_wattage(self, grpc_miner_stats: dict = None) -> Optional[int]:
if grpc_miner_stats is None:
try:
grpc_miner_stats = self.web.grpc.get_miner_stats()
except APIError:
pass
if grpc_miner_stats is not None:
try:
return grpc_miner_stats["powerStats"]["approximatedConsumption"]["watt"]
except KeyError:
pass
async def _get_wattage_limit(
self, grpc_active_performance_mode: dict = None
) -> Optional[int]:
if grpc_active_performance_mode is None:
try:
grpc_active_performance_mode = (
self.web.grpc.get_active_performance_mode()
)
except APIError:
pass
if grpc_active_performance_mode is not None:
try:
return grpc_active_performance_mode["tunerMode"]["powerTarget"][
"powerTarget"
]["watt"]
except KeyError:
pass
async def _get_fans(self, grpc_cooling_state: dict = None) -> List[Fan]:
if grpc_cooling_state is None:
try:
grpc_cooling_state = self.web.grpc.get_cooling_state()
except APIError:
pass
if grpc_cooling_state is not None:
fans = []
for n in range(self.expected_fans):
try:
fans.append(Fan(grpc_cooling_state["fans"][n]["rpm"]))
except (IndexError, KeyError):
pass
return fans
return [Fan() for _ in range(self.expected_fans)]
async def _get_errors(self, api_tunerstatus: dict = None) -> List[MinerErrorData]:
if api_tunerstatus is None:
try:
api_tunerstatus = await self.api.tunerstatus()
except APIError:
pass
if api_tunerstatus is not None:
errors = []
try:
chain_status = api_tunerstatus["TUNERSTATUS"][0]["TunerChainStatus"]
if chain_status and len(chain_status) > 0:
offset = (
6 if int(chain_status[0]["HashchainIndex"]) in [6, 7, 8] else 0
)
for board in chain_status:
_id = board["HashchainIndex"] - offset
if board["Status"] not in [
"Stable",
"Testing performance profile",
"Tuning individual chips",
]:
_error = board["Status"].split(" {")[0]
_error = _error[0].lower() + _error[1:]
errors.append(BraiinsOSError(f"Slot {_id} {_error}"))
return errors
except LookupError:
pass
async def _get_fault_light(self, grpc_locate_device_status: dict = None) -> bool:
if self.light is not None:
return self.light
if grpc_locate_device_status is None:
try:
grpc_locate_device_status = (
await self.web.grpc.get_locate_device_status()
)
except APIError:
pass
if grpc_locate_device_status is not None:
if grpc_locate_device_status == {}:
return False
try:
return grpc_locate_device_status["enabled"]
except LookupError:
pass
async def _is_mining(self, api_devdetails: dict = None) -> Optional[bool]:
if api_devdetails is None:
try:
api_devdetails = await self.api.send_command(
"devdetails", ignore_errors=True, allow_warning=False
)
except APIError:
pass
if api_devdetails is not None:
try:
return not api_devdetails["STATUS"][0]["Msg"] == "Unavailable"
except LookupError:
pass
async def _get_uptime(self, api_summary: dict = None) -> Optional[int]:
if api_summary is None:
try:
api_summary = await self.api.summary()
except APIError:
pass
if api_summary is not None:
try:
return int(api_summary["SUMMARY"][0]["Elapsed"])
except LookupError:
pass

View File

@@ -15,102 +15,115 @@
# ------------------------------------------------------------------------------
import logging
import warnings
from collections import namedtuple
from typing import List, Optional, Tuple
from typing import List, Optional
from pyasic.API.btminer import BTMinerAPI
from pyasic.config import MinerConfig
from pyasic.config import MinerConfig, MiningModeConfig
from pyasic.data import Fan, HashBoard
from pyasic.data.error_codes import MinerErrorData, WhatsminerError
from pyasic.errors import APIError
from pyasic.miners.base import BaseMiner
from pyasic.miners.base import (
BaseMiner,
DataFunction,
DataLocations,
DataOptions,
RPCAPICommand,
)
from pyasic.rpc.btminer import BTMinerRPCAPI
BTMINER_DATA_LOC = {
"mac": {
"cmd": "get_mac",
"kwargs": {
"api_summary": {"api": "summary"},
"api_get_miner_info": {"api": "get_miner_info"},
},
},
"model": {"cmd": "get_model", "kwargs": {}},
"api_ver": {
"cmd": "get_api_ver",
"kwargs": {"api_get_version": {"api": "get_version"}},
},
"fw_ver": {
"cmd": "get_fw_ver",
"kwargs": {
"api_get_version": {"api": "get_version"},
"api_summary": {"api": "summary"},
},
},
"hostname": {
"cmd": "get_hostname",
"kwargs": {"api_get_miner_info": {"api": "get_miner_info"}},
},
"hashrate": {"cmd": "get_hashrate", "kwargs": {"api_summary": {"api": "summary"}}},
"nominal_hashrate": {
"cmd": "get_nominal_hashrate",
"kwargs": {"api_summary": {"api": "summary"}},
},
"hashboards": {"cmd": "get_hashboards", "kwargs": {"api_devs": {"api": "devs"}}},
"env_temp": {"cmd": "get_env_temp", "kwargs": {"api_summary": {"api": "summary"}}},
"wattage": {"cmd": "get_wattage", "kwargs": {"api_summary": {"api": "summary"}}},
"wattage_limit": {
"cmd": "get_wattage_limit",
"kwargs": {"api_summary": {"api": "summary"}},
},
"fans": {
"cmd": "get_fans",
"kwargs": {
"api_summary": {"api": "summary"},
"api_get_psu": {"api": "get_psu"},
},
},
"fan_psu": {
"cmd": "get_fan_psu",
"kwargs": {
"api_summary": {"api": "summary"},
"api_get_psu": {"api": "get_psu"},
},
},
"errors": {
"cmd": "get_errors",
"kwargs": {
"api_summary": {"api": "summary"},
"api_get_error_code": {"api": "get_error_code"},
},
},
"fault_light": {
"cmd": "get_fault_light",
"kwargs": {"api_get_miner_info": {"api": "get_miner_info"}},
},
"pools": {"cmd": "get_pools", "kwargs": {"api_pools": {"api": "pools"}}},
"is_mining": {"cmd": "is_mining", "kwargs": {"api_status": {"api": "status"}}},
"uptime": {
"cmd": "get_uptime",
"kwargs": {"api_summary": {"api": "summary"}},
},
}
BTMINER_DATA_LOC = DataLocations(
**{
str(DataOptions.MAC): DataFunction(
"_get_mac",
[
RPCAPICommand("api_summary", "summary"),
RPCAPICommand("api_get_miner_info", "get_miner_info"),
],
),
str(DataOptions.API_VERSION): DataFunction(
"_get_api_ver",
[RPCAPICommand("api_get_version", "get_version")],
),
str(DataOptions.FW_VERSION): DataFunction(
"_get_fw_ver",
[
RPCAPICommand("api_get_version", "get_version"),
RPCAPICommand("api_summary", "summary"),
],
),
str(DataOptions.HOSTNAME): DataFunction(
"_get_hostname",
[RPCAPICommand("api_get_miner_info", "get_miner_info")],
),
str(DataOptions.HASHRATE): DataFunction(
"_get_hashrate",
[RPCAPICommand("api_summary", "summary")],
),
str(DataOptions.EXPECTED_HASHRATE): DataFunction(
"_get_expected_hashrate",
[RPCAPICommand("api_summary", "summary")],
),
str(DataOptions.HASHBOARDS): DataFunction(
"_get_hashboards",
[RPCAPICommand("api_devs", "devs")],
),
str(DataOptions.ENVIRONMENT_TEMP): DataFunction(
"_get_env_temp",
[RPCAPICommand("api_summary", "summary")],
),
str(DataOptions.WATTAGE): DataFunction(
"_get_wattage",
[RPCAPICommand("api_summary", "summary")],
),
str(DataOptions.WATTAGE_LIMIT): DataFunction(
"_get_wattage_limit",
[RPCAPICommand("api_summary", "summary")],
),
str(DataOptions.FANS): DataFunction(
"_get_fans",
[
RPCAPICommand("api_summary", "summary"),
RPCAPICommand("api_get_psu", "get_psu"),
],
),
str(DataOptions.FAN_PSU): DataFunction(
"_get_fan_psu",
[
RPCAPICommand("api_summary", "summary"),
RPCAPICommand("api_get_psu", "get_psu"),
],
),
str(DataOptions.ERRORS): DataFunction(
"_get_errors",
[
RPCAPICommand("api_get_error_code", "get_error_code"),
RPCAPICommand("api_summary", "summary"),
],
),
str(DataOptions.FAULT_LIGHT): DataFunction(
"_get_fault_light",
[RPCAPICommand("api_get_miner_info", "get_miner_info")],
),
str(DataOptions.IS_MINING): DataFunction(
"_is_mining",
[RPCAPICommand("api_status", "status")],
),
str(DataOptions.UPTIME): DataFunction(
"_get_uptime",
[RPCAPICommand("api_summary", "summary")],
),
}
)
class BTMiner(BaseMiner):
def __init__(self, ip: str, api_ver: str = "0.0.0") -> None:
super().__init__(ip)
# interfaces
self.api = BTMinerAPI(ip, api_ver)
"""Base handler for BTMiner based miners."""
# static data
self.api_type = "BTMiner"
# data gathering locations
self.data_locations = BTMINER_DATA_LOC
# autotuning/shutdown support
self.supports_shutdown = True
_api_cls = BTMinerRPCAPI
api: BTMinerRPCAPI
# data storage
self.api_ver = api_ver
data_locations = BTMINER_DATA_LOC
supports_shutdown = True
async def _reset_api_pwd_to_admin(self, pwd: str):
try:
@@ -198,44 +211,68 @@ class BTMiner(BaseMiner):
try:
await self.api.update_pools(**pools_conf)
if conf["mode"] == "normal":
await self.api.set_normal_power()
elif conf["mode"] == "high":
await self.api.set_high_power()
elif conf["mode"] == "low":
await self.api.set_low_power()
elif conf["mode"] == "power_tuning":
await self.api.adjust_power_limit(conf["power_tuning"]["wattage"])
except APIError:
pass
try:
await self.api.adjust_power_limit(conf["wattage"])
except APIError:
# cannot set wattage
# cannot update, no API access usually
pass
async def get_config(self) -> MinerConfig:
pools = None
summary = None
cfg = MinerConfig()
status = None
try:
data = await self.api.multicommand("pools", "summary")
data = await self.api.multicommand("pools", "summary", "status")
pools = data["pools"][0]
summary = data["summary"][0]
status = data["status"][0]
except APIError as e:
logging.warning(e)
except LookupError:
pass
if pools:
if "POOLS" in pools:
cfg = cfg.from_api(pools["POOLS"])
if pools is not None:
cfg = MinerConfig.from_api(pools)
else:
# somethings wrong with the miner
warnings.warn(
f"Failed to gather pool config for miner: {self}, miner did not return pool information."
)
if summary:
if "SUMMARY" in summary:
if wattage := summary["SUMMARY"][0].get("Power Limit"):
cfg.autotuning_wattage = wattage
cfg = MinerConfig()
self.config = cfg
is_mining = await self._is_mining(status)
if not is_mining:
cfg.mining_mode = MiningModeConfig.sleep()
return cfg
return self.config
if summary is not None:
mining_mode = None
try:
mining_mode = summary["SUMMARY"][0]["Power Mode"]
except LookupError:
pass
if mining_mode == "High":
cfg.mining_mode = MiningModeConfig.high()
return cfg
elif mining_mode == "Low":
cfg.mining_mode = MiningModeConfig.low()
return cfg
try:
power_lim = summary["SUMMARY"][0]["Power Limit"]
except LookupError:
power_lim = None
if power_lim is None:
cfg.mining_mode = MiningModeConfig.normal()
return cfg
cfg.mining_mode = MiningModeConfig.power_tuning(power_lim)
self.config = cfg
return self.config
async def set_power_limit(self, wattage: int) -> bool:
try:
@@ -250,57 +287,43 @@ class BTMiner(BaseMiner):
### DATA GATHERING FUNCTIONS (get_{some_data}) ###
##################################################
async def get_mac(
async def _get_mac(
self, api_summary: dict = None, api_get_miner_info: dict = None
) -> Optional[str]:
if not api_get_miner_info:
if api_get_miner_info is None:
try:
api_get_miner_info = await self.api.get_miner_info()
except APIError:
pass
if api_get_miner_info:
if api_get_miner_info is not None:
try:
mac = api_get_miner_info["Msg"]["mac"]
return str(mac).upper()
except KeyError:
pass
if not api_summary:
if api_summary is None:
try:
api_summary = await self.api.summary()
except APIError:
pass
if api_summary:
if api_summary is not None:
try:
mac = api_summary["SUMMARY"][0]["MAC"]
return str(mac).upper()
except (KeyError, IndexError):
except LookupError:
pass
async def get_version(
self, api_get_version: dict = None, api_summary: dict = None
) -> Tuple[Optional[str], Optional[str]]:
miner_version = namedtuple("MinerVersion", "api_ver fw_ver")
api_ver = await self.get_api_ver(api_get_version=api_get_version)
fw_ver = await self.get_fw_ver(
api_get_version=api_get_version, api_summary=api_summary
)
return miner_version(api_ver, fw_ver)
async def get_api_ver(self, api_get_version: dict = None) -> Optional[str]:
# Check to see if the version info is already cached
if self.api_ver:
return self.api_ver
if not api_get_version:
async def _get_api_ver(self, api_get_version: dict = None) -> Optional[str]:
if api_get_version is None:
try:
api_get_version = await self.api.get_version()
except APIError:
pass
if api_get_version:
if api_get_version is not None:
if "Code" in api_get_version.keys():
if api_get_version["Code"] == 131:
try:
@@ -316,20 +339,16 @@ class BTMiner(BaseMiner):
return self.api_ver
async def get_fw_ver(
async def _get_fw_ver(
self, api_get_version: dict = None, api_summary: dict = None
) -> Optional[str]:
# Check to see if the version info is already cached
if self.fw_ver:
return self.fw_ver
if not api_get_version:
if api_get_version is None:
try:
api_get_version = await self.api.get_version()
except APIError:
pass
if api_get_version:
if api_get_version is not None:
if "Code" in api_get_version.keys():
if api_get_version["Code"] == 131:
try:
@@ -339,7 +358,7 @@ class BTMiner(BaseMiner):
else:
return self.fw_ver
if not api_summary:
if api_summary is None:
try:
api_summary = await self.api.summary()
except APIError:
@@ -350,20 +369,20 @@ class BTMiner(BaseMiner):
self.fw_ver = api_summary["SUMMARY"][0]["Firmware Version"].replace(
"'", ""
)
except (KeyError, IndexError):
except LookupError:
pass
return self.fw_ver
async def get_hostname(self, api_get_miner_info: dict = None) -> Optional[str]:
async def _get_hostname(self, api_get_miner_info: dict = None) -> Optional[str]:
hostname = None
if not api_get_miner_info:
if api_get_miner_info is None:
try:
api_get_miner_info = await self.api.get_miner_info()
except APIError:
return None # only one way to get this
if api_get_miner_info:
if api_get_miner_info is not None:
try:
hostname = api_get_miner_info["Msg"]["hostname"]
except KeyError:
@@ -371,228 +390,204 @@ class BTMiner(BaseMiner):
return hostname
async def get_hashrate(self, api_summary: dict = None) -> Optional[float]:
# get hr from API
if not api_summary:
async def _get_hashrate(self, api_summary: dict = None) -> Optional[float]:
if api_summary is None:
try:
api_summary = await self.api.summary()
except APIError:
pass
if api_summary:
if api_summary is not None:
try:
return round(float(api_summary["SUMMARY"][0]["MHS 1m"] / 1000000), 2)
except (KeyError, IndexError):
except LookupError:
pass
async def get_hashboards(self, api_devs: dict = None) -> List[HashBoard]:
async def _get_hashboards(self, api_devs: dict = None) -> List[HashBoard]:
hashboards = [
HashBoard(slot=i, expected_chips=self.nominal_chips)
for i in range(self.ideal_hashboards)
HashBoard(slot=i, expected_chips=self.expected_chips)
for i in range(self.expected_hashboards)
]
if not api_devs:
if api_devs is None:
try:
api_devs = await self.api.devs()
except APIError:
pass
if api_devs:
if api_devs is not None:
try:
for board in api_devs["DEVS"]:
if len(hashboards) < board["ASC"] + 1:
hashboards.append(
HashBoard(
slot=board["ASC"], expected_chips=self.nominal_chips
slot=board["ASC"], expected_chips=self.expected_chips
)
)
self.ideal_hashboards += 1
self.expected_hashboards += 1
hashboards[board["ASC"]].chip_temp = round(board["Chip Temp Avg"])
hashboards[board["ASC"]].temp = round(board["Temperature"])
hashboards[board["ASC"]].hashrate = round(
float(board["MHS 1m"] / 1000000), 2
)
hashboards[board["ASC"]].chips = board["Effective Chips"]
hashboards[board["ASC"]].serial_number = board["PCB SN"]
hashboards[board["ASC"]].missing = False
except (KeyError, IndexError):
except LookupError:
pass
return hashboards
async def get_env_temp(self, api_summary: dict = None) -> Optional[float]:
if not api_summary:
async def _get_env_temp(self, api_summary: dict = None) -> Optional[float]:
if api_summary is None:
try:
api_summary = await self.api.summary()
except APIError:
pass
if api_summary:
if api_summary is not None:
try:
return api_summary["SUMMARY"][0]["Env Temp"]
except (KeyError, IndexError):
except LookupError:
pass
async def get_wattage(self, api_summary: dict = None) -> Optional[int]:
if not api_summary:
async def _get_wattage(self, api_summary: dict = None) -> Optional[int]:
if api_summary is None:
try:
api_summary = await self.api.summary()
except APIError:
pass
if api_summary:
if api_summary is not None:
try:
return api_summary["SUMMARY"][0]["Power"]
except (KeyError, IndexError):
wattage = api_summary["SUMMARY"][0]["Power"]
return wattage if not wattage == -1 else None
except LookupError:
pass
async def get_wattage_limit(self, api_summary: dict = None) -> Optional[int]:
if not api_summary:
async def _get_wattage_limit(self, api_summary: dict = None) -> Optional[int]:
if api_summary is None:
try:
api_summary = await self.api.summary()
except APIError:
pass
if api_summary:
if api_summary is not None:
try:
return api_summary["SUMMARY"][0]["Power Limit"]
except (KeyError, IndexError):
except LookupError:
pass
async def get_fans(
async def _get_fans(
self, api_summary: dict = None, api_get_psu: dict = None
) -> List[Fan]:
if not api_summary:
if api_summary is None:
try:
api_summary = await self.api.summary()
except APIError:
pass
fans = [Fan() for _ in range(self.fan_count)]
if api_summary:
fans = [Fan() for _ in range(self.expected_fans)]
if api_summary is not None:
try:
if self.fan_count > 0:
if self.expected_fans > 0:
fans = [
Fan(api_summary["SUMMARY"][0].get("Fan Speed In", 0)),
Fan(api_summary["SUMMARY"][0].get("Fan Speed Out", 0)),
]
except (KeyError, IndexError):
except LookupError:
pass
return fans
async def get_fan_psu(
async def _get_fan_psu(
self, api_summary: dict = None, api_get_psu: dict = None
) -> Optional[int]:
if not api_summary:
if api_summary is None:
try:
api_summary = await self.api.summary()
except APIError:
pass
if api_summary:
if api_summary is not None:
try:
return int(api_summary["SUMMARY"][0]["Power Fanspeed"])
except (KeyError, IndexError):
except LookupError:
pass
if not api_get_psu:
if api_get_psu is None:
try:
api_get_psu = await self.api.get_psu()
except APIError:
pass
if api_get_psu:
if api_get_psu is not None:
try:
return int(api_get_psu["Msg"]["fan_speed"])
except (KeyError, TypeError):
pass
async def get_pools(self, api_pools: dict = None) -> List[dict]:
groups = []
if not api_pools:
try:
api_pools = await self.api.pools()
except APIError:
pass
if api_pools:
try:
pools = {}
for i, pool in enumerate(api_pools["POOLS"]):
pools[f"pool_{i + 1}_url"] = (
pool["URL"]
.replace("stratum+tcp://", "")
.replace("stratum2+tcp://", "")
)
pools[f"pool_{i + 1}_user"] = pool["User"]
pools["quota"] = pool["Quota"] if pool.get("Quota") else "0"
groups.append(pools)
except KeyError:
pass
return groups
async def get_errors(
async def _get_errors(
self, api_summary: dict = None, api_get_error_code: dict = None
) -> List[MinerErrorData]:
errors = []
if not api_summary and not api_get_error_code:
try:
api_summary = await self.api.summary()
except APIError:
pass
if api_summary:
try:
for i in range(api_summary["SUMMARY"][0]["Error Code Count"]):
err = api_summary["SUMMARY"][0].get(f"Error Code {i}")
if err:
errors.append(WhatsminerError(error_code=err))
except (KeyError, IndexError, ValueError, TypeError):
pass
if not api_get_error_code:
if api_get_error_code is None and api_summary is None:
try:
api_get_error_code = await self.api.get_error_code()
except APIError:
pass
if api_get_error_code:
for err in api_get_error_code["Msg"]["error_code"]:
if isinstance(err, dict):
for code in err:
errors.append(WhatsminerError(error_code=int(code)))
else:
errors.append(WhatsminerError(error_code=int(err)))
if api_get_error_code is not None:
try:
for err in api_get_error_code["Msg"]["error_code"]:
if isinstance(err, dict):
for code in err:
errors.append(WhatsminerError(error_code=int(code)))
else:
errors.append(WhatsminerError(error_code=int(err)))
except KeyError:
pass
return errors
async def get_nominal_hashrate(self, api_summary: dict = None):
if not api_summary:
if api_summary is None:
try:
api_summary = await self.api.summary()
except APIError:
pass
if api_summary:
if api_summary is not None:
try:
nominal_hashrate = api_summary["SUMMARY"][0]["Factory GHS"]
if nominal_hashrate:
return round(nominal_hashrate / 1000, 2)
except (KeyError, IndexError):
for i in range(api_summary["SUMMARY"][0]["Error Code Count"]):
err = api_summary["SUMMARY"][0].get(f"Error Code {i}")
if err:
errors.append(WhatsminerError(error_code=err))
except (LookupError, ValueError, TypeError):
pass
return errors
async def _get_expected_hashrate(self, api_summary: dict = None) -> Optional[float]:
if api_summary is None:
try:
api_summary = await self.api.summary()
except APIError:
pass
async def get_fault_light(self, api_get_miner_info: dict = None) -> bool:
if not api_get_miner_info:
if api_summary is not None:
try:
expected_hashrate = api_summary["SUMMARY"][0]["Factory GHS"]
if expected_hashrate:
return round(expected_hashrate / 1000, 2)
except LookupError:
pass
async def _get_fault_light(self, api_get_miner_info: dict = None) -> Optional[bool]:
if api_get_miner_info is None:
try:
api_get_miner_info = await self.api.get_miner_info()
except APIError:
if not self.light:
self.light = False
if api_get_miner_info:
if api_get_miner_info is not None:
try:
self.light = not (api_get_miner_info["Msg"]["ledstat"] == "auto")
except KeyError:
@@ -622,14 +617,14 @@ class BTMiner(BaseMiner):
async def set_hostname(self, hostname: str):
await self.api.set_hostname(hostname)
async def is_mining(self, api_status: dict = None) -> Optional[bool]:
if not api_status:
async def _is_mining(self, api_status: dict = None) -> Optional[bool]:
if api_status is None:
try:
api_status = await self.api.status()
except APIError:
pass
if api_status:
if api_status is not None:
try:
if api_status["Msg"].get("btmineroff"):
try:
@@ -641,14 +636,14 @@ class BTMiner(BaseMiner):
except LookupError:
pass
async def get_uptime(self, api_summary: dict = None) -> Optional[int]:
if not api_summary:
async def _get_uptime(self, api_summary: dict = None) -> Optional[int]:
if api_summary is None:
try:
api_summary = await self.api.summary()
except APIError:
pass
if api_summary:
if api_summary is not None:
try:
return int(api_summary["SUMMARY"][0]["Elapsed"])
except LookupError:

View File

@@ -14,384 +14,128 @@
# limitations under the License. -
# ------------------------------------------------------------------------------
import logging
from collections import namedtuple
from typing import List, Optional, Tuple
from typing import Optional
from pyasic.API.cgminer import CGMinerAPI
from pyasic.config import MinerConfig
from pyasic.data import Fan, HashBoard
from pyasic.data.error_codes import MinerErrorData
from pyasic.errors import APIError
from pyasic.miners.base import BaseMiner
from pyasic.miners.base import (
BaseMiner,
DataFunction,
DataLocations,
DataOptions,
RPCAPICommand,
)
from pyasic.rpc.cgminer import CGMinerRPCAPI
CGMINER_DATA_LOC = {
"mac": {"cmd": "get_mac", "kwargs": {}},
"model": {"cmd": "get_model", "kwargs": {}},
"api_ver": {"cmd": "get_api_ver", "kwargs": {"api_version": {"api": "version"}}},
"fw_ver": {"cmd": "get_fw_ver", "kwargs": {"api_version": {"api": "version"}}},
"hostname": {"cmd": "get_hostname", "kwargs": {}},
"hashrate": {"cmd": "get_hashrate", "kwargs": {"api_summary": {"api": "summary"}}},
"nominal_hashrate": {
"cmd": "get_nominal_hashrate",
"kwargs": {"api_stats": {"api": "stats"}},
},
"hashboards": {"cmd": "get_hashboards", "kwargs": {"api_stats": {"api": "stats"}}},
"env_temp": {"cmd": "get_env_temp", "kwargs": {}},
"wattage": {"cmd": "get_wattage", "kwargs": {}},
"wattage_limit": {"cmd": "get_wattage_limit", "kwargs": {}},
"fans": {"cmd": "get_fans", "kwargs": {"api_stats": {"api": "stats"}}},
"fan_psu": {"cmd": "get_fan_psu", "kwargs": {}},
"errors": {"cmd": "get_errors", "kwargs": {}},
"fault_light": {"cmd": "get_fault_light", "kwargs": {}},
"pools": {"cmd": "get_pools", "kwargs": {"api_pools": {"api": "pools"}}},
"is_mining": {"cmd": "is_mining", "kwargs": {}},
"uptime": {
"cmd": "get_uptime",
"kwargs": {"api_stats": {"api": "stats"}},
},
}
CGMINER_DATA_LOC = DataLocations(
**{
str(DataOptions.API_VERSION): DataFunction(
"_get_api_ver",
[RPCAPICommand("api_version", "version")],
),
str(DataOptions.FW_VERSION): DataFunction(
"_get_fw_ver",
[RPCAPICommand("api_version", "version")],
),
str(DataOptions.HASHRATE): DataFunction(
"_get_hashrate",
[RPCAPICommand("api_summary", "summary")],
),
str(DataOptions.EXPECTED_HASHRATE): DataFunction(
"_get_expected_hashrate",
[RPCAPICommand("api_stats", "stats")],
),
str(DataOptions.HASHBOARDS): DataFunction(
"_get_hashboards",
[RPCAPICommand("api_stats", "stats")],
),
str(DataOptions.FANS): DataFunction(
"_get_fans",
[RPCAPICommand("api_stats", "stats")],
),
str(DataOptions.UPTIME): DataFunction(
"_get_uptime",
[RPCAPICommand("api_stats", "stats")],
),
}
)
class CGMiner(BaseMiner):
def __init__(self, ip: str, api_ver: str = "0.0.0") -> None:
super().__init__(ip)
# interfaces
self.api = CGMinerAPI(ip, api_ver)
"""Base handler for CGMiner based miners"""
# static data
self.api_type = "CGMiner"
# data gathering locations
self.data_locations = CGMINER_DATA_LOC
_api_cls = CGMinerRPCAPI
api: CGMinerRPCAPI
# data storage
self.api_ver = api_ver
async def send_ssh_command(self, cmd: str) -> Optional[str]:
result = None
try:
conn = await self._get_ssh_connection()
except ConnectionError:
return None
# open an ssh connection
async with conn:
# 3 retries
for i in range(3):
try:
# run the command and get the result
result = await conn.run(cmd)
result = result.stdout
except Exception as e:
# if the command fails, log it
logging.warning(f"{self} command {cmd} error: {e}")
# on the 3rd retry, return None
if i == 3:
return
continue
# return the result, either command output or None
return result
async def restart_backend(self) -> bool:
"""Restart cgminer hashing process. Wraps [`restart_cgminer`][pyasic.miners.backends.cgminer.CGMiner.restart_cgminer] to standardize."""
return await self.restart_cgminer()
async def restart_cgminer(self) -> bool:
"""Restart cgminer hashing process."""
commands = ["cgminer-api restart", "/usr/bin/cgminer-monitor >/dev/null 2>&1"]
commands = ";".join(commands)
ret = await self.send_ssh_command(commands)
if ret is None:
return False
return True
async def reboot(self) -> bool:
"""Reboots power to the physical miner."""
logging.debug(f"{self}: Sending reboot command.")
ret = await self.send_ssh_command("reboot")
if ret is None:
return False
return True
async def resume_mining(self) -> bool:
commands = [
"mkdir -p /etc/tmp/",
'echo "*/3 * * * * /usr/bin/cgminer-monitor" > /etc/tmp/root',
"crontab -u root /etc/tmp/root",
"/usr/bin/cgminer-monitor >/dev/null 2>&1",
]
commands = ";".join(commands)
ret = await self.send_ssh_command(commands)
if ret is None:
return False
return True
async def stop_mining(self) -> bool:
commands = [
"mkdir -p /etc/tmp/",
'echo "" > /etc/tmp/root',
"crontab -u root /etc/tmp/root",
"killall cgminer",
]
commands = ";".join(commands)
ret = await self.send_ssh_command(commands)
if ret is None:
return False
return True
data_locations = CGMINER_DATA_LOC
async def get_config(self) -> MinerConfig:
api_pools = await self.api.pools()
# get pool data
try:
pools = await self.api.pools()
except APIError:
return self.config
if api_pools:
self.config = MinerConfig().from_api(api_pools["POOLS"])
self.config = MinerConfig.from_api(pools)
return self.config
async def fault_light_off(self) -> bool:
return False
async def fault_light_on(self) -> bool:
return False
async def send_config(self, config: MinerConfig, user_suffix: str = None) -> None:
return None
async def set_power_limit(self, wattage: int) -> bool:
return False
##################################################
### DATA GATHERING FUNCTIONS (get_{some_data}) ###
##################################################
async def get_mac(self) -> Optional[str]:
return None
async def get_version(
self, api_version: dict = None
) -> Tuple[Optional[str], Optional[str]]:
miner_version = namedtuple("MinerVersion", "api_ver fw_ver")
return miner_version(
api_ver=await self.get_api_ver(api_version=api_version),
fw_ver=await self.get_fw_ver(api_version=api_version),
)
async def get_api_ver(self, api_version: dict = None) -> Optional[str]:
if self.api_ver:
return self.api_ver
if not api_version:
async def _get_api_ver(self, api_version: dict = None) -> Optional[str]:
if api_version is None:
try:
api_version = await self.api.version()
except APIError:
pass
if api_version:
if api_version is not None:
try:
self.api_ver = api_version["VERSION"][0]["API"]
except (KeyError, IndexError):
except LookupError:
pass
return self.api_ver
async def get_fw_ver(self, api_version: dict = None) -> Optional[str]:
if self.fw_ver:
return self.fw_ver
if not api_version:
async def _get_fw_ver(self, api_version: dict = None) -> Optional[str]:
if api_version is None:
try:
api_version = await self.api.version()
except APIError:
pass
if api_version:
if api_version is not None:
try:
self.fw_ver = api_version["VERSION"][0]["CGMiner"]
except (KeyError, IndexError):
except LookupError:
pass
return self.fw_ver
async def get_hostname(self) -> Optional[str]:
hn = await self.send_ssh_command("cat /proc/sys/kernel/hostname")
return hn
async def get_hashrate(self, api_summary: dict = None) -> Optional[float]:
# get hr from API
if not api_summary:
async def _get_hashrate(self, api_summary: dict = None) -> Optional[float]:
if api_summary is None:
try:
api_summary = await self.api.summary()
except APIError:
pass
if api_summary:
if api_summary is not None:
try:
return round(
float(float(api_summary["SUMMARY"][0]["GHS 5s"]) / 1000), 2
)
except (IndexError, KeyError, ValueError, TypeError):
except (LookupError, ValueError, TypeError):
pass
async def get_hashboards(self, api_stats: dict = None) -> List[HashBoard]:
hashboards = []
if not api_stats:
async def _get_uptime(self, api_stats: dict = None) -> Optional[int]:
if api_stats is None:
try:
api_stats = await self.api.stats()
except APIError:
pass
if api_stats:
try:
board_offset = -1
boards = api_stats["STATS"]
if len(boards) > 1:
for board_num in range(1, 16, 5):
for _b_num in range(5):
b = boards[1].get(f"chain_acn{board_num + _b_num}")
if b and not b == 0 and board_offset == -1:
board_offset = board_num
if board_offset == -1:
board_offset = 1
for i in range(board_offset, board_offset + self.ideal_hashboards):
hashboard = HashBoard(
slot=i - board_offset, expected_chips=self.nominal_chips
)
chip_temp = boards[1].get(f"temp{i}")
if chip_temp:
hashboard.chip_temp = round(chip_temp)
temp = boards[1].get(f"temp2_{i}")
if temp:
hashboard.temp = round(temp)
hashrate = boards[1].get(f"chain_rate{i}")
if hashrate:
hashboard.hashrate = round(float(hashrate) / 1000, 2)
chips = boards[1].get(f"chain_acn{i}")
if chips:
hashboard.chips = chips
hashboard.missing = False
if (not chips) or (not chips > 0):
hashboard.missing = True
hashboards.append(hashboard)
except (IndexError, KeyError, ValueError, TypeError):
pass
return hashboards
async def get_env_temp(self) -> Optional[float]:
return None
async def get_wattage(self) -> Optional[int]:
return None
async def get_wattage_limit(self) -> Optional[int]:
return None
async def get_fans(self, api_stats: dict = None) -> List[Fan]:
if not api_stats:
try:
api_stats = await self.api.stats()
except APIError:
pass
fans = [Fan() for _ in range(self.fan_count)]
if api_stats:
try:
fan_offset = -1
for fan_num in range(1, 8, 4):
for _f_num in range(4):
f = api_stats["STATS"][1].get(f"fan{fan_num + _f_num}")
if f and not f == 0 and fan_offset == -1:
fan_offset = fan_num
if fan_offset == -1:
fan_offset = 1
for fan in range(self.fan_count):
fans[fan].speed = api_stats["STATS"][1].get(
f"fan{fan_offset+fan}", 0
)
except (KeyError, IndexError):
pass
return fans
async def get_fan_psu(self) -> Optional[int]:
return None
async def get_pools(self, api_pools: dict = None) -> List[dict]:
groups = []
if not api_pools:
try:
api_pools = await self.api.pools()
except APIError:
pass
if api_pools:
try:
pools = {}
for i, pool in enumerate(api_pools["POOLS"]):
pools[f"pool_{i + 1}_url"] = (
pool["URL"]
.replace("stratum+tcp://", "")
.replace("stratum2+tcp://", "")
)
pools[f"pool_{i + 1}_user"] = pool["User"]
pools["quota"] = pool["Quota"] if pool.get("Quota") else "0"
groups.append(pools)
except KeyError:
pass
return groups
async def get_errors(self) -> List[MinerErrorData]:
return []
async def get_fault_light(self) -> bool:
return False
async def get_nominal_hashrate(self, api_stats: dict = None) -> Optional[float]:
# X19 method, not sure compatibility
if not api_stats:
try:
api_stats = await self.api.stats()
except APIError:
pass
if api_stats:
try:
ideal_rate = api_stats["STATS"][1]["total_rateideal"]
try:
rate_unit = api_stats["STATS"][1]["rate_unit"]
except KeyError:
rate_unit = "GH"
if rate_unit == "GH":
return round(ideal_rate / 1000, 2)
if rate_unit == "MH":
return round(ideal_rate / 1000000, 2)
else:
return round(ideal_rate, 2)
except (KeyError, IndexError):
pass
async def is_mining(self, *args, **kwargs) -> Optional[bool]:
return None
async def get_uptime(self, api_stats: dict = None) -> Optional[int]:
if not api_stats:
try:
api_stats = await self.api.stats()
except APIError:
pass
if api_stats:
if api_stats is not None:
try:
return int(api_stats["STATS"][1]["Elapsed"])
except LookupError:

Some files were not shown because too many files have changed in this diff Show More