Compare commits

...

72 Commits

Author SHA1 Message Date
UpstreamData
d8b938cd5b version: bump version number. 2023-11-27 10:39:10 -07:00
UpstreamData
47d76e325a docs: update docs to include new information. 2023-11-27 10:37:38 -07:00
UpstreamData
7ee7868094 feature: change so_linger_time to socket_linger_time. 2023-11-27 10:19:30 -07:00
UpstreamData
3f1183a4f9 feature: add so_linger option to settings. 2023-11-27 09:24:28 -07:00
Upstream Data
2b443497ea bug: improve handling of whatsminers in get_miner. 2023-11-25 12:48:45 -07:00
Upstream Data
c3972f9524 feature: add default ssl ctx to all httpx clients to speed up initialization. 2023-11-25 01:08:04 -07:00
Upstream Data
92bbb21c11 bug: handle OSError as ConnectionError, and handle Vnish Msg bug because of missing id key. 2023-11-25 00:05:03 -07:00
Upstream Data
1980ff2563 bug: do additional checks on refused connection when scanning. 2023-11-24 23:32:22 -07:00
Upstream Data
93d09a1483 bug: handle unhandled error in pings. 2023-11-24 23:28:16 -07:00
Upstream Data
690d0d99df feature: added new setting for api command timeouts. 2023-11-24 23:19:14 -07:00
Upstream Data
78f689eb2c feature: update scan method to use port 80 when possible, and add .scan() method. 2023-11-24 23:13:56 -07:00
Upstream Data
e68f188e8f bug: fix timeout references in MinerFactory and fix MinerNetwork instantiation. 2023-11-24 23:00:06 -07:00
Upstream Data
7eda611fe9 bug: fix scanning size being too large. 2023-11-24 22:50:43 -07:00
Upstream Data
1d12817942 feature: improve network parsing and implementation. 2023-11-24 22:38:43 -07:00
UpstreamData
b24efd4c69 bug: fix miner network not working with lists. 2023-11-24 13:27:09 -07:00
UpstreamData
5533135b05 docs: update supported miners. 2023-11-23 11:54:01 -07:00
UpstreamData
475054fbe0 feature: finish support for most whatsminer subtypes. 2023-11-23 11:52:44 -07:00
Daniel Sokil
06bad1bbe0 Add More Whatsminer Models, and Additional Config For Existing Models (#70) 2023-11-23 11:35:18 -07:00
UpstreamData
f3746ff756 version: bump version number. 2023-11-20 11:19:45 -07:00
UpstreamData
9f16d37c8b feature: hide GRPC and GQL if BOSer is not found. 2023-11-20 11:19:13 -07:00
UpstreamData
8a13c7940a docs: update pyproject.toml description. 2023-11-20 10:33:25 -07:00
UpstreamData
8bea76ff67 feature: add chip count for M30S+VG50. 2023-11-20 10:32:51 -07:00
Upstream Data
1504bd744c version: bump version number. 2023-11-18 22:45:38 -07:00
Upstream Data
6449f10615 feature: implement GPRC set commands properly. 2023-11-18 22:45:09 -07:00
UpstreamData
d79509bda7 version: bump version number. Pin httpx to 0.25.0 min. 2023-11-12 18:36:45 -07:00
UpstreamData
630b847466 version: bump version number. Pin httpx to 0.25.0 min. 2023-11-12 18:35:52 -07:00
Colin Crossman
ed11611919 Bump version number
Note: some issues with HTTPX may be resolved by using 1.0.0b, but I did not bump the requirement at this time to the beta.
2023-11-11 13:59:14 -07:00
Colin Crossman
e2431c938d Address unknown password issue on Whatsminers
When a whatsminer had an unknown password (not the default one), it would result in a timeout error. By moving the password check to before the data pull step, the timeout issue can be caught and addressed efficiently.
2023-11-11 13:52:04 -07:00
Colin Crossman
60f4b4a5ed Address a situation which causes many asyncio errors 2023-11-11 13:49:51 -07:00
UpstreamData
d41097af20 version: bump vbersion number. 2023-11-08 11:13:24 -07:00
UpstreamData
8a5d505731 bug: fix anyio stream error on some linux distros when getting miner. 2023-11-08 11:12:46 -07:00
UpstreamData
36e76c6f41 Add support for the grpc set_performance_mode command. 2023-11-07 08:54:06 -07:00
UpstreamData
717b9421dd Merge branch 'dev_grpc'
# Conflicts:
#	pyasic/web/bosminer.py
#	pyproject.toml
2023-10-30 16:36:06 -06:00
UpstreamData
d2f71e8c94 version: bump version number. 2023-10-30 16:34:05 -06:00
UpstreamData
697991f28f bug: fix some cases where a warning could still be passed when it was unexpected. 2023-10-30 16:33:01 -06:00
UpstreamData
b0e18ab766 feature: implement most of the GRPC functions for BOS, except for some configuration options which will require complex enums. 2023-10-27 16:35:09 -06:00
UpstreamData
e39a6921d0 refactor: improve settings handling to not use a dataclass, and not use singleton. 2023-10-26 10:28:59 -06:00
UpstreamData
aac1be0565 feature: refactor BOS web class into multiple classes. 2023-10-26 10:28:57 -06:00
UpstreamData
683fcb2138 version: bump version number. 2023-10-26 10:28:56 -06:00
UpstreamData
9fbbef9b18 bug: fix an issue with bosminer not responding correctly on older models with fans. 2023-10-26 10:28:56 -06:00
UpstreamData
6e0b9a0a7b version: bump version number. 2023-10-26 10:28:56 -06:00
UpstreamData
7f472f6f4f bug: fix possible missing value for bitmain work mode when checking is_mining. 2023-10-26 10:28:55 -06:00
UpstreamData
b7d7b33ab9 bug: round hashrate data in MinerData, and remove some unused imports. 2023-10-26 10:28:55 -06:00
UpstreamData
da11c0bb1f version: bump version number. 2023-10-26 10:28:55 -06:00
UpstreamData
eae433d2bd bug: update get_miner to work with latest whatsminer version. 2023-10-26 10:28:54 -06:00
UpstreamData
c16bc37aff refactor: improve settings handling to not use a dataclass, and not use singleton. 2023-10-26 10:28:53 -06:00
UpstreamData
255b06ac9e version: bump version number. 2023-10-23 13:01:44 -06:00
UpstreamData
29ec619126 bug: fix an issue with bosminer not responding correctly on older models with fans. 2023-10-23 12:59:52 -06:00
UpstreamData
247def04ff version: bump version number. 2023-10-12 13:19:38 -06:00
UpstreamData
4600e7d953 bug: fix possible missing value for bitmain work mode when checking is_mining. 2023-10-12 13:19:11 -06:00
UpstreamData
850c266555 bug: round hashrate data in MinerData, and remove some unused imports. 2023-10-10 13:59:28 -06:00
UpstreamData
ad374fe2fb version: bump version number. 2023-10-05 10:18:10 -06:00
UpstreamData
5ca39b6fe7 bug: update get_miner to work with latest whatsminer version. 2023-10-05 10:17:45 -06:00
UpstreamData
b50dd26e6f feature: refactor BOS web class into multiple classes. 2023-10-03 15:07:39 -06:00
UpstreamData
53eaccaa9b docs: update documentation. 2023-10-03 15:07:39 -06:00
UpstreamData
91f207316a version: bump version number. 2023-10-03 15:07:39 -06:00
UpstreamData
1e37418909 bug: fix some issues with early version of whatsminers, and handle some possible errors with BOS. 2023-10-03 15:07:38 -06:00
UpstreamData
4c09ba5529 version: bump version number. 2023-10-03 15:07:38 -06:00
UpstreamData
7bab4747ad refactor: improve settings handling to not use a dataclass, and not use singleton. 2023-10-03 15:07:37 -06:00
UpstreamData
fd8cc7378c version: bump version number. 2023-10-03 15:07:33 -06:00
UpstreamData
8aeef4d5e7 feature: add support for M20P, and add chips for M20SV30. 2023-10-03 15:07:33 -06:00
UpstreamData
4bafde9da7 docs: update documentation. 2023-10-03 14:59:25 -06:00
UpstreamData
5a3107aecf version: bump version number. 2023-10-03 11:12:11 -06:00
UpstreamData
7e758720f0 bug: fix some issues with early version of whatsminers, and handle some possible errors with BOS. 2023-10-03 11:11:32 -06:00
UpstreamData
39e3e249f8 version: bump version number. 2023-10-02 13:14:21 -06:00
UpstreamData
118c5b056e refactor: improve settings handling to not use a dataclass, and not use singleton. 2023-10-02 13:13:31 -06:00
UpstreamData
75d6bc6808 version: bump version number. 2023-09-28 15:49:23 -06:00
UpstreamData
98c547e416 bug: fAdd new commands added in whatsminer API 2.0.5. 2023-09-28 15:49:23 -06:00
UpstreamData
45250e36e4 bug: fix whatsminer identification to work with backwards incompatible changes in API 2.0.5. 2023-09-28 15:49:23 -06:00
UpstreamData
fa7544d052 Update README.md 2023-09-28 15:49:22 -06:00
UpstreamData
14f33a40c3 feature: add grpc BOS class and add grpc requests to requirements. 2023-09-22 09:44:25 -06:00
UpstreamData
5c904aced0 feature: refactor BOS web class into multiple classes. 2023-09-22 09:32:59 -06:00
88 changed files with 2434 additions and 953 deletions

View File

@@ -19,7 +19,7 @@ Getting started with pyasic is easy. First, find your miner (or miners) on the
## Scanning for miners
To scan for miners in pyasic, we use the class [`MinerNetwork`][pyasic.network.MinerNetwork], which abstracts the search, communication, identification, setup, and return of a miner to 1 command.
The command [`MinerNetwork().scan_network_for_miners()`][pyasic.network.MinerNetwork.scan_network_for_miners] returns a list that contains any miners found.
The command [`MinerNetwork.scan()`][pyasic.network.MinerNetwork.scan] returns a list that contains any miners found.
```python
import asyncio # asyncio for handling the async part
from pyasic.network import MinerNetwork # miner network handles the scanning
@@ -28,7 +28,7 @@ from pyasic.network import MinerNetwork # miner network handles the scanning
async def scan_miners(): # define async scan function to allow awaiting
# create a miner network
# you can pass in any IP and it will use that in a subnet with a /24 mask (255 IPs).
network = MinerNetwork("192.168.1.50") # this uses the 192.168.1.0-255 network
network = MinerNetwork.from_subnet("192.168.1.50/24") # this uses the 192.168.1.0-255 network
# scan for miners asynchronously
# this will return the correct type of miners if they are supported with all functionality.
@@ -263,7 +263,7 @@ Pyasic implements a few dataclasses as helpers to make data return types consist
[`MinerData`][pyasic.data.MinerData] is a return from the [`get_data()`](#get-data) function, and is used to have a consistent dataset across all returns.
You can call [`MinerData.asdict()`][pyasic.data.MinerData.asdict] to get the dataclass as a dictionary, and there are many other helper functions contained in the class to convert to different data formats.
You can call [`MinerData.as_dict()`][pyasic.data.MinerData.as_dict] to get the dataclass as a dictionary, and there are many other helper functions contained in the class to convert to different data formats.
[`MinerData`][pyasic.data.MinerData] instances can also be added to each other to combine their data and can be divided by a number to divide all their data, allowing you to get average data from many miners by doing -
```python
@@ -288,3 +288,27 @@ It is the return from [`get_config()`](#get-config).
Each miner has a unique way to convert the [`MinerConfig`][pyasic.config.MinerConfig] to their specific type, there are helper functions in the class.
In most cases these helper functions should not be used, as [`send_config()`](#send-config) takes a [`MinerConfig`][pyasic.config.MinerConfig] and will do the conversion to the right type for you.
## Settings
`pyasic` has settings designed to make using large groups of miners easier. You can set the default password for all types of miners using the [`pyasic.settings`][pyasic.settings] module, used as follows:
```python
from pyasic import settings
settings.update("default_antminer_password", "my_pwd")
```
Here are of all the settings, and their default values:
```
"network_ping_retries": 1,
"network_ping_timeout": 3,
"network_scan_threads": 300,
"factory_get_retries": 1,
"get_data_retries": 1,
"default_whatsminer_password": "admin",
"default_innosilicon_password": "admin",
"default_antminer_password": "root",
"default_bosminer_password": "root",
"default_vnish_password": "admin",
"default_goldshell_password": "123456789",
```

View File

@@ -29,6 +29,20 @@
show_root_heading: false
heading_level: 4
## S19i
::: pyasic.miners.antminer.bmminer.X19.S19.BMMinerS19i
handler: python
options:
show_root_heading: false
heading_level: 4
## S19+
::: pyasic.miners.antminer.bmminer.X19.S19.BMMinerS19Plus
handler: python
options:
show_root_heading: false
heading_level: 4
## S19j No PIC
::: pyasic.miners.antminer.bmminer.X19.S19.BMMinerS19jNoPIC
handler: python
@@ -113,6 +127,13 @@
show_root_heading: false
heading_level: 4
## S19j Pro (BOS)
::: pyasic.miners.antminer.bosminer.X19.S19.BOSMinerS19jPro
handler: python
options:
show_root_heading: false
heading_level: 4
## T19 (BOS)
::: pyasic.miners.antminer.bosminer.X19.T19.BOSMinerT19
handler: python

View File

@@ -1,6 +1,14 @@
# pyasic
## Miner Factory
[`MinerFactory`][pyasic.miners.miner_factory.MinerFactory] is the way to create miner types in `pyasic`. The most important method is [`get_miner()`][pyasic.get_miner], which is mapped to [`pyasic.get_miner()`][pyasic.get_miner], and should be used from there.
The instance used for [`pyasic.get_miner()`][pyasic.get_miner] is `pyasic.miner_factory`.
[`MinerFactory`][pyasic.MinerFactory] also keeps a cache, which can be cleared if needed with `pyasic.miner_factory.clear_cached_miners()`.
Finally, there is functionality to get multiple miners without using `asyncio.gather()` explicitly. Use `pyasic.miner_factory.get_multiple_miners()` with a list of IPs as strings to get a list of miner instances. You can also get multiple miners with an `AsyncGenerator` by using `pyasic.miner_factory.get_miner_generator()`.
::: pyasic.miners.miner_factory.MinerFactory
handler: python
options:

View File

@@ -10,9 +10,6 @@ details {
padding-top:0px;
padding-bottom:0px;
}
ul {
margin:0px;
}
</style>
<details>
@@ -73,6 +70,8 @@ ul {
<li><a href="../antminer/X19#s19l">S19L</a></li>
<li><a href="../antminer/X19#s19-pro">S19 Pro</a></li>
<li><a href="../antminer/X19#s19j">S19j</a></li>
<li><a href="../antminer/X19#s19i">S19i</a></li>
<li><a href="../antminer/X19#s19_1">S19+</a></li>
<li><a href="../antminer/X19#s19j-no-pic">S19j No PIC</a></li>
<li><a href="../antminer/X19#s19-pro_1">S19 Pro+</a></li>
<li><a href="../antminer/X19#s19j-pro">S19j Pro</a></li>
@@ -94,6 +93,8 @@ ul {
<li><a href="../whatsminer/M2X#m20s-v10">M20S V10</a></li>
<li><a href="../whatsminer/M2X#m20s-v20">M20S V20</a></li>
<li><a href="../whatsminer/M2X#m20s-v30">M20S V30</a></li>
<li><a href="../whatsminer/M2X#m20p-v10">M20P V10</a></li>
<li><a href="../whatsminer/M2X#m20p-v30">M20P V30</a></li>
<li><a href="../whatsminer/M2X#m20s_1-v30">M20S+ V30</a></li>
<li><a href="../whatsminer/M2X#m21-v10">M21 V10</a></li>
<li><a href="../whatsminer/M2X#m21s-v20">M21S V20</a></li>
@@ -108,6 +109,8 @@ ul {
<ul>
<li><a href="../whatsminer/M3X#m30-v10">M30 V10</a></li>
<li><a href="../whatsminer/M3X#m30-v20">M30 V20</a></li>
<li><a href="../whatsminer/M3X#m30k-v10">M30K V10</a></li>
<li><a href="../whatsminer/M3X#m30l-v10">M30L V10</a></li>
<li><a href="../whatsminer/M3X#m30s-v10">M30S V10</a></li>
<li><a href="../whatsminer/M3X#m30s-v20">M30S V20</a></li>
<li><a href="../whatsminer/M3X#m30s-v30">M30S V30</a></li>
@@ -157,6 +160,7 @@ ul {
<li><a href="../whatsminer/M3X#m30s_1-ve100">M30S+ VE100</a></li>
<li><a href="../whatsminer/M3X#m30s_1-vf20">M30S+ VF20</a></li>
<li><a href="../whatsminer/M3X#m30s_1-vf30">M30S+ VF30</a></li>
<li><a href="../whatsminer/M3X#m30s_1-vg20">M30S+ VG20</a></li>
<li><a href="../whatsminer/M3X#m30s_1-vg30">M30S+ VG30</a></li>
<li><a href="../whatsminer/M3X#m30s_1-vg40">M30S+ VG40</a></li>
<li><a href="../whatsminer/M3X#m30s_1-vg50">M30S+ VG50</a></li>
@@ -190,6 +194,9 @@ ul {
<li><a href="../whatsminer/M3X#m30s_1_1-vj30">M30S++ VJ30</a></li>
<li><a href="../whatsminer/M3X#m31-v10">M31 V10</a></li>
<li><a href="../whatsminer/M3X#m31-v20">M31 V20</a></li>
<li><a href="../whatsminer/M3X#m31h-v10">M31H V10</a></li>
<li><a href="../whatsminer/M3X#m31h-v40">M31H V40</a></li>
<li><a href="../whatsminer/M3X#m30l-v10">M30L V10</a></li>
<li><a href="../whatsminer/M3X#m31s-v10">M31S V10</a></li>
<li><a href="../whatsminer/M3X#m31s-v20">M31S V20</a></li>
<li><a href="../whatsminer/M3X#m31s-v30">M31S V30</a></li>
@@ -205,7 +212,6 @@ ul {
<li><a href="../whatsminer/M3X#m31se-v10">M31SE V10</a></li>
<li><a href="../whatsminer/M3X#m31se-v20">M31SE V20</a></li>
<li><a href="../whatsminer/M3X#m31se-v30">M31SE V30</a></li>
<li><a href="../whatsminer/M3X#m31h-v40">M31H V40</a></li>
<li><a href="../whatsminer/M3X#m31s_1-v10">M31S+ V10</a></li>
<li><a href="../whatsminer/M3X#m31s_1-v20">M31S+ V20</a></li>
<li><a href="../whatsminer/M3X#m31s_1-v30">M31S+ V30</a></li>
@@ -232,6 +238,7 @@ ul {
<li><a href="../whatsminer/M3X#m33-v20">M33 V20</a></li>
<li><a href="../whatsminer/M3X#m33-v30">M33 V30</a></li>
<li><a href="../whatsminer/M3X#m33s-vg30">M33S VG30</a></li>
<li><a href="../whatsminer/M3X#m33s_1-vg20">M33S+ VG20</a></li>
<li><a href="../whatsminer/M3X#m33s_1-vh20">M33S+ VH20</a></li>
<li><a href="../whatsminer/M3X#m33s_1-vh30">M33S+ VH30</a></li>
<li><a href="../whatsminer/M3X#m33s_1_1-vh20">M33S++ VH20</a></li>
@@ -241,12 +248,15 @@ ul {
<li><a href="../whatsminer/M3X#m36s-ve10">M36S VE10</a></li>
<li><a href="../whatsminer/M3X#m36s_1-vg30">M36S+ VG30</a></li>
<li><a href="../whatsminer/M3X#m36s_1_1-vh30">M36S++ VH30</a></li>
<li><a href="../whatsminer/M3X#m39-v10">M39 V10</a></li>
<li><a href="../whatsminer/M3X#m39-v20">M39 V20</a></li>
<li><a href="../whatsminer/M3X#m39-v30">M39 V30</a></li>
</ul>
</details>
<details>
<summary>M5X Series:</summary>
<ul>
<li><a href="../whatsminer/M5X#m50-ve30">M50 VE30</a></li>
<li><a href="../whatsminer/M5X#m50-vg30">M50 VG30</a></li>
<li><a href="../whatsminer/M5X#m50-vh10">M50 VH10</a></li>
<li><a href="../whatsminer/M5X#m50-vh20">M50 VH20</a></li>
@@ -397,6 +407,7 @@ ul {
<li><a href="../antminer/X19#s19j-bos">S19j (BOS)</a></li>
<li><a href="../antminer/X19#s19j-no-pic-bos">S19j No PIC (BOS)</a></li>
<li><a href="../antminer/X19#s19j-pro-bos">S19j Pro (BOS)</a></li>
<li><a href="../antminer/X19#s19j-pro-bos">S19j Pro (BOS)</a></li>
<li><a href="../antminer/X19#t19-bos">T19 (BOS)</a></li>
</ul>
</details>

View File

@@ -29,6 +29,20 @@
show_root_heading: false
heading_level: 4
## M20P V10
::: pyasic.miners.whatsminer.btminer.M2X.M20P.BTMinerM20PV10
handler: python
options:
show_root_heading: false
heading_level: 4
## M20P V30
::: pyasic.miners.whatsminer.btminer.M2X.M20P.BTMinerM20PV30
handler: python
options:
show_root_heading: false
heading_level: 4
## M20S+ V30
::: pyasic.miners.whatsminer.btminer.M2X.M20S_Plus.BTMinerM20SPlusV30
handler: python

View File

@@ -15,6 +15,20 @@
show_root_heading: false
heading_level: 4
## M30K V10
::: pyasic.miners.whatsminer.btminer.M3X.M30K.BTMinerM30KV10
handler: python
options:
show_root_heading: false
heading_level: 4
## M30L V10
::: pyasic.miners.whatsminer.btminer.M3X.M30L.BTMinerM30LV10
handler: python
options:
show_root_heading: false
heading_level: 4
## M30S V10
::: pyasic.miners.whatsminer.btminer.M3X.M30S.BTMinerM30SV10
handler: python
@@ -358,6 +372,13 @@
show_root_heading: false
heading_level: 4
## M30S+ VG20
::: pyasic.miners.whatsminer.btminer.M3X.M30S_Plus.BTMinerM30SPlusVG20
handler: python
options:
show_root_heading: false
heading_level: 4
## M30S+ VG30
::: pyasic.miners.whatsminer.btminer.M3X.M30S_Plus.BTMinerM30SPlusVG30
handler: python
@@ -589,6 +610,27 @@
show_root_heading: false
heading_level: 4
## M31H V10
::: pyasic.miners.whatsminer.btminer.M3X.M31H.BTMinerM31HV10
handler: python
options:
show_root_heading: false
heading_level: 4
## M31H V40
::: pyasic.miners.whatsminer.btminer.M3X.M31H.BTMinerM31HV40
handler: python
options:
show_root_heading: false
heading_level: 4
## M30L V10
::: pyasic.miners.whatsminer.btminer.M3X.M31L.BTMinerM31LV10
handler: python
options:
show_root_heading: false
heading_level: 4
## M31S V10
::: pyasic.miners.whatsminer.btminer.M3X.M31S.BTMinerM31SV10
handler: python
@@ -694,13 +736,6 @@
show_root_heading: false
heading_level: 4
## M31H V40
::: pyasic.miners.whatsminer.btminer.M3X.M31H.BTMinerM31HV40
handler: python
options:
show_root_heading: false
heading_level: 4
## M31S+ V10
::: pyasic.miners.whatsminer.btminer.M3X.M31S_Plus.BTMinerM31SPlusV10
handler: python
@@ -883,6 +918,13 @@
show_root_heading: false
heading_level: 4
## M33S+ VG20
::: pyasic.miners.whatsminer.btminer.M3X.M33S_Plus.BTMinerM33SPlusVG20
handler: python
options:
show_root_heading: false
heading_level: 4
## M33S+ VH20
::: pyasic.miners.whatsminer.btminer.M3X.M33S_Plus.BTMinerM33SPlusVH20
handler: python
@@ -946,6 +988,13 @@
show_root_heading: false
heading_level: 4
## M39 V10
::: pyasic.miners.whatsminer.btminer.M3X.M39.BTMinerM39V10
handler: python
options:
show_root_heading: false
heading_level: 4
## M39 V20
::: pyasic.miners.whatsminer.btminer.M3X.M39.BTMinerM39V20
handler: python
@@ -953,3 +1002,10 @@
show_root_heading: false
heading_level: 4
## M39 V30
::: pyasic.miners.whatsminer.btminer.M3X.M39.BTMinerM39V30
handler: python
options:
show_root_heading: false
heading_level: 4

View File

@@ -1,6 +1,13 @@
# pyasic
## M5X Models
## M50 VE30
::: pyasic.miners.whatsminer.btminer.M5X.M50.BTMinerM50VE30
handler: python
options:
show_root_heading: false
heading_level: 4
## M50 VG30
::: pyasic.miners.whatsminer.btminer.M5X.M50.BTMinerM50VG30
handler: python

View File

@@ -1,12 +0,0 @@
# pyasic
## Miner Network Range
[`MinerNetworkRange`][pyasic.network.net_range.MinerNetworkRange] is a class used by [`MinerNetwork`][pyasic.network.MinerNetwork] to handle any constructor stings.
The goal is to emulate what is produced by `ipaddress.ip_network` by allowing [`MinerNetwork`][pyasic.network.MinerNetwork] to get a list of hosts.
This allows this class to be the [`MinerNetwork.network`][pyasic.network.MinerNetwork] and hence be used for scanning.
::: pyasic.network.net_range.MinerNetworkRange
handler: python
options:
show_root_heading: false
heading_level: 4

23
docs/settings/settings.md Normal file
View File

@@ -0,0 +1,23 @@
# pyasic
## settings
::: pyasic.settings
handler: python
options:
show_root_heading: false
heading_level: 4
### get
::: pyasic.settings.get
handler: python
options:
show_root_heading: false
heading_level: 4
### update
::: pyasic.settings.update
handler: python
options:
show_root_heading: false
heading_level: 4

View File

@@ -53,7 +53,8 @@ nav:
- Goldshell X5: "miners/goldshell/X5.md"
- Goldshell XMax: "miners/goldshell/XMax.md"
- Base Miner: "miners/base_miner.md"
- Settings:
- Settings: "settings/settings.md"
plugins:
- mkdocstrings

View File

@@ -20,7 +20,7 @@ import json
import logging
import re
import warnings
from typing import Tuple, Union
from typing import Union
from pyasic.errors import APIError, APIWarning
@@ -83,15 +83,15 @@ class BaseMinerAPI:
data = self._load_api_data(data)
# check for if the user wants to allow errors to return
if not ignore_errors:
# validate the command succeeded
validation = self._validate_command_output(data)
if not validation[0]:
if allow_warning:
logging.warning(
f"{self.ip}: API Command Error: {command}: {validation[1]}"
)
validation = self._validate_command_output(data)
if not validation[0]:
if not ignore_errors:
# validate the command succeeded
raise APIError(validation[1])
if allow_warning:
logging.warning(
f"{self.ip}: API Command Error: {command}: {validation[1]}"
)
logging.debug(f"{self} - (Send Command) - Received data.")
return data
@@ -118,11 +118,12 @@ class BaseMinerAPI:
data = await self.send_command(command, allow_warning=allow_warning)
except APIError as e:
# try to identify the error
if ":" in e.message:
err_command = e.message.split(":")[0]
if err_command in commands:
commands.remove(err_command)
continue
if e.message is not None:
if ":" in e.message:
err_command = e.message.split(":")[0]
if err_command in commands:
commands.remove(err_command)
continue
return {command: [{}] for command in commands}
logging.debug(f"{self} - (Multicommand) - Received data")
data["multicommand"] = True
@@ -206,16 +207,18 @@ If you are sure you want to use this command please use API.send_command("{comma
logging.debug(f"{self} - ([Hidden] Send Bytes) - Draining")
await writer.drain()
try:
ret_data = await asyncio.wait_for(reader.read(4096), timeout=timeout)
except ConnectionAbortedError:
return b"{}"
try:
# TO address a situation where a whatsminer has an unknown PW -AND-
# Fix for stupid whatsminer bug, reboot/restart seem to not load properly in the loop
# have to receive, save the data, check if there is more data by reading with a short timeout
# append that data if there is more, and then onto the main loop.
ret_data += await asyncio.wait_for(reader.read(1), timeout=1)
except asyncio.TimeoutError:
return ret_data
# the password timeout might need to be longer than 1, but it seems to work for now.
ret_data = await asyncio.wait_for(reader.read(1), timeout=1)
except (asyncio.TimeoutError):
return b"{}"
try:
ret_data += await asyncio.wait_for(reader.read(4096), timeout=timeout)
except (ConnectionAbortedError):
return b"{}"
# loop to receive all the data
logging.debug(f"{self} - ([Hidden] Send Bytes) - Receiving")
@@ -254,6 +257,12 @@ If you are sure you want to use this command please use API.send_command("{comma
# this is an error
return False, f"{key}: " + data[key][0]["STATUS"][0]["Msg"]
elif "id" not in data.keys():
if isinstance(data["STATUS"], list):
if data["STATUS"][0].get("STATUS", None) in ["S", "I"]:
return True, None
else:
return False, data["STATUS"][0]["Msg"]
if data["STATUS"] not in ["S", "I"]:
return False, data["Msg"]
else:

View File

@@ -27,10 +27,10 @@ from typing import Literal, Union
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
from passlib.handlers.md5_crypt import md5_crypt
from pyasic import settings
from pyasic.API import BaseMinerAPI
from pyasic.errors import APIError
from pyasic.misc import api_min_version
from pyasic.settings import PyasicSettings
### IMPORTANT ###
# you need to change the password of the miners using the Whatsminer
@@ -192,7 +192,7 @@ class BTMinerAPI(BaseMinerAPI):
ip: str,
api_ver: str = "0.0.0",
port: int = 4028,
pwd: str = PyasicSettings().global_whatsminer_password,
pwd: str = settings.get("default_whatsminer_password", "admin"),
):
super().__init__(ip, port)
self.pwd = pwd

View File

@@ -13,6 +13,7 @@
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------
from pyasic import settings
from pyasic.API.bmminer import BMMinerAPI
from pyasic.API.bosminer import BOSMinerAPI
from pyasic.API.btminer import BTMinerAPI
@@ -29,10 +30,9 @@ from pyasic.data import (
from pyasic.errors import APIError, APIWarning
from pyasic.miners import get_miner
from pyasic.miners.base import AnyMiner
from pyasic.miners.miner_factory import MinerFactory
from pyasic.miners.miner_factory import MinerFactory, miner_factory
from pyasic.miners.miner_listener import MinerListener
from pyasic.network import MinerNetwork
from pyasic.settings import PyasicSettings
__all__ = [
"BMMinerAPI",
@@ -51,7 +51,8 @@ __all__ = [
"get_miner",
"AnyMiner",
"MinerFactory",
"miner_factory",
"MinerListener",
"MinerNetwork",
"PyasicSettings",
"settings",
]

View File

@@ -241,7 +241,7 @@ class MinerData:
if item.hashrate is not None:
hr_data.append(item.hashrate)
if len(hr_data) > 0:
return sum(hr_data)
return round(sum(hr_data), 2)
return self._hashrate
@hashrate.setter

View File

@@ -16,31 +16,29 @@
import logging
from pyasic.settings import PyasicSettings
def init_logger():
if PyasicSettings().logfile:
logging.basicConfig(
filename="logfile.txt",
filemode="a",
format="%(pathname)s:%(lineno)d in %(funcName)s\n[%(levelname)s][%(asctime)s](%(name)s) - %(message)s",
datefmt="%x %X",
)
else:
logging.basicConfig(
format="%(pathname)s:%(lineno)d in %(funcName)s\n[%(levelname)s][%(asctime)s](%(name)s) - %(message)s",
datefmt="%x %X",
)
# if PyasicSettings().logfile:
# logging.basicConfig(
# filename="logfile.txt",
# filemode="a",
# format="%(pathname)s:%(lineno)d in %(funcName)s\n[%(levelname)s][%(asctime)s](%(name)s) - %(message)s",
# datefmt="%x %X",
# )
# else:
logging.basicConfig(
format="%(pathname)s:%(lineno)d in %(funcName)s\n[%(levelname)s][%(asctime)s](%(name)s) - %(message)s",
datefmt="%x %X",
)
_logger = logging.getLogger()
if PyasicSettings().debug:
_logger.setLevel(logging.DEBUG)
logging.getLogger("asyncssh").setLevel(logging.DEBUG)
else:
_logger.setLevel(logging.WARNING)
logging.getLogger("asyncssh").setLevel(logging.WARNING)
# if PyasicSettings().debug:
# _logger.setLevel(logging.DEBUG)
# logging.getLogger("asyncssh").setLevel(logging.DEBUG)
# else:
_logger.setLevel(logging.WARNING)
logging.getLogger("asyncssh").setLevel(logging.WARNING)
return _logger

View File

@@ -274,7 +274,11 @@ class AntminerModern(BMMiner):
if web_get_conf:
try:
return False if int(web_get_conf["bitmain-work-mode"]) == 1 else True
if web_get_conf["bitmain-work-mode"].isdigit():
return (
False if int(web_get_conf["bitmain-work-mode"]) == 1 else True
)
return False
except LookupError:
pass

View File

@@ -184,11 +184,11 @@ BOSMINER_DATA_LOC = {
class BOSMiner(BaseMiner):
def __init__(self, ip: str, api_ver: str = "0.0.0") -> None:
def __init__(self, ip: str, api_ver: str = "0.0.0", boser: bool = None) -> None:
super().__init__(ip)
# interfaces
self.api = BOSMinerAPI(ip, api_ver)
self.web = BOSMinerWebAPI(ip)
self.web = BOSMinerWebAPI(ip, boser=boser)
# static data
self.api_type = "BOSMiner"
@@ -787,7 +787,7 @@ class BOSMiner(BaseMiner):
)
except APIError:
pass
if graphql_fans:
if graphql_fans.get("data"):
fans = []
for n in range(self.fan_count):
try:
@@ -1043,7 +1043,7 @@ class BOSMiner(BaseMiner):
if data == "50":
self.light = True
return self.light
except TypeError:
except (TypeError, AttributeError):
return self.light
async def get_nominal_hashrate(self, api_devs: dict = None) -> Optional[float]:

View File

@@ -442,7 +442,8 @@ class BTMiner(BaseMiner):
if api_summary:
try:
return api_summary["SUMMARY"][0]["Power"]
wattage = api_summary["SUMMARY"][0]["Power"]
return wattage if not wattage == -1 else None
except (KeyError, IndexError):
pass

View File

@@ -541,7 +541,7 @@ class BaseMiner(ABC):
)
gathered_data = await self._get_data(
allow_warning, include=include, exclude=exclude
allow_warning=allow_warning, include=include, exclude=exclude
)
for item in gathered_data:
if gathered_data[item] is not None:

View File

@@ -13,17 +13,17 @@
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------
import asyncio
import enum
import ipaddress
import json
import re
from typing import Callable, List, Optional, Tuple, Union
from typing import AsyncGenerator, Callable, List, Optional, Tuple, Union
import anyio
import httpx
from pyasic import settings
from pyasic.logger import logger
from pyasic.miners.antminer import *
from pyasic.miners.avalonminer import *
@@ -44,9 +44,6 @@ from pyasic.miners.innosilicon import *
from pyasic.miners.unknown import UnknownMiner
from pyasic.miners.whatsminer import *
TIMEOUT = 20
RETRIES = 3
class MinerTypes(enum.Enum):
ANTMINER = 0
@@ -112,6 +109,8 @@ MINER_CLASSES = {
"M29V10": BTMinerM29V10,
"M30V10": BTMinerM30V10,
"M30V20": BTMinerM30V20,
"M30KV10": BTMinerM30KV10,
"M30LV10": BTMinerM30LV10,
"M30SV10": BTMinerM30SV10,
"M30SV20": BTMinerM30SV20,
"M30SV30": BTMinerM30SV30,
@@ -161,6 +160,7 @@ MINER_CLASSES = {
"M30S+VE100": BTMinerM30SPlusVE100,
"M30S+VF20": BTMinerM30SPlusVF20,
"M30S+VF30": BTMinerM30SPlusVF30,
"M30S+VG20": BTMinerM30SPlusVG20,
"M30S+VG30": BTMinerM30SPlusVG30,
"M30S+VG40": BTMinerM30SPlusVG40,
"M30S+VG50": BTMinerM30SPlusVG50,
@@ -194,6 +194,9 @@ MINER_CLASSES = {
"M30S++VJ30": BTMinerM30SPlusPlusVJ30,
"M31V10": BTMinerM31V10,
"M31V20": BTMinerM31V20,
"M31HV10": BTMinerM31HV10,
"M31HV40": BTMinerM31HV40,
"M31LV10": BTMinerM31LV10,
"M31SV10": BTMinerM31SV10,
"M31SV20": BTMinerM31SV20,
"M31SV30": BTMinerM31SV30,
@@ -209,7 +212,6 @@ MINER_CLASSES = {
"M31SEV10": BTMinerM31SEV10,
"M31SEV20": BTMinerM31SEV20,
"M31SEV30": BTMinerM31SEV30,
"M31HV40": BTMinerM31HV40,
"M31S+V10": BTMinerM31SPlusV10,
"M31S+V20": BTMinerM31SPlusV20,
"M31S+V30": BTMinerM31SPlusV30,
@@ -236,6 +238,7 @@ MINER_CLASSES = {
"M33V20": BTMinerM33V20,
"M33V30": BTMinerM33V30,
"M33SVG30": BTMinerM33SVG30,
"M33S+VG20": BTMinerM33SPlusVG20,
"M33S+VH20": BTMinerM33SPlusVH20,
"M33S+VH30": BTMinerM33SPlusVH30,
"M33S++VH20": BTMinerM33SPlusPlusVH20,
@@ -245,7 +248,10 @@ MINER_CLASSES = {
"M36SVE10": BTMinerM36SVE10,
"M36S+VG30": BTMinerM36SPlusVG30,
"M36S++VH30": BTMinerM36SPlusPlusVH30,
"M39V10": BTMinerM39V10,
"M39V20": BTMinerM39V20,
"M39V30": BTMinerM39V30,
"M50VE30": BTMinerM50VE30,
"M50VG30": BTMinerM50VG30,
"M50VH10": BTMinerM50VH10,
"M50VH20": BTMinerM50VH20,
@@ -379,7 +385,9 @@ class MinerFactory:
def clear_cached_miners(self):
self.cache = {}
async def get_multiple_miners(self, ips: List[str], limit: int = 200):
async def get_multiple_miners(
self, ips: List[str], limit: int = 200
) -> List[AnyMiner]:
results = []
async for miner in self.get_miner_generator(ips, limit):
@@ -387,7 +395,7 @@ class MinerFactory:
return results
async def get_miner_generator(self, ips: list, limit: int = 200):
async def get_miner_generator(self, ips: list, limit: int = 200) -> AsyncGenerator:
tasks = []
semaphore = asyncio.Semaphore(limit)
@@ -395,13 +403,10 @@ class MinerFactory:
tasks.append(asyncio.create_task(self.get_miner(ip)))
for task in tasks:
await semaphore.acquire()
try:
async with semaphore:
result = await task
if result is not None:
yield result
finally:
semaphore.release()
async def get_miner(self, ip: str):
ip = str(ip)
@@ -410,10 +415,12 @@ class MinerFactory:
miner_type = None
for _ in range(RETRIES):
for _ in range(settings.get("factory_get_retries", 1)):
task = asyncio.create_task(self._get_miner_type(ip))
try:
miner_type = await asyncio.wait_for(task, timeout=TIMEOUT)
miner_type = await asyncio.wait_for(
task, timeout=settings.get("factory_get_timeout", 3)
)
except asyncio.TimeoutError:
task.cancel()
else:
@@ -438,12 +445,21 @@ class MinerFactory:
if fn is not None:
task = asyncio.create_task(fn(ip))
try:
miner_model = await asyncio.wait_for(task, timeout=30)
miner_model = await asyncio.wait_for(
task, timeout=settings.get("factory_get_timeout", 3)
)
except asyncio.TimeoutError:
task.cancel()
boser_enabled = None
if miner_type == MinerTypes.BRAIINS_OS:
boser_enabled = await self.get_boser_braiins_os(ip)
miner = self._select_miner_from_classes(
ip, miner_type=miner_type, miner_model=miner_model
ip,
miner_type=miner_type,
miner_model=miner_model,
boser_enabled=boser_enabled,
)
if miner is not None and not isinstance(miner, UnknownMiner):
@@ -453,18 +469,22 @@ class MinerFactory:
async def _get_miner_type(self, ip: str):
tasks = [
asyncio.create_task(self._get_miner_web(ip)),
asyncio.create_task(self._get_miner_socket(ip)),
# asyncio.create_task(self._get_miner_socket(ip)),
]
return await concurrent_get_first_result(tasks, lambda x: x is not None)
async def _get_miner_web(self, ip: str):
urls = [f"http://{ip}/", f"https://{ip}/"]
async with httpx.AsyncClient(verify=False) as session:
async with httpx.AsyncClient(
transport=settings.transport(verify=False)
) as session:
tasks = [asyncio.create_task(self._web_ping(session, url)) for url in urls]
text, resp = await concurrent_get_first_result(
tasks, lambda x: x[0] is not None
tasks,
lambda x: x[0] is not None
and self._parse_web_type(x[0], x[1]) is not None,
)
if text is not None:
return self._parse_web_type(text, resp)
@@ -474,9 +494,14 @@ class MinerFactory:
session: httpx.AsyncClient, url: str
) -> Tuple[Optional[str], Optional[httpx.Response]]:
try:
resp = await session.get(url, follow_redirects=False)
resp = await session.get(url, follow_redirects=True)
return resp.text, resp
except (httpx.HTTPError, asyncio.TimeoutError):
except (
httpx.HTTPError,
asyncio.TimeoutError,
anyio.EndOfStream,
anyio.ClosedResourceError,
):
pass
return None, None
@@ -486,10 +511,14 @@ class MinerFactory:
"www-authenticate", ""
):
return MinerTypes.ANTMINER
if web_resp.status_code == 307 and "https://" in web_resp.headers.get(
"location", ""
):
return MinerTypes.WHATSMINER
if len(web_resp.history) > 0:
history_resp = web_resp.history[0]
if (
"/cgi-bin/luci" in web_text
and history_resp.status_code == 307
and "https://" in history_resp.headers.get("location", "")
):
return MinerTypes.WHATSMINER
if "Braiins OS" in web_text:
return MinerTypes.BRAIINS_OS
if "cloud-box" in web_text:
@@ -517,7 +546,8 @@ class MinerFactory:
data = b""
try:
reader, writer = await asyncio.wait_for(
asyncio.open_connection(str(ip), 4028), timeout=30
asyncio.open_connection(str(ip), 4028),
timeout=settings.get("factory_get_timeout", 3),
)
except (ConnectionError, OSError, asyncio.TimeoutError):
return
@@ -583,12 +613,12 @@ class MinerFactory:
location: str,
auth: Optional[httpx.DigestAuth] = None,
) -> Optional[dict]:
async with httpx.AsyncClient(verify=False) as session:
async with httpx.AsyncClient(transport=settings.transport()) as session:
try:
data = await session.get(
f"http://{str(ip)}{location}",
auth=auth,
timeout=30,
timeout=settings.get("factory_get_timeout", 3),
)
except (httpx.HTTPError, asyncio.TimeoutError):
logger.info(f"{ip}: Web command timeout.")
@@ -687,9 +717,13 @@ class MinerFactory:
ip: ipaddress.ip_address,
miner_model: Union[str, None],
miner_type: Union[MinerTypes, None],
boser_enabled: bool = None,
) -> AnyMiner:
kwargs = {}
if boser_enabled is not None:
kwargs["boser"] = boser_enabled
try:
return MINER_CLASSES[miner_type][str(miner_model).upper()](ip)
return MINER_CLASSES[miner_type][str(miner_model).upper()](ip, **kwargs)
except LookupError:
if miner_type in MINER_CLASSES:
return MINER_CLASSES[miner_type][None](ip)
@@ -756,6 +790,7 @@ class MinerFactory:
sock_json_data = await self.send_api_command(ip, "devdetails")
try:
miner_model = sock_json_data["DEVDETAILS"][0]["Model"].replace("_", "")
miner_model = miner_model[:-1] + "0"
return miner_model
except (TypeError, LookupError):
@@ -774,7 +809,7 @@ class MinerFactory:
async def get_miner_model_innosilicon(self, ip: str) -> Optional[str]:
try:
async with httpx.AsyncClient(verify=False) as session:
async with httpx.AsyncClient(transport=settings.transport()) as session:
auth_req = await session.post(
f"http://{ip}/api/auth",
data={"username": "admin", "password": "admin"},
@@ -804,7 +839,7 @@ class MinerFactory:
pass
try:
async with httpx.AsyncClient(verify=False) as session:
async with httpx.AsyncClient(transport=settings.transport()) as session:
d = await session.post(
f"http://{ip}/graphql",
json={"query": "{bosminer {info{modelName}}}"},
@@ -816,6 +851,15 @@ class MinerFactory:
except (httpx.HTTPError, LookupError):
pass
async def get_boser_braiins_os(self, ip: str):
# TODO: refine this check
try:
sock_json_data = await self.send_api_command(ip, "version")
return sock_json_data["STATUS"][0]["Msg"].split(" ")[0].upper() == "BOSER"
except LookupError:
# let the bosminer class decide
return None
async def get_miner_model_vnish(self, ip: str) -> Optional[str]:
sock_json_data = await self.send_api_command(ip, "stats")
try:

View File

@@ -24,8 +24,5 @@ class M21V10(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M21 V10"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M21V10, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 33
self.fan_count = 2

View File

@@ -42,8 +42,5 @@ class M21SV70(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M21S V70"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M21SV70, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 111
self.fan_count = 2

View File

@@ -24,10 +24,7 @@ class M30V10(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30 V10"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30V10, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 105
self.fan_count = 2
@@ -36,8 +33,5 @@ class M30V20(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30 V20"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30V20, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 111
self.fan_count = 2

View File

@@ -0,0 +1,29 @@
# ------------------------------------------------------------------------------
# Copyright 2022 Upstream Data Inc -
# -
# Licensed under the Apache License, Version 2.0 (the "License"); -
# you may not use this file except in compliance with the License. -
# You may obtain a copy of the License at -
# -
# http://www.apache.org/licenses/LICENSE-2.0 -
# -
# Unless required by applicable law or agreed to in writing, software -
# distributed under the License is distributed on an "AS IS" BASIS, -
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------
import warnings
from pyasic.miners.makes import WhatsMiner
class M30KV10(WhatsMiner): # noqa - ignore ABC method implementation
def __init__(self, ip: str, api_ver: str = "0.0.0"):
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30K V10"
self.ideal_hashboards = 4
self.nominal_chips = 240
self.fan_count = 2

View File

@@ -0,0 +1,29 @@
# ------------------------------------------------------------------------------
# Copyright 2022 Upstream Data Inc -
# -
# Licensed under the Apache License, Version 2.0 (the "License"); -
# you may not use this file except in compliance with the License. -
# You may obtain a copy of the License at -
# -
# http://www.apache.org/licenses/LICENSE-2.0 -
# -
# Unless required by applicable law or agreed to in writing, software -
# distributed under the License is distributed on an "AS IS" BASIS, -
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------
import warnings
from pyasic.miners.makes import WhatsMiner
class M30LV10(WhatsMiner): # noqa - ignore ABC method implementation
def __init__(self, ip: str, api_ver: str = "0.0.0"):
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30L V10"
self.board_num = 4
self.nominal_chips = 144
self.fan_count = 2

View File

@@ -24,10 +24,7 @@ class M30SV10(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S V10"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30SV10, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 148
self.fan_count = 2
@@ -36,10 +33,7 @@ class M30SV20(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S V20"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30SV20, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 156
self.fan_count = 2
@@ -48,10 +42,7 @@ class M30SV30(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S V30"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30SV30, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 164
self.fan_count = 2
@@ -60,10 +51,7 @@ class M30SV40(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S V40"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30SV40, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 172
self.fan_count = 2
@@ -81,10 +69,7 @@ class M30SV60(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S V60"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30SV60, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 164
self.fan_count = 2
@@ -105,10 +90,7 @@ class M30SV80(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S V80"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30SV80, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 129
self.fan_count = 2
@@ -135,10 +117,7 @@ class M30SVE30(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S VE30"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30SVE30, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 117
self.fan_count = 2
@@ -147,10 +126,7 @@ class M30SVE40(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S VE40"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30SVE40, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 123
self.fan_count = 2
@@ -159,10 +135,7 @@ class M30SVE50(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S VE50"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30SVE50, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 129
self.fan_count = 2
@@ -195,10 +168,7 @@ class M30SVF10(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S VF10"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30SVF10, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 70
self.fan_count = 2
@@ -207,10 +177,7 @@ class M30SVF20(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S VF20"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30SVF20, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 74
self.fan_count = 2
@@ -219,10 +186,7 @@ class M30SVF30(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S VF30"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30SVF30, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 78
self.fan_count = 2
@@ -249,10 +213,7 @@ class M30SVG30(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S VG30"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30SVG30, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 74
self.fan_count = 2
@@ -261,10 +222,7 @@ class M30SVG40(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S VG40"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30SVG40, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 78
self.fan_count = 2
@@ -273,10 +231,7 @@ class M30SVH10(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S VH10"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30SVH10, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 64
self.fan_count = 2
@@ -285,10 +240,7 @@ class M30SVH20(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S VH20"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30SVH20, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 66
self.fan_count = 2
@@ -309,10 +261,7 @@ class M30SVH40(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S VH40"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30SVH40, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 64
self.fan_count = 2
@@ -321,10 +270,7 @@ class M30SVH50(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S VH50"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30SVH50, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 66
self.fan_count = 2
@@ -345,8 +291,5 @@ class M30SVI20(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S VI20"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30SVI20, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 70
self.fan_count = 2

View File

@@ -24,10 +24,7 @@ class M30SPlusV10(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S+ V10"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S+ V10, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 215
self.fan_count = 2
@@ -36,10 +33,7 @@ class M30SPlusV20(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S+ V20"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S+ V20, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 255
self.fan_count = 2
@@ -60,10 +54,7 @@ class M30SPlusV40(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S+ V40"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S+ V40, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 235
self.fan_count = 2
@@ -72,10 +63,7 @@ class M30SPlusV50(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S+ V50"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S+ V50, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 225
self.fan_count = 2
@@ -84,10 +72,7 @@ class M30SPlusV60(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S+ V60"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S+ V60, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 245
self.fan_count = 2
@@ -96,10 +81,7 @@ class M30SPlusV70(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S+ V70"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S+ V70, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 235
self.fan_count = 2
@@ -108,10 +90,7 @@ class M30SPlusV80(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S+ V80"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S+ V80, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 245
self.fan_count = 2
@@ -120,10 +99,7 @@ class M30SPlusV90(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S+ V90"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S+ V90, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 225
self.fan_count = 2
@@ -132,10 +108,7 @@ class M30SPlusV100(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S+ V100"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S+ V100, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 215
self.fan_count = 2
@@ -144,10 +117,7 @@ class M30SPlusVE30(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S+ VE30"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S+ VE30, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 148
self.fan_count = 2
@@ -174,10 +144,7 @@ class M30SPlusVE60(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S+ VE60"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S+ VE60, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 172
self.fan_count = 2
@@ -243,34 +210,23 @@ class M30SPlusVF30(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S+ VF30"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S+ VF30, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 117
self.fan_count = 2
class M36SPlusVG30(WhatsMiner): # noqa - ignore ABC method implementation
class M30SPlusVG20(WhatsMiner): # noqa - ignore ABC method implementation
def __init__(self, ip: str, api_ver: str = "0.0.0"):
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M36S+ VG30"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M36SPlusVG30, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.model = "M30S+ VG20"
self.nominal_chips = 82
self.fan_count = 2
class M30SPlusVG30(WhatsMiner): # noqa - ignore ABC method implementation
def __init__(self, ip: str, api_ver: str = "0.0.0"):
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S+ VG30"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S+ VG30, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 78
self.fan_count = 2
@@ -288,10 +244,7 @@ class M30SPlusVG50(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S+ VG50"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S+ VG50, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 111
self.fan_count = 2
@@ -309,10 +262,7 @@ class M30SPlusVH10(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S+ VH10"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S+ VH10, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 64
self.fan_count = 2
@@ -321,10 +271,7 @@ class M30SPlusVH20(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S+ VH20"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S+ VH20, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 66
self.fan_count = 2
@@ -342,10 +289,7 @@ class M30SPlusVH40(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S+ VH40"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S+ VH40, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 74
self.fan_count = 2
@@ -354,10 +298,7 @@ class M30SPlusVH50(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S+ VH50"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S+ VH50, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 64
self.fan_count = 2

View File

@@ -24,10 +24,8 @@ class M30SPlusPlusV10(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S++ V10"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S++ V10, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.ideal_hashboards = 4
self.nominal_chips = 255
self.fan_count = 2
@@ -36,10 +34,8 @@ class M30SPlusPlusV20(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S++ V20"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S++ V20, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.ideal_hashboards = 4
self.nominal_chips = 255
self.fan_count = 2
@@ -48,10 +44,7 @@ class M30SPlusPlusVE30(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S++ VE30"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S++ VE30, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 215
self.fan_count = 2
@@ -60,10 +53,7 @@ class M30SPlusPlusVE40(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S++ VE40"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S++ VE40, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 225
self.fan_count = 2
@@ -72,10 +62,7 @@ class M30SPlusPlusVE50(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S++ VE50"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S++ VE50, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 235
self.fan_count = 2
@@ -84,10 +71,7 @@ class M30SPlusPlusVF40(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S++ VF40"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S++ VF40, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 156
self.fan_count = 2
@@ -126,10 +110,7 @@ class M30SPlusPlusVH10(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S++ VH10"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S++ VH10, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 82
self.fan_count = 2
@@ -138,10 +119,7 @@ class M30SPlusPlusVH20(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S++ VH20"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S++ VH20, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 86
self.fan_count = 2
@@ -150,10 +128,7 @@ class M30SPlusPlusVH30(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S++ VH30"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S++ VH30, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 111
self.fan_count = 2
@@ -162,10 +137,7 @@ class M30SPlusPlusVH40(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S++ VH40"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S++ VH40, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 70
self.fan_count = 2
@@ -201,10 +173,7 @@ class M30SPlusPlusVH80(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S++ VH80"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S++ VH80, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 74
self.fan_count = 2
@@ -213,10 +182,7 @@ class M30SPlusPlusVH90(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M30S++ VH90"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S++ VH90, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 78
self.fan_count = 2

View File

@@ -24,10 +24,7 @@ class M31V10(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M31 V10"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M31V10, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 70
self.fan_count = 2
@@ -36,8 +33,5 @@ class M31V20(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M31 V20"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M31V20, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 74
self.fan_count = 2

View File

@@ -18,14 +18,19 @@ import warnings
from pyasic.miners.makes import WhatsMiner
class M31HV10(WhatsMiner): # noqa - ignore ABC method implementation
def __init__(self, ip: str, api_ver: str = "0.0.0"):
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M31H V10"
self.nominal_chips = 114
self.fan_count = 0
class M31HV40(WhatsMiner): # noqa - ignore ABC method implementation
def __init__(self, ip: str, api_ver: str = "0.0.0"):
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M31H V40"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M31HV40, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.ideal_hashboards = 4
self.nominal_chips = 136
self.fan_count = 0

View File

@@ -0,0 +1,27 @@
# ------------------------------------------------------------------------------
# Copyright 2022 Upstream Data Inc -
# -
# Licensed under the Apache License, Version 2.0 (the "License"); -
# you may not use this file except in compliance with the License. -
# You may obtain a copy of the License at -
# -
# http://www.apache.org/licenses/LICENSE-2.0 -
# -
# Unless required by applicable law or agreed to in writing, software -
# distributed under the License is distributed on an "AS IS" BASIS, -
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------
import warnings
from pyasic.miners.makes import WhatsMiner
class M31LV10(WhatsMiner): # noqa - ignore ABC method implementation
def __init__(self, ip: str, api_ver: str = "0.0.0"):
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M31L V10"
self.nominal_chips = 114
self.fan_count = 2

View File

@@ -42,10 +42,7 @@ class M31SV30(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M31S V30"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M31SV30, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 117
self.fan_count = 2
@@ -54,10 +51,7 @@ class M31SV40(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M31S V40"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M31SV40, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 123
self.fan_count = 2
@@ -66,10 +60,7 @@ class M31SV50(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M31S V50"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M31SV50, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 78
self.fan_count = 2
@@ -108,10 +99,7 @@ class M31SV90(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M31S V90"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M31SV90, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 117
self.fan_count = 2
@@ -129,10 +117,7 @@ class M31SVE20(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M31S VE20"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M31SVE20, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 74
self.fan_count = 2

View File

@@ -24,10 +24,7 @@ class M31SEV10(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M31SE V10"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M31SEV10, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 82
self.fan_count = 2
@@ -36,10 +33,7 @@ class M31SEV20(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M31SE V20"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M31SEV20, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 78
self.fan_count = 2
@@ -48,8 +42,5 @@ class M31SEV30(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M31SE V30"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M31SEV30, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 78
self.fan_count = 2

View File

@@ -24,10 +24,7 @@ class M31SPlusV10(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M31S+ V10"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S+ V10, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 105
self.fan_count = 2
@@ -36,10 +33,7 @@ class M31SPlusV20(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M31S+ V20"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S+ V20, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 111
self.fan_count = 2
@@ -66,10 +60,7 @@ class M31SPlusV50(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M31S+ V50"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S+ V50, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 148
self.fan_count = 2
@@ -114,10 +105,7 @@ class M31SPlusVE10(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M31S+ VE10"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S+ VE10, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 82
self.fan_count = 2
@@ -135,10 +123,7 @@ class M31SPlusVE30(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M31S+ VE30"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S+ VE30, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 105
self.fan_count = 2
@@ -147,10 +132,7 @@ class M31SPlusVE40(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M31S+ VE40"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S+ VE40, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 111
self.fan_count = 2
@@ -159,10 +141,7 @@ class M31SPlusVE50(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M31S+ VE50"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S+ VE50, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 117
self.fan_count = 2
@@ -195,10 +174,7 @@ class M31SPlusVF20(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M31S+ VF20"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S+ VF20, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 66
self.fan_count = 2
@@ -219,10 +195,7 @@ class M31SPlusVG20(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M31S+ VG20"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S+ VG20, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 66
self.fan_count = 2
@@ -231,10 +204,7 @@ class M31SPlusVG30(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M31S+ VG30"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S+ VG30, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 70
self.fan_count = 2

View File

@@ -24,7 +24,7 @@ class M32V10(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M32 V10"
self.nominal_chips = 74
self.nominal_chips = 78
self.fan_count = 2

View File

@@ -24,10 +24,7 @@ class M33V10(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M33 V10"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M33V10, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 33
self.fan_count = 0
@@ -36,10 +33,7 @@ class M33V20(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M33 V20"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M33V20, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 62
self.fan_count = 0
@@ -48,8 +42,5 @@ class M33V30(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M33 V30"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M33V30, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 66
self.fan_count = 0

View File

@@ -24,8 +24,6 @@ class M33SVG30(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M33S VG30"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M33SVG30, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.ideal_hashboards = 4
self.nominal_chips = 116
self.fan_count = 0

View File

@@ -18,16 +18,23 @@ import warnings
from pyasic.miners.makes import WhatsMiner
class M33SPlusVG20(WhatsMiner): # noqa - ignore ABC method implementation
def __init__(self, ip: str, api_ver: str = "0.0.0"):
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M33S+ VG20"
self.ideal_hashboards = 4
self.nominal_chips = 112
self.fan_count = 0
class M33SPlusVH20(WhatsMiner): # noqa - ignore ABC method implementation
def __init__(self, ip: str, api_ver: str = "0.0.0"):
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M33S+ VH20"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S+ VH20, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.ideal_hashboards = 4
self.nominal_chips = 100
self.fan_count = 0
@@ -36,7 +43,8 @@ class M33SPlusVH30(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M33S+ VH30"
self.nominal_chips = 0
self.ideal_hashboards = 4
self.nominal_chips = 0 # slot1 116, slot2 106, slot3 116, slot4 106
warnings.warn(
"Unknown chip count for miner type M30S+ VH30, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)

View File

@@ -48,8 +48,5 @@ class M33SPlusPlusVG40(WhatsMiner): # noqa - ignore ABC method implementation
self.ip = ip
self.model = "M33S++ VG40"
self.ideal_hashboards = 4
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M30S++ VG40, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 174
self.fan_count = 0

View File

@@ -24,8 +24,6 @@ class M36SVE10(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M36S VE10"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M36SVE10, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.ideal_hashboards = 4
self.nominal_chips = 114
self.fan_count = 0

View File

@@ -24,8 +24,6 @@ class M36SPlusVG30(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M36S+ VG30"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M36S+ VG30, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.ideal_hashboards = 4
self.nominal_chips = 108
self.fan_count = 0

View File

@@ -24,8 +24,6 @@ class M36SPlusPlusVH30(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M36S++ VH30"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M36S++ VH30, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.ideal_hashboards = 4
self.nominal_chips = 80
self.fan_count = 0

View File

@@ -18,14 +18,28 @@ import warnings
from pyasic.miners.makes import WhatsMiner
class M39V10(WhatsMiner): # noqa - ignore ABC method implementation
def __init__(self, ip: str, api_ver: str = "0.0.0"):
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M39 V10"
self.nominal_chips = 50
self.fan_count = 0
class M39V20(WhatsMiner): # noqa - ignore ABC method implementation
def __init__(self, ip: str, api_ver: str = "0.0.0"):
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M39 V20"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M39 V20, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 54
self.fan_count = 0
class M39V30(WhatsMiner): # noqa - ignore ABC method implementation
def __init__(self, ip: str, api_ver: str = "0.0.0"):
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M39 V30"
self.nominal_chips = 68
self.fan_count = 0

View File

@@ -15,6 +15,8 @@
# ------------------------------------------------------------------------------
from .M30 import M30V10, M30V20
from .M30K import M30KV10
from .M30L import M30LV10
from .M30S import (
M30SV10,
M30SV20,
@@ -67,6 +69,7 @@ from .M30S_Plus import (
M30SPlusVE100,
M30SPlusVF20,
M30SPlusVF30,
M30SPlusVG20,
M30SPlusVG30,
M30SPlusVG40,
M30SPlusVG50,
@@ -77,7 +80,6 @@ from .M30S_Plus import (
M30SPlusVH40,
M30SPlusVH50,
M30SPlusVH60,
M36SPlusVG30,
)
from .M30S_Plus_Plus import (
M30SPlusPlusV10,
@@ -103,7 +105,8 @@ from .M30S_Plus_Plus import (
M30SPlusPlusVJ30,
)
from .M31 import M31V10, M31V20
from .M31H import M31HV40
from .M31H import M31HV10, M31HV40
from .M31L import M31LV10
from .M31S import (
M31SV10,
M31SV20,
@@ -145,10 +148,10 @@ from .M32 import M32V10, M32V20
from .M32S import M32S
from .M33 import M33V10, M33V20, M33V30
from .M33S import M33SVG30
from .M33S_Plus import M33SPlusVH20, M33SPlusVH30
from .M33S_Plus import M33SPlusVG20, M33SPlusVH20, M33SPlusVH30
from .M33S_Plus_Plus import M33SPlusPlusVG40, M33SPlusPlusVH20, M33SPlusPlusVH30
from .M34S_Plus import M34SPlusVE10
from .M36S import M36SVE10
from .M36S_Plus import M36SPlusVG30
from .M36S_Plus_Plus import M36SPlusPlusVH30
from .M39 import M39V20
from .M39 import M39V10, M39V20, M39V30

View File

@@ -18,16 +18,21 @@ import warnings
from pyasic.miners.makes import WhatsMiner
class M50VE30(WhatsMiner): # noqa - ignore ABC method implementation
def __init__(self, ip: str, api_ver: str = "0.0.0"):
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M50 VE30"
self.ideal_hashboards = 4
self.nominal_chips = 255
self.fan_count = 2
class M50VG30(WhatsMiner): # noqa - ignore ABC method implementation
def __init__(self, ip: str, api_ver: str = "0.0.0"):
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M50 VG30"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M50 VG30, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 156
self.fan_count = 2
@@ -36,10 +41,7 @@ class M50VH10(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M50 VH10"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M50 VH10, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 86
self.fan_count = 2
@@ -57,10 +59,7 @@ class M50VH30(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M50 VH30"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M50 VH30, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 117
self.fan_count = 2

View File

@@ -72,10 +72,7 @@ class M50SVH20(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M50S VH20"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M50S VH20, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 135
self.fan_count = 2
@@ -84,10 +81,7 @@ class M50SVH30(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M50S VH30"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M50S VH30, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.nominal_chips = 156
self.fan_count = 2

View File

@@ -24,8 +24,6 @@ class M53VH30(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M53 VH30"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M53 VH30, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.ideal_hashboards = 4
self.nominal_chips = 128
self.fan_count = 0

View File

@@ -24,8 +24,6 @@ class M56VH30(WhatsMiner): # noqa - ignore ABC method implementation
super().__init__(ip, api_ver)
self.ip = ip
self.model = "M56 VH30"
self.nominal_chips = 0
warnings.warn(
"Unknown chip count for miner type M56 VH30, please open an issue on GitHub (https://github.com/UpstreamData/pyasic)."
)
self.ideal_hashboards = 4
self.nominal_chips = 108
self.fan_count = 0

View File

@@ -15,6 +15,7 @@
# ------------------------------------------------------------------------------
from .M50 import (
M50VE30,
M50VG30,
M50VH10,
M50VH20,

View File

@@ -0,0 +1,22 @@
# ------------------------------------------------------------------------------
# Copyright 2022 Upstream Data Inc -
# -
# Licensed under the Apache License, Version 2.0 (the "License"); -
# you may not use this file except in compliance with the License. -
# You may obtain a copy of the License at -
# -
# http://www.apache.org/licenses/LICENSE-2.0 -
# -
# Unless required by applicable law or agreed to in writing, software -
# distributed under the License is distributed on an "AS IS" BASIS, -
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------
from pyasic.miners.backends import M3X
from pyasic.miners.types import M30KV10
class BTMinerM30KV10(M3X, M30KV10):
pass

View File

@@ -0,0 +1,22 @@
# ------------------------------------------------------------------------------
# Copyright 2022 Upstream Data Inc -
# -
# Licensed under the Apache License, Version 2.0 (the "License"); -
# you may not use this file except in compliance with the License. -
# You may obtain a copy of the License at -
# -
# http://www.apache.org/licenses/LICENSE-2.0 -
# -
# Unless required by applicable law or agreed to in writing, software -
# distributed under the License is distributed on an "AS IS" BASIS, -
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------
from pyasic.miners.backends import M3X
from pyasic.miners.types import M30LV10
class BTMinerM30LV10(M3X, M30LV10):
pass

View File

@@ -36,6 +36,7 @@ from pyasic.miners.types import (
M30SPlusVE100,
M30SPlusVF20,
M30SPlusVF30,
M30SPlusVG20,
M30SPlusVG30,
M30SPlusVG40,
M30SPlusVG50,
@@ -46,7 +47,6 @@ from pyasic.miners.types import (
M30SPlusVH40,
M30SPlusVH50,
M30SPlusVH60,
M36SPlusVG30,
)
@@ -130,7 +130,7 @@ class BTMinerM30SPlusVF30(M3X, M30SPlusVF30):
pass
class BTMinerM36SPlusVG30(M3X, M36SPlusVG30):
class BTMinerM30SPlusVG20(M3X, M30SPlusVG20):
pass

View File

@@ -15,7 +15,11 @@
# ------------------------------------------------------------------------------
from pyasic.miners.backends import M3X
from pyasic.miners.types import M31HV40
from pyasic.miners.types import M31HV10, M31HV40
class BTMinerM31HV10(M3X, M31HV10):
pass
class BTMinerM31HV40(M3X, M31HV40):

View File

@@ -0,0 +1,22 @@
# ------------------------------------------------------------------------------
# Copyright 2022 Upstream Data Inc -
# -
# Licensed under the Apache License, Version 2.0 (the "License"); -
# you may not use this file except in compliance with the License. -
# You may obtain a copy of the License at -
# -
# http://www.apache.org/licenses/LICENSE-2.0 -
# -
# Unless required by applicable law or agreed to in writing, software -
# distributed under the License is distributed on an "AS IS" BASIS, -
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------
from pyasic.miners.backends import M3X
from pyasic.miners.types import M30LV10
class BTMinerM31LV10(M3X, M30LV10):
pass

View File

@@ -15,7 +15,11 @@
# ------------------------------------------------------------------------------
from pyasic.miners.backends import M3X
from pyasic.miners.types import M33SPlusVH20, M33SPlusVH30
from pyasic.miners.types import M33SPlusVG20, M33SPlusVH20, M33SPlusVH30
class BTMinerM33SPlusVG20(M3X, M33SPlusVG20):
pass
class BTMinerM33SPlusVH20(M3X, M33SPlusVH20):

View File

@@ -15,8 +15,16 @@
# ------------------------------------------------------------------------------
from pyasic.miners.backends import M3X
from pyasic.miners.types import M39V20
from pyasic.miners.types import M39V10, M39V20, M39V30
class BTMinerM39V10(M3X, M39V10):
pass
class BTMinerM39V20(M3X, M39V20):
pass
class BTMinerM39V30(M3X, M39V30):
pass

View File

@@ -15,6 +15,8 @@
# ------------------------------------------------------------------------------
from .M30 import BTMinerM30V10, BTMinerM30V20
from .M30K import BTMinerM30KV10
from .M30L import BTMinerM30LV10
from .M30S import (
BTMinerM30SV10,
BTMinerM30SV20,
@@ -67,6 +69,7 @@ from .M30S_Plus import (
BTMinerM30SPlusVE100,
BTMinerM30SPlusVF20,
BTMinerM30SPlusVF30,
BTMinerM30SPlusVG20,
BTMinerM30SPlusVG30,
BTMinerM30SPlusVG40,
BTMinerM30SPlusVG50,
@@ -77,7 +80,6 @@ from .M30S_Plus import (
BTMinerM30SPlusVH40,
BTMinerM30SPlusVH50,
BTMinerM30SPlusVH60,
BTMinerM36SPlusVG30,
)
from .M30S_Plus_Plus import (
BTMinerM30SPlusPlusV10,
@@ -103,7 +105,8 @@ from .M30S_Plus_Plus import (
BTMinerM30SPlusPlusVJ30,
)
from .M31 import BTMinerM31V10, BTMinerM31V20
from .M31H import BTMinerM31HV40
from .M31H import BTMinerM31HV10, BTMinerM31HV40
from .M31L import BTMinerM31LV10
from .M31S import (
BTMinerM31SV10,
BTMinerM31SV20,
@@ -145,7 +148,7 @@ from .M32 import BTMinerM32V10, BTMinerM32V20
from .M32S import BTMinerM32S
from .M33 import BTMinerM33V10, BTMinerM33V20, BTMinerM33V30
from .M33S import BTMinerM33SVG30
from .M33S_Plus import BTMinerM33SPlusVH20, BTMinerM33SPlusVH30
from .M33S_Plus import BTMinerM33SPlusVG20, BTMinerM33SPlusVH20, BTMinerM33SPlusVH30
from .M33S_Plus_Plus import (
BTMinerM33SPlusPlusVG40,
BTMinerM33SPlusPlusVH20,
@@ -155,4 +158,4 @@ from .M34S_Plus import BTMinerM34SPlusVE10
from .M36S import BTMinerM36SVE10
from .M36S_Plus import BTMinerM36SPlusVG30
from .M36S_Plus_Plus import BTMinerM36SPlusPlusVH30
from .M39 import BTMinerM39V20
from .M39 import BTMinerM39V10, BTMinerM39V20, BTMinerM39V30

View File

@@ -16,6 +16,7 @@
from pyasic.miners.backends import M5X
from pyasic.miners.types import (
M50VE30,
M50VG30,
M50VH10,
M50VH20,
@@ -31,6 +32,10 @@ from pyasic.miners.types import (
)
class BTMinerM50VE30(M5X, M50VE30):
pass
class BTMinerM50VG30(M5X, M50VG30):
pass

View File

@@ -15,6 +15,7 @@
# ------------------------------------------------------------------------------
from .M50 import (
BTMinerM50VE30,
BTMinerM50VG30,
BTMinerM50VH10,
BTMinerM50VH20,

View File

@@ -19,98 +19,113 @@ import ipaddress
import logging
from typing import AsyncIterator, List, Union
from pyasic import settings
from pyasic.miners.miner_factory import AnyMiner, miner_factory
from pyasic.network.net_range import MinerNetworkRange
from pyasic.settings import PyasicSettings
class MinerNetwork:
"""A class to handle a network containing miners. Handles scanning and gets miners via [`MinerFactory`][pyasic.miners.miner_factory.MinerFactory].
Parameters:
ip_addr: ### An IP address, range of IP addresses, or a list of IPs
* Takes a single IP address as an `ipadddress.ipaddress()` or a string
* Takes a string formatted as:
```f"{ip_range_1_start}-{ip_range_1_end}, {ip_address_1}, {ip_range_2_start}-{ip_range_2_end}, {ip_address_2}..."```
* Also takes a list of strings or `ipaddress.ipaddress` formatted as:
```[{ip_address_1}, {ip_address_2}, {ip_address_3}, ...]```
mask: A subnet mask to use when constructing the network. Only used if `ip_addr` is a single IP.
Defaults to /24 (255.255.255.0 or 0.0.0.255)
hosts: A list of `ipaddress.IPv4Address` to be used when scanning.
"""
def __init__(
self,
ip_addr: Union[str, List[str], None] = None,
mask: Union[str, int, None] = None,
) -> None:
self.network = None
self.ip_addr = ip_addr
self.connected_miners = {}
if isinstance(mask, str):
if mask.startswith("/"):
mask = mask.replace("/", "")
self.mask = mask
self.network = self.get_network()
def __init__(self, hosts: List[ipaddress.IPv4Address]):
self.hosts = hosts
def __len__(self):
return len([item for item in self.get_network().hosts()])
return len(self.hosts)
def __repr__(self):
return str(self.network)
@classmethod
def from_list(cls, addresses: list) -> "MinerNetwork":
"""Parse a list of address constructors into a MinerNetwork.
def hosts(self):
for x in self.network.hosts():
yield x
def get_network(self) -> ipaddress.ip_network:
"""Get the network using the information passed to the MinerNetwork or from cache.
Returns:
The proper network to be able to scan.
Parameters:
addresses: A list of address constructors, such as `["10.1-2.1.1-50", "10.4.1-2.1-50"]`.
"""
# if we have a network cached already, use that
if self.network:
return self.network
hosts = []
for address in addresses:
hosts = [*hosts, *cls.from_address(address).hosts]
return cls(sorted(list(set(hosts))))
# if there is no IP address passed, default to 192.168.1.0
if not self.ip_addr:
self.ip_addr = "192.168.1.0"
if "-" in self.ip_addr:
self.network = MinerNetworkRange(self.ip_addr)
elif isinstance(self.ip_addr, list):
self.network = MinerNetworkRange(self.ip_addr)
else:
# if there is no subnet mask passed, default to /24
if not self.mask:
subnet_mask = "24"
# if we do have a mask passed, use that
else:
subnet_mask = str(self.mask)
@classmethod
def from_address(cls, address: str) -> "MinerNetwork":
"""Parse an address constructor into a MinerNetwork.
# save the network and return it
self.network = ipaddress.ip_network(
f"{self.ip_addr}/{subnet_mask}", strict=False
)
Parameters:
address: An address constructor, such as `"10.1-2.1.1-50"`.
"""
octets = address.split(".")
if len(octets) > 4:
raise ValueError("Too many octets in IP constructor.")
if len(octets) < 4:
raise ValueError("Too few octets in IP constructor.")
return cls.from_octets(*octets)
logging.debug(f"{self} - (Get Network) - Found network")
return self.network
@classmethod
def from_octets(
cls, oct_1: str, oct_2: str, oct_3: str, oct_4: str
) -> "MinerNetwork":
"""Parse 4 octet constructors into a MinerNetwork.
async def scan_network_for_miners(self) -> List[AnyMiner]:
"""Scan the network for miners, and return found miners as a list.
Parameters:
oct_1: An octet constructor, such as `"10"`.
oct_2: An octet constructor, such as `"1-2"`.
oct_3: An octet constructor, such as `"1"`.
oct_4: An octet constructor, such as `"1-50"`.
"""
hosts = []
oct_1_val_start, oct_1_start, oct_1_end = compute_oct_range(oct_1)
for oct_1_idx in range((abs(oct_1_end - oct_1_start)) + 1):
oct_1_val = str(oct_1_idx + oct_1_start)
oct_2_val_start, oct_2_start, oct_2_end = compute_oct_range(oct_2)
for oct_2_idx in range((abs(oct_2_end - oct_2_start)) + 1):
oct_2_val = str(oct_2_idx + oct_2_start)
oct_3_val_start, oct_3_start, oct_3_end = compute_oct_range(oct_3)
for oct_3_idx in range((abs(oct_3_end - oct_3_start)) + 1):
oct_3_val = str(oct_3_idx + oct_3_start)
oct_4_val_start, oct_4_start, oct_4_end = compute_oct_range(oct_4)
for oct_4_idx in range((abs(oct_4_end - oct_4_start)) + 1):
oct_4_val = str(oct_4_idx + oct_4_start)
hosts.append(
ipaddress.ip_address(
".".join([oct_1_val, oct_2_val, oct_3_val, oct_4_val])
)
)
return cls(sorted(hosts))
@classmethod
def from_subnet(cls, subnet: str) -> "MinerNetwork":
"""Parse a subnet into a MinerNetwork.
Parameters:
subnet: A subnet string, such as `"10.0.0.1/24"`.
"""
return cls(list(ipaddress.ip_network(subnet, strict=False).hosts()))
async def scan(self) -> List[AnyMiner]:
"""Scan the network for miners.
Returns:
A list of found miners.
"""
# get the network
local_network = self.get_network()
return await self.scan_network_for_miners()
async def scan_network_for_miners(self) -> List[AnyMiner]:
logging.debug(f"{self} - (Scan Network For Miners) - Scanning")
# clear cached miners
miner_factory.clear_cached_miners()
limit = asyncio.Semaphore(PyasicSettings().network_scan_threads)
limit = asyncio.Semaphore(settings.get("network_scan_threads", 300))
miners = await asyncio.gather(
*[self.ping_and_get_miner(host, limit) for host in local_network.hosts()]
*[self.ping_and_get_miner(host, limit) for host in self.hosts]
)
# remove all None from the miner list
@@ -132,15 +147,12 @@ class MinerNetwork:
# get the current event loop
loop = asyncio.get_event_loop()
# get the network
local_network = self.get_network()
# create a list of scan tasks
limit = asyncio.Semaphore(PyasicSettings().network_scan_threads)
limit = asyncio.Semaphore(settings.get("network_scan_threads", 300))
miners = asyncio.as_completed(
[
loop.create_task(self.ping_and_get_miner(host, limit))
for host in local_network.hosts()
for host in self.hosts
]
)
for miner in miners:
@@ -149,83 +161,33 @@ class MinerNetwork:
except TimeoutError:
yield None
@staticmethod
async def ping_miner(
ip: ipaddress.ip_address, semaphore: asyncio.Semaphore
) -> Union[None, ipaddress.ip_address]:
async with semaphore:
try:
miner = await ping_miner(ip)
if miner:
return miner
except ConnectionRefusedError:
tasks = [ping_miner(ip, port=port) for port in [4029, 8889]]
for miner in asyncio.as_completed(tasks):
try:
miner = await miner
if miner:
return miner
except ConnectionRefusedError:
pass
@staticmethod
async def ping_and_get_miner(
ip: ipaddress.ip_address, semaphore: asyncio.Semaphore
) -> Union[None, AnyMiner]:
async with semaphore:
try:
miner = await ping_and_get_miner(ip)
if miner:
return miner
return await ping_and_get_miner(ip)
except ConnectionRefusedError:
tasks = [ping_and_get_miner(ip, port=port) for port in [4029, 8889]]
tasks = [
ping_and_get_miner(ip, port=port) for port in [4028, 4029, 8889]
]
for miner in asyncio.as_completed(tasks):
try:
miner = await miner
if miner:
return miner
return await miner
except ConnectionRefusedError:
pass
async def ping_miner(
ip: ipaddress.ip_address, port=4028
) -> Union[None, ipaddress.ip_address]:
for i in range(PyasicSettings().network_ping_retries):
try:
connection_fut = asyncio.open_connection(str(ip), port)
# get the read and write streams from the connection
reader, writer = await asyncio.wait_for(
connection_fut, timeout=PyasicSettings().network_ping_timeout
)
# immediately close connection, we know connection happened
writer.close()
# make sure the writer is closed
await writer.wait_closed()
# ping was successful
return ip
except asyncio.exceptions.TimeoutError:
# ping failed if we time out
continue
except (ConnectionRefusedError, OSError):
# handle for other connection errors
logging.debug(f"{str(ip)}: Connection Refused.")
raise ConnectionRefusedError
except Exception as e:
logging.warning(f"{str(ip)}: Ping And Get Miner Exception: {e}")
raise ConnectionRefusedError
return
async def ping_and_get_miner(
ip: ipaddress.ip_address, port=4028
ip: ipaddress.ip_address, port=80
) -> Union[None, AnyMiner]:
for i in range(PyasicSettings().network_ping_retries):
for i in range(settings.get("network_ping_retries", 1)):
try:
connection_fut = asyncio.open_connection(str(ip), port)
# get the read and write streams from the connection
reader, writer = await asyncio.wait_for(
connection_fut, timeout=PyasicSettings().network_ping_timeout
connection_fut, timeout=settings.get("network_ping_timeout", 3)
)
# immediately close connection, we know connection happened
writer.close()
@@ -236,11 +198,25 @@ async def ping_and_get_miner(
except asyncio.exceptions.TimeoutError:
# ping failed if we time out
continue
except (ConnectionRefusedError, OSError):
# handle for other connection errors
logging.debug(f"{str(ip)}: Connection Refused.")
raise ConnectionRefusedError
except OSError as e:
raise ConnectionRefusedError from e
except Exception as e:
logging.warning(f"{str(ip)}: Ping And Get Miner Exception: {e}")
raise ConnectionRefusedError
logging.warning(f"{str(ip)}: Unhandled ping exception: {e}")
return
return
def compute_oct_range(octet: str) -> tuple:
octet_split = octet.split("-")
octet_start = int(octet_split[0])
octet_end = None
try:
octet_end = int(octet_split[1])
except IndexError:
pass
if octet_end is None:
octet_end = int(octet_start)
octet_val_start = min([octet_start, octet_end])
return octet_val_start, octet_start, octet_end

View File

@@ -1,56 +0,0 @@
# ------------------------------------------------------------------------------
# Copyright 2022 Upstream Data Inc -
# -
# Licensed under the Apache License, Version 2.0 (the "License"); -
# you may not use this file except in compliance with the License. -
# You may obtain a copy of the License at -
# -
# http://www.apache.org/licenses/LICENSE-2.0 -
# -
# Unless required by applicable law or agreed to in writing, software -
# distributed under the License is distributed on an "AS IS" BASIS, -
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------
import ipaddress
from typing import Union
class MinerNetworkRange:
"""A MinerNetwork that takes a range of IP addresses.
Parameters:
ip_range: ## A range of IP addresses to put in the network, or a list of IPs
* Takes a string formatted as:
```f"{ip_range_1_start}-{ip_range_1_end}, {ip_address_1}, {ip_range_2_start}-{ip_range_2_end}, {ip_address_2}..."```
* Also takes a list of strings or `ipaddress.ipaddress` formatted as:
```[{ip_address_1}, {ip_address_2}, {ip_address_3}, ...]```
"""
def __init__(self, ip_range: Union[str, list]):
self.host_ips = []
if isinstance(ip_range, str):
ip_ranges = ip_range.replace(" ", "").split(",")
for item in ip_ranges:
if "-" in item:
start, end = item.split("-")
start_ip = ipaddress.ip_address(start)
end_ip = ipaddress.ip_address(end)
networks = ipaddress.summarize_address_range(start_ip, end_ip)
for network in networks:
self.host_ips.append(network.network_address)
for host in network.hosts():
if host not in self.host_ips:
self.host_ips.append(host)
if network.broadcast_address not in self.host_ips:
self.host_ips.append(network.broadcast_address)
else:
self.host_ips.append(ipaddress.ip_address(item))
elif isinstance(ip_range, list):
self.host_ips = [ipaddress.ip_address(ip_str) for ip_str in ip_range]
def hosts(self):
for x in self.host_ips:
yield x

View File

@@ -13,28 +13,47 @@
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------
import socket
import struct
from ssl import SSLContext
from typing import Any, Union
from dataclasses import dataclass
import httpx
from httpx import AsyncHTTPTransport
from pyasic.misc import Singleton
_settings = { # defaults
"network_ping_retries": 1,
"network_ping_timeout": 3,
"network_scan_threads": 300,
"factory_get_retries": 1,
"factory_get_timeout": 3,
"get_data_retries": 1,
"api_function_timeout": 5,
"default_whatsminer_password": "admin",
"default_innosilicon_password": "admin",
"default_antminer_password": "root",
"default_bosminer_password": "root",
"default_vnish_password": "admin",
"default_goldshell_password": "123456789",
"socket_linger_time": 1000,
}
@dataclass
class PyasicSettings(metaclass=Singleton):
network_ping_retries: int = 1
network_ping_timeout: int = 3
network_scan_threads: int = 300
ssl_cxt = httpx.create_ssl_context()
miner_factory_get_version_retries: int = 1
miner_get_data_retries: int = 1
def transport(verify: Union[str, bool, SSLContext] = ssl_cxt):
l_onoff = 1
l_linger = get("so_linger_time", 1000)
global_whatsminer_password = "admin"
global_innosilicon_password = "admin"
global_antminer_password = "root"
global_bosminer_password = "root"
global_vnish_password = "admin"
global_goldshell_password = "123456789"
opts = [(socket.SOL_SOCKET, socket.SO_LINGER, struct.pack("ii", l_onoff, l_linger))]
debug: bool = False
logfile: bool = False
return AsyncHTTPTransport(socket_options=opts, verify=verify)
def get(key: str, other: Any = None) -> Any:
return _settings.get(key, other)
def update(key: str, val: Any) -> Any:
_settings[key] = val

View File

@@ -19,14 +19,14 @@ from typing import Union
import httpx
from pyasic.settings import PyasicSettings
from pyasic import settings
from pyasic.web import BaseWebAPI
class AntminerModernWebAPI(BaseWebAPI):
def __init__(self, ip: str) -> None:
super().__init__(ip)
self.pwd = PyasicSettings().global_antminer_password
self.pwd = settings.get("default_antminer_password", "root")
async def send_command(
self,
@@ -38,10 +38,13 @@ class AntminerModernWebAPI(BaseWebAPI):
url = f"http://{self.ip}/cgi-bin/{command}.cgi"
auth = httpx.DigestAuth(self.username, self.pwd)
try:
async with httpx.AsyncClient() as client:
async with httpx.AsyncClient(transport=settings.transport()) as client:
if parameters:
data = await client.post(
url, data=json.dumps(parameters), auth=auth, timeout=15 # noqa
url,
data=json.dumps(parameters),
auth=auth,
timeout=settings.get("api_function_timeout", 3), # noqa
)
else:
data = await client.get(url, auth=auth)
@@ -57,7 +60,7 @@ class AntminerModernWebAPI(BaseWebAPI):
async def multicommand(
self, *commands: str, ignore_errors: bool = False, allow_warning: bool = True
) -> dict:
async with httpx.AsyncClient() as client:
async with httpx.AsyncClient(transport=settings.transport()) as client:
tasks = [
asyncio.create_task(self._handle_multicommand(client, command))
for command in commands
@@ -137,7 +140,7 @@ class AntminerModernWebAPI(BaseWebAPI):
class AntminerOldWebAPI(BaseWebAPI):
def __init__(self, ip: str) -> None:
super().__init__(ip)
self.pwd = PyasicSettings().global_antminer_password
self.pwd = settings.get("default_antminer_password", "root")
async def send_command(
self,
@@ -149,10 +152,13 @@ class AntminerOldWebAPI(BaseWebAPI):
url = f"http://{self.ip}/cgi-bin/{command}.cgi"
auth = httpx.DigestAuth(self.username, self.pwd)
try:
async with httpx.AsyncClient() as client:
async with httpx.AsyncClient(transport=settings.transport()) as client:
if parameters:
data = await client.post(
url, data=parameters, auth=auth, timeout=15
url,
data=parameters,
auth=auth,
timeout=settings.get("api_function_timeout", 3),
)
else:
data = await client.get(url, auth=auth)
@@ -170,7 +176,7 @@ class AntminerOldWebAPI(BaseWebAPI):
) -> dict:
data = {k: None for k in commands}
auth = httpx.DigestAuth(self.username, self.pwd)
async with httpx.AsyncClient() as client:
async with httpx.AsyncClient(transport=settings.transport()) as client:
for command in commands:
try:
url = f"http://{self.ip}/cgi-bin/{command}.cgi"

View File

@@ -1,195 +0,0 @@
# ------------------------------------------------------------------------------
# Copyright 2022 Upstream Data Inc -
# -
# Licensed under the Apache License, Version 2.0 (the "License"); -
# you may not use this file except in compliance with the License. -
# You may obtain a copy of the License at -
# -
# http://www.apache.org/licenses/LICENSE-2.0 -
# -
# Unless required by applicable law or agreed to in writing, software -
# distributed under the License is distributed on an "AS IS" BASIS, -
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------
import json
from typing import Union
import httpx
from pyasic import APIError
from pyasic.settings import PyasicSettings
from pyasic.web import BaseWebAPI
class BOSMinerWebAPI(BaseWebAPI):
def __init__(self, ip: str) -> None:
super().__init__(ip)
self.pwd = PyasicSettings().global_bosminer_password
async def send_command(
self,
command: Union[str, dict],
ignore_errors: bool = False,
allow_warning: bool = True,
**parameters: Union[str, int, bool],
) -> dict:
if isinstance(command, str):
return await self.send_luci_command(command)
else:
return await self.send_gql_command(command)
def parse_command(self, graphql_command: Union[dict, set]) -> str:
if isinstance(graphql_command, dict):
data = []
for key in graphql_command:
if graphql_command[key] is not None:
parsed = self.parse_command(graphql_command[key])
data.append(key + parsed)
else:
data.append(key)
else:
data = graphql_command
return "{" + ",".join(data) + "}"
async def send_gql_command(
self,
command: dict,
) -> dict:
url = f"http://{self.ip}/graphql"
query = command
if command.get("query") is None:
query = {"query": self.parse_command(command)}
try:
async with httpx.AsyncClient() as client:
await self.auth(client)
data = await client.post(url, json=query)
except httpx.HTTPError:
pass
else:
if data.status_code == 200:
try:
return data.json()
except json.decoder.JSONDecodeError:
pass
async def multicommand(
self, *commands: Union[dict, str], allow_warning: bool = True
) -> dict:
luci_commands = []
gql_commands = []
for cmd in commands:
if isinstance(cmd, dict):
gql_commands.append(cmd)
if isinstance(cmd, str):
luci_commands.append(cmd)
luci_data = await self.luci_multicommand(*luci_commands)
gql_data = await self.gql_multicommand(*gql_commands)
if gql_data is None:
gql_data = {}
if luci_data is None:
luci_data = {}
data = dict(**luci_data, **gql_data)
return data
async def luci_multicommand(self, *commands: str) -> dict:
data = {}
for command in commands:
data[command] = await self.send_luci_command(command, ignore_errors=True)
return data
async def gql_multicommand(self, *commands: dict) -> dict:
def merge(*d: dict):
ret = {}
for i in d:
if i:
for k in i:
if not k in ret:
ret[k] = i[k]
else:
ret[k] = merge(ret[k], i[k])
return None if ret == {} else ret
command = merge(*commands)
data = await self.send_command(command)
if data is not None:
if data.get("data") is None:
try:
commands = list(commands)
# noinspection PyTypeChecker
commands.remove({"bos": {"faultLight": None}})
command = merge(*commands)
data = await self.send_gql_command(command)
except (LookupError, ValueError):
pass
if not data:
data = {}
data["multicommand"] = False
return data
async def auth(self, client: httpx.AsyncClient) -> None:
url = f"http://{self.ip}/graphql"
await client.post(
url,
json={
"query": 'mutation{auth{login(username:"'
+ "root"
+ '", password:"'
+ self.pwd
+ '"){__typename}}}'
},
)
async def send_luci_command(self, path: str, ignore_errors: bool = False) -> dict:
try:
async with httpx.AsyncClient() as client:
await self.luci_auth(client)
data = await client.get(
f"http://{self.ip}{path}", headers={"User-Agent": "BTC Tools v0.1"}
)
if data.status_code == 200:
return data.json()
if ignore_errors:
return {}
raise APIError(
f"Web command failed: path={path}, code={data.status_code}"
)
except (httpx.HTTPError, json.JSONDecodeError):
if ignore_errors:
return {}
raise APIError(f"Web command failed: path={path}")
async def luci_auth(self, session: httpx.AsyncClient):
login = {"luci_username": self.username, "luci_password": self.pwd}
url = f"http://{self.ip}/cgi-bin/luci"
headers = {
"User-Agent": "BTC Tools v0.1", # only seems to respond if this user-agent is set
"Content-Type": "application/x-www-form-urlencoded",
}
await session.post(url, headers=headers, data=login)
async def get_net_conf(self):
return await self.send_luci_command(
"/cgi-bin/luci/admin/network/iface_status/lan"
)
async def get_cfg_metadata(self):
return await self.send_luci_command("/cgi-bin/luci/admin/miner/cfg_metadata")
async def get_cfg_data(self):
return await self.send_luci_command("/cgi-bin/luci/admin/miner/cfg_data")
async def get_bos_info(self):
return await self.send_luci_command("/cgi-bin/luci/bos/info")
async def get_overview(self):
return await self.send_luci_command(
"/cgi-bin/luci/admin/status/overview?status=1"
) # needs status=1 or it fails
async def get_api_status(self):
return await self.send_luci_command("/cgi-bin/luci/admin/miner/api_status")

View File

@@ -0,0 +1,626 @@
# ------------------------------------------------------------------------------
# Copyright 2022 Upstream Data Inc -
# -
# Licensed under the Apache License, Version 2.0 (the "License"); -
# you may not use this file except in compliance with the License. -
# You may obtain a copy of the License at -
# -
# http://www.apache.org/licenses/LICENSE-2.0 -
# -
# Unless required by applicable law or agreed to in writing, software -
# distributed under the License is distributed on an "AS IS" BASIS, -
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------
import json
from datetime import datetime, timedelta
from typing import List, Union
import grpc_requests
import httpx
from google.protobuf.message import Message
from grpc import RpcError
from pyasic import APIError, settings
from pyasic.web import BaseWebAPI
from pyasic.web.bosminer.proto import (
get_auth_service_descriptors,
get_service_descriptors,
)
from pyasic.web.bosminer.proto.bos.v1.actions_pb2 import ( # noqa: this will be defined
SetLocateDeviceStatusRequest,
)
from pyasic.web.bosminer.proto.bos.v1.authentication_pb2 import ( # noqa: this will be defined
SetPasswordRequest,
)
from pyasic.web.bosminer.proto.bos.v1.common_pb2 import ( # noqa: this will be defined
SaveAction,
)
from pyasic.web.bosminer.proto.bos.v1.cooling_pb2 import ( # noqa: this will be defined
SetImmersionModeRequest,
)
from pyasic.web.bosminer.proto.bos.v1.miner_pb2 import ( # noqa: this will be defined
DisableHashboardsRequest,
EnableHashboardsRequest,
)
from pyasic.web.bosminer.proto.bos.v1.performance_pb2 import ( # noqa: this will be defined
DecrementHashrateTargetRequest,
DecrementPowerTargetRequest,
IncrementHashrateTargetRequest,
IncrementPowerTargetRequest,
SetDefaultHashrateTargetRequest,
SetDefaultPowerTargetRequest,
SetHashrateTargetRequest,
SetPowerTargetRequest,
)
class BOSMinerWebAPI(BaseWebAPI):
def __init__(self, ip: str, boser: bool = None) -> None:
if boser is None:
boser = True
if boser:
self.gql = BOSMinerGQLAPI(
ip, settings.get("default_bosminer_password", "root")
)
self.grpc = BOSMinerGRPCAPI(
ip, settings.get("default_bosminer_password", "root")
)
else:
self.gql = None
self.grpc = None
self.luci = BOSMinerLuCIAPI(
ip, settings.get("default_bosminer_password", "root")
)
self._pwd = settings.get("default_bosminer_password", "root")
super().__init__(ip)
@property
def pwd(self):
return self._pwd
@pwd.setter
def pwd(self, other: str):
self._pwd = other
self.luci.pwd = other
if self.gql is not None:
self.gql.pwd = other
if self.grpc is not None:
self.grpc.pwd = other
async def send_command(
self,
command: Union[str, dict],
ignore_errors: bool = False,
allow_warning: bool = True,
**parameters: Union[str, int, bool],
) -> dict:
if isinstance(command, dict):
if self.gql is not None:
return await self.gql.send_command(command)
elif command.startswith("/cgi-bin/luci"):
return await self.gql.send_command(command)
else:
if self.grpc is not None:
return await self.grpc.send_command(command)
async def multicommand(
self, *commands: Union[dict, str], allow_warning: bool = True
) -> dict:
luci_commands = []
gql_commands = []
grpc_commands = []
for cmd in commands:
if isinstance(cmd, dict):
gql_commands.append(cmd)
elif cmd.startswith("/cgi-bin/luci"):
luci_commands.append(cmd)
else:
grpc_commands.append(cmd)
luci_data = await self.luci.multicommand(*luci_commands)
if self.gql is not None:
gql_data = await self.gql.multicommand(*gql_commands)
else:
gql_data = None
if self.grpc is not None:
grpc_data = await self.grpc.multicommand(*grpc_commands)
else:
grpc_data = None
if gql_data is None:
gql_data = {}
if luci_data is None:
luci_data = {}
if grpc_data is None:
grpc_data = {}
data = dict(**luci_data, **gql_data, **grpc_data)
return data
class BOSMinerGQLAPI:
def __init__(self, ip: str, pwd: str):
self.ip = ip
self.username = "root"
self.pwd = pwd
async def multicommand(self, *commands: dict) -> dict:
def merge(*d: dict):
ret = {}
for i in d:
if i:
for k in i:
if not k in ret:
ret[k] = i[k]
else:
ret[k] = merge(ret[k], i[k])
return None if ret == {} else ret
command = merge(*commands)
data = await self.send_command(command)
if data is not None:
if data.get("data") is None:
try:
commands = list(commands)
# noinspection PyTypeChecker
commands.remove({"bos": {"faultLight": None}})
command = merge(*commands)
data = await self.send_command(command)
except (LookupError, ValueError):
pass
if not data:
data = {}
data["multicommand"] = False
return data
async def send_command(
self,
command: dict,
) -> dict:
url = f"http://{self.ip}/graphql"
query = command
if command.get("query") is None:
query = {"query": self.parse_command(command)}
try:
async with httpx.AsyncClient(transport=settings.transport()) as client:
await self.auth(client)
data = await client.post(url, json=query)
except httpx.HTTPError:
pass
else:
if data.status_code == 200:
try:
return data.json()
except json.decoder.JSONDecodeError:
pass
def parse_command(self, graphql_command: Union[dict, set]) -> str:
if isinstance(graphql_command, dict):
data = []
for key in graphql_command:
if graphql_command[key] is not None:
parsed = self.parse_command(graphql_command[key])
data.append(key + parsed)
else:
data.append(key)
else:
data = graphql_command
return "{" + ",".join(data) + "}"
async def auth(self, client: httpx.AsyncClient) -> None:
url = f"http://{self.ip}/graphql"
await client.post(
url,
json={
"query": 'mutation{auth{login(username:"'
+ "root"
+ '", password:"'
+ self.pwd
+ '"){__typename}}}'
},
)
class BOSMinerLuCIAPI:
def __init__(self, ip: str, pwd: str):
self.ip = ip
self.username = "root"
self.pwd = pwd
async def multicommand(self, *commands: str) -> dict:
data = {}
for command in commands:
data[command] = await self.send_command(command, ignore_errors=True)
return data
async def send_command(self, path: str, ignore_errors: bool = False) -> dict:
try:
async with httpx.AsyncClient(transport=settings.transport()) as client:
await self.auth(client)
data = await client.get(
f"http://{self.ip}{path}", headers={"User-Agent": "BTC Tools v0.1"}
)
if data.status_code == 200:
return data.json()
if ignore_errors:
return {}
raise APIError(
f"Web command failed: path={path}, code={data.status_code}"
)
except (httpx.HTTPError, json.JSONDecodeError):
if ignore_errors:
return {}
raise APIError(f"Web command failed: path={path}")
async def auth(self, session: httpx.AsyncClient):
login = {"luci_username": self.username, "luci_password": self.pwd}
url = f"http://{self.ip}/cgi-bin/luci"
headers = {
"User-Agent": "BTC Tools v0.1", # only seems to respond if this user-agent is set
"Content-Type": "application/x-www-form-urlencoded",
}
await session.post(url, headers=headers, data=login)
async def get_net_conf(self):
return await self.send_command("/cgi-bin/luci/admin/network/iface_status/lan")
async def get_cfg_metadata(self):
return await self.send_command("/cgi-bin/luci/admin/miner/cfg_metadata")
async def get_cfg_data(self):
return await self.send_command("/cgi-bin/luci/admin/miner/cfg_data")
async def get_bos_info(self):
return await self.send_command("/cgi-bin/luci/bos/info")
async def get_overview(self):
return await self.send_command(
"/cgi-bin/luci/admin/status/overview?status=1"
) # needs status=1 or it fails
async def get_api_status(self):
return await self.send_command("/cgi-bin/luci/admin/miner/api_status")
class BOSMinerGRPCAPI:
def __init__(self, ip: str, pwd: str):
self.ip = ip
self.username = "root"
self.pwd = pwd
self._auth = None
self._auth_time = datetime.now()
@property
def commands(self) -> list:
return self.get_commands()
def get_commands(self) -> list:
return [
func
for func in
# each function in self
dir(self)
if func
not in ["send_command", "multicommand", "auth", "commands", "get_commands"]
if callable(getattr(self, func)) and
# no __ or _ methods
not func.startswith("__") and not func.startswith("_")
]
async def multicommand(self, *commands: str) -> dict:
pass
async def send_command(
self,
command: str,
message: Message = None,
ignore_errors: bool = False,
auth: bool = True,
) -> dict:
service, method = command.split("/")
metadata = []
if auth:
metadata.append(("authorization", await self.auth()))
async with grpc_requests.StubAsyncClient(
f"{self.ip}:50051", service_descriptors=get_service_descriptors()
) as client:
await client.register_all_service()
try:
return await client.request(
service,
method,
request=message,
metadata=metadata,
)
except RpcError as e:
if ignore_errors:
return {}
raise APIError(e._details)
async def auth(self):
if self._auth is not None and self._auth_time - datetime.now() < timedelta(
seconds=3540
):
return self._auth
await self._get_auth()
return self._auth
async def _get_auth(self):
async with grpc_requests.StubAsyncClient(
f"{self.ip}:50051", service_descriptors=get_auth_service_descriptors()
) as client:
await client.register_all_service()
method_meta = client.get_method_meta(
"braiins.bos.v1.AuthenticationService", "Login"
)
_request = method_meta.method_type.request_parser(
{"username": self.username, "password": self.pwd},
method_meta.input_type,
)
metadata = await method_meta.handler(_request).initial_metadata()
for key, value in metadata:
if key == "authorization":
self._auth = value
self._auth_time = datetime.now()
return self._auth
async def get_api_version(self):
return await self.send_command(
"braiins.bos.ApiVersionService/GetApiVersion", auth=False
)
async def start(self):
return await self.send_command("braiins.bos.v1.ActionsService/Start")
async def stop(self):
return await self.send_command("braiins.bos.v1.ActionsService/Stop")
async def pause_mining(self):
return await self.send_command("braiins.bos.v1.ActionsService/PauseMining")
async def resume_mining(self):
return await self.send_command("braiins.bos.v1.ActionsService/ResumeMining")
async def restart(self):
return await self.send_command("braiins.bos.v1.ActionsService/Restart")
async def reboot(self):
return await self.send_command("braiins.bos.v1.ActionsService/Reboot")
async def set_locate_device_status(self, enable: bool):
message = SetLocateDeviceStatusRequest()
message.enable = enable
return await self.send_command(
"braiins.bos.v1.ActionsService/SetLocateDeviceStatus", message=message
)
async def get_locate_device_status(self):
return await self.send_command(
"braiins.bos.v1.ActionsService/GetLocateDeviceStatus"
)
async def set_password(self, password: str = None):
message = SetPasswordRequest()
if password:
message.password = password
return await self.send_command(
"braiins.bos.v1.AuthenticationService/SetPassword", message=message
)
async def get_cooling_state(self):
return await self.send_command("braiins.bos.v1.CoolingService/GetCoolingState")
async def set_immersion_mode(
self,
enable: bool,
save_action: SaveAction = SaveAction.SAVE_ACTION_SAVE_AND_APPLY,
):
message = SetImmersionModeRequest()
message.enable = enable
message.save_action = save_action
return await self.send_command(
"braiins.bos.v1.CoolingService/SetImmersionMode", message=message
)
async def get_tuner_state(self):
return await self.send_command(
"braiins.bos.v1.PerformanceService/GetTunerState"
)
async def list_target_profiles(self):
return await self.send_command(
"braiins.bos.v1.PerformanceService/ListTargetProfiles"
)
async def set_default_power_target(
self, save_action: SaveAction = SaveAction.SAVE_ACTION_SAVE_AND_APPLY
):
message = SetDefaultPowerTargetRequest()
message.save_action = save_action
return await self.send_command(
"braiins.bos.v1.PerformanceService/SetDefaultPowerTarget", message=message
)
async def set_power_target(
self,
power_target: int,
save_action: SaveAction = SaveAction.SAVE_ACTION_SAVE_AND_APPLY,
):
message = SetPowerTargetRequest()
message.power_target.watt = power_target
message.save_action = save_action
return await self.send_command(
"braiins.bos.v1.PerformanceService/SetPowerTarget", message=message
)
async def increment_power_target(
self,
power_target_increment: int,
save_action: SaveAction = SaveAction.SAVE_ACTION_SAVE_AND_APPLY,
):
message = IncrementPowerTargetRequest()
message.power_target_increment.watt = power_target_increment
message.save_action = save_action
return await self.send_command(
"braiins.bos.v1.PerformanceService/IncrementPowerTarget", message=message
)
async def decrement_power_target(
self,
power_target_decrement: int,
save_action: SaveAction = SaveAction.SAVE_ACTION_SAVE_AND_APPLY,
):
message = DecrementPowerTargetRequest()
message.power_target_decrement.watt = power_target_decrement
message.save_action = save_action
return await self.send_command(
"braiins.bos.v1.PerformanceService/DecrementPowerTarget",
message=message,
)
async def set_default_hashrate_target(
self, save_action: SaveAction = SaveAction.SAVE_ACTION_SAVE_AND_APPLY
):
message = SetDefaultHashrateTargetRequest()
message.save_action = save_action
return await self.send_command(
"braiins.bos.v1.PerformanceService/SetDefaultHashrateTarget",
message=message,
)
async def set_hashrate_target(
self,
hashrate_target: int,
save_action: SaveAction = SaveAction.SAVE_ACTION_SAVE_AND_APPLY,
):
message = SetHashrateTargetRequest()
message.hashrate_target.terahash_per_second = hashrate_target
message.save_action = save_action
return await self.send_command(
"braiins.bos.v1.PerformanceService/SetHashrateTarget", message=message
)
async def increment_hashrate_target(
self,
hashrate_target_increment: int,
save_action: SaveAction = SaveAction.SAVE_ACTION_SAVE_AND_APPLY,
):
message = IncrementHashrateTargetRequest()
message.hashrate_target_increment.terahash_per_second = (
hashrate_target_increment
)
message.save_action = save_action
return await self.send_command(
"braiins.bos.v1.PerformanceService/IncrementHashrateTarget",
message=message,
)
async def decrement_hashrate_target(
self,
hashrate_target_decrement: int,
save_action: SaveAction = SaveAction.SAVE_ACTION_SAVE_AND_APPLY,
):
message = DecrementHashrateTargetRequest()
message.hashrate_target_decrement.terahash_per_second = (
hashrate_target_decrement
)
message.save_action = save_action
return await self.send_command(
"braiins.bos.v1.PerformanceService/DecrementHashrateTarget",
message=message,
)
async def set_dps(self):
raise NotImplementedError
return await self.send_command("braiins.bos.v1.PerformanceService/SetDPS")
async def set_performance_mode(self):
raise NotImplementedError
return await self.send_command(
"braiins.bos.v1.PerformanceService/SetPerformanceMode"
)
async def get_active_performance_mode(self):
return await self.send_command(
"braiins.bos.v1.PerformanceService/GetActivePerformanceMode"
)
async def get_pool_groups(self):
return await self.send_command("braiins.bos.v1.PoolService/GetPoolGroups")
async def create_pool_group(self):
raise NotImplementedError
return await self.send_command("braiins.bos.v1.PoolService/CreatePoolGroup")
async def update_pool_group(self):
raise NotImplementedError
return await self.send_command("braiins.bos.v1.PoolService/UpdatePoolGroup")
async def remove_pool_group(self):
raise NotImplementedError
return await self.send_command("braiins.bos.v1.PoolService/RemovePoolGroup")
async def get_miner_configuration(self):
return await self.send_command(
"braiins.bos.v1.ConfigurationService/GetMinerConfiguration"
)
async def get_constraints(self):
return await self.send_command(
"braiins.bos.v1.ConfigurationService/GetConstraints"
)
async def get_license_state(self):
return await self.send_command("braiins.bos.v1.LicenseService/GetLicenseState")
async def get_miner_status(self):
return await self.send_command("braiins.bos.v1.MinerService/GetMinerStatus")
async def get_miner_details(self):
return await self.send_command("braiins.bos.v1.MinerService/GetMinerDetails")
async def get_miner_stats(self):
return await self.send_command("braiins.bos.v1.MinerService/GetMinerStats")
async def get_hashboards(self):
return await self.send_command("braiins.bos.v1.MinerService/GetHashboards")
async def get_support_archive(self):
return await self.send_command("braiins.bos.v1.MinerService/GetSupportArchive")
async def enable_hashboards(
self,
hashboard_ids: List[str],
save_action: SaveAction = SaveAction.SAVE_ACTION_SAVE_AND_APPLY,
):
message = EnableHashboardsRequest()
message.hashboard_ids[:] = hashboard_ids
message.save_action = save_action
return await self.send_command(
"braiins.bos.v1.MinerService/EnableHashboards", message=message
)
async def disable_hashboards(
self,
hashboard_ids: List[str],
save_action: SaveAction = SaveAction.SAVE_ACTION_SAVE_AND_APPLY,
):
message = DisableHashboardsRequest()
message.hashboard_ids[:] = hashboard_ids
message.save_action = save_action
return await self.send_command(
"braiins.bos.v1.MinerService/DisableHashboards", message=message
)

View File

@@ -0,0 +1,54 @@
# ------------------------------------------------------------------------------
# Copyright 2022 Upstream Data Inc -
# -
# Licensed under the Apache License, Version 2.0 (the "License"); -
# you may not use this file except in compliance with the License. -
# You may obtain a copy of the License at -
# -
# http://www.apache.org/licenses/LICENSE-2.0 -
# -
# Unless required by applicable law or agreed to in writing, software -
# distributed under the License is distributed on an "AS IS" BASIS, -
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------
from __future__ import annotations
from .bos import version_pb2
from .bos.v1 import (
actions_pb2,
authentication_pb2,
common_pb2,
configuration_pb2,
constraints_pb2,
cooling_pb2,
license_pb2,
miner_pb2,
performance_pb2,
pool_pb2,
units_pb2,
work_pb2,
)
def get_service_descriptors():
return [
*version_pb2.DESCRIPTOR.services_by_name.values(),
*authentication_pb2.DESCRIPTOR.services_by_name.values(),
*actions_pb2.DESCRIPTOR.services_by_name.values(),
*common_pb2.DESCRIPTOR.services_by_name.values(),
*configuration_pb2.DESCRIPTOR.services_by_name.values(),
*constraints_pb2.DESCRIPTOR.services_by_name.values(),
*cooling_pb2.DESCRIPTOR.services_by_name.values(),
*license_pb2.DESCRIPTOR.services_by_name.values(),
*miner_pb2.DESCRIPTOR.services_by_name.values(),
*performance_pb2.DESCRIPTOR.services_by_name.values(),
*pool_pb2.DESCRIPTOR.services_by_name.values(),
*units_pb2.DESCRIPTOR.services_by_name.values(),
*work_pb2.DESCRIPTOR.services_by_name.values(),
]
def get_auth_service_descriptors():
return authentication_pb2.DESCRIPTOR.services_by_name.values()

View File

@@ -0,0 +1,15 @@
# ------------------------------------------------------------------------------
# Copyright 2022 Upstream Data Inc -
# -
# Licensed under the Apache License, Version 2.0 (the "License"); -
# you may not use this file except in compliance with the License. -
# You may obtain a copy of the License at -
# -
# http://www.apache.org/licenses/LICENSE-2.0 -
# -
# Unless required by applicable law or agreed to in writing, software -
# distributed under the License is distributed on an "AS IS" BASIS, -
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------

View File

@@ -0,0 +1,15 @@
# ------------------------------------------------------------------------------
# Copyright 2022 Upstream Data Inc -
# -
# Licensed under the Apache License, Version 2.0 (the "License"); -
# you may not use this file except in compliance with the License. -
# You may obtain a copy of the License at -
# -
# http://www.apache.org/licenses/LICENSE-2.0 -
# -
# Unless required by applicable law or agreed to in writing, software -
# distributed under the License is distributed on an "AS IS" BASIS, -
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------

View File

@@ -0,0 +1,23 @@
# ------------------------------------------------------------------------------
# Copyright 2022 Upstream Data Inc -
# -
# Licensed under the Apache License, Version 2.0 (the "License"); -
# you may not use this file except in compliance with the License. -
# You may obtain a copy of the License at -
# -
# http://www.apache.org/licenses/LICENSE-2.0 -
# -
# Unless required by applicable law or agreed to in writing, software -
# distributed under the License is distributed on an "AS IS" BASIS, -
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------
from enum import Enum
class SaveAction(Enum):
UNSPECIFIED = "SaveAction.SAVE_ACTION_UNSPECIFIED"
SAVE = "SaveAction.SAVE_ACTION_SAVE"
SAVE_AND_APPLY = "SaveAction.SAVE_ACTION_SAVE_AND_APPLY"
SAVE_AND_FORCE_APPLY = "SaveAction.SAVE_ACTION_SAVE_AND_FORCE_APPLY"

View File

@@ -0,0 +1,56 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: bos/v1/actions.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
from google.protobuf.internal import builder as _builder
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
b'\n\x14\x62os/v1/actions.proto\x12\x0e\x62raiins.bos.v1"\x0e\n\x0cStartRequest"(\n\rStartResponse\x12\x17\n\x0f\x61lready_running\x18\x01 \x01(\x08"\x10\n\x0eRestartRequest"*\n\x0fRestartResponse\x12\x17\n\x0f\x61lready_running\x18\x01 \x01(\x08"\x0f\n\rRebootRequest"\x10\n\x0eRebootResponse"\r\n\x0bStopRequest"\'\n\x0cStopResponse\x12\x17\n\x0f\x61lready_stopped\x18\x01 \x01(\x08"\x14\n\x12PauseMiningRequest"-\n\x13PauseMiningResponse\x12\x16\n\x0e\x61lready_paused\x18\x01 \x01(\x08"\x15\n\x13ResumeMiningRequest".\n\x14ResumeMiningResponse\x12\x16\n\x0e\x61lready_mining\x18\x01 \x01(\x08".\n\x1cSetLocateDeviceStatusRequest\x12\x0e\n\x06\x65nable\x18\x01 \x01(\x08"-\n\x1aLocateDeviceStatusResponse\x12\x0f\n\x07\x65nabled\x18\x01 \x01(\x08"\x1e\n\x1cGetLocateDeviceStatusRequest2\xc7\x05\n\x0e\x41\x63tionsService\x12\x44\n\x05Start\x12\x1c.braiins.bos.v1.StartRequest\x1a\x1d.braiins.bos.v1.StartResponse\x12\x41\n\x04Stop\x12\x1b.braiins.bos.v1.StopRequest\x1a\x1c.braiins.bos.v1.StopResponse\x12V\n\x0bPauseMining\x12".braiins.bos.v1.PauseMiningRequest\x1a#.braiins.bos.v1.PauseMiningResponse\x12Y\n\x0cResumeMining\x12#.braiins.bos.v1.ResumeMiningRequest\x1a$.braiins.bos.v1.ResumeMiningResponse\x12J\n\x07Restart\x12\x1e.braiins.bos.v1.RestartRequest\x1a\x1f.braiins.bos.v1.RestartResponse\x12G\n\x06Reboot\x12\x1d.braiins.bos.v1.RebootRequest\x1a\x1e.braiins.bos.v1.RebootResponse\x12q\n\x15SetLocateDeviceStatus\x12,.braiins.bos.v1.SetLocateDeviceStatusRequest\x1a*.braiins.bos.v1.LocateDeviceStatusResponse\x12q\n\x15GetLocateDeviceStatus\x12,.braiins.bos.v1.GetLocateDeviceStatusRequest\x1a*.braiins.bos.v1.LocateDeviceStatusResponseb\x06proto3'
)
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "bos.v1.actions_pb2", _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
_globals["_STARTREQUEST"]._serialized_start = 40
_globals["_STARTREQUEST"]._serialized_end = 54
_globals["_STARTRESPONSE"]._serialized_start = 56
_globals["_STARTRESPONSE"]._serialized_end = 96
_globals["_RESTARTREQUEST"]._serialized_start = 98
_globals["_RESTARTREQUEST"]._serialized_end = 114
_globals["_RESTARTRESPONSE"]._serialized_start = 116
_globals["_RESTARTRESPONSE"]._serialized_end = 158
_globals["_REBOOTREQUEST"]._serialized_start = 160
_globals["_REBOOTREQUEST"]._serialized_end = 175
_globals["_REBOOTRESPONSE"]._serialized_start = 177
_globals["_REBOOTRESPONSE"]._serialized_end = 193
_globals["_STOPREQUEST"]._serialized_start = 195
_globals["_STOPREQUEST"]._serialized_end = 208
_globals["_STOPRESPONSE"]._serialized_start = 210
_globals["_STOPRESPONSE"]._serialized_end = 249
_globals["_PAUSEMININGREQUEST"]._serialized_start = 251
_globals["_PAUSEMININGREQUEST"]._serialized_end = 271
_globals["_PAUSEMININGRESPONSE"]._serialized_start = 273
_globals["_PAUSEMININGRESPONSE"]._serialized_end = 318
_globals["_RESUMEMININGREQUEST"]._serialized_start = 320
_globals["_RESUMEMININGREQUEST"]._serialized_end = 341
_globals["_RESUMEMININGRESPONSE"]._serialized_start = 343
_globals["_RESUMEMININGRESPONSE"]._serialized_end = 389
_globals["_SETLOCATEDEVICESTATUSREQUEST"]._serialized_start = 391
_globals["_SETLOCATEDEVICESTATUSREQUEST"]._serialized_end = 437
_globals["_LOCATEDEVICESTATUSRESPONSE"]._serialized_start = 439
_globals["_LOCATEDEVICESTATUSRESPONSE"]._serialized_end = 484
_globals["_GETLOCATEDEVICESTATUSREQUEST"]._serialized_start = 486
_globals["_GETLOCATEDEVICESTATUSREQUEST"]._serialized_end = 516
_globals["_ACTIONSSERVICE"]._serialized_start = 519
_globals["_ACTIONSSERVICE"]._serialized_end = 1230
# @@protoc_insertion_point(module_scope)

View File

@@ -0,0 +1,36 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: bos/v1/authentication.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
from google.protobuf.internal import builder as _builder
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
b'\n\x1b\x62os/v1/authentication.proto\x12\x0e\x62raiins.bos.v1"2\n\x0cLoginRequest\x12\x10\n\x08username\x18\x01 \x01(\t\x12\x10\n\x08password\x18\x02 \x01(\t"\x0f\n\rLoginResponse"8\n\x12SetPasswordRequest\x12\x15\n\x08password\x18\x01 \x01(\tH\x00\x88\x01\x01\x42\x0b\n\t_password"\x15\n\x13SetPasswordResponse2\xb5\x01\n\x15\x41uthenticationService\x12\x44\n\x05Login\x12\x1c.braiins.bos.v1.LoginRequest\x1a\x1d.braiins.bos.v1.LoginResponse\x12V\n\x0bSetPassword\x12".braiins.bos.v1.SetPasswordRequest\x1a#.braiins.bos.v1.SetPasswordResponseb\x06proto3'
)
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(
DESCRIPTOR, "bos.v1.authentication_pb2", _globals
)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
_globals["_LOGINREQUEST"]._serialized_start = 47
_globals["_LOGINREQUEST"]._serialized_end = 97
_globals["_LOGINRESPONSE"]._serialized_start = 99
_globals["_LOGINRESPONSE"]._serialized_end = 114
_globals["_SETPASSWORDREQUEST"]._serialized_start = 116
_globals["_SETPASSWORDREQUEST"]._serialized_end = 172
_globals["_SETPASSWORDRESPONSE"]._serialized_start = 174
_globals["_SETPASSWORDRESPONSE"]._serialized_end = 195
_globals["_AUTHENTICATIONSERVICE"]._serialized_start = 198
_globals["_AUTHENTICATIONSERVICE"]._serialized_end = 379
# @@protoc_insertion_point(module_scope)

View File

@@ -0,0 +1,26 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: bos/v1/common.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
from google.protobuf.internal import builder as _builder
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
b"\n\x13\x62os/v1/common.proto\x12\x0e\x62raiins.bos.v1*\x85\x01\n\nSaveAction\x12\x1b\n\x17SAVE_ACTION_UNSPECIFIED\x10\x00\x12\x14\n\x10SAVE_ACTION_SAVE\x10\x01\x12\x1e\n\x1aSAVE_ACTION_SAVE_AND_APPLY\x10\x02\x12$\n SAVE_ACTION_SAVE_AND_FORCE_APPLY\x10\x03\x62\x06proto3"
)
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "bos.v1.common_pb2", _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
_globals["_SAVEACTION"]._serialized_start = 40
_globals["_SAVEACTION"]._serialized_end = 173
# @@protoc_insertion_point(module_scope)

View File

@@ -0,0 +1,40 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: bos/v1/configuration.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
from google.protobuf.internal import builder as _builder
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from ...bos.v1 import cooling_pb2 as bos_dot_v1_dot_cooling__pb2
from ...bos.v1 import performance_pb2 as bos_dot_v1_dot_performance__pb2
from ...bos.v1 import pool_pb2 as bos_dot_v1_dot_pool__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
b'\n\x1a\x62os/v1/configuration.proto\x12\x0e\x62raiins.bos.v1\x1a\x14\x62os/v1/cooling.proto\x1a\x18\x62os/v1/performance.proto\x1a\x11\x62os/v1/pool.proto"\x1e\n\x1cGetMinerConfigurationRequest"\xc6\x02\n\x1dGetMinerConfigurationResponse\x12;\n\x0bpool_groups\x18\x01 \x03(\x0b\x32&.braiins.bos.v1.PoolGroupConfiguration\x12\x39\n\x0btemperature\x18\x02 \x01(\x0b\x32$.braiins.bos.v1.CoolingConfiguration\x12\x31\n\x05tuner\x18\x03 \x01(\x0b\x32".braiins.bos.v1.TunerConfiguration\x12-\n\x03\x64ps\x18\x04 \x01(\x0b\x32 .braiins.bos.v1.DPSConfiguration\x12K\n\x10hashboard_config\x18\x05 \x01(\x0b\x32\x31.braiins.bos.v1.HashboardPerformanceConfiguration"\x17\n\x15GetConstraintsRequest"\x95\x02\n\x16GetConstraintsResponse\x12;\n\x11tuner_constraints\x18\x01 \x01(\x0b\x32 .braiins.bos.v1.TunerConstraints\x12?\n\x13\x63ooling_constraints\x18\x02 \x01(\x0b\x32".braiins.bos.v1.CoolingConstraints\x12\x37\n\x0f\x64ps_constraints\x18\x03 \x01(\x0b\x32\x1e.braiins.bos.v1.DPSConstraints\x12\x44\n\x16hashboards_constraints\x18\x04 \x01(\x0b\x32$.braiins.bos.v1.HashboardConstraints2\xed\x01\n\x14\x43onfigurationService\x12t\n\x15GetMinerConfiguration\x12,.braiins.bos.v1.GetMinerConfigurationRequest\x1a-.braiins.bos.v1.GetMinerConfigurationResponse\x12_\n\x0eGetConstraints\x12%.braiins.bos.v1.GetConstraintsRequest\x1a&.braiins.bos.v1.GetConstraintsResponseb\x06proto3'
)
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(
DESCRIPTOR, "bos.v1.configuration_pb2", _globals
)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
_globals["_GETMINERCONFIGURATIONREQUEST"]._serialized_start = 113
_globals["_GETMINERCONFIGURATIONREQUEST"]._serialized_end = 143
_globals["_GETMINERCONFIGURATIONRESPONSE"]._serialized_start = 146
_globals["_GETMINERCONFIGURATIONRESPONSE"]._serialized_end = 472
_globals["_GETCONSTRAINTSREQUEST"]._serialized_start = 474
_globals["_GETCONSTRAINTSREQUEST"]._serialized_end = 497
_globals["_GETCONSTRAINTSRESPONSE"]._serialized_start = 500
_globals["_GETCONSTRAINTSRESPONSE"]._serialized_end = 777
_globals["_CONFIGURATIONSERVICE"]._serialized_start = 780
_globals["_CONFIGURATIONSERVICE"]._serialized_end = 1017
# @@protoc_insertion_point(module_scope)

View File

@@ -0,0 +1,44 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: bos/v1/constraints.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
from google.protobuf.internal import builder as _builder
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from ...bos.v1 import units_pb2 as bos_dot_v1_dot_units__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
b'\n\x18\x62os/v1/constraints.proto\x12\x0e\x62raiins.bos.v1\x1a\x12\x62os/v1/units.proto">\n\x11UInt32Constraints\x12\x0f\n\x07\x64\x65\x66\x61ult\x18\x01 \x01(\r\x12\x0b\n\x03min\x18\x02 \x01(\r\x12\x0b\n\x03max\x18\x03 \x01(\r">\n\x11\x44oubleConstraints\x12\x0f\n\x07\x64\x65\x66\x61ult\x18\x01 \x01(\x01\x12\x0b\n\x03min\x18\x02 \x01(\x01\x12\x0b\n\x03max\x18\x03 \x01(\x01"\x82\x01\n\x10PowerConstraints\x12&\n\x07\x64\x65\x66\x61ult\x18\x01 \x01(\x0b\x32\x15.braiins.bos.v1.Power\x12"\n\x03min\x18\x02 \x01(\x0b\x32\x15.braiins.bos.v1.Power\x12"\n\x03max\x18\x03 \x01(\x0b\x32\x15.braiins.bos.v1.Power"\x9a\x01\n\x13HashrateConstraints\x12-\n\x07\x64\x65\x66\x61ult\x18\x01 \x01(\x0b\x32\x1c.braiins.bos.v1.TeraHashrate\x12)\n\x03min\x18\x02 \x01(\x0b\x32\x1c.braiins.bos.v1.TeraHashrate\x12)\n\x03max\x18\x03 \x01(\x0b\x32\x1c.braiins.bos.v1.TeraHashrate"\x9a\x01\n\x16TemperatureConstraints\x12,\n\x07\x64\x65\x66\x61ult\x18\x01 \x01(\x0b\x32\x1b.braiins.bos.v1.Temperature\x12(\n\x03min\x18\x02 \x01(\x0b\x32\x1b.braiins.bos.v1.Temperature\x12(\n\x03max\x18\x03 \x01(\x0b\x32\x1b.braiins.bos.v1.Temperature"$\n\x11\x42ooleanConstraint\x12\x0f\n\x07\x64\x65\x66\x61ult\x18\x01 \x01(\x08"\x85\x01\n\x13\x44urationConstraints\x12&\n\x07\x64\x65\x66\x61ult\x18\x01 \x01(\x0b\x32\x15.braiins.bos.v1.Hours\x12"\n\x03min\x18\x02 \x01(\x0b\x32\x15.braiins.bos.v1.Hours\x12"\n\x03max\x18\x03 \x01(\x0b\x32\x15.braiins.bos.v1.Hours"\x92\x01\n\x14\x46requencyConstraints\x12*\n\x07\x64\x65\x66\x61ult\x18\x01 \x01(\x0b\x32\x19.braiins.bos.v1.Frequency\x12&\n\x03min\x18\x02 \x01(\x0b\x32\x19.braiins.bos.v1.Frequency\x12&\n\x03max\x18\x03 \x01(\x0b\x32\x19.braiins.bos.v1.Frequency"\x8a\x01\n\x12VoltageConstraints\x12(\n\x07\x64\x65\x66\x61ult\x18\x01 \x01(\x0b\x32\x17.braiins.bos.v1.Voltage\x12$\n\x03min\x18\x02 \x01(\x0b\x32\x17.braiins.bos.v1.Voltage\x12$\n\x03max\x18\x03 \x01(\x0b\x32\x17.braiins.bos.v1.Voltageb\x06proto3'
)
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "bos.v1.constraints_pb2", _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
_globals["_UINT32CONSTRAINTS"]._serialized_start = 64
_globals["_UINT32CONSTRAINTS"]._serialized_end = 126
_globals["_DOUBLECONSTRAINTS"]._serialized_start = 128
_globals["_DOUBLECONSTRAINTS"]._serialized_end = 190
_globals["_POWERCONSTRAINTS"]._serialized_start = 193
_globals["_POWERCONSTRAINTS"]._serialized_end = 323
_globals["_HASHRATECONSTRAINTS"]._serialized_start = 326
_globals["_HASHRATECONSTRAINTS"]._serialized_end = 480
_globals["_TEMPERATURECONSTRAINTS"]._serialized_start = 483
_globals["_TEMPERATURECONSTRAINTS"]._serialized_end = 637
_globals["_BOOLEANCONSTRAINT"]._serialized_start = 639
_globals["_BOOLEANCONSTRAINT"]._serialized_end = 675
_globals["_DURATIONCONSTRAINTS"]._serialized_start = 678
_globals["_DURATIONCONSTRAINTS"]._serialized_end = 811
_globals["_FREQUENCYCONSTRAINTS"]._serialized_start = 814
_globals["_FREQUENCYCONSTRAINTS"]._serialized_end = 960
_globals["_VOLTAGECONSTRAINTS"]._serialized_start = 963
_globals["_VOLTAGECONSTRAINTS"]._serialized_end = 1101
# @@protoc_insertion_point(module_scope)

View File

@@ -0,0 +1,56 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: bos/v1/cooling.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
from google.protobuf.internal import builder as _builder
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from ...bos.v1 import common_pb2 as bos_dot_v1_dot_common__pb2
from ...bos.v1 import constraints_pb2 as bos_dot_v1_dot_constraints__pb2
from ...bos.v1 import units_pb2 as bos_dot_v1_dot_units__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
b'\n\x14\x62os/v1/cooling.proto\x12\x0e\x62raiins.bos.v1\x1a\x13\x62os/v1/common.proto\x1a\x18\x62os/v1/constraints.proto\x1a\x12\x62os/v1/units.proto"\xbc\x01\n\x0f\x43oolingAutoMode\x12\x37\n\x12target_temperature\x18\x01 \x01(\x0b\x32\x1b.braiins.bos.v1.Temperature\x12\x34\n\x0fhot_temperature\x18\x02 \x01(\x0b\x32\x1b.braiins.bos.v1.Temperature\x12:\n\x15\x64\x61ngerous_temperature\x18\x03 \x01(\x0b\x32\x1b.braiins.bos.v1.Temperature"\xb7\x01\n\x11\x43oolingManualMode\x12\x1c\n\x0f\x66\x61n_speed_ratio\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x34\n\x0fhot_temperature\x18\x02 \x01(\x0b\x32\x1b.braiins.bos.v1.Temperature\x12:\n\x15\x64\x61ngerous_temperature\x18\x03 \x01(\x0b\x32\x1b.braiins.bos.v1.TemperatureB\x12\n\x10_fan_speed_ratio"G\n\x13\x43oolingDisabledMode\x12\x1c\n\x0f\x66\x61n_speed_ratio\x18\x01 \x01(\x01H\x00\x88\x01\x01\x42\x12\n\x10_fan_speed_ratio"\xfb\x01\n\x14\x43oolingConfiguration\x12"\n\x15minimum_required_fans\x18\x01 \x01(\rH\x01\x88\x01\x01\x12/\n\x04\x61uto\x18\x02 \x01(\x0b\x32\x1f.braiins.bos.v1.CoolingAutoModeH\x00\x12\x33\n\x06manual\x18\x03 \x01(\x0b\x32!.braiins.bos.v1.CoolingManualModeH\x00\x12\x37\n\x08\x64isabled\x18\x04 \x01(\x0b\x32#.braiins.bos.v1.CoolingDisabledModeH\x00\x42\x06\n\x04modeB\x18\n\x16_minimum_required_fans"\x99\x03\n\x12\x43oolingConstraints\x12\x39\n\x14\x64\x65\x66\x61ult_cooling_mode\x18\x01 \x01(\x0e\x32\x1b.braiins.bos.v1.CoolingMode\x12\x42\n\x12target_temperature\x18\x02 \x01(\x0b\x32&.braiins.bos.v1.TemperatureConstraints\x12?\n\x0fhot_temperature\x18\x03 \x01(\x0b\x32&.braiins.bos.v1.TemperatureConstraints\x12\x45\n\x15\x64\x61ngerous_temperature\x18\x04 \x01(\x0b\x32&.braiins.bos.v1.TemperatureConstraints\x12:\n\x0f\x66\x61n_speed_ratio\x18\x05 \x01(\x0b\x32!.braiins.bos.v1.DoubleConstraints\x12@\n\x15minimum_required_fans\x18\x06 \x01(\x0b\x32!.braiins.bos.v1.UInt32Constraints"s\n\x08\x46\x61nState\x12\x15\n\x08position\x18\x01 \x01(\rH\x00\x88\x01\x01\x12\x0b\n\x03rpm\x18\x02 \x01(\r\x12\x1f\n\x12target_speed_ratio\x18\x03 \x01(\x01H\x01\x88\x01\x01\x42\x0b\n\t_positionB\x15\n\x13_target_speed_ratio"\x8f\x01\n\x11TemperatureSensor\x12\x0f\n\x02id\x18\x01 \x01(\rH\x00\x88\x01\x01\x12\x30\n\x08location\x18\x02 \x01(\x0e\x32\x1e.braiins.bos.v1.SensorLocation\x12\x30\n\x0btemperature\x18\x03 \x01(\x0b\x32\x1b.braiins.bos.v1.TemperatureB\x05\n\x03_id"\x18\n\x16GetCoolingStateRequest"\x81\x01\n\x17GetCoolingStateResponse\x12&\n\x04\x66\x61ns\x18\x01 \x03(\x0b\x32\x18.braiins.bos.v1.FanState\x12>\n\x13highest_temperature\x18\x02 \x01(\x0b\x32!.braiins.bos.v1.TemperatureSensor"i\n\x17SetImmersionModeRequest\x12/\n\x0bsave_action\x18\x01 \x01(\x0e\x32\x1a.braiins.bos.v1.SaveAction\x12\x1d\n\x15\x65nable_immersion_mode\x18\x02 \x01(\x08"2\n\x18SetImmersionModeResponse\x12\x16\n\x0eimmersion_mode\x18\x01 \x01(\x08*v\n\x0b\x43oolingMode\x12\x1c\n\x18\x43OOLING_MODE_UNSPECIFIED\x10\x00\x12\x15\n\x11\x43OOLING_MODE_AUTO\x10\x01\x12\x17\n\x13\x43OOLING_MODE_MANUAL\x10\x02\x12\x19\n\x15\x43OOLING_MODE_DISABLED\x10\x03*d\n\x0eSensorLocation\x12\x1f\n\x1bSENSOR_LOCATION_UNSPECIFIED\x10\x00\x12\x18\n\x14SENSOR_LOCATION_CHIP\x10\x01\x12\x17\n\x13SENSOR_LOCATION_PCB\x10\x02\x32\xdb\x01\n\x0e\x43oolingService\x12\x62\n\x0fGetCoolingState\x12&.braiins.bos.v1.GetCoolingStateRequest\x1a\'.braiins.bos.v1.GetCoolingStateResponse\x12\x65\n\x10SetImmersionMode\x12\'.braiins.bos.v1.SetImmersionModeRequest\x1a(.braiins.bos.v1.SetImmersionModeResponseb\x06proto3'
)
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "bos.v1.cooling_pb2", _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
_globals["_COOLINGMODE"]._serialized_start = 1803
_globals["_COOLINGMODE"]._serialized_end = 1921
_globals["_SENSORLOCATION"]._serialized_start = 1923
_globals["_SENSORLOCATION"]._serialized_end = 2023
_globals["_COOLINGAUTOMODE"]._serialized_start = 108
_globals["_COOLINGAUTOMODE"]._serialized_end = 296
_globals["_COOLINGMANUALMODE"]._serialized_start = 299
_globals["_COOLINGMANUALMODE"]._serialized_end = 482
_globals["_COOLINGDISABLEDMODE"]._serialized_start = 484
_globals["_COOLINGDISABLEDMODE"]._serialized_end = 555
_globals["_COOLINGCONFIGURATION"]._serialized_start = 558
_globals["_COOLINGCONFIGURATION"]._serialized_end = 809
_globals["_COOLINGCONSTRAINTS"]._serialized_start = 812
_globals["_COOLINGCONSTRAINTS"]._serialized_end = 1221
_globals["_FANSTATE"]._serialized_start = 1223
_globals["_FANSTATE"]._serialized_end = 1338
_globals["_TEMPERATURESENSOR"]._serialized_start = 1341
_globals["_TEMPERATURESENSOR"]._serialized_end = 1484
_globals["_GETCOOLINGSTATEREQUEST"]._serialized_start = 1486
_globals["_GETCOOLINGSTATEREQUEST"]._serialized_end = 1510
_globals["_GETCOOLINGSTATERESPONSE"]._serialized_start = 1513
_globals["_GETCOOLINGSTATERESPONSE"]._serialized_end = 1642
_globals["_SETIMMERSIONMODEREQUEST"]._serialized_start = 1644
_globals["_SETIMMERSIONMODEREQUEST"]._serialized_end = 1749
_globals["_SETIMMERSIONMODERESPONSE"]._serialized_start = 1751
_globals["_SETIMMERSIONMODERESPONSE"]._serialized_end = 1801
_globals["_COOLINGSERVICE"]._serialized_start = 2026
_globals["_COOLINGSERVICE"]._serialized_end = 2245
# @@protoc_insertion_point(module_scope)

View File

@@ -0,0 +1,42 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: bos/v1/license.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
from google.protobuf.internal import builder as _builder
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from ...bos.v1 import units_pb2 as bos_dot_v1_dot_units__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
b'\n\x14\x62os/v1/license.proto\x12\x0e\x62raiins.bos.v1\x1a\x12\x62os/v1/units.proto")\n\x0bNoneLicense\x12\x1a\n\x12time_to_restricted\x18\x01 \x01(\r"\x10\n\x0eLimitedLicense"\x9a\x01\n\x0cValidLicense\x12)\n\x04type\x18\x01 \x01(\x0e\x32\x1b.braiins.bos.v1.LicenseType\x12\x15\n\rcontract_name\x18\x02 \x01(\t\x12\x1a\n\x12time_to_restricted\x18\x03 \x01(\r\x12,\n\x07\x64\x65v_fee\x18\x04 \x01(\x0b\x32\x1b.braiins.bos.v1.BasesPoints"\x80\x01\n\x0e\x45xpiredLicense\x12)\n\x04type\x18\x01 \x01(\x0e\x32\x1b.braiins.bos.v1.LicenseType\x12\x15\n\rcontract_name\x18\x02 \x01(\t\x12,\n\x07\x64\x65v_fee\x18\x03 \x01(\x0b\x32\x1b.braiins.bos.v1.BasesPoints"\x18\n\x16GetLicenseStateRequest"\xe4\x01\n\x17GetLicenseStateResponse\x12+\n\x04none\x18\x01 \x01(\x0b\x32\x1b.braiins.bos.v1.NoneLicenseH\x00\x12\x31\n\x07limited\x18\x02 \x01(\x0b\x32\x1e.braiins.bos.v1.LimitedLicenseH\x00\x12-\n\x05valid\x18\x03 \x01(\x0b\x32\x1c.braiins.bos.v1.ValidLicenseH\x00\x12\x31\n\x07\x65xpired\x18\x04 \x01(\x0b\x32\x1e.braiins.bos.v1.ExpiredLicenseH\x00\x42\x07\n\x05state*_\n\x0bLicenseType\x12\x1c\n\x18LICENSE_TYPE_UNSPECIFIED\x10\x00\x12\x19\n\x15LICENSE_TYPE_STANDARD\x10\x01\x12\x17\n\x13LICENSE_TYPE_CUSTOM\x10\x02\x32t\n\x0eLicenseService\x12\x62\n\x0fGetLicenseState\x12&.braiins.bos.v1.GetLicenseStateRequest\x1a\'.braiins.bos.v1.GetLicenseStateResponseb\x06proto3'
)
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "bos.v1.license_pb2", _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
_globals["_LICENSETYPE"]._serialized_start = 666
_globals["_LICENSETYPE"]._serialized_end = 761
_globals["_NONELICENSE"]._serialized_start = 60
_globals["_NONELICENSE"]._serialized_end = 101
_globals["_LIMITEDLICENSE"]._serialized_start = 103
_globals["_LIMITEDLICENSE"]._serialized_end = 119
_globals["_VALIDLICENSE"]._serialized_start = 122
_globals["_VALIDLICENSE"]._serialized_end = 276
_globals["_EXPIREDLICENSE"]._serialized_start = 279
_globals["_EXPIREDLICENSE"]._serialized_end = 407
_globals["_GETLICENSESTATEREQUEST"]._serialized_start = 409
_globals["_GETLICENSESTATEREQUEST"]._serialized_end = 433
_globals["_GETLICENSESTATERESPONSE"]._serialized_start = 436
_globals["_GETLICENSESTATERESPONSE"]._serialized_end = 664
_globals["_LICENSESERVICE"]._serialized_start = 763
_globals["_LICENSESERVICE"]._serialized_end = 879
# @@protoc_insertion_point(module_scope)

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,110 @@
# ------------------------------------------------------------------------------
# Copyright 2022 Upstream Data Inc -
# -
# Licensed under the Apache License, Version 2.0 (the "License"); -
# you may not use this file except in compliance with the License. -
# You may obtain a copy of the License at -
# -
# http://www.apache.org/licenses/LICENSE-2.0 -
# -
# Unless required by applicable law or agreed to in writing, software -
# distributed under the License is distributed on an "AS IS" BASIS, -
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------
from dataclasses import asdict, dataclass
from typing import Union
@dataclass
class Frequency:
hertz: float
@dataclass
class Voltage:
volt: float
@dataclass
class Power:
watt: int
@dataclass
class TeraHashrate:
terahash_per_second: float
@dataclass
class HashboardPerformanceSettings:
id: str
frequency: Frequency
voltage: Voltage
@dataclass
class ManualPerformanceMode:
global_frequency: Frequency
global_voltage: Voltage
hashboards: list[HashboardPerformanceSettings]
@dataclass
class PowerTargetMode:
power_target: Power
@dataclass
class HashrateTargetMode:
hashrate_target: TeraHashrate
@dataclass
class TunerPerformanceMode:
target: Union[PowerTargetMode, HashrateTargetMode]
@dataclass
class PerformanceMode:
mode: Union[ManualPerformanceMode, TunerPerformanceMode]
@classmethod
def create(
cls,
power_target: int = None,
hashrate_target: float = None,
manual_configuration: ManualPerformanceMode = None,
):
provided_args = [power_target, hashrate_target, manual_configuration]
if sum(arg is not None for arg in provided_args) > 1:
raise ValueError(
"More than one keyword argument provided. Please use only power target, hashrate target, or manual config."
)
elif sum(arg is not None for arg in provided_args) < 1:
raise ValueError(
"Please pass one of power target, hashrate target, or manual config."
)
if power_target is not None:
return cls(
mode=TunerPerformanceMode(
target=PowerTargetMode(power_target=Power(watt=power_target))
)
)
elif hashrate_target is not None:
return cls(
mode=TunerPerformanceMode(
target=HashrateTargetMode(
hashrate_target=TeraHashrate(
terahash_per_second=hashrate_target
)
)
)
)
elif manual_configuration is not None:
return cls(mode=manual_configuration)
def as_dict(self):
return asdict(self)

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,75 @@
# -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------
# Copyright 2022 Upstream Data Inc -
# -
# Licensed under the Apache License, Version 2.0 (the "License"); -
# you may not use this file except in compliance with the License. -
# You may obtain a copy of the License at -
# -
# http://www.apache.org/licenses/LICENSE-2.0 -
# -
# Unless required by applicable law or agreed to in writing, software -
# distributed under the License is distributed on an "AS IS" BASIS, -
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: bos/v1/pool.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
from google.protobuf.internal import builder as _builder
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from ...bos.v1 import common_pb2 as bos_dot_v1_dot_common__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
b'\n\x11\x62os/v1/pool.proto\x12\x0e\x62raiins.bos.v1\x1a\x13\x62os/v1/common.proto"\x16\n\x05Quota\x12\r\n\x05value\x18\x01 \x01(\r" \n\x0f\x46ixedShareRatio\x12\r\n\x05value\x18\x01 \x01(\x01"\xe4\x01\n\x16PoolGroupConfiguration\x12\x0b\n\x03uid\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12&\n\x05quota\x18\x03 \x01(\x0b\x32\x15.braiins.bos.v1.QuotaH\x00\x12<\n\x11\x66ixed_share_ratio\x18\x04 \x01(\x0b\x32\x1f.braiins.bos.v1.FixedShareRatioH\x00\x12\x30\n\x05pools\x18\x05 \x03(\x0b\x32!.braiins.bos.v1.PoolConfigurationB\x17\n\x15load_balance_strategy"\x81\x01\n\x11PoolConfiguration\x12\x0b\n\x03uid\x18\x01 \x01(\t\x12\x0b\n\x03url\x18\x02 \x01(\t\x12\x0c\n\x04user\x18\x03 \x01(\t\x12\x15\n\x08password\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x14\n\x07\x65nabled\x18\x05 \x01(\x08H\x01\x88\x01\x01\x42\x0b\n\t_passwordB\n\n\x08_enabled"\xb0\x01\n\tPoolGroup\x12\x0c\n\x04name\x18\x01 \x01(\t\x12&\n\x05quota\x18\x02 \x01(\x0b\x32\x15.braiins.bos.v1.QuotaH\x00\x12<\n\x11\x66ixed_share_ratio\x18\x03 \x01(\x0b\x32\x1f.braiins.bos.v1.FixedShareRatioH\x00\x12#\n\x05pools\x18\x04 \x03(\x0b\x32\x14.braiins.bos.v1.PoolB\n\n\x08strategy"\x88\x01\n\x04Pool\x12\x0b\n\x03uid\x18\x01 \x01(\t\x12\x0b\n\x03url\x18\x02 \x01(\t\x12\x0c\n\x04user\x18\x03 \x01(\t\x12\x0f\n\x07\x65nabled\x18\x04 \x01(\x08\x12\r\n\x05\x61live\x18\x05 \x01(\x08\x12\x0e\n\x06\x61\x63tive\x18\x06 \x01(\x08\x12(\n\x05stats\x18\x07 \x01(\x0b\x32\x19.braiins.bos.v1.PoolStats"\x98\x01\n\tPoolStats\x12\x17\n\x0f\x61\x63\x63\x65pted_shares\x18\x01 \x01(\x04\x12\x17\n\x0frejected_shares\x18\x02 \x01(\x04\x12\x14\n\x0cstale_shares\x18\x03 \x01(\x04\x12\x17\n\x0flast_difficulty\x18\x04 \x01(\x04\x12\x12\n\nbest_share\x18\x05 \x01(\x04\x12\x16\n\x0egenerated_work\x18\x06 \x01(\x04"\x16\n\x14GetPoolGroupsRequest"G\n\x15GetPoolGroupsResponse\x12.\n\x0bpool_groups\x18\x01 \x03(\x0b\x32\x19.braiins.bos.v1.PoolGroup"\x80\x01\n\x16\x43reatePoolGroupRequest\x12/\n\x0bsave_action\x18\x01 \x01(\x0e\x32\x1a.braiins.bos.v1.SaveAction\x12\x35\n\x05group\x18\x02 \x01(\x0b\x32&.braiins.bos.v1.PoolGroupConfiguration"P\n\x17\x43reatePoolGroupResponse\x12\x35\n\x05group\x18\x01 \x01(\x0b\x32&.braiins.bos.v1.PoolGroupConfiguration"\x80\x01\n\x16UpdatePoolGroupRequest\x12/\n\x0bsave_action\x18\x01 \x01(\x0e\x32\x1a.braiins.bos.v1.SaveAction\x12\x35\n\x05group\x18\x02 \x01(\x0b\x32&.braiins.bos.v1.PoolGroupConfiguration"P\n\x17UpdatePoolGroupResponse\x12\x35\n\x05group\x18\x01 \x01(\x0b\x32&.braiins.bos.v1.PoolGroupConfiguration"V\n\x16RemovePoolGroupRequest\x12/\n\x0bsave_action\x18\x01 \x01(\x0e\x32\x1a.braiins.bos.v1.SaveAction\x12\x0b\n\x03uid\x18\x02 \x01(\t"\x19\n\x17RemovePoolGroupResponse2\x97\x03\n\x0bPoolService\x12\\\n\rGetPoolGroups\x12$.braiins.bos.v1.GetPoolGroupsRequest\x1a%.braiins.bos.v1.GetPoolGroupsResponse\x12\x62\n\x0f\x43reatePoolGroup\x12&.braiins.bos.v1.CreatePoolGroupRequest\x1a\'.braiins.bos.v1.CreatePoolGroupResponse\x12\x62\n\x0fUpdatePoolGroup\x12&.braiins.bos.v1.UpdatePoolGroupRequest\x1a\'.braiins.bos.v1.UpdatePoolGroupResponse\x12\x62\n\x0fRemovePoolGroup\x12&.braiins.bos.v1.RemovePoolGroupRequest\x1a\'.braiins.bos.v1.RemovePoolGroupResponseb\x06proto3'
)
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "bos.v1.pool_pb2", _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
_globals["_QUOTA"]._serialized_start = 58
_globals["_QUOTA"]._serialized_end = 80
_globals["_FIXEDSHARERATIO"]._serialized_start = 82
_globals["_FIXEDSHARERATIO"]._serialized_end = 114
_globals["_POOLGROUPCONFIGURATION"]._serialized_start = 117
_globals["_POOLGROUPCONFIGURATION"]._serialized_end = 345
_globals["_POOLCONFIGURATION"]._serialized_start = 348
_globals["_POOLCONFIGURATION"]._serialized_end = 477
_globals["_POOLGROUP"]._serialized_start = 480
_globals["_POOLGROUP"]._serialized_end = 656
_globals["_POOL"]._serialized_start = 659
_globals["_POOL"]._serialized_end = 795
_globals["_POOLSTATS"]._serialized_start = 798
_globals["_POOLSTATS"]._serialized_end = 950
_globals["_GETPOOLGROUPSREQUEST"]._serialized_start = 952
_globals["_GETPOOLGROUPSREQUEST"]._serialized_end = 974
_globals["_GETPOOLGROUPSRESPONSE"]._serialized_start = 976
_globals["_GETPOOLGROUPSRESPONSE"]._serialized_end = 1047
_globals["_CREATEPOOLGROUPREQUEST"]._serialized_start = 1050
_globals["_CREATEPOOLGROUPREQUEST"]._serialized_end = 1178
_globals["_CREATEPOOLGROUPRESPONSE"]._serialized_start = 1180
_globals["_CREATEPOOLGROUPRESPONSE"]._serialized_end = 1260
_globals["_UPDATEPOOLGROUPREQUEST"]._serialized_start = 1263
_globals["_UPDATEPOOLGROUPREQUEST"]._serialized_end = 1391
_globals["_UPDATEPOOLGROUPRESPONSE"]._serialized_start = 1393
_globals["_UPDATEPOOLGROUPRESPONSE"]._serialized_end = 1473
_globals["_REMOVEPOOLGROUPREQUEST"]._serialized_start = 1475
_globals["_REMOVEPOOLGROUPREQUEST"]._serialized_end = 1561
_globals["_REMOVEPOOLGROUPRESPONSE"]._serialized_start = 1563
_globals["_REMOVEPOOLGROUPRESPONSE"]._serialized_end = 1588
_globals["_POOLSERVICE"]._serialized_start = 1591
_globals["_POOLSERVICE"]._serialized_end = 1998
# @@protoc_insertion_point(module_scope)

View File

@@ -0,0 +1,61 @@
# -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------
# Copyright 2022 Upstream Data Inc -
# -
# Licensed under the Apache License, Version 2.0 (the "License"); -
# you may not use this file except in compliance with the License. -
# You may obtain a copy of the License at -
# -
# http://www.apache.org/licenses/LICENSE-2.0 -
# -
# Unless required by applicable law or agreed to in writing, software -
# distributed under the License is distributed on an "AS IS" BASIS, -
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: bos/v1/units.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
from google.protobuf.internal import builder as _builder
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
b'\n\x12\x62os/v1/units.proto\x12\x0e\x62raiins.bos.v1"+\n\x0cMegaHashrate\x12\x1b\n\x13megahash_per_second\x18\x01 \x01(\x01"+\n\x0cGigaHashrate\x12\x1b\n\x13gigahash_per_second\x18\x01 \x01(\x01"+\n\x0cTeraHashrate\x12\x1b\n\x13terahash_per_second\x18\x01 \x01(\x01"\x1a\n\tFrequency\x12\r\n\x05hertz\x18\x01 \x01(\x01"\x17\n\x07Voltage\x12\x0c\n\x04volt\x18\x01 \x01(\x01"\x15\n\x05Power\x12\x0c\n\x04watt\x18\x01 \x01(\x04"-\n\x0fPowerEfficiency\x12\x1a\n\x12joule_per_terahash\x18\x01 \x01(\x01"\x1f\n\x0bTemperature\x12\x10\n\x08\x64\x65gree_c\x18\x01 \x01(\x01"\x1a\n\x0b\x42\x61sesPoints\x12\x0b\n\x03\x62sp\x18\x01 \x01(\r"\x16\n\x05Hours\x12\r\n\x05hours\x18\x01 \x01(\rb\x06proto3'
)
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "bos.v1.units_pb2", _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
_globals["_MEGAHASHRATE"]._serialized_start = 38
_globals["_MEGAHASHRATE"]._serialized_end = 81
_globals["_GIGAHASHRATE"]._serialized_start = 83
_globals["_GIGAHASHRATE"]._serialized_end = 126
_globals["_TERAHASHRATE"]._serialized_start = 128
_globals["_TERAHASHRATE"]._serialized_end = 171
_globals["_FREQUENCY"]._serialized_start = 173
_globals["_FREQUENCY"]._serialized_end = 199
_globals["_VOLTAGE"]._serialized_start = 201
_globals["_VOLTAGE"]._serialized_end = 224
_globals["_POWER"]._serialized_start = 226
_globals["_POWER"]._serialized_end = 247
_globals["_POWEREFFICIENCY"]._serialized_start = 249
_globals["_POWEREFFICIENCY"]._serialized_end = 294
_globals["_TEMPERATURE"]._serialized_start = 296
_globals["_TEMPERATURE"]._serialized_end = 327
_globals["_BASESPOINTS"]._serialized_start = 329
_globals["_BASESPOINTS"]._serialized_end = 355
_globals["_HOURS"]._serialized_start = 357
_globals["_HOURS"]._serialized_end = 379
# @@protoc_insertion_point(module_scope)

View File

@@ -0,0 +1,47 @@
# -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------
# Copyright 2022 Upstream Data Inc -
# -
# Licensed under the Apache License, Version 2.0 (the "License"); -
# you may not use this file except in compliance with the License. -
# You may obtain a copy of the License at -
# -
# http://www.apache.org/licenses/LICENSE-2.0 -
# -
# Unless required by applicable law or agreed to in writing, software -
# distributed under the License is distributed on an "AS IS" BASIS, -
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: bos/v1/work.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
from google.protobuf.internal import builder as _builder
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from ...bos.v1 import units_pb2 as bos_dot_v1_dot_units__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
b'\n\x11\x62os/v1/work.proto\x12\x0e\x62raiins.bos.v1\x1a\x12\x62os/v1/units.proto"\xef\x03\n\x0cRealHashrate\x12-\n\x07last_5s\x18\x01 \x01(\x0b\x32\x1c.braiins.bos.v1.GigaHashrate\x12.\n\x08last_15s\x18\x02 \x01(\x0b\x32\x1c.braiins.bos.v1.GigaHashrate\x12.\n\x08last_30s\x18\x03 \x01(\x0b\x32\x1c.braiins.bos.v1.GigaHashrate\x12-\n\x07last_1m\x18\x04 \x01(\x0b\x32\x1c.braiins.bos.v1.GigaHashrate\x12-\n\x07last_5m\x18\x05 \x01(\x0b\x32\x1c.braiins.bos.v1.GigaHashrate\x12.\n\x08last_15m\x18\x06 \x01(\x0b\x32\x1c.braiins.bos.v1.GigaHashrate\x12.\n\x08last_30m\x18\x07 \x01(\x0b\x32\x1c.braiins.bos.v1.GigaHashrate\x12-\n\x07last_1h\x18\x08 \x01(\x0b\x32\x1c.braiins.bos.v1.GigaHashrate\x12.\n\x08last_24h\x18\t \x01(\x0b\x32\x1c.braiins.bos.v1.GigaHashrate\x12\x33\n\rsince_restart\x18\n \x01(\x0b\x32\x1c.braiins.bos.v1.GigaHashrate"\xde\x01\n\x0fWorkSolverStats\x12\x33\n\rreal_hashrate\x18\x01 \x01(\x0b\x32\x1c.braiins.bos.v1.RealHashrate\x12\x36\n\x10nominal_hashrate\x18\x02 \x01(\x0b\x32\x1c.braiins.bos.v1.GigaHashrate\x12\x34\n\x0e\x65rror_hashrate\x18\x03 \x01(\x0b\x32\x1c.braiins.bos.v1.MegaHashrate\x12\x14\n\x0c\x66ound_blocks\x18\x04 \x01(\r\x12\x12\n\nbest_share\x18\x05 \x01(\x04\x62\x06proto3'
)
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "bos.v1.work_pb2", _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
_globals["_REALHASHRATE"]._serialized_start = 58
_globals["_REALHASHRATE"]._serialized_end = 553
_globals["_WORKSOLVERSTATS"]._serialized_start = 556
_globals["_WORKSOLVERSTATS"]._serialized_end = 778
# @@protoc_insertion_point(module_scope)

View File

@@ -0,0 +1,47 @@
# -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------
# Copyright 2022 Upstream Data Inc -
# -
# Licensed under the Apache License, Version 2.0 (the "License"); -
# you may not use this file except in compliance with the License. -
# You may obtain a copy of the License at -
# -
# http://www.apache.org/licenses/LICENSE-2.0 -
# -
# Unless required by applicable law or agreed to in writing, software -
# distributed under the License is distributed on an "AS IS" BASIS, -
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -
# See the License for the specific language governing permissions and -
# limitations under the License. -
# ------------------------------------------------------------------------------
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: bos/version.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
from google.protobuf.internal import builder as _builder
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
b'\n\x11\x62os/version.proto\x12\x0b\x62raiins.bos"U\n\nApiVersion\x12\r\n\x05major\x18\x01 \x01(\x04\x12\r\n\x05minor\x18\x02 \x01(\x04\x12\r\n\x05patch\x18\x03 \x01(\x04\x12\x0b\n\x03pre\x18\x04 \x01(\t\x12\r\n\x05\x62uild\x18\x05 \x01(\t"\x13\n\x11\x41piVersionRequest2]\n\x11\x41piVersionService\x12H\n\rGetApiVersion\x12\x1e.braiins.bos.ApiVersionRequest\x1a\x17.braiins.bos.ApiVersionb\x06proto3'
)
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "bos.version_pb2", _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
_globals["_APIVERSION"]._serialized_start = 34
_globals["_APIVERSION"]._serialized_end = 119
_globals["_APIVERSIONREQUEST"]._serialized_start = 121
_globals["_APIVERSIONREQUEST"]._serialized_end = 140
_globals["_APIVERSIONSERVICE"]._serialized_start = 142
_globals["_APIVERSIONSERVICE"]._serialized_end = 235
# @@protoc_insertion_point(module_scope)

View File

@@ -19,7 +19,7 @@ from typing import Union
import httpx
from pyasic.settings import PyasicSettings
from pyasic import settings
from pyasic.web import BaseWebAPI
@@ -27,11 +27,11 @@ class GoldshellWebAPI(BaseWebAPI):
def __init__(self, ip: str) -> None:
super().__init__(ip)
self.username = "admin"
self.pwd = PyasicSettings().global_goldshell_password
self.pwd = settings.get("default_goldshell_password", "123456789")
self.jwt = None
async def auth(self):
async with httpx.AsyncClient() as client:
async with httpx.AsyncClient(transport=settings.transport()) as client:
try:
await client.get(f"http://{self.ip}/user/logout")
auth = (
@@ -71,21 +71,21 @@ class GoldshellWebAPI(BaseWebAPI):
parameters.pop("pool_pwd")
if not self.jwt:
await self.auth()
async with httpx.AsyncClient() as client:
for i in range(PyasicSettings().miner_get_data_retries):
async with httpx.AsyncClient(transport=settings.transport()) as client:
for i in range(settings.get("get_data_retries", 1)):
try:
if parameters:
response = await client.put(
f"http://{self.ip}/mcb/{command}",
headers={"Authorization": "Bearer " + self.jwt},
timeout=5,
timeout=settings.get("api_function_timeout", 5),
json=parameters,
)
else:
response = await client.get(
f"http://{self.ip}/mcb/{command}",
headers={"Authorization": "Bearer " + self.jwt},
timeout=5,
timeout=settings.get("api_function_timeout", 5),
)
json_data = response.json()
return json_data
@@ -102,13 +102,13 @@ class GoldshellWebAPI(BaseWebAPI):
data = {k: None for k in commands}
data["multicommand"] = True
await self.auth()
async with httpx.AsyncClient() as client:
async with httpx.AsyncClient(transport=settings.transport()) as client:
for command in commands:
try:
response = await client.get(
f"http://{self.ip}/mcb/{command}",
headers={"Authorization": "Bearer " + self.jwt},
timeout=5,
timeout=settings.get("api_function_timeout", 5),
)
json_data = response.json()
data[command] = json_data

View File

@@ -19,8 +19,8 @@ from typing import Union
import httpx
from pyasic import settings
from pyasic.errors import APIError
from pyasic.settings import PyasicSettings
from pyasic.web import BaseWebAPI
@@ -28,11 +28,11 @@ class InnosiliconWebAPI(BaseWebAPI):
def __init__(self, ip: str) -> None:
super().__init__(ip)
self.username = "admin"
self.pwd = PyasicSettings().global_innosilicon_password
self.pwd = settings.get("default_innosilicon_password", "admin")
self.jwt = None
async def auth(self):
async with httpx.AsyncClient() as client:
async with httpx.AsyncClient(transport=settings.transport()) as client:
try:
auth = await client.post(
f"http://{self.ip}/api/auth",
@@ -54,13 +54,13 @@ class InnosiliconWebAPI(BaseWebAPI):
) -> dict:
if not self.jwt:
await self.auth()
async with httpx.AsyncClient() as client:
for i in range(PyasicSettings().miner_get_data_retries):
async with httpx.AsyncClient(transport=settings.transport()) as client:
for i in range(settings.get("get_data_retries", 1)):
try:
response = await client.post(
f"http://{self.ip}/api/{command}",
headers={"Authorization": "Bearer " + self.jwt},
timeout=5,
timeout=settings.get("api_function_timeout", 5),
json=parameters,
)
json_data = response.json()
@@ -90,13 +90,13 @@ class InnosiliconWebAPI(BaseWebAPI):
data = {k: None for k in commands}
data["multicommand"] = True
await self.auth()
async with httpx.AsyncClient() as client:
async with httpx.AsyncClient(transport=settings.transport()) as client:
for command in commands:
try:
response = await client.post(
f"http://{self.ip}/api/{command}",
headers={"Authorization": "Bearer " + self.jwt},
timeout=5,
timeout=settings.get("api_function_timeout", 5),
)
json_data = response.json()
data[command] = json_data

View File

@@ -19,7 +19,7 @@ from typing import Union
import httpx
from pyasic.settings import PyasicSettings
from pyasic import settings
from pyasic.web import BaseWebAPI
@@ -27,11 +27,11 @@ class VNishWebAPI(BaseWebAPI):
def __init__(self, ip: str) -> None:
super().__init__(ip)
self.username = "admin"
self.pwd = PyasicSettings().global_vnish_password
self.pwd = settings.get("default_vnish_password", "admin")
self.token = None
async def auth(self):
async with httpx.AsyncClient() as client:
async with httpx.AsyncClient(transport=settings.transport()) as client:
try:
auth = await client.post(
f"http://{self.ip}/api/v1/unlock",
@@ -58,8 +58,8 @@ class VNishWebAPI(BaseWebAPI):
) -> dict:
if not self.token:
await self.auth()
async with httpx.AsyncClient() as client:
for i in range(PyasicSettings().miner_get_data_retries):
async with httpx.AsyncClient(transport=settings.transport()) as client:
for i in range(settings.get("get_data_retries", 1)):
try:
auth = self.token
if command.startswith("system"):
@@ -70,21 +70,21 @@ class VNishWebAPI(BaseWebAPI):
response = await client.post(
f"http://{self.ip}/api/v1/{command}",
headers={"Authorization": auth},
timeout=5,
timeout=settings.get("api_function_timeout", 5),
json=parameters,
)
elif not parameters == {}:
response = await client.post(
f"http://{self.ip}/api/v1/{command}",
headers={"Authorization": auth},
timeout=5,
timeout=settings.get("api_function_timeout", 5),
json=parameters,
)
else:
response = await client.get(
f"http://{self.ip}/api/v1/{command}",
headers={"Authorization": auth},
timeout=5,
timeout=settings.get("api_function_timeout", 5),
)
if not response.status_code == 200:
# refresh the token, retry

View File

@@ -1,7 +1,7 @@
[tool.poetry]
name = "pyasic"
version = "0.38.10"
description = "A set of modules for interfacing with many common types of ASIC bitcoin miners, using both their API and SSH."
version = "0.41.0"
description = "A simplified and standardized interface for Bitcoin ASICs."
authors = ["UpstreamData <brett@upstreamdata.ca>"]
repository = "https://github.com/UpstreamData/pyasic"
documentation = "https://pyasic.readthedocs.io/en/latest/"
@@ -9,10 +9,11 @@ readme = "README.md"
[tool.poetry.dependencies]
python = "^3.8"
asyncssh = "^2.13.1"
httpx = "^0.24.0"
httpx = "^0.25.2"
asyncssh = "^2.14.1"
grpc-requests = "^0.1.12"
passlib = "^1.7.4"
pyaml = "^23.5.9"
pyaml = "^23.9.7"
toml = "^0.10.2"
[tool.poetry.group.dev]

View File

@@ -22,7 +22,7 @@ from pyasic.network import MinerNetwork
class NetworkTest(unittest.TestCase):
def test_net_range(self):
net_range_str = "192.168.1.29, 192.168.1.40-192.168.1.43, 192.168.1.60"
net_range_str = ["192.168.1.29", "192.168.1.40-43", "192.168.1.60"]
net_range_list = [
"192.168.1.29",
"192.168.1.40",
@@ -32,8 +32,8 @@ class NetworkTest(unittest.TestCase):
"192.168.1.60",
]
net_1 = list(MinerNetwork(net_range_str).get_network().hosts())
net_2 = list(MinerNetwork(net_range_list).get_network().hosts())
net_1 = list(MinerNetwork.from_list(net_range_list).hosts)
net_2 = list(MinerNetwork.from_list(net_range_str).hosts)
correct_net = [
ipaddress.IPv4Address("192.168.1.29"),
@@ -51,11 +51,9 @@ class NetworkTest(unittest.TestCase):
net_1_str = "192.168.1.0"
net_1_mask = "/29"
net_1 = list(MinerNetwork(net_1_str, mask=net_1_mask).get_network().hosts())
net_1 = list(MinerNetwork.from_subnet(net_1_str + net_1_mask).hosts)
net_2 = list(
MinerNetwork("192.168.1.1-192.168.1.5, 192.168.1.6").get_network().hosts()
)
net_2 = list(MinerNetwork.from_list(["192.168.1.1-5", "192.168.1.6"]).hosts)
correct_net = [
ipaddress.IPv4Address("192.168.1.1"),
@@ -70,11 +68,10 @@ class NetworkTest(unittest.TestCase):
self.assertTrue(net_2 == correct_net)
def test_net_defaults(self):
net = MinerNetwork()
net_obj = net.get_network()
self.assertEqual(net_obj, MinerNetwork("192.168.1.0", mask=24).get_network())
self.assertEqual(net_obj, net.get_network())
net = MinerNetwork.from_subnet("192.168.1.1/24")
self.assertEqual(
net.hosts, list(ipaddress.ip_network("192.168.1.0/24").hosts())
)
if __name__ == "__main__":