reformatted all files to use the Black formatting style

This commit is contained in:
UpstreamData
2022-03-31 11:27:57 -06:00
parent e1383f2002
commit c57a523553
54 changed files with 1375 additions and 964 deletions

View File

@@ -42,17 +42,22 @@ class BaseMinerAPI:
def get_commands(self) -> list:
"""Get a list of command accessible to a specific type of API on the miner."""
return [func for func in
# each function in self
dir(self) if callable(getattr(self, func)) and
# no __ methods
not func.startswith("__") and
# remove all functions that are in this base class
func not in
[func for func in
dir(BaseMinerAPI) if callable(getattr(BaseMinerAPI, func))
]
]
return [
func
for func in
# each function in self
dir(self)
if callable(getattr(self, func)) and
# no __ methods
not func.startswith("__") and
# remove all functions that are in this base class
func
not in [
func
for func in dir(BaseMinerAPI)
if callable(getattr(BaseMinerAPI, func))
]
]
async def multicommand(self, *commands: str) -> dict:
"""Creates and sends multiple commands as one command to the miner."""
@@ -63,9 +68,11 @@ class BaseMinerAPI:
# make sure we can actually run the command, otherwise it will fail
commands = [command for command in user_commands if command in allowed_commands]
for item in list(set(user_commands) - set(commands)):
warnings.warn(f"""Removing incorrect command: {item}
warnings.warn(
f"""Removing incorrect command: {item}
If you are sure you want to use this command please use API.send_command("{item}", ignore_errors=True) instead.""",
APIWarning)
APIWarning,
)
# standard multicommand format is "command1+command2"
# doesnt work for S19 which is dealt with in the send command function
command = "+".join(commands)
@@ -87,7 +94,12 @@ If you are sure you want to use this command please use API.send_command("{item}
logging.debug(f"{self.ip}: Received multicommand data.")
return data
async def send_command(self, command: str, parameters: str or int or bool = None, ignore_errors: bool = False) -> dict:
async def send_command(
self,
command: str,
parameters: str or int or bool = None,
ignore_errors: bool = False,
) -> dict:
"""Send an API command to the miner and return the result."""
try:
# get reader and writer streams
@@ -104,7 +116,7 @@ If you are sure you want to use this command please use API.send_command("{item}
cmd["parameter"] = parameters
# send the command
writer.write(json.dumps(cmd).encode('utf-8'))
writer.write(json.dumps(cmd).encode("utf-8"))
await writer.drain()
# instantiate data
@@ -169,10 +181,10 @@ If you are sure you want to use this command please use API.send_command("{item}
# some json from the API returns with a null byte (\x00) on the end
if data.endswith(b"\x00"):
# handle the null byte
str_data = data.decode('utf-8')[:-1]
str_data = data.decode("utf-8")[:-1]
else:
# no null byte
str_data = data.decode('utf-8')
str_data = data.decode("utf-8")
# fix an error with a btminer return having an extra comma that breaks json.loads()
str_data = str_data.replace(",}", "}")
# fix an error with a btminer return having a newline that breaks json.loads()

View File

@@ -18,6 +18,7 @@ class BMMinerAPI(BaseMinerAPI):
:param ip: The IP of the miner to reference the API on.
:param port: The port to reference the API on. Default is 4028.
"""
def __init__(self, ip: str, port: int = 4028) -> None:
super().__init__(ip, port)
@@ -115,11 +116,7 @@ class BMMinerAPI(BaseMinerAPI):
"""
return await self.send_command("enablepool", parameters=n)
async def addpool(self,
url: str,
username: str,
password: str
) -> dict:
async def addpool(self, url: str, username: str, password: str) -> dict:
"""Add a pool to the miner.
:param url: The URL of the new pool to add.
@@ -128,11 +125,9 @@ class BMMinerAPI(BaseMinerAPI):
:return: A confirmation of adding the pool.
"""
return await self.send_command("addpool",
parameters=f"{url}, "
f"{username}, "
f"{password}"
)
return await self.send_command(
"addpool", parameters=f"{url}, " f"{username}, " f"{password}"
)
async def poolpriority(self, *n: int) -> dict:
"""Set pool priority.
@@ -142,8 +137,7 @@ class BMMinerAPI(BaseMinerAPI):
:return: A confirmation of setting pool priority.
"""
pools = f"{','.join([str(item) for item in n])}"
return await self.send_command("poolpriority",
parameters=pools)
return await self.send_command("poolpriority", parameters=pools)
async def poolquota(self, n: int, q: int) -> dict:
"""Set pool quota.
@@ -153,10 +147,7 @@ class BMMinerAPI(BaseMinerAPI):
:return: A confirmation of setting pool quota.
"""
return await self.send_command("poolquota",
parameters=f"{n}, "
f"{q}"
)
return await self.send_command("poolquota", parameters=f"{n}, " f"{q}")
async def disablepool(self, n: int) -> dict:
"""Disable a pool.
@@ -292,9 +283,7 @@ class BMMinerAPI(BaseMinerAPI):
:return: Confirmation of setting failover-only.
"""
return await self.send_command("failover-only",
parameters=failover
)
return await self.send_command("failover-only", parameters=failover)
async def coin(self) -> dict:
"""Get information on the current coin.
@@ -337,10 +326,7 @@ class BMMinerAPI(BaseMinerAPI):
:return: The results of setting config of name to n.
"""
return await self.send_command("setconfig",
parameters=f"{name}, "
f"{n}"
)
return await self.send_command("setconfig", parameters=f"{name}, " f"{n}")
async def usbstats(self) -> dict:
"""Get stats of all USB devices except ztex.
@@ -368,15 +354,11 @@ class BMMinerAPI(BaseMinerAPI):
:return: Confirmation of setting PGA n with opt[,val].
"""
if val:
return await self.send_command("pgaset",
parameters=f"{n}, "
f"{opt}, "
f"{val}"
)
return await self.send_command(
"pgaset", parameters=f"{n}, " f"{opt}, " f"{val}"
)
else:
return await self.send_command("pgaset",
parameters=f"{n}, "
f"{opt}")
return await self.send_command("pgaset", parameters=f"{n}, " f"{opt}")
async def zero(self, which: str, summary: bool) -> dict:
"""Zero a device.

View File

@@ -18,6 +18,7 @@ class BOSMinerAPI(BaseMinerAPI):
:param ip: The IP of the miner to reference the API on.
:param port: The port to reference the API on. Default is 4028.
"""
def __init__(self, ip, port=4028):
super().__init__(ip, port)

View File

@@ -6,14 +6,12 @@ import binascii
import base64
from passlib.handlers.md5_crypt import md5_crypt
from cryptography.hazmat.primitives.ciphers import \
Cipher, algorithms, modes
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
from API import BaseMinerAPI, APIError
from settings import WHATSMINER_PWD
### IMPORTANT ###
# you need to change the password of the miners using the Whatsminer
# tool, then you can set them back to admin with this tool, but they
@@ -35,7 +33,7 @@ def _crypt(word: str, salt: str) -> str:
:return: An MD5 hash of the word with the salt.
"""
# compile a standard format for the salt
standard_salt = re.compile('\s*\$(\d+)\$([\w\./]*)\$')
standard_salt = re.compile("\s*\$(\d+)\$([\w\./]*)\$")
# check if the salt matches
match = standard_salt.match(salt)
# if the matching fails, the salt is incorrect
@@ -58,7 +56,7 @@ def _add_to_16(string: str) -> bytes:
length.
"""
while len(string) % 16 != 0:
string += '\0'
string += "\0"
return str.encode(string) # return bytes
@@ -74,20 +72,20 @@ def parse_btminer_priviledge_data(token_data: dict, data: dict):
:return: A decoded dict version of the privileged command output.
"""
# get the encoded data from the dict
enc_data = data['enc']
enc_data = data["enc"]
# get the aes key from the token data
aeskey = hashlib.sha256(
token_data['host_passwd_md5'].encode()
).hexdigest()
aeskey = hashlib.sha256(token_data["host_passwd_md5"].encode()).hexdigest()
# unhexlify the aes key
aeskey = binascii.unhexlify(aeskey.encode())
# create the required decryptor
aes = Cipher(algorithms.AES(aeskey), modes.ECB())
decryptor = aes.decryptor()
# decode the message with the decryptor
ret_msg = json.loads(decryptor.update(
base64.decodebytes(bytes(enc_data, encoding='utf8'))
).rstrip(b'\0').decode("utf8"))
ret_msg = json.loads(
decryptor.update(base64.decodebytes(bytes(enc_data, encoding="utf8")))
.rstrip(b"\0")
.decode("utf8")
)
return ret_msg
@@ -104,11 +102,9 @@ def create_privileged_cmd(token_data: dict, command: dict) -> bytes:
:return: The encrypted privileged command to be sent to the miner.
"""
# add token to command
command['token'] = token_data['host_sign']
command["token"] = token_data["host_sign"]
# encode host_passwd data and get hexdigest
aeskey = hashlib.sha256(
token_data['host_passwd_md5'].encode()
).hexdigest()
aeskey = hashlib.sha256(token_data["host_passwd_md5"].encode()).hexdigest()
# unhexlify the encoded host_passwd
aeskey = binascii.unhexlify(aeskey.encode())
# create a new AES key
@@ -117,18 +113,16 @@ def create_privileged_cmd(token_data: dict, command: dict) -> bytes:
# dump the command to json
api_json_str = json.dumps(command)
# encode the json command with the aes key
api_json_str_enc = base64.encodebytes(
encryptor.update(
_add_to_16(
api_json_str
)
)
).decode("utf-8").replace("\n", "")
api_json_str_enc = (
base64.encodebytes(encryptor.update(_add_to_16(api_json_str)))
.decode("utf-8")
.replace("\n", "")
)
# label the data as being encoded
data_enc = {'enc': 1, 'data': api_json_str_enc}
data_enc = {"enc": 1, "data": api_json_str_enc}
# dump the labeled data to json
api_packet_str = json.dumps(data_enc)
return api_packet_str.encode('utf-8')
return api_packet_str.encode("utf-8")
class BTMinerAPI(BaseMinerAPI):
@@ -157,16 +151,18 @@ class BTMinerAPI(BaseMinerAPI):
:param port: The port to reference the API on. Default is 4028.
:param pwd: The admin password of the miner. Default is admin.
"""
def __init__(self, ip, port=4028, pwd: str = WHATSMINER_PWD):
super().__init__(ip, port)
self.admin_pwd = pwd
self.current_token = None
async def send_command(self,
command: str or bytes,
parameters: str or int or bool = None,
ignore_errors: bool = False
) -> dict:
async def send_command(
self,
command: str or bytes,
parameters: str or int or bool = None,
ignore_errors: bool = False,
) -> dict:
"""Send a command to the miner API.
Send a command using an asynchronous connection, load the data,
@@ -187,10 +183,7 @@ class BTMinerAPI(BaseMinerAPI):
command = json.dumps({"command": command}).encode("utf-8")
try:
# get reader and writer streams
reader, writer = await asyncio.open_connection(
str(self.ip),
self.port
)
reader, writer = await asyncio.open_connection(str(self.ip), self.port)
# handle OSError 121
except OSError as e:
if e.winerror == "121":
@@ -221,13 +214,10 @@ class BTMinerAPI(BaseMinerAPI):
await writer.wait_closed()
# check if the returned data is encoded
if 'enc' in data.keys():
if "enc" in data.keys():
# try to parse the encoded data
try:
data = parse_btminer_priviledge_data(
self.current_token,
data
)
data = parse_btminer_priviledge_data(self.current_token, data)
except Exception as e:
print(e)
@@ -250,25 +240,24 @@ class BTMinerAPI(BaseMinerAPI):
data = await self.send_command("get_token")
# encrypt the admin password with the salt
pwd = _crypt(self.admin_pwd, "$1$" + data["Msg"]["salt"] + '$')
pwd = pwd.split('$')
pwd = _crypt(self.admin_pwd, "$1$" + data["Msg"]["salt"] + "$")
pwd = pwd.split("$")
# take the 4th item from the pwd split
host_passwd_md5 = pwd[3]
# encrypt the pwd with the time and new salt
tmp = _crypt(pwd[3] + data["Msg"]["time"],
"$1$" + data["Msg"]["newsalt"] + '$'
)
tmp = tmp.split('$')
tmp = _crypt(pwd[3] + data["Msg"]["time"], "$1$" + data["Msg"]["newsalt"] + "$")
tmp = tmp.split("$")
# take the 4th item from the encrypted pwd split
host_sign = tmp[3]
# set the current token
self.current_token = {'host_sign': host_sign,
'host_passwd_md5': host_passwd_md5
}
self.current_token = {
"host_sign": host_sign,
"host_passwd_md5": host_passwd_md5,
}
return self.current_token
#### PRIVILEGED COMMANDS ####
@@ -276,19 +265,18 @@ class BTMinerAPI(BaseMinerAPI):
# how to configure the Whatsminer API to
# use these commands.
async def update_pools(self,
pool_1: str,
worker_1: str,
passwd_1: str,
pool_2: str = None,
worker_2: str = None,
passwd_2: str = None,
pool_3: str = None,
worker_3: str = None,
passwd_3: str = None
):
async def update_pools(
self,
pool_1: str,
worker_1: str,
passwd_1: str,
pool_2: str = None,
worker_2: str = None,
passwd_2: str = None,
pool_3: str = None,
worker_3: str = None,
passwd_3: str = None,
):
"""Update the pools of the miner using the API.
Update the pools of the miner using the API, only works after
@@ -314,15 +302,12 @@ class BTMinerAPI(BaseMinerAPI):
elif pool_2 and pool_3:
command = {
"cmd": "update_pools",
"pool1": pool_1,
"worker1": worker_1,
"passwd1": passwd_1,
"pool2": pool_2,
"worker2": worker_2,
"passwd2": passwd_2,
"pool3": pool_3,
"worker3": worker_3,
"passwd3": passwd_3,
@@ -333,10 +318,9 @@ class BTMinerAPI(BaseMinerAPI):
"pool1": pool_1,
"worker1": worker_1,
"passwd1": passwd_1,
"pool2": pool_2,
"worker2": worker_2,
"passwd2": passwd_2
"passwd2": passwd_2,
}
else:
command = {
@@ -406,12 +390,13 @@ class BTMinerAPI(BaseMinerAPI):
enc_command = create_privileged_cmd(token_data, command)
return await self.send_command(enc_command)
async def set_led(self,
color: str = "red",
period: int = 2000,
duration: int = 1000,
start: int = 0
):
async def set_led(
self,
color: str = "red",
period: int = 2000,
duration: int = 1000,
start: int = 0,
):
"""Set the LED on the miner using the API.
Set the LED on the miner using the API, only works after
@@ -423,12 +408,13 @@ class BTMinerAPI(BaseMinerAPI):
:param start: LED on time offset in the cycle in ms.
:return: A reply informing of the status of setting the LED.
"""
command = {"cmd": "set_led",
"color": color,
"period": period,
"duration": duration,
"start": start
}
command = {
"cmd": "set_led",
"color": color,
"period": period,
"duration": duration,
"start": start,
}
token_data = await self.get_token()
enc_command = create_privileged_cmd(token_data, command)
return await self.send_command(enc_command)
@@ -486,10 +472,11 @@ class BTMinerAPI(BaseMinerAPI):
password.
"""
# check if password length is greater than 8 bytes
if len(new_pwd.encode('utf-8')) > 8:
if len(new_pwd.encode("utf-8")) > 8:
return APIError(
f"New password too long, the max length is 8. "
f"Password size: {len(new_pwd.encode('utf-8'))}")
f"Password size: {len(new_pwd.encode('utf-8'))}"
)
command = {"cmd": "update_pwd", "old": old_pwd, "new": new_pwd}
token_data = await self.get_token()
enc_command = create_privileged_cmd(token_data, command)
@@ -507,9 +494,11 @@ class BTMinerAPI(BaseMinerAPI):
frequency.
"""
if not -10 < percent < 100:
return APIError(f"Frequency % is outside of the allowed "
f"range. Please set a % between -10 and "
f"100")
return APIError(
f"Frequency % is outside of the allowed "
f"range. Please set a % between -10 and "
f"100"
)
command = {"cmd": "set_target_freq", "percent": str(percent)}
token_data = await self.get_token()
enc_command = create_privileged_cmd(token_data, command)
@@ -596,9 +585,11 @@ class BTMinerAPI(BaseMinerAPI):
"""
if not 0 < percent < 100:
return APIError(f"Power PCT % is outside of the allowed "
f"range. Please set a % between 0 and "
f"100")
return APIError(
f"Power PCT % is outside of the allowed "
f"range. Please set a % between 0 and "
f"100"
)
command = {"cmd": "set_power_pct", "percent": str(percent)}
token_data = await self.get_token()
enc_command = create_privileged_cmd(token_data, command)
@@ -618,12 +609,9 @@ class BTMinerAPI(BaseMinerAPI):
:return: A reply informing of the status of pre power on.
"""
if not msg == \
"wait for adjust temp" or \
"adjust complete" or \
"adjust continue":
if not msg == "wait for adjust temp" or "adjust complete" or "adjust continue":
return APIError(
'Message is incorrect, please choose one of '
"Message is incorrect, please choose one of "
'["wait for adjust temp", '
'"adjust complete", '
'"adjust continue"]'
@@ -632,10 +620,7 @@ class BTMinerAPI(BaseMinerAPI):
complete = "true"
else:
complete = "false"
command = {"cmd": "pre_power_on",
"complete": complete,
"msg": msg
}
command = {"cmd": "pre_power_on", "complete": complete, "msg": msg}
token_data = await self.get_token()
enc_command = create_privileged_cmd(token_data, command)
return await self.send_command(enc_command)

View File

@@ -18,6 +18,7 @@ class CGMinerAPI(BaseMinerAPI):
:param ip: The IP of the miner to reference the API on.
:param port: The port to reference the API on. Default is 4028.
"""
def __init__(self, ip, port=4028):
super().__init__(ip, port)
@@ -111,11 +112,7 @@ class CGMinerAPI(BaseMinerAPI):
"""
return await self.send_command("enablepool", parameters=n)
async def addpool(self,
url: str,
username: str,
password: str
) -> dict:
async def addpool(self, url: str, username: str, password: str) -> dict:
"""Add a pool to the miner.
:param url: The URL of the new pool to add.
@@ -124,11 +121,9 @@ class CGMinerAPI(BaseMinerAPI):
:return: A confirmation of adding the pool.
"""
return await self.send_command("addpool",
parameters=f"{url}, "
f"{username}, "
f"{password}"
)
return await self.send_command(
"addpool", parameters=f"{url}, " f"{username}, " f"{password}"
)
async def poolpriority(self, *n: int) -> dict:
"""Set pool priority.
@@ -138,8 +133,7 @@ class CGMinerAPI(BaseMinerAPI):
:return: A confirmation of setting pool priority.
"""
pools = f"{','.join([str(item) for item in n])}"
return await self.send_command("poolpriority",
parameters=pools)
return await self.send_command("poolpriority", parameters=pools)
async def poolquota(self, n: int, q: int) -> dict:
"""Set pool quota.
@@ -149,10 +143,7 @@ class CGMinerAPI(BaseMinerAPI):
:return: A confirmation of setting pool quota.
"""
return await self.send_command("poolquota",
parameters=f"{n}, "
f"{q}"
)
return await self.send_command("poolquota", parameters=f"{n}, " f"{q}")
async def disablepool(self, n: int) -> dict:
"""Disable a pool.
@@ -288,9 +279,7 @@ class CGMinerAPI(BaseMinerAPI):
:return: Confirmation of setting failover-only.
"""
return await self.send_command("failover-only",
parameters=failover
)
return await self.send_command("failover-only", parameters=failover)
async def coin(self) -> dict:
"""Get information on the current coin.
@@ -333,10 +322,7 @@ class CGMinerAPI(BaseMinerAPI):
:return: The results of setting config of name to n.
"""
return await self.send_command("setconfig",
parameters=f"{name}, "
f"{n}"
)
return await self.send_command("setconfig", parameters=f"{name}, " f"{n}")
async def usbstats(self) -> dict:
"""Get stats of all USB devices except ztex.
@@ -364,12 +350,11 @@ class CGMinerAPI(BaseMinerAPI):
:return: Confirmation of setting PGA n with opt[,val].
"""
if val:
return await self.send_command("pgaset", parameters=f"{n}, "
f"{opt}, "
f"{val}")
return await self.send_command(
"pgaset", parameters=f"{n}, " f"{opt}, " f"{val}"
)
else:
return await self.send_command("pgaset", parameters=f"{n}, "
f"{opt}")
return await self.send_command("pgaset", parameters=f"{n}, " f"{opt}")
async def zero(self, which: str, summary: bool) -> dict:
"""Zero a device.
@@ -384,8 +369,7 @@ class CGMinerAPI(BaseMinerAPI):
:return: the STATUS section with info on the zero and optional
summary.
"""
return await self.send_command("zero", parameters=f"{which}, "
f"{summary}")
return await self.send_command("zero", parameters=f"{which}, " f"{summary}")
async def hotplug(self, n: int) -> dict:
"""Enable hotplug.
@@ -486,12 +470,11 @@ class CGMinerAPI(BaseMinerAPI):
:return: Confirmation of setting option opt to value val.
"""
if val:
return await self.send_command("ascset", parameters=f"{n}, "
f"{opt}, "
f"{val}")
return await self.send_command(
"ascset", parameters=f"{n}, " f"{opt}, " f"{val}"
)
else:
return await self.send_command("ascset", parameters=f"{n}, "
f"{opt}")
return await self.send_command("ascset", parameters=f"{n}, " f"{opt}")
async def lcd(self) -> dict:
"""Get a general all-in-one status summary of the miner.

View File

@@ -8,6 +8,7 @@ class UnknownAPI(BaseMinerAPI):
and API commands as possible (API ⋂ API), to ensure that it can be used
with as many APIs as possible.
"""
def __init__(self, ip, port=4028):
super().__init__(ip, port)

View File

@@ -1,4 +1,4 @@
from tools.bad_board_util import main
if __name__ == '__main__':
main()
if __name__ == "__main__":
main()

View File

@@ -75,4 +75,4 @@ SAMPLE CONFIG
"shutdown_duration": 3.0, # -> (default = 3.0, float, (bos: power_scaling.shutdown_duration))
}
}
"""
"""

View File

@@ -8,7 +8,7 @@ async def bos_config_convert(config: dict):
for opt in config:
if opt == "format":
out_config["format"] = config[opt]
out_config["format"]["generator"] = 'upstream_config_util'
out_config["format"]["generator"] = "upstream_config_util"
out_config["format"]["timestamp"] = int(time.time())
elif opt == "temp_control":
out_config["temperature"] = {}
@@ -47,20 +47,28 @@ async def bos_config_convert(config: dict):
out_config["pool_groups"][idx]["pools"] = []
out_config["pool_groups"][idx] = {}
if "name" in config[opt][idx].keys():
out_config["pool_groups"][idx]["group_name"] = config[opt][idx]["name"]
out_config["pool_groups"][idx]["group_name"] = config[opt][idx][
"name"
]
else:
out_config["pool_groups"][idx]["group_name"] = f"group_{idx}"
if "quota" in config[opt][idx].keys():
out_config["pool_groups"][idx]["quota"] = config[opt][idx]["quota"]
else:
out_config["pool_groups"][idx]["quota"] = 1
out_config["pool_groups"][idx]["pools"] = [{} for _item in range(len(config[opt][idx]["pool"]))]
out_config["pool_groups"][idx]["pools"] = [
{} for _item in range(len(config[opt][idx]["pool"]))
]
for pool_idx in range(len(config[opt][idx]["pool"])):
out_config["pool_groups"][idx]["pools"][pool_idx]["url"] = config[opt][idx]["pool"][pool_idx]["url"]
out_config["pool_groups"][idx]["pools"][pool_idx]["username"] = config[opt][idx]["pool"][pool_idx][
"user"]
out_config["pool_groups"][idx]["pools"][pool_idx]["password"] = config[opt][idx]["pool"][pool_idx][
"password"]
out_config["pool_groups"][idx]["pools"][pool_idx]["url"] = config[
opt
][idx]["pool"][pool_idx]["url"]
out_config["pool_groups"][idx]["pools"][pool_idx][
"username"
] = config[opt][idx]["pool"][pool_idx]["user"]
out_config["pool_groups"][idx]["pools"][pool_idx][
"password"
] = config[opt][idx]["pool"][pool_idx]["password"]
elif opt == "autotuning":
out_config["autotuning"] = {}
if "enabled" in config[opt].keys():
@@ -82,15 +90,21 @@ async def bos_config_convert(config: dict):
else:
out_config["power_scaling"]["power_step"] = 100
if "min_psu_power_limit" in config[opt].keys():
out_config["power_scaling"]["min_psu_power_limit"] = config[opt]["min_psu_power_limit"]
out_config["power_scaling"]["min_psu_power_limit"] = config[opt][
"min_psu_power_limit"
]
else:
out_config["power_scaling"]["min_psu_power_limit"] = 800
if "shutdown_enabled" in config[opt].keys():
out_config["power_scaling"]["shutdown_enabled"] = config[opt]["shutdown_enabled"]
out_config["power_scaling"]["shutdown_enabled"] = config[opt][
"shutdown_enabled"
]
else:
out_config["power_scaling"]["shutdown_enabled"] = False
if "shutdown_duration" in config[opt].keys():
out_config["power_scaling"]["shutdown_duration"] = config[opt]["shutdown_duration"]
out_config["power_scaling"]["shutdown_duration"] = config[opt][
"shutdown_duration"
]
else:
out_config["power_scaling"]["shutdown_duration"] = 3.0
return yaml.dump(out_config, sort_keys=False)
@@ -102,7 +116,7 @@ async def general_config_convert_bos(yaml_config):
for opt in config:
if opt == "format":
out_config["format"] = config[opt]
out_config["format"]["generator"] = 'upstream_config_util'
out_config["format"]["generator"] = "upstream_config_util"
out_config["format"]["timestamp"] = int(time.time())
elif opt == "temperature":
out_config["temp_control"] = {}
@@ -148,11 +162,19 @@ async def general_config_convert_bos(yaml_config):
out_config["group"][idx]["quota"] = config[opt][idx]["quota"]
else:
out_config["group"][idx]["quota"] = 1
out_config["group"][idx]["pool"] = [{} for _item in range(len(config[opt][idx]["pools"]))]
out_config["group"][idx]["pool"] = [
{} for _item in range(len(config[opt][idx]["pools"]))
]
for pool_idx in range(len(config[opt][idx]["pools"])):
out_config["group"][idx]["pool"][pool_idx]["url"] = config[opt][idx]["pools"][pool_idx]["url"]
out_config["group"][idx]["pool"][pool_idx]["user"] = config[opt][idx]["pools"][pool_idx]["username"]
out_config["group"][idx]["pool"][pool_idx]["password"] = config[opt][idx]["pools"][pool_idx]["password"]
out_config["group"][idx]["pool"][pool_idx]["url"] = config[opt][
idx
]["pools"][pool_idx]["url"]
out_config["group"][idx]["pool"][pool_idx]["user"] = config[opt][
idx
]["pools"][pool_idx]["username"]
out_config["group"][idx]["pool"][pool_idx]["password"] = config[
opt
][idx]["pools"][pool_idx]["password"]
elif opt == "autotuning":
out_config["autotuning"] = {}
if "enabled" in config[opt].keys():
@@ -174,15 +196,21 @@ async def general_config_convert_bos(yaml_config):
else:
out_config["power_scaling"]["power_step"] = 100
if "min_psu_power_limit" in config[opt].keys():
out_config["power_scaling"]["min_psu_power_limit"] = config[opt]["min_psu_power_limit"]
out_config["power_scaling"]["min_psu_power_limit"] = config[opt][
"min_psu_power_limit"
]
else:
out_config["power_scaling"]["min_psu_power_limit"] = 800
if "shutdown_enabled" in config[opt].keys():
out_config["power_scaling"]["shutdown_enabled"] = config[opt]["shutdown_enabled"]
out_config["power_scaling"]["shutdown_enabled"] = config[opt][
"shutdown_enabled"
]
else:
out_config["power_scaling"]["shutdown_enabled"] = False
if "shutdown_duration" in config[opt].keys():
out_config["power_scaling"]["shutdown_duration"] = config[opt]["shutdown_duration"]
out_config["power_scaling"]["shutdown_duration"] = config[opt][
"shutdown_duration"
]
else:
out_config["power_scaling"]["shutdown_duration"] = 3.0
return out_config

View File

@@ -1,4 +1,4 @@
from tools.cfg_util import main
if __name__ == '__main__':
if __name__ == "__main__":
main()

View File

@@ -5,8 +5,8 @@ from settings import DEBUG
logging.basicConfig(
# filename="logfile.txt",
# filemode="a",
format='[%(levelname)s][%(asctime)s](%(name)s) - %(message)s',
datefmt='%x %X'
format="[%(levelname)s][%(asctime)s](%(name)s) - %(message)s",
datefmt="%x %X",
)
logger = logging.getLogger()

View File

@@ -19,20 +19,26 @@ version = version.strftime("%y.%m.%d")
print(version)
setup(name="UpstreamBoardUtil.exe",
version=version,
description="Upstream Data Board Utility Build",
options={
"build_exe": {
"build_exe": f"{os.getcwd()}\\build\\board_util\\UpstreamBoardUtil-{version}-{sys.platform}\\",
"include_msvcr": True,
"add_to_path": True
},
},
executables=[Executable(
"board_util.py",
base=base,
icon="icon.ico",
target_name="UpstreamBoardUtil.exe"
)]
)
setup(
name="UpstreamBoardUtil.exe",
version=version,
description="Upstream Data Board Utility Build",
options={
"build_exe": {
"build_exe": f"{os.getcwd()}\\build\\board_util\\UpstreamBoardUtil-{version}-{sys.platform}\\",
"include_files": [
os.path.join(os.getcwd(), "settings/settings.toml"),
],
"include_msvcr": True,
"add_to_path": True,
},
},
executables=[
Executable(
"board_util.py",
base=base,
icon="icon.ico",
target_name="UpstreamBoardUtil.exe",
)
],
)

View File

@@ -19,13 +19,25 @@ version = version.strftime("%y.%m.%d")
print(version)
setup(name="UpstreamCFGUtil.exe",
version=version,
description="Upstream Data Config Utility Build",
options={"build_exe": {"build_exe": f"{os.getcwd()}\\build\\UpstreamCFGUtil-{version}-{sys.platform}\\",
"include_files": [os.path.join(os.getcwd(), "settings/settings.toml"),
os.path.join(os.getcwd(), "static/CFG-Util-README.md")],
},
},
executables=[Executable("config_tool.py", base=base, icon="icon.ico", target_name="UpstreamCFGUtil.exe")]
)
setup(
name="UpstreamCFGUtil.exe",
version=version,
description="Upstream Data Config Utility Build",
options={
"build_exe": {
"build_exe": f"{os.getcwd()}\\build\\UpstreamCFGUtil-{version}-{sys.platform}\\",
"include_files": [
os.path.join(os.getcwd(), "settings/settings.toml"),
os.path.join(os.getcwd(), "static/CFG-Util-README.md"),
],
},
},
executables=[
Executable(
"config_tool.py",
base=base,
icon="icon.ico",
target_name="UpstreamCFGUtil.exe",
)
],
)

View File

@@ -9,7 +9,11 @@ import logging
class BaseMiner:
def __init__(self, ip: str, api: BMMinerAPI or BOSMinerAPI or CGMinerAPI or BTMinerAPI or UnknownAPI) -> None:
def __init__(
self,
ip: str,
api: BMMinerAPI or BOSMinerAPI or CGMinerAPI or BTMinerAPI or UnknownAPI,
) -> None:
self.ip = ipaddress.ip_address(ip)
self.uname = None
self.pwd = None
@@ -20,19 +24,23 @@ class BaseMiner:
async def _get_ssh_connection(self) -> asyncssh.connect:
"""Create a new asyncssh connection"""
try:
conn = await asyncssh.connect(str(self.ip),
known_hosts=None,
username=self.uname,
password=self.pwd,
server_host_key_algs=['ssh-rsa'])
conn = await asyncssh.connect(
str(self.ip),
known_hosts=None,
username=self.uname,
password=self.pwd,
server_host_key_algs=["ssh-rsa"],
)
return conn
except asyncssh.misc.PermissionDenied:
try:
conn = await asyncssh.connect(str(self.ip),
known_hosts=None,
username="admin",
password="admin",
server_host_key_algs=['ssh-rsa'])
conn = await asyncssh.connect(
str(self.ip),
known_hosts=None,
username="admin",
password="admin",
server_host_key_algs=["ssh-rsa"],
)
return conn
except Exception as e:
logging.warning(f"{self} raised an exception: {e}")

View File

@@ -24,7 +24,7 @@ class BOSMinerS9(BOSMiner):
logging.debug(f"{self}: Opening SFTP connection.")
async with conn.start_sftp_client() as sftp:
logging.debug(f"{self}: Opening config file.")
async with sftp.open('/etc/bosminer.toml', 'w+') as file:
async with sftp.open("/etc/bosminer.toml", "w+") as file:
await file.write(toml_conf)
logging.debug(f"{self}: Restarting BOSMiner")
await conn.run("/etc/init.d/bosminer restart")

View File

@@ -13,7 +13,7 @@ class HiveonT9(BMMiner):
async def get_board_info(self) -> dict:
"""Gets data on each board and chain in the miner."""
board_stats = await self.api.stats()
stats = board_stats['STATS'][1]
stats = board_stats["STATS"][1]
boards = {}
board_chains = {0: [2, 9, 10], 1: [3, 11, 12], 2: [4, 13, 14]}
for idx, board in enumerate(board_chains):
@@ -25,12 +25,14 @@ class HiveonT9(BMMiner):
nominal = False
else:
nominal = True
boards[board].append({
"chain": chain,
"chip_count": count,
"chip_status": chips,
"nominal": nominal
})
boards[board].append(
{
"chain": chain,
"chip_count": count,
"chip_status": chips,
"nominal": nominal,
}
)
return boards
async def get_bad_boards(self) -> dict:
@@ -43,4 +45,4 @@ class HiveonT9(BMMiner):
if board not in bad_boards.keys():
bad_boards[board] = []
bad_boards[board].append(chain)
return bad_boards
return bad_boards

View File

@@ -7,155 +7,157 @@ class CGMinerAvalon8(CGMiner):
super().__init__(ip)
self.model = "Avalon 8"
self.api_type = "CGMiner"
self.pattern = re.compile(r'Ver\[(?P<Ver>[-0-9A-Fa-f+]+)\]\s'
'DNA\[(?P<DNA>[0-9A-Fa-f]+)\]\s'
'Elapsed\[(?P<Elapsed>[-0-9]+)\]\s'
'MW\[(?P<MW>[-\s0-9]+)\]\s'
'LW\[(?P<LW>[-0-9]+)\]\s'
'MH\[(?P<MH>[-\s0-9]+)\]\s'
'HW\[(?P<HW>[-0-9]+)\]\s'
'Temp\[(?P<Temp>[0-9]+)\]\s'
'TMax\[(?P<TMax>[0-9]+)\]\s'
'Fan\[(?P<Fan>[0-9]+)\]\s'
'FanR\[(?P<FanR>[0-9]+)%\]\s'
'Vi\[(?P<Vi>[-\s0-9]+)\]\s'
'Vo\[(?P<Vo>[-\s0-9]+)\]\s'
'('
'PLL0\[(?P<PLL0>[-\s0-9]+)\]\s'
'PLL1\[(?P<PLL1>[-\s0-9]+)\]\s'
'PLL2\[(?P<PLL2>[-\s0-9]+)\]\s'
'PLL3\[(?P<PLL3>[-\s0-9]+)\]\s'
')?'
'GHSmm\[(?P<GHSmm>[-.0-9]+)\]\s'
'WU\[(?P<WU>[-.0-9]+)\]\s'
'Freq\[(?P<Freq>[.0-9]+)\]\s'
'PG\[(?P<PG>[0-9]+)\]\s'
'Led\[(?P<LED>0|1)\]\s'
'MW0\[(?P<MW0>[0-9\s]+)\]\s'
'MW1\[(?P<MW1>[0-9\s]+)\]\s'
'MW2\[(?P<MW2>[0-9\s]+)\]\s'
'MW3\[(?P<MW3>[0-9\s]+)\]\s'
'TA\[(?P<TA>[0-9]+)\]\s'
'ECHU\[(?P<ECHU>[0-9\s]+)\]\s'
'ECMM\[(?P<ECMM>[0-9]+)\]\s.*'
'FAC0\[(?P<FAC0>[-0-9]+)\]\s'
'OC\[(?P<OC>[0-9]+)\]\s'
'SF0\[(?P<SF0>[-\s0-9]+)\]\s'
'SF1\[(?P<SF1>[-\s0-9]+)\]\s'
'SF2\[(?P<SF2>[-\s0-9]+)\]\s'
'SF3\[(?P<SF3>[-\s0-9]+)\]\s'
'PMUV\[(?P<PMUV>[-\s\S*]+)\]\s'
'PVT_T0\[(?P<PVT_T0>[-0-9\s]+)\]\s'
'PVT_T1\[(?P<PVT_T1>[-0-9\s]+)\]\s'
'PVT_T2\[(?P<PVT_T2>[-0-9\s]+)\]\s'
'PVT_T3\[(?P<PVT_T3>[-0-9\s]+)\]\s'
'PVT_V0_0\[(?P<PVT_V0_0>[-0-9\s]+)\]\s'
'PVT_V0_1\[(?P<PVT_V0_1>[-0-9\s]+)\]\s'
'PVT_V0_2\[(?P<PVT_V0_2>[-0-9\s]+)\]\s'
'PVT_V0_3\[(?P<PVT_V0_3>[-0-9\s]+)\]\s'
'PVT_V0_4\[(?P<PVT_V0_4>[-0-9\s]+)\]\s'
'PVT_V0_5\[(?P<PVT_V0_5>[-0-9\s]+)\]\s'
'PVT_V0_6\[(?P<PVT_V0_6>[-0-9\s]+)\]\s'
'PVT_V0_7\[(?P<PVT_V0_7>[-0-9\s]+)\]\s'
'PVT_V0_8\[(?P<PVT_V0_8>[-0-9\s]+)\]\s'
'PVT_V0_9\[(?P<PVT_V0_9>[-0-9\s]+)\]\s'
'PVT_V0_10\[(?P<PVT_V0_10>[-0-9\s]+)\]\s'
'PVT_V0_11\[(?P<PVT_V0_11>[-0-9\s]+)\]\s'
'PVT_V0_12\[(?P<PVT_V0_12>[-0-9\s]+)\]\s'
'PVT_V0_13\[(?P<PVT_V0_13>[-0-9\s]+)\]\s'
'PVT_V0_14\[(?P<PVT_V0_14>[-0-9\s]+)\]\s'
'PVT_V0_15\[(?P<PVT_V0_15>[-0-9\s]+)\]\s'
'PVT_V0_16\[(?P<PVT_V0_16>[-0-9\s]+)\]\s'
'PVT_V0_17\[(?P<PVT_V0_17>[-0-9\s]+)\]\s'
'PVT_V0_18\[(?P<PVT_V0_18>[-0-9\s]+)\]\s'
'PVT_V0_19\[(?P<PVT_V0_19>[-0-9\s]+)\]\s'
'PVT_V0_20\[(?P<PVT_V0_20>[-0-9\s]+)\]\s'
'PVT_V0_21\[(?P<PVT_V0_21>[-0-9\s]+)\]\s'
'PVT_V0_22\[(?P<PVT_V0_22>[-0-9\s]+)\]\s'
'PVT_V0_23\[(?P<PVT_V0_23>[-0-9\s]+)\]\s'
'PVT_V0_24\[(?P<PVT_V0_24>[-0-9\s]+)\]\s'
'PVT_V0_25\[(?P<PVT_V0_25>[-0-9\s]+)\]\s'
'PVT_V1_0\[(?P<PVT_V1_0>[-0-9\s]+)\]\s'
'PVT_V1_1\[(?P<PVT_V1_1>[-0-9\s]+)\]\s'
'PVT_V1_2\[(?P<PVT_V1_2>[-0-9\s]+)\]\s'
'PVT_V1_3\[(?P<PVT_V1_3>[-0-9\s]+)\]\s'
'PVT_V1_4\[(?P<PVT_V1_4>[-0-9\s]+)\]\s'
'PVT_V1_5\[(?P<PVT_V1_5>[-0-9\s]+)\]\s'
'PVT_V1_6\[(?P<PVT_V1_6>[-0-9\s]+)\]\s'
'PVT_V1_7\[(?P<PVT_V1_7>[-0-9\s]+)\]\s'
'PVT_V1_8\[(?P<PVT_V1_8>[-0-9\s]+)\]\s'
'PVT_V1_9\[(?P<PVT_V1_9>[-0-9\s]+)\]\s'
'PVT_V1_10\[(?P<PVT_V1_10>[-0-9\s]+)\]\s'
'PVT_V1_11\[(?P<PVT_V1_11>[-0-9\s]+)\]\s'
'PVT_V1_12\[(?P<PVT_V1_12>[-0-9\s]+)\]\s'
'PVT_V1_13\[(?P<PVT_V1_13>[-0-9\s]+)\]\s'
'PVT_V1_14\[(?P<PVT_V1_14>[-0-9\s]+)\]\s'
'PVT_V1_15\[(?P<PVT_V1_15>[-0-9\s]+)\]\s'
'PVT_V1_16\[(?P<PVT_V1_16>[-0-9\s]+)\]\s'
'PVT_V1_17\[(?P<PVT_V1_17>[-0-9\s]+)\]\s'
'PVT_V1_18\[(?P<PVT_V1_18>[-0-9\s]+)\]\s'
'PVT_V1_19\[(?P<PVT_V1_19>[-0-9\s]+)\]\s'
'PVT_V1_20\[(?P<PVT_V1_20>[-0-9\s]+)\]\s'
'PVT_V1_21\[(?P<PVT_V1_21>[-0-9\s]+)\]\s'
'PVT_V1_22\[(?P<PVT_V1_22>[-0-9\s]+)\]\s'
'PVT_V1_23\[(?P<PVT_V1_23>[-0-9\s]+)\]\s'
'PVT_V1_24\[(?P<PVT_V1_24>[-0-9\s]+)\]\s'
'PVT_V1_25\[(?P<PVT_V1_25>[-0-9\s]+)\]\s'
'PVT_V2_0\[(?P<PVT_V2_0>[-0-9\s]+)\]\s'
'PVT_V2_1\[(?P<PVT_V2_1>[-0-9\s]+)\]\s'
'PVT_V2_2\[(?P<PVT_V2_2>[-0-9\s]+)\]\s'
'PVT_V2_3\[(?P<PVT_V2_3>[-0-9\s]+)\]\s'
'PVT_V2_4\[(?P<PVT_V2_4>[-0-9\s]+)\]\s'
'PVT_V2_5\[(?P<PVT_V2_5>[-0-9\s]+)\]\s'
'PVT_V2_6\[(?P<PVT_V2_6>[-0-9\s]+)\]\s'
'PVT_V2_7\[(?P<PVT_V2_7>[-0-9\s]+)\]\s'
'PVT_V2_8\[(?P<PVT_V2_8>[-0-9\s]+)\]\s'
'PVT_V2_9\[(?P<PVT_V2_9>[-0-9\s]+)\]\s'
'PVT_V2_10\[(?P<PVT_V2_10>[-0-9\s]+)\]\s'
'PVT_V2_11\[(?P<PVT_V2_11>[-0-9\s]+)\]\s'
'PVT_V2_12\[(?P<PVT_V2_12>[-0-9\s]+)\]\s'
'PVT_V2_13\[(?P<PVT_V2_13>[-0-9\s]+)\]\s'
'PVT_V2_14\[(?P<PVT_V2_14>[-0-9\s]+)\]\s'
'PVT_V2_15\[(?P<PVT_V2_15>[-0-9\s]+)\]\s'
'PVT_V2_16\[(?P<PVT_V2_16>[-0-9\s]+)\]\s'
'PVT_V2_17\[(?P<PVT_V2_17>[-0-9\s]+)\]\s'
'PVT_V2_18\[(?P<PVT_V2_18>[-0-9\s]+)\]\s'
'PVT_V2_19\[(?P<PVT_V2_19>[-0-9\s]+)\]\s'
'PVT_V2_20\[(?P<PVT_V2_20>[-0-9\s]+)\]\s'
'PVT_V2_21\[(?P<PVT_V2_21>[-0-9\s]+)\]\s'
'PVT_V2_22\[(?P<PVT_V2_22>[-0-9\s]+)\]\s'
'PVT_V2_23\[(?P<PVT_V2_23>[-0-9\s]+)\]\s'
'PVT_V2_24\[(?P<PVT_V2_24>[-0-9\s]+)\]\s'
'PVT_V2_25\[(?P<PVT_V2_25>[-0-9\s]+)\]\s'
'PVT_V3_0\[(?P<PVT_V3_0>[-0-9\s]+)\]\s'
'PVT_V3_1\[(?P<PVT_V3_1>[-0-9\s]+)\]\s'
'PVT_V3_2\[(?P<PVT_V3_2>[-0-9\s]+)\]\s'
'PVT_V3_3\[(?P<PVT_V3_3>[-0-9\s]+)\]\s'
'PVT_V3_4\[(?P<PVT_V3_4>[-0-9\s]+)\]\s'
'PVT_V3_5\[(?P<PVT_V3_5>[-0-9\s]+)\]\s'
'PVT_V3_6\[(?P<PVT_V3_6>[-0-9\s]+)\]\s'
'PVT_V3_7\[(?P<PVT_V3_7>[-0-9\s]+)\]\s'
'PVT_V3_8\[(?P<PVT_V3_8>[-0-9\s]+)\]\s'
'PVT_V3_9\[(?P<PVT_V3_9>[-0-9\s]+)\]\s'
'PVT_V3_10\[(?P<PVT_V3_10>[-0-9\s]+)\]\s'
'PVT_V3_11\[(?P<PVT_V3_11>[-0-9\s]+)\]\s'
'PVT_V3_12\[(?P<PVT_V3_12>[-0-9\s]+)\]\s'
'PVT_V3_13\[(?P<PVT_V3_13>[-0-9\s]+)\]\s'
'PVT_V3_14\[(?P<PVT_V3_14>[-0-9\s]+)\]\s'
'PVT_V3_15\[(?P<PVT_V3_15>[-0-9\s]+)\]\s'
'PVT_V3_16\[(?P<PVT_V3_16>[-0-9\s]+)\]\s'
'PVT_V3_17\[(?P<PVT_V3_17>[-0-9\s]+)\]\s'
'PVT_V3_18\[(?P<PVT_V3_18>[-0-9\s]+)\]\s'
'PVT_V3_19\[(?P<PVT_V3_19>[-0-9\s]+)\]\s'
'PVT_V3_20\[(?P<PVT_V3_20>[-0-9\s]+)\]\s'
'PVT_V3_21\[(?P<PVT_V3_21>[-0-9\s]+)\]\s'
'PVT_V3_22\[(?P<PVT_V3_22>[-0-9\s]+)\]\s'
'PVT_V3_23\[(?P<PVT_V3_23>[-0-9\s]+)\]\s'
'PVT_V3_24\[(?P<PVT_V3_24>[-0-9\s]+)\]\s'
'PVT_V3_25\[(?P<PVT_V3_25>[-0-9\s]+)\]\s'
'FM\[(?P<FM>[0-9]+)\]\s'
'CRC\[(?P<CRC>[0-9\s]+)\]', re.X
)
self.pattern = re.compile(
r"Ver\[(?P<Ver>[-0-9A-Fa-f+]+)\]\s"
"DNA\[(?P<DNA>[0-9A-Fa-f]+)\]\s"
"Elapsed\[(?P<Elapsed>[-0-9]+)\]\s"
"MW\[(?P<MW>[-\s0-9]+)\]\s"
"LW\[(?P<LW>[-0-9]+)\]\s"
"MH\[(?P<MH>[-\s0-9]+)\]\s"
"HW\[(?P<HW>[-0-9]+)\]\s"
"Temp\[(?P<Temp>[0-9]+)\]\s"
"TMax\[(?P<TMax>[0-9]+)\]\s"
"Fan\[(?P<Fan>[0-9]+)\]\s"
"FanR\[(?P<FanR>[0-9]+)%\]\s"
"Vi\[(?P<Vi>[-\s0-9]+)\]\s"
"Vo\[(?P<Vo>[-\s0-9]+)\]\s"
"("
"PLL0\[(?P<PLL0>[-\s0-9]+)\]\s"
"PLL1\[(?P<PLL1>[-\s0-9]+)\]\s"
"PLL2\[(?P<PLL2>[-\s0-9]+)\]\s"
"PLL3\[(?P<PLL3>[-\s0-9]+)\]\s"
")?"
"GHSmm\[(?P<GHSmm>[-.0-9]+)\]\s"
"WU\[(?P<WU>[-.0-9]+)\]\s"
"Freq\[(?P<Freq>[.0-9]+)\]\s"
"PG\[(?P<PG>[0-9]+)\]\s"
"Led\[(?P<LED>0|1)\]\s"
"MW0\[(?P<MW0>[0-9\s]+)\]\s"
"MW1\[(?P<MW1>[0-9\s]+)\]\s"
"MW2\[(?P<MW2>[0-9\s]+)\]\s"
"MW3\[(?P<MW3>[0-9\s]+)\]\s"
"TA\[(?P<TA>[0-9]+)\]\s"
"ECHU\[(?P<ECHU>[0-9\s]+)\]\s"
"ECMM\[(?P<ECMM>[0-9]+)\]\s.*"
"FAC0\[(?P<FAC0>[-0-9]+)\]\s"
"OC\[(?P<OC>[0-9]+)\]\s"
"SF0\[(?P<SF0>[-\s0-9]+)\]\s"
"SF1\[(?P<SF1>[-\s0-9]+)\]\s"
"SF2\[(?P<SF2>[-\s0-9]+)\]\s"
"SF3\[(?P<SF3>[-\s0-9]+)\]\s"
"PMUV\[(?P<PMUV>[-\s\S*]+)\]\s"
"PVT_T0\[(?P<PVT_T0>[-0-9\s]+)\]\s"
"PVT_T1\[(?P<PVT_T1>[-0-9\s]+)\]\s"
"PVT_T2\[(?P<PVT_T2>[-0-9\s]+)\]\s"
"PVT_T3\[(?P<PVT_T3>[-0-9\s]+)\]\s"
"PVT_V0_0\[(?P<PVT_V0_0>[-0-9\s]+)\]\s"
"PVT_V0_1\[(?P<PVT_V0_1>[-0-9\s]+)\]\s"
"PVT_V0_2\[(?P<PVT_V0_2>[-0-9\s]+)\]\s"
"PVT_V0_3\[(?P<PVT_V0_3>[-0-9\s]+)\]\s"
"PVT_V0_4\[(?P<PVT_V0_4>[-0-9\s]+)\]\s"
"PVT_V0_5\[(?P<PVT_V0_5>[-0-9\s]+)\]\s"
"PVT_V0_6\[(?P<PVT_V0_6>[-0-9\s]+)\]\s"
"PVT_V0_7\[(?P<PVT_V0_7>[-0-9\s]+)\]\s"
"PVT_V0_8\[(?P<PVT_V0_8>[-0-9\s]+)\]\s"
"PVT_V0_9\[(?P<PVT_V0_9>[-0-9\s]+)\]\s"
"PVT_V0_10\[(?P<PVT_V0_10>[-0-9\s]+)\]\s"
"PVT_V0_11\[(?P<PVT_V0_11>[-0-9\s]+)\]\s"
"PVT_V0_12\[(?P<PVT_V0_12>[-0-9\s]+)\]\s"
"PVT_V0_13\[(?P<PVT_V0_13>[-0-9\s]+)\]\s"
"PVT_V0_14\[(?P<PVT_V0_14>[-0-9\s]+)\]\s"
"PVT_V0_15\[(?P<PVT_V0_15>[-0-9\s]+)\]\s"
"PVT_V0_16\[(?P<PVT_V0_16>[-0-9\s]+)\]\s"
"PVT_V0_17\[(?P<PVT_V0_17>[-0-9\s]+)\]\s"
"PVT_V0_18\[(?P<PVT_V0_18>[-0-9\s]+)\]\s"
"PVT_V0_19\[(?P<PVT_V0_19>[-0-9\s]+)\]\s"
"PVT_V0_20\[(?P<PVT_V0_20>[-0-9\s]+)\]\s"
"PVT_V0_21\[(?P<PVT_V0_21>[-0-9\s]+)\]\s"
"PVT_V0_22\[(?P<PVT_V0_22>[-0-9\s]+)\]\s"
"PVT_V0_23\[(?P<PVT_V0_23>[-0-9\s]+)\]\s"
"PVT_V0_24\[(?P<PVT_V0_24>[-0-9\s]+)\]\s"
"PVT_V0_25\[(?P<PVT_V0_25>[-0-9\s]+)\]\s"
"PVT_V1_0\[(?P<PVT_V1_0>[-0-9\s]+)\]\s"
"PVT_V1_1\[(?P<PVT_V1_1>[-0-9\s]+)\]\s"
"PVT_V1_2\[(?P<PVT_V1_2>[-0-9\s]+)\]\s"
"PVT_V1_3\[(?P<PVT_V1_3>[-0-9\s]+)\]\s"
"PVT_V1_4\[(?P<PVT_V1_4>[-0-9\s]+)\]\s"
"PVT_V1_5\[(?P<PVT_V1_5>[-0-9\s]+)\]\s"
"PVT_V1_6\[(?P<PVT_V1_6>[-0-9\s]+)\]\s"
"PVT_V1_7\[(?P<PVT_V1_7>[-0-9\s]+)\]\s"
"PVT_V1_8\[(?P<PVT_V1_8>[-0-9\s]+)\]\s"
"PVT_V1_9\[(?P<PVT_V1_9>[-0-9\s]+)\]\s"
"PVT_V1_10\[(?P<PVT_V1_10>[-0-9\s]+)\]\s"
"PVT_V1_11\[(?P<PVT_V1_11>[-0-9\s]+)\]\s"
"PVT_V1_12\[(?P<PVT_V1_12>[-0-9\s]+)\]\s"
"PVT_V1_13\[(?P<PVT_V1_13>[-0-9\s]+)\]\s"
"PVT_V1_14\[(?P<PVT_V1_14>[-0-9\s]+)\]\s"
"PVT_V1_15\[(?P<PVT_V1_15>[-0-9\s]+)\]\s"
"PVT_V1_16\[(?P<PVT_V1_16>[-0-9\s]+)\]\s"
"PVT_V1_17\[(?P<PVT_V1_17>[-0-9\s]+)\]\s"
"PVT_V1_18\[(?P<PVT_V1_18>[-0-9\s]+)\]\s"
"PVT_V1_19\[(?P<PVT_V1_19>[-0-9\s]+)\]\s"
"PVT_V1_20\[(?P<PVT_V1_20>[-0-9\s]+)\]\s"
"PVT_V1_21\[(?P<PVT_V1_21>[-0-9\s]+)\]\s"
"PVT_V1_22\[(?P<PVT_V1_22>[-0-9\s]+)\]\s"
"PVT_V1_23\[(?P<PVT_V1_23>[-0-9\s]+)\]\s"
"PVT_V1_24\[(?P<PVT_V1_24>[-0-9\s]+)\]\s"
"PVT_V1_25\[(?P<PVT_V1_25>[-0-9\s]+)\]\s"
"PVT_V2_0\[(?P<PVT_V2_0>[-0-9\s]+)\]\s"
"PVT_V2_1\[(?P<PVT_V2_1>[-0-9\s]+)\]\s"
"PVT_V2_2\[(?P<PVT_V2_2>[-0-9\s]+)\]\s"
"PVT_V2_3\[(?P<PVT_V2_3>[-0-9\s]+)\]\s"
"PVT_V2_4\[(?P<PVT_V2_4>[-0-9\s]+)\]\s"
"PVT_V2_5\[(?P<PVT_V2_5>[-0-9\s]+)\]\s"
"PVT_V2_6\[(?P<PVT_V2_6>[-0-9\s]+)\]\s"
"PVT_V2_7\[(?P<PVT_V2_7>[-0-9\s]+)\]\s"
"PVT_V2_8\[(?P<PVT_V2_8>[-0-9\s]+)\]\s"
"PVT_V2_9\[(?P<PVT_V2_9>[-0-9\s]+)\]\s"
"PVT_V2_10\[(?P<PVT_V2_10>[-0-9\s]+)\]\s"
"PVT_V2_11\[(?P<PVT_V2_11>[-0-9\s]+)\]\s"
"PVT_V2_12\[(?P<PVT_V2_12>[-0-9\s]+)\]\s"
"PVT_V2_13\[(?P<PVT_V2_13>[-0-9\s]+)\]\s"
"PVT_V2_14\[(?P<PVT_V2_14>[-0-9\s]+)\]\s"
"PVT_V2_15\[(?P<PVT_V2_15>[-0-9\s]+)\]\s"
"PVT_V2_16\[(?P<PVT_V2_16>[-0-9\s]+)\]\s"
"PVT_V2_17\[(?P<PVT_V2_17>[-0-9\s]+)\]\s"
"PVT_V2_18\[(?P<PVT_V2_18>[-0-9\s]+)\]\s"
"PVT_V2_19\[(?P<PVT_V2_19>[-0-9\s]+)\]\s"
"PVT_V2_20\[(?P<PVT_V2_20>[-0-9\s]+)\]\s"
"PVT_V2_21\[(?P<PVT_V2_21>[-0-9\s]+)\]\s"
"PVT_V2_22\[(?P<PVT_V2_22>[-0-9\s]+)\]\s"
"PVT_V2_23\[(?P<PVT_V2_23>[-0-9\s]+)\]\s"
"PVT_V2_24\[(?P<PVT_V2_24>[-0-9\s]+)\]\s"
"PVT_V2_25\[(?P<PVT_V2_25>[-0-9\s]+)\]\s"
"PVT_V3_0\[(?P<PVT_V3_0>[-0-9\s]+)\]\s"
"PVT_V3_1\[(?P<PVT_V3_1>[-0-9\s]+)\]\s"
"PVT_V3_2\[(?P<PVT_V3_2>[-0-9\s]+)\]\s"
"PVT_V3_3\[(?P<PVT_V3_3>[-0-9\s]+)\]\s"
"PVT_V3_4\[(?P<PVT_V3_4>[-0-9\s]+)\]\s"
"PVT_V3_5\[(?P<PVT_V3_5>[-0-9\s]+)\]\s"
"PVT_V3_6\[(?P<PVT_V3_6>[-0-9\s]+)\]\s"
"PVT_V3_7\[(?P<PVT_V3_7>[-0-9\s]+)\]\s"
"PVT_V3_8\[(?P<PVT_V3_8>[-0-9\s]+)\]\s"
"PVT_V3_9\[(?P<PVT_V3_9>[-0-9\s]+)\]\s"
"PVT_V3_10\[(?P<PVT_V3_10>[-0-9\s]+)\]\s"
"PVT_V3_11\[(?P<PVT_V3_11>[-0-9\s]+)\]\s"
"PVT_V3_12\[(?P<PVT_V3_12>[-0-9\s]+)\]\s"
"PVT_V3_13\[(?P<PVT_V3_13>[-0-9\s]+)\]\s"
"PVT_V3_14\[(?P<PVT_V3_14>[-0-9\s]+)\]\s"
"PVT_V3_15\[(?P<PVT_V3_15>[-0-9\s]+)\]\s"
"PVT_V3_16\[(?P<PVT_V3_16>[-0-9\s]+)\]\s"
"PVT_V3_17\[(?P<PVT_V3_17>[-0-9\s]+)\]\s"
"PVT_V3_18\[(?P<PVT_V3_18>[-0-9\s]+)\]\s"
"PVT_V3_19\[(?P<PVT_V3_19>[-0-9\s]+)\]\s"
"PVT_V3_20\[(?P<PVT_V3_20>[-0-9\s]+)\]\s"
"PVT_V3_21\[(?P<PVT_V3_21>[-0-9\s]+)\]\s"
"PVT_V3_22\[(?P<PVT_V3_22>[-0-9\s]+)\]\s"
"PVT_V3_23\[(?P<PVT_V3_23>[-0-9\s]+)\]\s"
"PVT_V3_24\[(?P<PVT_V3_24>[-0-9\s]+)\]\s"
"PVT_V3_25\[(?P<PVT_V3_25>[-0-9\s]+)\]\s"
"FM\[(?P<FM>[0-9]+)\]\s"
"CRC\[(?P<CRC>[0-9\s]+)\]",
re.X,
)
def __repr__(self) -> str:
return f"CGMinerAvalon8: {str(self.ip)}"
@@ -163,7 +165,7 @@ class CGMinerAvalon8(CGMiner):
def parse_estats(self, estats):
for estat in estats:
for key in estat:
if key[:5] == 'MM ID':
if key[:5] == "MM ID":
self._parse_estat(estat, key)
def _parse_estat(self, estat, key):

View File

@@ -9,8 +9,8 @@ class BMMiner(BaseMiner):
super().__init__(ip, api)
self.model = None
self.config = None
self.uname = 'root'
self.pwd = 'admin'
self.uname = "root"
self.pwd = "admin"
def __repr__(self) -> str:
return f"BMMiner: {str(self.ip)}"
@@ -31,7 +31,7 @@ class BMMiner(BaseMiner):
try:
async with (await self._get_ssh_connection()) as conn:
if conn is not None:
data = await conn.run('cat /proc/sys/kernel/hostname')
data = await conn.run("cat /proc/sys/kernel/hostname")
host = data.stdout.strip()
logging.debug(f"Found hostname for {self.ip}: {host}")
return host

View File

@@ -4,14 +4,15 @@ import toml
from config.bos import bos_config_convert, general_config_convert_bos
import logging
class BOSMiner(BaseMiner):
def __init__(self, ip: str) -> None:
api = BOSMinerAPI(ip)
super().__init__(ip, api)
self.model = None
self.config = None
self.uname = 'root'
self.pwd = 'admin'
self.uname = "root"
self.pwd = "admin"
self.nominal_chips = 63
def __repr__(self) -> str:
@@ -39,13 +40,13 @@ class BOSMiner(BaseMiner):
async def fault_light_on(self) -> None:
"""Sends command to turn on fault light on the miner."""
logging.debug(f"{self}: Sending fault_light on command.")
await self.send_ssh_command('miner fault_light on')
await self.send_ssh_command("miner fault_light on")
logging.debug(f"{self}: fault_light on command completed.")
async def fault_light_off(self) -> None:
"""Sends command to turn off fault light on the miner."""
logging.debug(f"{self}: Sending fault_light off command.")
await self.send_ssh_command('miner fault_light off')
await self.send_ssh_command("miner fault_light off")
logging.debug(f"{self}: fault_light off command completed.")
async def restart_backend(self):
@@ -54,7 +55,7 @@ class BOSMiner(BaseMiner):
async def restart_bosminer(self) -> None:
"""Restart bosminer hashing process."""
logging.debug(f"{self}: Sending bosminer restart command.")
await self.send_ssh_command('/etc/init.d/bosminer restart')
await self.send_ssh_command("/etc/init.d/bosminer restart")
logging.debug(f"{self}: bosminer restart command completed.")
async def reboot(self) -> None:
@@ -69,7 +70,7 @@ class BOSMiner(BaseMiner):
logging.debug(f"{self}: Opening SFTP connection.")
async with conn.start_sftp_client() as sftp:
logging.debug(f"{self}: Reading config file.")
async with sftp.open('/etc/bosminer.toml') as file:
async with sftp.open("/etc/bosminer.toml") as file:
toml_data = toml.loads(await file.read())
logging.debug(f"{self}: Converting config file.")
cfg = await bos_config_convert(toml_data)
@@ -80,7 +81,7 @@ class BOSMiner(BaseMiner):
try:
async with (await self._get_ssh_connection()) as conn:
if conn is not None:
data = await conn.run('cat /proc/sys/kernel/hostname')
data = await conn.run("cat /proc/sys/kernel/hostname")
host = data.stdout.strip()
logging.debug(f"Found hostname for {self.ip}: {host}")
return host
@@ -98,7 +99,9 @@ class BOSMiner(BaseMiner):
version_data = await self.api.devdetails()
if version_data:
if not version_data["DEVDETAILS"] == []:
self.model = version_data["DEVDETAILS"][0]["Model"].replace("Antminer ", "")
self.model = version_data["DEVDETAILS"][0]["Model"].replace(
"Antminer ", ""
)
logging.debug(f"Found model for {self.ip}: {self.model} (BOS)")
return self.model + " (BOS)"
logging.warning(f"Failed to get model for miner: {self}")
@@ -112,7 +115,7 @@ class BOSMiner(BaseMiner):
logging.debug(f"{self}: Opening SFTP connection.")
async with conn.start_sftp_client() as sftp:
logging.debug(f"{self}: Opening config file.")
async with sftp.open('/etc/bosminer.toml', 'w+') as file:
async with sftp.open("/etc/bosminer.toml", "w+") as file:
await file.write(toml_conf)
logging.debug(f"{self}: Restarting BOSMiner")
await conn.run("/etc/init.d/bosminer restart")
@@ -124,21 +127,23 @@ class BOSMiner(BaseMiner):
if not devdetails.get("DEVDETAILS"):
print("devdetails error", devdetails)
return {0: [], 1: [], 2: []}
devs = devdetails['DEVDETAILS']
devs = devdetails["DEVDETAILS"]
boards = {}
offset = devs[0]["ID"]
for board in devs:
boards[board["ID"] - offset] = []
if not board['Chips'] == self.nominal_chips:
if not board["Chips"] == self.nominal_chips:
nominal = False
else:
nominal = True
boards[board["ID"] - offset].append({
"chain": board["ID"] - offset,
"chip_count": board['Chips'],
"chip_status": "o" * board['Chips'],
"nominal": nominal
})
boards[board["ID"] - offset].append(
{
"chain": board["ID"] - offset,
"chip_count": board["Chips"],
"chip_status": "o" * board["Chips"],
"nominal": nominal,
}
)
logging.debug(f"Found board data for {self}: {boards}")
return boards
@@ -158,9 +163,9 @@ class BOSMiner(BaseMiner):
"""Checks for and provides list for working boards."""
devs = await self.api.devdetails()
bad = 0
chains = devs['DEVDETAILS']
chains = devs["DEVDETAILS"]
for chain in chains:
if chain['Chips'] == 0:
if chain["Chips"] == 0:
bad += 1
if not bad > 0:
return str(self.ip)

View File

@@ -53,16 +53,18 @@ class BTMiner(BaseMiner):
for board in devs:
boards[board["ID"] - offset] = []
if "Effective Chips" in board.keys():
if not board['Effective Chips'] in self.nominal_chips:
if not board["Effective Chips"] in self.nominal_chips:
nominal = False
else:
nominal = True
boards[board["ID"] - offset].append({
"chain": board["ID"] - offset,
"chip_count": board['Effective Chips'],
"chip_status": "o" * board['Effective Chips'],
"nominal": nominal
})
boards[board["ID"] - offset].append(
{
"chain": board["ID"] - offset,
"chip_count": board["Effective Chips"],
"chip_status": "o" * board["Effective Chips"],
"nominal": nominal,
}
)
else:
logging.warning(f"Incorrect board data from {self}: {board}")
print(board)

View File

@@ -9,8 +9,8 @@ class CGMiner(BaseMiner):
super().__init__(ip, api)
self.model = None
self.config = None
self.uname = 'root'
self.pwd = 'admin'
self.uname = "root"
self.pwd = "admin"
def __repr__(self) -> str:
return f"CGMiner: {str(self.ip)}"
@@ -23,8 +23,7 @@ class CGMiner(BaseMiner):
except APIError:
return None
if version_data:
self.model = version_data["DEVDETAILS"][0]["Model"].replace(
"Antminer ", "")
self.model = version_data["DEVDETAILS"][0]["Model"].replace("Antminer ", "")
return self.model
return None
@@ -32,7 +31,7 @@ class CGMiner(BaseMiner):
try:
async with (await self._get_ssh_connection()) as conn:
if conn is not None:
data = await conn.run('cat /proc/sys/kernel/hostname')
data = await conn.run("cat /proc/sys/kernel/hostname")
return data.stdout.strip()
else:
return "?"
@@ -56,33 +55,36 @@ class CGMiner(BaseMiner):
await self.restart_cgminer()
async def restart_cgminer(self) -> None:
commands = ['cgminer-api restart',
'/usr/bin/cgminer-monitor >/dev/null 2>&1']
commands = ';'.join(commands)
commands = ["cgminer-api restart", "/usr/bin/cgminer-monitor >/dev/null 2>&1"]
commands = ";".join(commands)
await self.send_ssh_command(commands)
async def reboot(self) -> None:
await self.send_ssh_command("reboot")
async def start_cgminer(self) -> None:
commands = ['mkdir -p /etc/tmp/',
'echo \"*/3 * * * * /usr/bin/cgminer-monitor\" > /etc/tmp/root',
'crontab -u root /etc/tmp/root',
'/usr/bin/cgminer-monitor >/dev/null 2>&1']
commands = ';'.join(commands)
commands = [
"mkdir -p /etc/tmp/",
'echo "*/3 * * * * /usr/bin/cgminer-monitor" > /etc/tmp/root',
"crontab -u root /etc/tmp/root",
"/usr/bin/cgminer-monitor >/dev/null 2>&1",
]
commands = ";".join(commands)
await self.send_ssh_command(commands)
async def stop_cgminer(self) -> None:
commands = ['mkdir -p /etc/tmp/',
'echo \"\" > /etc/tmp/root',
'crontab -u root /etc/tmp/root',
'killall cgminer']
commands = ';'.join(commands)
commands = [
"mkdir -p /etc/tmp/",
'echo "" > /etc/tmp/root',
"crontab -u root /etc/tmp/root",
"killall cgminer",
]
commands = ";".join(commands)
await self.send_ssh_command(commands)
async def get_config(self) -> None:
async with (await self._get_ssh_connection()) as conn:
command = 'cat /etc/config/cgminer'
command = "cat /etc/config/cgminer"
result = await conn.run(command, check=True)
self.config = result.stdout
print(str(self.config))

View File

@@ -46,10 +46,7 @@ class MinerFactory:
def __new__(cls):
if not cls._instance:
cls._instance = super(
MinerFactory,
cls
).__new__(cls)
cls._instance = super(MinerFactory, cls).__new__(cls)
return cls._instance
async def get_miner_generator(self, ips: list):
@@ -221,7 +218,10 @@ class MinerFactory:
model = data["VERSION"][0]["Type"]
else:
# make sure devdetails actually contains data, if its empty, there are no devices
if "DEVDETAILS" in data.keys() and not data["DEVDETAILS"] == []:
if (
"DEVDETAILS" in data.keys()
and not data["DEVDETAILS"] == []
):
# check for model, for most miners
if not data["DEVDETAILS"][0]["Model"] == "":
@@ -261,7 +261,7 @@ class MinerFactory:
cmd = {"command": command}
# send the command
writer.write(json.dumps(cmd).encode('utf-8'))
writer.write(json.dumps(cmd).encode("utf-8"))
await writer.drain()
# instantiate data
@@ -281,10 +281,10 @@ class MinerFactory:
# some json from the API returns with a null byte (\x00) on the end
if data.endswith(b"\x00"):
# handle the null byte
str_data = data.decode('utf-8')[:-1]
str_data = data.decode("utf-8")[:-1]
else:
# no null byte
str_data = data.decode('utf-8')
str_data = data.decode("utf-8")
# fix an error with a btminer return having an extra comma that breaks json.loads()
str_data = str_data.replace(",}", "}")
# fix an error with a btminer return having a newline that breaks json.loads()
@@ -321,19 +321,27 @@ class MinerFactory:
if data["STATUS"][0].get("STATUS") in ["I", "S"]:
# check if there are any BMMiner strings in any of the dict keys
if any("BMMiner" in string for string in data["VERSION"][0].keys()):
if any(
"BMMiner" in string for string in data["VERSION"][0].keys()
):
api = "BMMiner"
# check if there are any CGMiner strings in any of the dict keys
elif any("CGMiner" in string for string in data["VERSION"][0].keys()):
elif any(
"CGMiner" in string for string in data["VERSION"][0].keys()
):
api = "CGMiner"
# check if there are any BOSMiner strings in any of the dict keys
elif any("BOSminer" in string for string in data["VERSION"][0].keys()):
elif any(
"BOSminer" in string for string in data["VERSION"][0].keys()
):
api = "BOSMiner"
# if all that fails, check the Description to see if it is a whatsminer
elif data.get("Description") and "whatsminer" in data.get("Description"):
elif data.get("Description") and "whatsminer" in data.get(
"Description"
):
api = "BTMiner"
# return the API if we found it

View File

@@ -3,12 +3,17 @@ import asyncio
from network.net_range import MinerNetworkRange
from miners.miner_factory import MinerFactory
from settings import NETWORK_PING_RETRIES as PING_RETRIES, NETWORK_PING_TIMEOUT as PING_TIMEOUT, \
NETWORK_SCAN_THREADS as SCAN_THREADS
from settings import (
NETWORK_PING_RETRIES as PING_RETRIES,
NETWORK_PING_TIMEOUT as PING_TIMEOUT,
NETWORK_SCAN_THREADS as SCAN_THREADS,
)
class MinerNetwork:
def __init__(self, ip_addr: str or None = None, mask: str or int or None = None) -> None:
def __init__(
self, ip_addr: str or None = None, mask: str or int or None = None
) -> None:
self.network = None
self.ip_addr = ip_addr
self.connected_miners = {}
@@ -45,7 +50,9 @@ class MinerNetwork:
subnet_mask = str(self.mask)
# save the network and return it
self.network = ipaddress.ip_network(f"{default_gateway}/{subnet_mask}", strict=False)
self.network = ipaddress.ip_network(
f"{default_gateway}/{subnet_mask}", strict=False
)
return self.network
async def scan_network_for_miners(self) -> None or list:
@@ -139,7 +146,9 @@ class MinerNetwork:
connection_fut = asyncio.open_connection(str(ip), 4028)
try:
# get the read and write streams from the connection
reader, writer = await asyncio.wait_for(connection_fut, timeout=PING_TIMEOUT)
reader, writer = await asyncio.wait_for(
connection_fut, timeout=PING_TIMEOUT
)
# immediately close connection, we know connection happened
writer.close()
# make sure the writer is closed

View File

@@ -9,6 +9,7 @@ class MinerNetworkRange:
{ip_range_1_start}-{ip_range_1_end}, {ip_range_2_start}-{ip_range_2_end}
"""
def __init__(self, ip_range: str):
ip_ranges = ip_range.replace(" ", "").split(",")
self.host_ips = []

View File

@@ -15,7 +15,9 @@ WHATSMINER_PWD = "admin"
DEBUG = False
try:
with open(os.path.join(os.path.dirname(__file__), "settings.toml"), "r") as settings_file:
with open(
os.path.join(os.path.dirname(__file__), "settings.toml"), "r"
) as settings_file:
settings = toml.loads(settings_file.read())
except:
pass

View File

@@ -7,11 +7,16 @@ import sys
import logging
from logger import logger
logger.info("Initializing logger for CFG Util.")
# Fix bug with some whatsminers and asyncio because of a socket not being shut down:
if sys.version_info[0] == 3 and sys.version_info[1] >= 8 and sys.platform.startswith('win'):
if (
sys.version_info[0] == 3
and sys.version_info[1] >= 8
and sys.platform.startswith("win")
):
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
@@ -22,5 +27,5 @@ def main():
logging.info("Closing Board Util.")
if __name__ == '__main__':
if __name__ == "__main__":
main()

View File

@@ -2,13 +2,14 @@ from tools.bad_board_util.layout import window
def disable_buttons(func):
button_list = ["scan",
"import_iplist",
"export_iplist",
"select_all_ips",
"refresh_data",
"open_in_web"
]
button_list = [
"scan",
"import_iplist",
"export_iplist",
"select_all_ips",
"refresh_data",
"open_in_web",
]
# handle the inner function that the decorator is wrapping
async def inner(*args, **kwargs):

View File

@@ -14,10 +14,15 @@ async def import_iplist(file_location):
return
else:
ip_list = []
async with aiofiles.open(file_location, mode='r') as file:
async with aiofiles.open(file_location, mode="r") as file:
async for line in file:
ips = [x.group() for x in re.finditer(
"^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)", line)]
ips = [
x.group()
for x in re.finditer(
"^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)",
line,
)
]
for ip in ips:
if ip not in ip_list:
ip_list.append(ipaddress.ip_address(ip))
@@ -33,11 +38,11 @@ async def export_iplist(file_location, ip_list_selected):
return
else:
if ip_list_selected is not None and not ip_list_selected == []:
async with aiofiles.open(file_location, mode='w') as file:
async with aiofiles.open(file_location, mode="w") as file:
for item in ip_list_selected:
await file.write(str(item) + "\n")
else:
async with aiofiles.open(file_location, mode='w') as file:
for item in window['ip_table'].Values:
async with aiofiles.open(file_location, mode="w") as file:
for item in window["ip_table"].Values:
await file.write(str(item[0]) + "\n")
await update_ui_with_data("status", "")

View File

@@ -2,7 +2,11 @@ import asyncio
import ipaddress
import warnings
from tools.bad_board_util.func.ui import update_ui_with_data, update_prog_bar, set_progress_bar_len
from tools.bad_board_util.func.ui import (
update_ui_with_data,
update_prog_bar,
set_progress_bar_len,
)
from tools.bad_board_util.layout import window
from miners.miner_factory import MinerFactory
from tools.bad_board_util.func.decorators import disable_buttons
@@ -43,7 +47,10 @@ async def refresh_data(ip_list: list):
await update_ui_with_data("status", "Getting Data")
ips = [ipaddress.ip_address(ip) for ip in ip_list]
if len(ips) == 0:
ips = [ipaddress.ip_address(ip) for ip in [item[0] for item in window["ip_table"].Values]]
ips = [
ipaddress.ip_address(ip)
for ip in [item[0] for item in window["ip_table"].Values]
]
await set_progress_bar_len(len(ips))
progress_bar_len = 0
asyncio.create_task(update_prog_bar(progress_bar_len))
@@ -68,18 +75,29 @@ async def refresh_data(ip_list: list):
board_right = ""
if data_point["data"]:
if 0 in data_point["data"].keys():
board_left = " ".join([chain["chip_status"] for chain in data_point["data"][0]]).replace("o", "")
board_left = " ".join(
[chain["chip_status"] for chain in data_point["data"][0]]
).replace("o", "")
else:
row_colors.append((ip_table_index, "white", "red"))
if 1 in data_point["data"].keys():
board_center = " ".join([chain["chip_status"] for chain in data_point["data"][1]]).replace("o", "")
board_center = " ".join(
[chain["chip_status"] for chain in data_point["data"][1]]
).replace("o", "")
else:
row_colors.append((ip_table_index, "white", "red"))
if 2 in data_point["data"].keys():
board_right = " ".join([chain["chip_status"] for chain in data_point["data"][2]]).replace("o", "")
board_right = " ".join(
[chain["chip_status"] for chain in data_point["data"][2]]
).replace("o", "")
else:
row_colors.append((ip_table_index, "white", "red"))
if False in [chain["nominal"] for chain in [data_point["data"][key] for key in data_point["data"].keys()][0]]:
if False in [
chain["nominal"]
for chain in [
data_point["data"][key] for key in data_point["data"].keys()
][0]
]:
row_colors.append((ip_table_index, "white", "red"))
else:
row_colors.append((ip_table_index, "white", "red"))
@@ -92,7 +110,7 @@ async def refresh_data(ip_list: list):
len(board_center),
board_center,
len(board_right),
board_right
board_right,
]
ip_table_data[ip_table_index] = data
window["ip_table"].update(ip_table_data, row_colors=row_colors)
@@ -134,7 +152,7 @@ async def scan_and_get_data(network):
data_gen = asyncio.as_completed([get_formatted_data(miner) for miner in miners])
ip_table_data = window["ip_table"].Values
ordered_all_ips = [item[0] for item in ip_table_data]
progress_bar_len += (network_size - len(miners))
progress_bar_len += network_size - len(miners)
asyncio.create_task(update_prog_bar(progress_bar_len))
await update_ui_with_data("status", "Getting Data")
row_colors = []
@@ -147,18 +165,30 @@ async def scan_and_get_data(network):
board_right = ""
if data_point["data"]:
if 0 in data_point["data"].keys():
board_left = " ".join([chain["chip_status"] for chain in data_point["data"][0]]).replace("o", "")
board_left = " ".join(
[chain["chip_status"] for chain in data_point["data"][0]]
).replace("o", "")
else:
row_colors.append((ip_table_index, "bad"))
if 1 in data_point["data"].keys():
board_center = " ".join([chain["chip_status"] for chain in data_point["data"][1]]).replace("o", "")
board_center = " ".join(
[chain["chip_status"] for chain in data_point["data"][1]]
).replace("o", "")
else:
row_colors.append((ip_table_index, "bad"))
if 2 in data_point["data"].keys():
board_right = " ".join([chain["chip_status"] for chain in data_point["data"][2]]).replace("o", "")
board_right = " ".join(
[chain["chip_status"] for chain in data_point["data"][2]]
).replace("o", "")
else:
row_colors.append((ip_table_index, "bad"))
if False in [chain["nominal"] for board in [data_point["data"][key] for key in data_point["data"].keys()] for chain in board]:
if False in [
chain["nominal"]
for board in [
data_point["data"][key] for key in data_point["data"].keys()
]
for chain in board
]:
row_colors.append((ip_table_index, "bad"))
else:
row_colors.append((ip_table_index, "bad"))
@@ -175,7 +205,7 @@ async def scan_and_get_data(network):
len(board_center),
board_center_chips,
len(board_right),
board_right_chips
board_right_chips,
]
ip_table_data[ip_table_index] = data
window["ip_table"].update(ip_table_data)
@@ -190,13 +220,16 @@ async def scan_and_get_data(network):
def split_chips(string, number_of_splits):
k, m = divmod(len(string), number_of_splits)
return (string[i*k+min(i, m):(i+1)*k+min(i+1, m)] for i in range(number_of_splits))
return (
string[i * k + min(i, m) : (i + 1) * k + min(i + 1, m)]
for i in range(number_of_splits)
)
async def get_formatted_data(ip: ipaddress.ip_address):
miner = await MinerFactory().get_miner(ip)
model = await miner.get_model()
warnings.filterwarnings('ignore')
warnings.filterwarnings("ignore")
board_data = await miner.get_board_info()
data = {"IP": str(ip), "model": str(model), "data": board_data}
return data

View File

@@ -8,9 +8,7 @@ import pyperclip
def table_select_all():
window["ip_table"].update(
select_rows=(
[row for row in range(len(window["ip_table"].Values))]
)
select_rows=([row for row in range(len(window["ip_table"].Values))])
)
@@ -45,7 +43,7 @@ async def update_ui_with_data(key, message, append=False):
async def update_prog_bar(amount):
window["progress"].Update(amount)
percent_done = 100 * (amount / window['progress'].maxlen)
percent_done = 100 * (amount / window["progress"].maxlen)
window["progress_percent"].Update(f"{round(percent_done, 2)} %")
if percent_done == 100:
window["progress_percent"].Update("")
@@ -61,17 +59,25 @@ async def sort_data(index: int or str):
if window["scan"].Disabled:
return
await update_ui_with_data("status", "Sorting Data")
data_list = window['ip_table'].Values
data_list = window["ip_table"].Values
table = window["ip_table"].Widget
all_data = []
for idx, item in enumerate(data_list):
all_data.append({"data": item, "tags": table.item(int(idx) + 1)["tags"]})
# ip addresses
if re.match("^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)",
str(all_data[0]["data"][index])):
new_list = sorted(all_data, key=lambda x: ipaddress.ip_address(x["data"][index]))
if re.match(
"^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)",
str(all_data[0]["data"][index]),
):
new_list = sorted(
all_data, key=lambda x: ipaddress.ip_address(x["data"][index])
)
if all_data == new_list:
new_list = sorted(all_data, reverse=True, key=lambda x: ipaddress.ip_address(x["data"][index]))
new_list = sorted(
all_data,
reverse=True,
key=lambda x: ipaddress.ip_address(x["data"][index]),
)
# everything else, model, chips
else:

File diff suppressed because one or more lines are too long

View File

@@ -19,33 +19,44 @@ async def ui():
table.bind("<Control-Key-a>", lambda x: table_select_all())
while True:
event, value = window.read(timeout=0)
if event in (None, 'Close', sg.WIN_CLOSED):
if event in (None, "Close", sg.WIN_CLOSED):
sys.exit()
if isinstance(event, tuple):
if len(window["ip_table"].Values) > 0:
if event[0] == 'ip_table':
if event[0] == "ip_table":
if event[2][0] == -1:
await sort_data(event[2][1])
if event == 'open_in_web':
if event == "open_in_web":
for row in value["ip_table"]:
webbrowser.open("http://" + window["ip_table"].Values[row][0])
if event == 'scan':
if len(value['miner_network'].split("/")) > 1:
network = value['miner_network'].split("/")
if event == "scan":
if len(value["miner_network"].split("/")) > 1:
network = value["miner_network"].split("/")
miner_network = MinerNetwork(ip_addr=network[0], mask=network[1])
else:
miner_network = MinerNetwork(value['miner_network'])
miner_network = MinerNetwork(value["miner_network"])
asyncio.create_task(scan_and_get_data(miner_network))
if event == 'select_all_ips':
if event == "select_all_ips":
if len(value["ip_table"]) == len(window["ip_table"].Values):
window["ip_table"].update(select_rows=())
else:
window["ip_table"].update(select_rows=([row for row in range(len(window["ip_table"].Values))]))
window["ip_table"].update(
select_rows=([row for row in range(len(window["ip_table"].Values))])
)
if event == "import_iplist":
asyncio.create_task(import_iplist(value["file_iplist"]))
if event == "export_iplist":
asyncio.create_task(export_iplist(value["file_iplist"], [window['ip_table'].Values[item][0] for item in value['ip_table']]))
asyncio.create_task(
export_iplist(
value["file_iplist"],
[window["ip_table"].Values[item][0] for item in value["ip_table"]],
)
)
if event == "refresh_data":
asyncio.create_task(refresh_data([window["ip_table"].Values[item][0] for item in value["ip_table"]]))
asyncio.create_task(
refresh_data(
[window["ip_table"].Values[item][0] for item in value["ip_table"]]
)
)
if event == "__TIMEOUT__":
await asyncio.sleep(0)

View File

@@ -13,10 +13,15 @@ from tools.cfg_util.cfg_util_sg.ui import ui
# initialize logger and get settings
from logger import logger
logger.info("Initializing logger for CFG Util.")
# Fix bug with some whatsminers and asyncio because of a socket not being shut down:
if sys.version_info[0] == 3 and sys.version_info[1] >= 8 and sys.platform.startswith('win'):
if (
sys.version_info[0] == 3
and sys.version_info[1] >= 8
and sys.platform.startswith("win")
):
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())

View File

@@ -2,23 +2,24 @@ from tools.cfg_util.cfg_util_sg.layout import window
def disable_buttons(func):
button_list = ["scan",
"import_file_config",
"export_file_config",
"import_iplist",
"export_iplist",
"export_csv",
"select_all_ips",
"refresh_data",
"open_in_web",
"reboot_miners",
"restart_miner_backend",
"import_config",
"send_config",
"light",
"generate_config",
"send_miner_ssh_command_window",
]
button_list = [
"scan",
"import_file_config",
"export_file_config",
"import_iplist",
"export_iplist",
"export_csv",
"select_all_ips",
"refresh_data",
"open_in_web",
"reboot_miners",
"restart_miner_backend",
"import_config",
"send_config",
"light",
"generate_config",
"send_miner_ssh_command_window",
]
# handle the inner function that the decorator is wrapping
async def inner(*args, **kwargs):

View File

@@ -17,10 +17,15 @@ async def import_iplist(file_location):
return
else:
ip_list = []
async with aiofiles.open(file_location, mode='r') as file:
async with aiofiles.open(file_location, mode="r") as file:
async for line in file:
ips = [x.group() for x in re.finditer(
"^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)", line)]
ips = [
x.group()
for x in re.finditer(
"^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)",
line,
)
]
for ip in ips:
if ip not in ip_list:
ip_list.append(ipaddress.ip_address(ip))
@@ -36,33 +41,34 @@ async def export_csv(file_location, ip_list_selected):
return
else:
if ip_list_selected is not None and not ip_list_selected == []:
async with aiofiles.open(file_location, mode='w') as file:
async with aiofiles.open(file_location, mode="w") as file:
for item in ip_list_selected:
await file.write(str(
", ".join([str(part).rstrip().lstrip() for part in item])
) + "\n")
await file.write(
str(", ".join([str(part).rstrip().lstrip() for part in item]))
+ "\n"
)
else:
async with aiofiles.open(file_location, mode='w') as file:
for item in window['ip_table'].Values:
await file.write(str(
", ".join([str(part).rstrip().lstrip() for part in item])
) + "\n")
async with aiofiles.open(file_location, mode="w") as file:
for item in window["ip_table"].Values:
await file.write(
str(", ".join([str(part).rstrip().lstrip() for part in item]))
+ "\n"
)
await update_ui_with_data("status", "")
async def export_iplist(file_location, ip_list_selected):
await update_ui_with_data("status", "Exporting")
if not os.path.exists(file_location):
return
else:
if ip_list_selected is not None and not ip_list_selected == []:
async with aiofiles.open(file_location, mode='w') as file:
async with aiofiles.open(file_location, mode="w") as file:
for item in ip_list_selected:
await file.write(str(item) + "\n")
else:
async with aiofiles.open(file_location, mode='w') as file:
for item in window['ip_table'].Values:
async with aiofiles.open(file_location, mode="w") as file:
for item in window["ip_table"].Values:
await file.write(str(item[0]) + "\n")
await update_ui_with_data("status", "")
@@ -72,7 +78,7 @@ async def import_config_file(file_location):
if not os.path.exists(file_location):
return
else:
async with aiofiles.open(file_location, mode='r') as file:
async with aiofiles.open(file_location, mode="r") as file:
config = await file.read()
await update_ui_with_data("config", await bos_config_convert(toml.loads(config)))
await update_ui_with_data("status", "")
@@ -82,9 +88,9 @@ async def export_config_file(file_location, config):
await update_ui_with_data("status", "Exporting")
config = toml.dumps(await general_config_convert_bos(config))
config = toml.loads(config)
config['format']['generator'] = 'upstream_config_util'
config['format']['timestamp'] = int(time.time())
config["format"]["generator"] = "upstream_config_util"
config["format"]["timestamp"] = int(time.time())
config = toml.dumps(config)
async with aiofiles.open(file_location, mode='w+') as file:
async with aiofiles.open(file_location, mode="w+") as file:
await file.write(config)
await update_ui_with_data("status", "")

View File

@@ -6,17 +6,24 @@ import logging
from API import APIError
from tools.cfg_util.cfg_util_sg.func.parse_data import safe_parse_api_data
from tools.cfg_util.cfg_util_sg.func.ui import update_ui_with_data, update_prog_bar, set_progress_bar_len
from tools.cfg_util.cfg_util_sg.func.ui import (
update_ui_with_data,
update_prog_bar,
set_progress_bar_len,
)
from tools.cfg_util.cfg_util_sg.layout import window
from miners.miner_factory import MinerFactory
from config.bos import bos_config_convert
from tools.cfg_util.cfg_util_sg.func.decorators import disable_buttons
from settings import CFG_UTIL_CONFIG_THREADS as CONFIG_THREADS, CFG_UTIL_REBOOT_THREADS as REBOOT_THREADS
from settings import (
CFG_UTIL_CONFIG_THREADS as CONFIG_THREADS,
CFG_UTIL_REBOOT_THREADS as REBOOT_THREADS,
)
async def import_config(idx):
await update_ui_with_data("status", "Importing")
miner_ip = window['ip_table'].Values[idx[0]][0]
miner_ip = window["ip_table"].Values[idx[0]][0]
logging.debug(f"{miner_ip}: Importing config.")
miner = await MinerFactory().get_miner(ipaddress.ip_address(miner_ip))
await miner.get_config()
@@ -67,10 +74,10 @@ async def miner_light(ips: list):
async def flip_light(ip):
ip_list = window['ip_table'].Widget
ip_list = window["ip_table"].Widget
miner = await MinerFactory().get_miner(ip)
index = [item[0] for item in window["ip_table"].Values].index(ip)
index_tags = ip_list.item(index + 1)['tags']
index_tags = ip_list.item(index + 1)["tags"]
if "light" not in index_tags:
index_tags.append("light")
ip_list.item(index + 1, tags=index_tags)
@@ -122,7 +129,8 @@ async def send_miners_ssh_commands(ips: list, command: str, ssh_cmd_window):
if str(item["IP"]) in ips:
proc_table_index = ips.index(str(item["IP"]))
proc_table_data[proc_table_index] = [
str(item["IP"]), return_data.replace("\n", " "),
str(item["IP"]),
return_data.replace("\n", " "),
]
ssh_cmd_window["ssh_cmd_table"].update(proc_table_data)
@@ -238,7 +246,10 @@ async def refresh_data(ip_list: list):
await update_ui_with_data("hr_total", "")
ips = [ipaddress.ip_address(ip) for ip in ip_list]
if len(ips) == 0:
ips = [ipaddress.ip_address(ip) for ip in [item[0] for item in window["ip_table"].Values]]
ips = [
ipaddress.ip_address(ip)
for ip in [item[0] for item in window["ip_table"].Values]
]
await set_progress_bar_len(len(ips))
progress_bar_len = 0
asyncio.create_task(update_prog_bar(progress_bar_len))
@@ -258,9 +269,13 @@ async def refresh_data(ip_list: list):
if data_point["IP"] in ordered_all_ips:
ip_table_index = ordered_all_ips.index(data_point["IP"])
ip_table_data[ip_table_index] = [
data_point["IP"], data_point["model"], data_point["host"], str(data_point['TH/s']) + " TH/s ",
data_point["IP"],
data_point["model"],
data_point["host"],
str(data_point["TH/s"]) + " TH/s ",
data_point["temp"],
data_point['user'], str(data_point['wattage']) + " W"
data_point["user"],
str(data_point["wattage"]) + " W",
]
window["ip_table"].update(ip_table_data)
progress_bar_len += 1
@@ -270,8 +285,10 @@ async def refresh_data(ip_list: list):
hr_idx = 3
for item, _ in enumerate(window["ip_table"].Values):
if len(window["ip_table"].Values[item]) > hr_idx:
if not window["ip_table"].Values[item][hr_idx] == '':
hashrate_list.append(float(window["ip_table"].Values[item][hr_idx].replace(" TH/s ", "")))
if not window["ip_table"].Values[item][hr_idx] == "":
hashrate_list.append(
float(window["ip_table"].Values[item][hr_idx].replace(" TH/s ", ""))
)
else:
hashrate_list.append(0)
else:
@@ -325,7 +342,7 @@ async def scan_and_get_data(network):
data_gen = asyncio.as_completed([get_formatted_data(miner) for miner in miners])
ip_table_data = window["ip_table"].Values
ordered_all_ips = [item[0] for item in ip_table_data]
progress_bar_len += (network_size - len(miners))
progress_bar_len += network_size - len(miners)
asyncio.create_task(update_prog_bar(progress_bar_len))
await update_ui_with_data("status", "Getting Data")
logging.debug("Getting data on miners.")
@@ -334,14 +351,22 @@ async def scan_and_get_data(network):
if data_point["IP"] in ordered_all_ips:
ip_table_index = ordered_all_ips.index(data_point["IP"])
ip_table_data[ip_table_index] = [
data_point["IP"], data_point["model"], data_point["host"], str(data_point['TH/s']) + " TH/s ",
data_point["IP"],
data_point["model"],
data_point["host"],
str(data_point["TH/s"]) + " TH/s ",
data_point["temp"],
data_point['user'], str(data_point['wattage']) + " W"
data_point["user"],
str(data_point["wattage"]) + " W",
]
window["ip_table"].update(ip_table_data)
progress_bar_len += 1
asyncio.create_task(update_prog_bar(progress_bar_len))
hashrate_list = [float(item[3].replace(" TH/s ", "")) for item in window["ip_table"].Values if not item[3] == '']
hashrate_list = [
float(item[3].replace(" TH/s ", ""))
for item in window["ip_table"].Values
if not item[3] == ""
]
total_hr = round(sum(hashrate_list), 2)
await update_ui_with_data("hr_total", f"{total_hr} TH/s")
await update_ui_with_data("status", "")
@@ -350,7 +375,7 @@ async def scan_and_get_data(network):
async def get_formatted_data(ip: ipaddress.ip_address):
miner = await MinerFactory().get_miner(ip)
logging.debug(f"Getting data for miner: {miner.ip}")
warnings.filterwarnings('ignore')
warnings.filterwarnings("ignore")
miner_data = None
host = await miner.get_hostname()
try:
@@ -365,81 +390,144 @@ async def get_formatted_data(ip: ipaddress.ip_address):
user = "?"
try:
miner_data = await miner.api.multicommand("summary", "devs", "temps", "tunerstatus", "pools", "stats")
miner_data = await miner.api.multicommand(
"summary", "devs", "temps", "tunerstatus", "pools", "stats"
)
except APIError:
try:
# no devs command, it will fail in this case
miner_data = await miner.api.multicommand("summary", "temps", "tunerstatus", "pools", "stats")
miner_data = await miner.api.multicommand(
"summary", "temps", "tunerstatus", "pools", "stats"
)
except APIError as e:
logging.warning(f"{str(ip)}: {e}")
return {'TH/s': 0, 'IP': str(miner.ip), 'model': 'Unknown', 'temp': 0, 'host': 'Unknown', 'user': 'Unknown',
'wattage': 0}
return {
"TH/s": 0,
"IP": str(miner.ip),
"model": "Unknown",
"temp": 0,
"host": "Unknown",
"user": "Unknown",
"wattage": 0,
}
if miner_data:
logging.info(f"Received miner data for miner: {miner.ip}")
# get all data from summary
if "summary" in miner_data.keys():
if not miner_data["summary"][0].get("SUMMARY") == [] and "SUMMARY" in miner_data["summary"][0].keys():
if (
not miner_data["summary"][0].get("SUMMARY") == []
and "SUMMARY" in miner_data["summary"][0].keys()
):
# temperature data, this is the idea spot to get this
if "Temperature" in miner_data['summary'][0]['SUMMARY'][0].keys():
if not round(miner_data['summary'][0]['SUMMARY'][0]["Temperature"]) == 0:
temps = miner_data['summary'][0]['SUMMARY'][0]["Temperature"]
if "Temperature" in miner_data["summary"][0]["SUMMARY"][0].keys():
if (
not round(miner_data["summary"][0]["SUMMARY"][0]["Temperature"])
== 0
):
temps = miner_data["summary"][0]["SUMMARY"][0]["Temperature"]
# hashrate data
if 'MHS av' in miner_data['summary'][0]['SUMMARY'][0].keys():
th5s = format(round(await safe_parse_api_data(miner_data, 'summary', 0, 'SUMMARY', 0, 'MHS av') / 1000000, 2), ".2f").rjust(6, " ")
elif 'GHS av' in miner_data['summary'][0]['SUMMARY'][0].keys():
if not miner_data['summary'][0]['SUMMARY'][0]['GHS av'] == "":
th5s = format(round(
float(await safe_parse_api_data(miner_data, 'summary', 0, 'SUMMARY', 0, 'GHS av')) / 1000,
2), ".2f").rjust(6, " ")
if "MHS av" in miner_data["summary"][0]["SUMMARY"][0].keys():
th5s = format(
round(
await safe_parse_api_data(
miner_data, "summary", 0, "SUMMARY", 0, "MHS av"
)
/ 1000000,
2,
),
".2f",
).rjust(6, " ")
elif "GHS av" in miner_data["summary"][0]["SUMMARY"][0].keys():
if not miner_data["summary"][0]["SUMMARY"][0]["GHS av"] == "":
th5s = format(
round(
float(
await safe_parse_api_data(
miner_data, "summary", 0, "SUMMARY", 0, "GHS av"
)
)
/ 1000,
2,
),
".2f",
).rjust(6, " ")
# alternate temperature data, for BraiinsOS
if "temps" in miner_data.keys():
if not miner_data["temps"][0].get('TEMPS') == []:
if "Chip" in miner_data["temps"][0]['TEMPS'][0].keys():
for board in miner_data["temps"][0]['TEMPS']:
if not miner_data["temps"][0].get("TEMPS") == []:
if "Chip" in miner_data["temps"][0]["TEMPS"][0].keys():
for board in miner_data["temps"][0]["TEMPS"]:
if board["Chip"] is not None and not board["Chip"] == 0.0:
temps = board["Chip"]
# alternate temperature data, for Whatsminers
if "devs" in miner_data.keys():
if not miner_data["devs"][0].get('DEVS') == []:
if "Chip Temp Avg" in miner_data["devs"][0]['DEVS'][0].keys():
for board in miner_data["devs"][0]['DEVS']:
if board['Chip Temp Avg'] is not None and not board['Chip Temp Avg'] == 0.0:
temps = board['Chip Temp Avg']
if not miner_data["devs"][0].get("DEVS") == []:
if "Chip Temp Avg" in miner_data["devs"][0]["DEVS"][0].keys():
for board in miner_data["devs"][0]["DEVS"]:
if (
board["Chip Temp Avg"] is not None
and not board["Chip Temp Avg"] == 0.0
):
temps = board["Chip Temp Avg"]
# alternate temperature data
if "stats" in miner_data.keys():
if not miner_data["stats"][0]['STATS'] == []:
if not miner_data["stats"][0]["STATS"] == []:
for temp in ["temp2", "temp1", "temp3"]:
if temp in miner_data["stats"][0]['STATS'][1].keys():
if miner_data["stats"][0]['STATS'][1][temp] is not None and not miner_data["stats"][0]['STATS'][1][temp] == 0.0:
temps = miner_data["stats"][0]['STATS'][1][temp]
if temp in miner_data["stats"][0]["STATS"][1].keys():
if (
miner_data["stats"][0]["STATS"][1][temp] is not None
and not miner_data["stats"][0]["STATS"][1][temp] == 0.0
):
temps = miner_data["stats"][0]["STATS"][1][temp]
# alternate temperature data, for Avalonminers
miner_data["stats"][0]['STATS'][0].keys()
if any("MM ID" in string for string in miner_data["stats"][0]['STATS'][0].keys()):
miner_data["stats"][0]["STATS"][0].keys()
if any(
"MM ID" in string
for string in miner_data["stats"][0]["STATS"][0].keys()
):
temp_all = []
for key in [string for string in miner_data["stats"][0]['STATS'][0].keys() if "MM ID" in string]:
for value in [string for string in miner_data["stats"][0]['STATS'][0][key].split(" ") if
"TMax" in string]:
for key in [
string
for string in miner_data["stats"][0]["STATS"][0].keys()
if "MM ID" in string
]:
for value in [
string
for string in miner_data["stats"][0]["STATS"][0][key].split(" ")
if "TMax" in string
]:
temp_all.append(int(value.split("[")[1].replace("]", "")))
temps = round(sum(temp_all) / len(temp_all))
# pool information
if "pools" in miner_data.keys():
if not miner_data['pools'][0].get('POOLS') == []:
user = await safe_parse_api_data(miner_data, 'pools', 0, 'POOLS', 0, 'User')
if not miner_data["pools"][0].get("POOLS") == []:
user = await safe_parse_api_data(
miner_data, "pools", 0, "POOLS", 0, "User"
)
else:
print(miner_data['pools'][0])
print(miner_data["pools"][0])
user = "Blank"
# braiins tuner status / wattage
if "tunerstatus" in miner_data.keys():
wattage = await safe_parse_api_data(miner_data, "tunerstatus", 0, 'TUNERSTATUS', 0, "PowerLimit")
wattage = await safe_parse_api_data(
miner_data, "tunerstatus", 0, "TUNERSTATUS", 0, "PowerLimit"
)
elif "Power" in miner_data["summary"][0]["SUMMARY"][0].keys():
wattage = await safe_parse_api_data(miner_data, "summary", 0, 'SUMMARY', 0, "Power")
wattage = await safe_parse_api_data(
miner_data, "summary", 0, "SUMMARY", 0, "Power"
)
ret_data = {'TH/s': th5s, 'IP': str(miner.ip), 'model': model,
'temp': round(temps), 'host': host, 'user': user,
'wattage': wattage}
ret_data = {
"TH/s": th5s,
"IP": str(miner.ip),
"model": model,
"temp": round(temps),
"host": host,
"user": user,
"wattage": wattage,
}
logging.debug(f"{ret_data}")
@@ -455,46 +543,37 @@ async def generate_config(username, workername, v2_allowed):
return
if v2_allowed:
url_1 = 'stratum2+tcp://v2.us-east.stratum.slushpool.com/u95GEReVMjK6k5YqiSFNqqTnKU4ypU2Wm8awa6tmbmDmk1bWt'
url_2 = 'stratum2+tcp://v2.stratum.slushpool.com/u95GEReVMjK6k5YqiSFNqqTnKU4ypU2Wm8awa6tmbmDmk1bWt'
url_3 = 'stratum+tcp://stratum.slushpool.com:3333'
url_1 = "stratum2+tcp://v2.us-east.stratum.slushpool.com/u95GEReVMjK6k5YqiSFNqqTnKU4ypU2Wm8awa6tmbmDmk1bWt"
url_2 = "stratum2+tcp://v2.stratum.slushpool.com/u95GEReVMjK6k5YqiSFNqqTnKU4ypU2Wm8awa6tmbmDmk1bWt"
url_3 = "stratum+tcp://stratum.slushpool.com:3333"
else:
url_1 = 'stratum+tcp://ca.stratum.slushpool.com:3333'
url_2 = 'stratum+tcp://us-east.stratum.slushpool.com:3333'
url_3 = 'stratum+tcp://stratum.slushpool.com:3333'
url_1 = "stratum+tcp://ca.stratum.slushpool.com:3333"
url_2 = "stratum+tcp://us-east.stratum.slushpool.com:3333"
url_3 = "stratum+tcp://stratum.slushpool.com:3333"
config = {
'group': [{
'name': 'group',
'quota': 1,
'pool': [{
'url': url_1,
'user': user,
'password': '123'
}, {
'url': url_2,
'user': user,
'password': '123'
}, {
'url': url_3,
'user': user,
'password': '123'
}]
}],
'format': {
'version': '1.2+',
'model': 'Antminer S9',
'generator': 'upstream_config_util',
'timestamp': int(time.time())
"group": [
{
"name": "group",
"quota": 1,
"pool": [
{"url": url_1, "user": user, "password": "123"},
{"url": url_2, "user": user, "password": "123"},
{"url": url_3, "user": user, "password": "123"},
],
}
],
"format": {
"version": "1.2+",
"model": "Antminer S9",
"generator": "upstream_config_util",
"timestamp": int(time.time()),
},
'temp_control': {
'target_temp': 80.0,
'hot_temp': 90.0,
'dangerous_temp': 120.0
"temp_control": {
"target_temp": 80.0,
"hot_temp": 90.0,
"dangerous_temp": 120.0,
},
'autotuning': {
'enabled': True,
'psu_power_limit': 900
}
"autotuning": {"enabled": True, "psu_power_limit": 900},
}
window['config'].update(await bos_config_convert(config))
window["config"].update(await bos_config_convert(config))

View File

@@ -4,7 +4,7 @@ from API import APIError
# noinspection PyPep8
async def safe_parse_api_data(data: dict or list, *path: str or int, idx: int = 0):
path = [*path]
if len(path) == idx+1:
if len(path) == idx + 1:
if isinstance(path[idx], str):
if isinstance(data, dict):
if path[idx] in data.keys():
@@ -17,34 +17,50 @@ async def safe_parse_api_data(data: dict or list, *path: str or int, idx: int =
if isinstance(path[idx], str):
if isinstance(data, dict):
if path[idx] in data.keys():
parsed_data = await safe_parse_api_data(data[path[idx]], idx=idx+1, *path)
parsed_data = await safe_parse_api_data(
data[path[idx]], idx=idx + 1, *path
)
# has to be == None, or else it fails on 0.0 hashrates
# noinspection PyPep8
if parsed_data == None:
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
raise APIError(
f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}"
)
return parsed_data
else:
if idx == 0:
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
raise APIError(
f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}"
)
return False
else:
if idx == 0:
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
raise APIError(
f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}"
)
return False
elif isinstance(path[idx], int):
if isinstance(data, list):
if len(data) > path[idx]:
parsed_data = await safe_parse_api_data(data[path[idx]], idx=idx+1, *path)
parsed_data = await safe_parse_api_data(
data[path[idx]], idx=idx + 1, *path
)
# has to be == None, or else it fails on 0.0 hashrates
# noinspection PyPep8
if parsed_data == None:
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
raise APIError(
f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}"
)
return parsed_data
else:
if idx == 0:
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
raise APIError(
f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}"
)
return False
else:
if idx == 0:
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
raise APIError(
f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}"
)
return False

View File

@@ -8,9 +8,7 @@ import pyperclip
def table_select_all():
window["ip_table"].update(
select_rows=(
[row for row in range(len(window["ip_table"].Values))]
)
select_rows=([row for row in range(len(window["ip_table"].Values))])
)
@@ -40,7 +38,6 @@ def copy_from_ssh_table(table):
pyperclip.copy(copy_string)
async def update_ui_with_data(key, message, append=False):
if append:
message = window[key].get_text() + message
@@ -49,7 +46,7 @@ async def update_ui_with_data(key, message, append=False):
async def update_prog_bar(amount):
window["progress"].Update(amount)
percent_done = 100 * (amount / window['progress'].maxlen)
percent_done = 100 * (amount / window["progress"].maxlen)
window["progress_percent"].Update(f"{round(percent_done, 2)} %")
if percent_done == 100:
window["progress_percent"].Update("")
@@ -65,7 +62,7 @@ async def sort_data(index: int or str):
if window["scan"].Disabled:
return
await update_ui_with_data("status", "Sorting Data")
data_list = window['ip_table'].Values
data_list = window["ip_table"].Values
table = window["ip_table"].Widget
all_data = []
for idx, item in enumerate(data_list):
@@ -73,22 +70,42 @@ async def sort_data(index: int or str):
# wattage
if re.match("[0-9]* W", str(all_data[0]["data"][index])):
new_list = sorted(all_data, key=lambda x: int(x["data"][index].replace(" W", "")))
new_list = sorted(
all_data, key=lambda x: int(x["data"][index].replace(" W", ""))
)
if all_data == new_list:
new_list = sorted(all_data, reverse=True, key=lambda x: int(x["data"][index].replace(" W", "")))
new_list = sorted(
all_data,
reverse=True,
key=lambda x: int(x["data"][index].replace(" W", "")),
)
# hashrate
elif re.match("[0-9]*\.?[0-9]* TH\/s", str(all_data[0]["data"][index])):
new_list = sorted(all_data, key=lambda x: float(x["data"][index].replace(" TH/s", "")))
new_list = sorted(
all_data, key=lambda x: float(x["data"][index].replace(" TH/s", ""))
)
if all_data == new_list:
new_list = sorted(all_data, reverse=True, key=lambda x: float(x["data"][index].replace(" TH/s", "")))
new_list = sorted(
all_data,
reverse=True,
key=lambda x: float(x["data"][index].replace(" TH/s", "")),
)
# ip addresses
elif re.match("^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)",
str(all_data[0]["data"][index])):
new_list = sorted(all_data, key=lambda x: ipaddress.ip_address(x["data"][index]))
elif re.match(
"^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)",
str(all_data[0]["data"][index]),
):
new_list = sorted(
all_data, key=lambda x: ipaddress.ip_address(x["data"][index])
)
if all_data == new_list:
new_list = sorted(all_data, reverse=True, key=lambda x: ipaddress.ip_address(x["data"][index]))
new_list = sorted(
all_data,
reverse=True,
key=lambda x: ipaddress.ip_address(x["data"][index]),
)
# everything else, hostname, temp, and user
else:

File diff suppressed because one or more lines are too long

View File

@@ -3,13 +3,36 @@ import sys
import PySimpleGUI as sg
import tkinter as tk
from tools.cfg_util.cfg_util_sg.layout import window, generate_config_layout, send_ssh_cmd_layout
from tools.cfg_util.cfg_util_sg.func.miners import send_config, miner_light, refresh_data, generate_config, import_config, \
scan_and_get_data, restart_miners_backend, reboot_miners, send_miners_ssh_commands
from tools.cfg_util.cfg_util_sg.func.files import import_iplist, \
import_config_file, export_iplist, export_config_file, export_csv
from tools.cfg_util.cfg_util_sg.layout import (
window,
generate_config_layout,
send_ssh_cmd_layout,
)
from tools.cfg_util.cfg_util_sg.func.miners import (
send_config,
miner_light,
refresh_data,
generate_config,
import_config,
scan_and_get_data,
restart_miners_backend,
reboot_miners,
send_miners_ssh_commands,
)
from tools.cfg_util.cfg_util_sg.func.files import (
import_iplist,
import_config_file,
export_iplist,
export_config_file,
export_csv,
)
from tools.cfg_util.cfg_util_sg.func.decorators import disable_buttons
from tools.cfg_util.cfg_util_sg.func.ui import sort_data, copy_from_table, table_select_all, copy_from_ssh_table
from tools.cfg_util.cfg_util_sg.func.ui import (
sort_data,
copy_from_table,
table_select_all,
copy_from_ssh_table,
)
from network import MinerNetwork
@@ -27,62 +50,112 @@ async def ui():
table.column(2, anchor=tk.W)
while True:
event, value = window.read(timeout=0)
if event in (None, 'Close', sg.WIN_CLOSED):
if event in (None, "Close", sg.WIN_CLOSED):
sys.exit()
if isinstance(event, tuple):
if len(window["ip_table"].Values) > 0:
if event[0] == 'ip_table':
if event[0] == "ip_table":
if event[2][0] == -1:
await sort_data(event[2][1])
if event == 'open_in_web':
if event == "open_in_web":
for row in value["ip_table"]:
webbrowser.open("http://" + window["ip_table"].Values[row][0])
if event == 'scan':
if len(value['miner_network'].split("/")) > 1:
network = value['miner_network'].split("/")
if event == "scan":
if len(value["miner_network"].split("/")) > 1:
network = value["miner_network"].split("/")
miner_network = MinerNetwork(ip_addr=network[0], mask=network[1])
else:
miner_network = MinerNetwork(value['miner_network'])
miner_network = MinerNetwork(value["miner_network"])
asyncio.create_task(scan_and_get_data(miner_network))
if event == 'select_all_ips':
if event == "select_all_ips":
if len(value["ip_table"]) == len(window["ip_table"].Values):
window["ip_table"].update(select_rows=())
else:
window["ip_table"].update(select_rows=([row for row in range(len(window["ip_table"].Values))]))
if event == 'import_config':
if 2 > len(value['ip_table']) > 0:
asyncio.create_task(import_config(value['ip_table']))
window["ip_table"].update(
select_rows=([row for row in range(len(window["ip_table"].Values))])
)
if event == "import_config":
if 2 > len(value["ip_table"]) > 0:
asyncio.create_task(import_config(value["ip_table"]))
if event == "restart_miner_backend":
if len(window["ip_table"].Values) > 0:
asyncio.create_task(restart_miners_backend([window['ip_table'].Values[item][0] for item in value['ip_table']]))
asyncio.create_task(
restart_miners_backend(
[
window["ip_table"].Values[item][0]
for item in value["ip_table"]
]
)
)
if event == "reboot_miners":
if len(window["ip_table"].Values) > 0:
asyncio.create_task(reboot_miners([window['ip_table'].Values[item][0] for item in value['ip_table']]))
asyncio.create_task(
reboot_miners(
[
window["ip_table"].Values[item][0]
for item in value["ip_table"]
]
)
)
if event == "send_miner_ssh_command_window":
ips = [window['ip_table'].Values[item][0] for item in value['ip_table']]
ips = [window["ip_table"].Values[item][0] for item in value["ip_table"]]
if len(ips) == 0:
ips = [item[0] for item in window["ip_table"].Values]
if not len(ips) == 0:
await generate_ssh_cmd_ui(ips)
if event == 'light':
if event == "light":
if len(window["ip_table"].Values) > 0:
asyncio.create_task(miner_light([window['ip_table'].Values[item][0] for item in value['ip_table']]))
asyncio.create_task(
miner_light(
[
window["ip_table"].Values[item][0]
for item in value["ip_table"]
]
)
)
if event == "import_iplist":
asyncio.create_task(import_iplist(value["file_iplist"]))
if event == "export_iplist":
asyncio.create_task(export_iplist(value["file_iplist"], [window['ip_table'].Values[item][0] for item in value['ip_table']]))
asyncio.create_task(
export_iplist(
value["file_iplist"],
[window["ip_table"].Values[item][0] for item in value["ip_table"]],
)
)
if event == "export_csv":
asyncio.create_task(export_csv(value["file_iplist"], [window['ip_table'].Values[item] for item in value['ip_table']]))
asyncio.create_task(
export_csv(
value["file_iplist"],
[window["ip_table"].Values[item] for item in value["ip_table"]],
)
)
if event == "send_config":
if len(window["ip_table"].Values) > 0:
asyncio.create_task(send_config([window['ip_table'].Values[item][0] for item in value['ip_table']], value['config']))
asyncio.create_task(
send_config(
[
window["ip_table"].Values[item][0]
for item in value["ip_table"]
],
value["config"],
)
)
if event == "import_file_config":
asyncio.create_task(import_config_file(value['file_config']))
asyncio.create_task(import_config_file(value["file_config"]))
if event == "export_file_config":
asyncio.create_task(export_config_file(value['file_config'], value["config"]))
asyncio.create_task(
export_config_file(value["file_config"], value["config"])
)
if event == "refresh_data":
if len(window["ip_table"].Values) > 0:
asyncio.create_task(refresh_data([window["ip_table"].Values[item][0] for item in value["ip_table"]]))
asyncio.create_task(
refresh_data(
[
window["ip_table"].Values[item][0]
for item in value["ip_table"]
]
)
)
if event == "generate_config":
await generate_config_ui()
if event == "__TIMEOUT__":
@@ -90,23 +163,29 @@ async def ui():
async def generate_config_ui():
generate_config_window = sg.Window("Generate Config", generate_config_layout(), modal=True)
generate_config_window = sg.Window(
"Generate Config", generate_config_layout(), modal=True
)
while True:
event, values = generate_config_window.read()
if event in (None, 'Close', sg.WIN_CLOSED):
if event in (None, "Close", sg.WIN_CLOSED):
break
if event == "generate_config_window_generate":
if values['generate_config_window_username']:
await generate_config(values['generate_config_window_username'],
values['generate_config_window_workername'],
values['generate_config_window_allow_v2'])
if values["generate_config_window_username"]:
await generate_config(
values["generate_config_window_username"],
values["generate_config_window_workername"],
values["generate_config_window_allow_v2"],
)
generate_config_window.close()
break
@disable_buttons
async def generate_ssh_cmd_ui(selected_miners: list):
ssh_cmd_window = sg.Window("Send Command", send_ssh_cmd_layout(selected_miners), modal=True)
ssh_cmd_window = sg.Window(
"Send Command", send_ssh_cmd_layout(selected_miners), modal=True
)
ssh_cmd_window.read(timeout=0)
table = ssh_cmd_window["ssh_cmd_table"].Widget
table.bind("<Control-Key-c>", lambda x: copy_from_ssh_table(table))
@@ -114,9 +193,13 @@ async def generate_ssh_cmd_ui(selected_miners: list):
table.column(1, anchor=tk.W)
while True:
event, values = ssh_cmd_window.read(timeout=0)
if event in (None, 'Close', sg.WIN_CLOSED):
if event in (None, "Close", sg.WIN_CLOSED):
break
if event == "ssh_command_window_send_cmd":
asyncio.create_task(send_miners_ssh_commands(selected_miners, values["ssh_command_window_cmd"], ssh_cmd_window))
asyncio.create_task(
send_miners_ssh_commands(
selected_miners, values["ssh_command_window_cmd"], ssh_cmd_window
)
)
if event == "__TIMEOUT__":
await asyncio.sleep(0)

View File

@@ -4,7 +4,7 @@ from API import APIError
# noinspection PyPep8
async def safe_parse_api_data(data: dict or list, *path: str or int, idx: int = 0):
path = [*path]
if len(path) == idx+1:
if len(path) == idx + 1:
if isinstance(path[idx], str):
if isinstance(data, dict):
if path[idx] in data.keys():
@@ -17,34 +17,50 @@ async def safe_parse_api_data(data: dict or list, *path: str or int, idx: int =
if isinstance(path[idx], str):
if isinstance(data, dict):
if path[idx] in data.keys():
parsed_data = await safe_parse_api_data(data[path[idx]], idx=idx+1, *path)
parsed_data = await safe_parse_api_data(
data[path[idx]], idx=idx + 1, *path
)
# has to be == None, or else it fails on 0.0 hashrates
# noinspection PyPep8
if parsed_data == None:
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
raise APIError(
f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}"
)
return parsed_data
else:
if idx == 0:
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
raise APIError(
f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}"
)
return False
else:
if idx == 0:
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
raise APIError(
f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}"
)
return False
elif isinstance(path[idx], int):
if isinstance(data, list):
if len(data) > path[idx]:
parsed_data = await safe_parse_api_data(data[path[idx]], idx=idx+1, *path)
parsed_data = await safe_parse_api_data(
data[path[idx]], idx=idx + 1, *path
)
# has to be == None, or else it fails on 0.0 hashrates
# noinspection PyPep8
if parsed_data == None:
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
raise APIError(
f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}"
)
return parsed_data
else:
if idx == 0:
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
raise APIError(
f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}"
)
return False
else:
if idx == 0:
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
raise APIError(
f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}"
)
return False

View File

@@ -11,19 +11,22 @@ router = APIRouter()
@router.route("/", methods=["GET", "POST"])
async def settings(request: Request):
return templates.TemplateResponse("settings.html", {
"request": request,
"cur_miners": get_current_miner_list(),
"settings": get_current_settings()
})
return templates.TemplateResponse(
"settings.html",
{
"request": request,
"cur_miners": get_current_miner_list(),
"settings": get_current_settings(),
},
)
@router.post("/update")
async def update_settings_page(request: Request):
data = await request.form()
graph_data_sleep_time = data.get('graph_data_sleep_time')
miner_data_timeout = data.get('miner_data_timeout')
miner_identify_timeout = data.get('miner_identify_timeout')
graph_data_sleep_time = data.get("graph_data_sleep_time")
miner_data_timeout = data.get("miner_data_timeout")
miner_identify_timeout = data.get("miner_identify_timeout")
new_settings = {
"graph_data_sleep_time": int(graph_data_sleep_time),
"miner_data_timeout": int(miner_data_timeout),

View File

@@ -4,7 +4,9 @@ import os
def get_current_settings():
try:
with open(os.path.join(os.getcwd(), "settings/web_settings.toml"), "r") as settings_file:
with open(
os.path.join(os.getcwd(), "settings/web_settings.toml"), "r"
) as settings_file:
settings = toml.loads(settings_file.read())
except:
settings = {
@@ -16,5 +18,7 @@ def get_current_settings():
def update_settings(settings):
with open(os.path.join(os.getcwd(), "settings/web_settings.toml"), "w") as settings_file:
with open(
os.path.join(os.getcwd(), "settings/web_settings.toml"), "w"
) as settings_file:
settings_file.write(toml.dumps(settings))

View File

@@ -12,8 +12,11 @@ from tools.web_monitor._settings import router as settings_router
app = FastAPI()
app.mount("/static", StaticFiles(
directory=os.path.join(os.path.dirname(__file__), "static")), name="static")
app.mount(
"/static",
StaticFiles(directory=os.path.join(os.path.dirname(__file__), "static")),
name="static",
)
app.include_router(dashboard_router, tags=["dashboard"])
app.include_router(miner_router, tags=["miner"], prefix="/miner")

View File

@@ -12,13 +12,12 @@ router.include_router(ws_router)
@router.get("/")
def index(request: Request):
return RedirectResponse(request.url_for('dashboard'))
return RedirectResponse(request.url_for("dashboard"))
@router.get("/dashboard")
def dashboard(request: Request):
print()
return templates.TemplateResponse("index.html", {
"request": request,
"cur_miners": get_current_miner_list()
})
return templates.TemplateResponse(
"index.html", {"request": request, "cur_miners": get_current_miner_list()}
)

View File

@@ -11,23 +11,21 @@ async def get_miner_data_dashboard(miner_ip):
miner_data_timeout = settings["miner_data_timeout"]
miner_ip = await asyncio.wait_for(
MinerFactory().get_miner(miner_ip),
miner_identify_timeout
MinerFactory().get_miner(miner_ip), miner_identify_timeout
)
miner_summary = await asyncio.wait_for(
miner_ip.api.summary(),
miner_data_timeout
miner_ip.api.summary(), miner_data_timeout
)
if miner_summary:
if 'MHS av' in miner_summary['SUMMARY'][0].keys():
if "MHS av" in miner_summary["SUMMARY"][0].keys():
hashrate = format(
round(miner_summary['SUMMARY'][0]['MHS av'] / 1000000,
2), ".2f")
elif 'GHS av' in miner_summary['SUMMARY'][0].keys():
round(miner_summary["SUMMARY"][0]["MHS av"] / 1000000, 2), ".2f"
)
elif "GHS av" in miner_summary["SUMMARY"][0].keys():
hashrate = format(
round(miner_summary['SUMMARY'][0]['GHS av'] / 1000, 2),
".2f")
round(miner_summary["SUMMARY"][0]["GHS av"] / 1000, 2), ".2f"
)
else:
hashrate = 0
else:
@@ -39,5 +37,7 @@ async def get_miner_data_dashboard(miner_ip):
return {"ip": miner_ip, "error": "The miner is not responding."}
except KeyError:
return {"ip": miner_ip,
"error": "The miner returned unusable/unsupported data."}
return {
"ip": miner_ip,
"error": "The miner returned unusable/unsupported data.",
}

View File

@@ -21,14 +21,18 @@ async def dashboard_websocket(websocket: WebSocket):
miners = get_current_miner_list()
all_miner_data = []
data_gen = asyncio.as_completed(
[get_miner_data_dashboard(miner_ip) for miner_ip in miners])
[get_miner_data_dashboard(miner_ip) for miner_ip in miners]
)
for all_data in data_gen:
data_point = await all_data
all_miner_data.append(data_point)
all_miner_data.sort(key=lambda x: x["ip"])
await websocket.send_json(
{"datetime": datetime.datetime.now().isoformat(),
"miners": all_miner_data})
{
"datetime": datetime.datetime.now().isoformat(),
"miners": all_miner_data,
}
)
await asyncio.sleep(graph_sleep_time)
except WebSocketDisconnect:
print("Websocket disconnected.")

View File

@@ -16,8 +16,7 @@ def miner(_request: Request, _miner_ip):
@router.get("/{miner_ip}")
def get_miner(request: Request, miner_ip):
return templates.TemplateResponse("miner.html", {
"request": request,
"cur_miners": get_current_miner_list(),
"miner": miner_ip
})
return templates.TemplateResponse(
"miner.html",
{"request": request, "cur_miners": get_current_miner_list(), "miner": miner_ip},
)

View File

@@ -13,4 +13,4 @@ def get_miner(request: Request, miner_ip):
for miner_ip in miners:
file.write(miner_ip + "\n")
return RedirectResponse(request.url_for('dashboard'))
return RedirectResponse(request.url_for("dashboard"))

View File

@@ -22,13 +22,14 @@ async def miner_websocket(websocket: WebSocket, miner_ip):
while True:
try:
cur_miner = await asyncio.wait_for(
MinerFactory().get_miner(str(miner_ip)),
miner_identify_timeout
MinerFactory().get_miner(str(miner_ip)), miner_identify_timeout
)
data = await asyncio.wait_for(
cur_miner.api.multicommand("summary", "fans", "stats", "devs", "temps"),
miner_data_timeout
cur_miner.api.multicommand(
"summary", "fans", "stats", "devs", "temps"
),
miner_data_timeout,
)
miner_model = await cur_miner.get_model()
@@ -42,7 +43,8 @@ async def miner_websocket(websocket: WebSocket, miner_ip):
for item in ["Fan Speed In", "Fan Speed Out"]:
if item in miner_summary["SUMMARY"][0].keys():
miner_fans["FANS"].append(
{"RPM": miner_summary["SUMMARY"][0][item]})
{"RPM": miner_summary["SUMMARY"][0][item]}
)
if "fans" in data.keys():
miner_fans = data["fans"][0]
@@ -50,30 +52,52 @@ async def miner_websocket(websocket: WebSocket, miner_ip):
miner_temp_list = []
if "temps" in data.keys():
miner_temps = data["temps"][0]
for board in miner_temps['TEMPS']:
for board in miner_temps["TEMPS"]:
if board["Chip"] is not None and not board["Chip"] == 0.0:
miner_temp_list.append(board["Chip"])
if "devs" in data.keys() and not len(miner_temp_list) > 0:
if not data["devs"][0].get('DEVS') == []:
if "Chip Temp Avg" in data["devs"][0]['DEVS'][0].keys():
for board in data["devs"][0]['DEVS']:
if board['Chip Temp Avg'] is not None and not board['Chip Temp Avg'] == 0.0:
miner_temp_list.append(board['Chip Temp Avg'])
if not data["devs"][0].get("DEVS") == []:
if "Chip Temp Avg" in data["devs"][0]["DEVS"][0].keys():
for board in data["devs"][0]["DEVS"]:
if (
board["Chip Temp Avg"] is not None
and not board["Chip Temp Avg"] == 0.0
):
miner_temp_list.append(board["Chip Temp Avg"])
if "stats" in data.keys() and not len(miner_temp_list) > 0:
if not data["stats"][0]['STATS'] == []:
if not data["stats"][0]["STATS"] == []:
for temp in ["temp2", "temp1", "temp3"]:
if temp in data["stats"][0]['STATS'][1].keys():
if data["stats"][0]['STATS'][1][temp] is not None and not data["stats"][0]['STATS'][1][temp] == 0.0:
miner_temp_list.append(data["stats"][0]['STATS'][1][temp])
data["stats"][0]['STATS'][0].keys()
if any("MM ID" in string for string in
data["stats"][0]['STATS'][0].keys()):
if temp in data["stats"][0]["STATS"][1].keys():
if (
data["stats"][0]["STATS"][1][temp] is not None
and not data["stats"][0]["STATS"][1][temp] == 0.0
):
miner_temp_list.append(
data["stats"][0]["STATS"][1][temp]
)
data["stats"][0]["STATS"][0].keys()
if any(
"MM ID" in string
for string in data["stats"][0]["STATS"][0].keys()
):
temp_all = []
for key in [string for string in data["stats"][0]['STATS'][0].keys() if "MM ID" in string]:
for value in [string for string in data["stats"][0]['STATS'][0][key].split(" ") if "TMax" in string]:
temp_all.append(int(value.split("[")[1].replace("]", "")))
for key in [
string
for string in data["stats"][0]["STATS"][0].keys()
if "MM ID" in string
]:
for value in [
string
for string in data["stats"][0]["STATS"][0][key].split(
" "
)
if "TMax" in string
]:
temp_all.append(
int(value.split("[")[1].replace("]", ""))
)
miner_temp_list.append(round(sum(temp_all) / len(temp_all)))
if "stats" in data.keys() and not miner_fans:
@@ -82,19 +106,26 @@ async def miner_websocket(websocket: WebSocket, miner_ip):
for item in ["fan1", "fan2", "fan3", "fan4"]:
if item in miner_stats["STATS"][1].keys():
miner_fans["FANS"].append(
{"RPM": miner_stats["STATS"][1][item]})
{"RPM": miner_stats["STATS"][1][item]}
)
if miner_summary:
if 'MHS av' in miner_summary['SUMMARY'][0].keys():
hashrate = float(format(
round(
miner_summary['SUMMARY'][0]['MHS av'] / 1000000,
2), ".2f"))
elif 'GHS av' in miner_summary['SUMMARY'][0].keys():
hashrate = float(format(
round(miner_summary['SUMMARY'][0]['GHS av'] / 1000,
2),
".2f"))
if "MHS av" in miner_summary["SUMMARY"][0].keys():
hashrate = float(
format(
round(
miner_summary["SUMMARY"][0]["MHS av"] / 1000000, 2
),
".2f",
)
)
elif "GHS av" in miner_summary["SUMMARY"][0].keys():
hashrate = float(
format(
round(miner_summary["SUMMARY"][0]["GHS av"] / 1000, 2),
".2f",
)
)
else:
hashrate = 0
else:
@@ -111,24 +142,25 @@ async def miner_websocket(websocket: WebSocket, miner_ip):
if len(miner_temp_list) == 0:
miner_temps_list = [0]
data = {"hashrate": hashrate,
"fans": fan_speeds,
"temp": round(sum(miner_temp_list)/len(miner_temp_list), 2),
"datetime": datetime.datetime.now().isoformat(),
"model": miner_model}
data = {
"hashrate": hashrate,
"fans": fan_speeds,
"temp": round(sum(miner_temp_list) / len(miner_temp_list), 2),
"datetime": datetime.datetime.now().isoformat(),
"model": miner_model,
}
print(data)
await websocket.send_json(data)
await asyncio.sleep(settings["graph_data_sleep_time"])
except asyncio.exceptions.TimeoutError:
data = {"error": "The miner is not responding."}
await websocket.send_json(data)
await asyncio.sleep(.5)
await asyncio.sleep(0.5)
except KeyError as e:
print(e)
data = {
"error": "The miner returned unusable/unsupported data."}
data = {"error": "The miner returned unusable/unsupported data."}
await websocket.send_json(data)
await asyncio.sleep(.5)
await asyncio.sleep(0.5)
except WebSocketDisconnect:
print("Websocket disconnected.")
except websockets.exceptions.ConnectionClosedOK:

View File

@@ -11,10 +11,9 @@ router.include_router(ws_router)
@router.get("/")
def scan(request: Request):
return templates.TemplateResponse("scan.html", {
"request": request,
"cur_miners": get_current_miner_list()
})
return templates.TemplateResponse(
"scan.html", {"request": request, "cur_miners": get_current_miner_list()}
)
@router.post("/add_miners")

View File

@@ -25,12 +25,14 @@ async def do_websocket_scan(websocket: WebSocket, network_ip: str):
all_miners = []
async for found_miner in get_miner_generator:
all_miners.append(
{"ip": found_miner.ip, "model": await found_miner.get_model()})
{"ip": found_miner.ip, "model": await found_miner.get_model()}
)
all_miners.sort(key=lambda x: x["ip"])
send_miners = []
for miner_ip in all_miners:
send_miners.append(
{"ip": str(miner_ip["ip"]), "model": miner_ip["model"]})
{"ip": str(miner_ip["ip"]), "model": miner_ip["model"]}
)
await websocket.send_json(send_miners)
await websocket.send_text("Done")
except asyncio.CancelledError:

View File

@@ -26,8 +26,7 @@ async def websocket_scan(websocket: WebSocket):
cur_task = None
await websocket.send_text("Cancelled")
else:
cur_task = asyncio.create_task(
do_websocket_scan(websocket, ws_data))
cur_task = asyncio.create_task(do_websocket_scan(websocket, ws_data))
if cur_task and cur_task.done():
cur_task = None
except WebSocketDisconnect:

View File

@@ -3,4 +3,5 @@ from fastapi.templating import Jinja2Templates
templates = Jinja2Templates(
directory=os.path.join(os.path.dirname(__file__), "templates"))
directory=os.path.join(os.path.dirname(__file__), "templates")
)