reformatted and fixed a bunch of small formatting related issues
This commit is contained in:
@@ -46,7 +46,7 @@ class BaseMinerAPI:
|
|||||||
for item in commands:
|
for item in commands:
|
||||||
# make sure we can actually run the command, otherwise it will fail
|
# make sure we can actually run the command, otherwise it will fail
|
||||||
if item not in self.get_commands():
|
if item not in self.get_commands():
|
||||||
# if the command isnt allowed, remove it
|
# if the command isn't allowed, remove it
|
||||||
print(f"Removing incorrect command: {item}")
|
print(f"Removing incorrect command: {item}")
|
||||||
commands.remove(item)
|
commands.remove(item)
|
||||||
|
|
||||||
|
|||||||
@@ -104,9 +104,6 @@ class BOSMinerAPI(BaseMinerAPI):
|
|||||||
|
|
||||||
Returns a dict containing stats for all device/pool with more than 1 getwork,
|
Returns a dict containing stats for all device/pool with more than 1 getwork,
|
||||||
ignoring zombie devices.
|
ignoring zombie devices.
|
||||||
|
|
||||||
Parameters:
|
|
||||||
old (optional): include zombie devices that became zombies less than 'old' seconds ago.
|
|
||||||
"""
|
"""
|
||||||
return await self.send_command("estats")
|
return await self.send_command("estats")
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
from API import BaseMinerAPI, APIError
|
from API import BaseMinerAPI, APIError
|
||||||
|
|
||||||
from passlib.handlers import md5_crypt
|
from passlib.handlers.md5_crypt import md5_crypt
|
||||||
import asyncio
|
import asyncio
|
||||||
import re
|
import re
|
||||||
import json
|
import json
|
||||||
|
|||||||
@@ -6,14 +6,14 @@
|
|||||||
## Input Fields
|
## Input Fields
|
||||||
### Network IP:
|
### Network IP:
|
||||||
* Defaults to 192.168.1.0/24 (192.168.1.0 - 192.168.1.255)
|
* Defaults to 192.168.1.0/24 (192.168.1.0 - 192.168.1.255)
|
||||||
* Enter any IP on your local network and it will automatically load your entire network with a /24 subnet (255 IP addresses)
|
* Enter any IP on your local network, and it will automatically load your entire network with a /24 subnet (255 IP addresses)
|
||||||
* You can also add a subnet mask by adding a / after the IP and entering the subnet mask
|
* You can also add a subnet mask by adding a / after the IP and entering the subnet mask
|
||||||
* Press Scan to scan the selected network for miners
|
* Press Scan to scan the selected network for miners
|
||||||
|
|
||||||
### IP List File:
|
### IP List File:
|
||||||
* Use the Browse button to select a file
|
* Use the Browse button to select a file
|
||||||
* Use the Import button to import all IP addresses from a file, regardless of where they are located in the file
|
* Use the Import button to import all IP addresses from a file, regardless of where they are located in the file
|
||||||
* Use the Export button to export all IP addresses (or all selected IP addresses if you select some) to a file, with each seperated by a new line
|
* Use the Export button to export all IP addresses (or all selected IP addresses if you select some) to a file, with each separated by a new line
|
||||||
|
|
||||||
### Config File:
|
### Config File:
|
||||||
* Use the Browse button to select a file
|
* Use the Browse button to select a file
|
||||||
@@ -25,7 +25,7 @@
|
|||||||
## Data Fields
|
## Data Fields
|
||||||
### IP List:
|
### IP List:
|
||||||
* This field contains all the IP addresses of miners that were either imported from a file or scanned
|
* This field contains all the IP addresses of miners that were either imported from a file or scanned
|
||||||
* Select one by clicking, mutiple by holding CTRL and clicking, and select all between 2 chosen miners by holding SHIFT as you select them
|
* Select one by clicking, multiple by holding CTRL and clicking, and select all between 2 chosen miners by holding SHIFT as you select them
|
||||||
* Use the ALL button to select all IP addresses in the field, or unselect all if they are selected
|
* Use the ALL button to select all IP addresses in the field, or unselect all if they are selected
|
||||||
|
|
||||||
### Data:
|
### Data:
|
||||||
|
|||||||
@@ -46,7 +46,7 @@ A basic script to find all miners on the network and get the hashrate from them
|
|||||||
```python
|
```python
|
||||||
import asyncio
|
import asyncio
|
||||||
from network import MinerNetwork
|
from network import MinerNetwork
|
||||||
from cfg_util.func import safe_parse_api_data
|
from cfg_util.func.parse_data import safe_parse_api_data
|
||||||
|
|
||||||
async def get_hashrate():
|
async def get_hashrate():
|
||||||
# Miner Network class allows for easy scanning of a network
|
# Miner Network class allows for easy scanning of a network
|
||||||
@@ -80,7 +80,7 @@ You can also create your own miner without scanning if you know the IP:
|
|||||||
import asyncio
|
import asyncio
|
||||||
import ipaddress
|
import ipaddress
|
||||||
from miners.miner_factory import MinerFactory
|
from miners.miner_factory import MinerFactory
|
||||||
from cfg_util.func import safe_parse_api_data
|
from cfg_util.func.parse_data import safe_parse_api_data
|
||||||
|
|
||||||
async def get_miner_hashrate(ip: str):
|
async def get_miner_hashrate(ip: str):
|
||||||
# Instantiate a Miner Factory to generate miners from their IP
|
# Instantiate a Miner Factory to generate miners from their IP
|
||||||
@@ -106,7 +106,7 @@ Or generate a miner directly without the factory:
|
|||||||
```python
|
```python
|
||||||
import asyncio
|
import asyncio
|
||||||
from miners.bosminer import BOSminer
|
from miners.bosminer import BOSminer
|
||||||
from cfg_util.func import safe_parse_api_data
|
from cfg_util.func.parse_data import safe_parse_api_data
|
||||||
|
|
||||||
async def get_miner_hashrate(ip: str):
|
async def get_miner_hashrate(ip: str):
|
||||||
# Create a BOSminer miner object
|
# Create a BOSminer miner object
|
||||||
@@ -128,7 +128,7 @@ Or finally, just get the API directly:
|
|||||||
```python
|
```python
|
||||||
import asyncio
|
import asyncio
|
||||||
from API.bosminer import BOSMinerAPI
|
from API.bosminer import BOSMinerAPI
|
||||||
from cfg_util.func import safe_parse_api_data
|
from cfg_util.func.parse_data import safe_parse_api_data
|
||||||
|
|
||||||
async def get_miner_hashrate(ip: str):
|
async def get_miner_hashrate(ip: str):
|
||||||
# Create a BOSminerAPI object
|
# Create a BOSminerAPI object
|
||||||
|
|||||||
@@ -1,329 +0,0 @@
|
|||||||
import asyncio
|
|
||||||
import ipaddress
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import time
|
|
||||||
|
|
||||||
import aiofiles
|
|
||||||
import toml
|
|
||||||
|
|
||||||
from API import APIError
|
|
||||||
from cfg_util.func.data import safe_parse_api_data
|
|
||||||
from cfg_util.layout import window
|
|
||||||
from cfg_util.miner_factory import miner_factory
|
|
||||||
from config.bos import bos_config_convert, general_config_convert_bos
|
|
||||||
from settings import CFG_UTIL_CONFIG_THREADS as CONFIG_THREADS
|
|
||||||
from miners.btminer import BTMiner
|
|
||||||
|
|
||||||
|
|
||||||
async def update_ui_with_data(key, message, append=False):
|
|
||||||
if append:
|
|
||||||
message = window[key].get_text() + message
|
|
||||||
window[key].update(message)
|
|
||||||
|
|
||||||
|
|
||||||
async def update_prog_bar(amount):
|
|
||||||
window["progress"].Update(amount)
|
|
||||||
percent_done = 100 * (amount / window['progress'].maxlen)
|
|
||||||
window["progress_percent"].Update(f"{round(percent_done, 2)} %")
|
|
||||||
if percent_done == 100:
|
|
||||||
window["progress_percent"].Update("")
|
|
||||||
|
|
||||||
|
|
||||||
async def set_progress_bar_len(amount):
|
|
||||||
window["progress"].Update(0, max=amount)
|
|
||||||
window["progress"].maxlen = amount
|
|
||||||
window["progress_percent"].Update("0.0 %")
|
|
||||||
|
|
||||||
|
|
||||||
async def scan_network(network):
|
|
||||||
await update_ui_with_data("status", "Scanning")
|
|
||||||
await update_ui_with_data("hr_total", "")
|
|
||||||
window["ip_table"].update([])
|
|
||||||
network_size = len(network)
|
|
||||||
miner_generator = network.scan_network_generator()
|
|
||||||
await set_progress_bar_len(2 * network_size)
|
|
||||||
progress_bar_len = 0
|
|
||||||
miners = []
|
|
||||||
async for miner in miner_generator:
|
|
||||||
if miner:
|
|
||||||
miners.append(miner)
|
|
||||||
# can output "Identifying" for each found item, but it gets a bit cluttered
|
|
||||||
# and could possibly be confusing for the end user because of timing on
|
|
||||||
# adding the IPs
|
|
||||||
# window["ip_table"].update([["Identifying...", "", "", "", ""] for miner in miners])
|
|
||||||
progress_bar_len += 1
|
|
||||||
asyncio.create_task(update_prog_bar(progress_bar_len))
|
|
||||||
progress_bar_len += network_size - len(miners)
|
|
||||||
asyncio.create_task(update_prog_bar(progress_bar_len))
|
|
||||||
get_miner_genenerator = miner_factory.get_miner_generator(miners)
|
|
||||||
all_miners = []
|
|
||||||
async for found_miner in get_miner_genenerator:
|
|
||||||
all_miners.append(found_miner)
|
|
||||||
all_miners.sort(key=lambda x: x.ip)
|
|
||||||
window["ip_table"].update([[str(miner.ip), "", "", "", ""] for miner in all_miners])
|
|
||||||
progress_bar_len += 1
|
|
||||||
asyncio.create_task(update_prog_bar(progress_bar_len))
|
|
||||||
await update_ui_with_data("ip_count", str(len(all_miners)))
|
|
||||||
await update_ui_with_data("status", "")
|
|
||||||
|
|
||||||
|
|
||||||
async def miner_light(ips: list):
|
|
||||||
await asyncio.gather(*[flip_light(ip) for ip in ips])
|
|
||||||
|
|
||||||
|
|
||||||
async def flip_light(ip):
|
|
||||||
ip_list = window['ip_table'].Widget
|
|
||||||
miner = await miner_factory.get_miner(ip)
|
|
||||||
index = [item[0] for item in window["ip_table"].Values].index(ip)
|
|
||||||
index_tags = ip_list.item(index)['tags']
|
|
||||||
if "light" not in index_tags:
|
|
||||||
ip_list.item(index, tags=([*index_tags, "light"]))
|
|
||||||
window['ip_table'].update(row_colors=[(index, "white", "red")])
|
|
||||||
await miner.fault_light_on()
|
|
||||||
else:
|
|
||||||
index_tags.remove("light")
|
|
||||||
ip_list.item(index, tags=index_tags)
|
|
||||||
window['ip_table'].update(row_colors=[(index, "black", "white")])
|
|
||||||
await miner.fault_light_off()
|
|
||||||
|
|
||||||
|
|
||||||
async def import_config(idx):
|
|
||||||
await update_ui_with_data("status", "Importing")
|
|
||||||
miner = await miner_factory.get_miner(ipaddress.ip_address(window["ip_table"].Values[idx[0]][0]))
|
|
||||||
await miner.get_config()
|
|
||||||
config = miner.config
|
|
||||||
await update_ui_with_data("config", str(config))
|
|
||||||
await update_ui_with_data("status", "")
|
|
||||||
|
|
||||||
|
|
||||||
async def import_iplist(file_location):
|
|
||||||
await update_ui_with_data("status", "Importing")
|
|
||||||
if not os.path.exists(file_location):
|
|
||||||
return
|
|
||||||
else:
|
|
||||||
ip_list = []
|
|
||||||
async with aiofiles.open(file_location, mode='r') as file:
|
|
||||||
async for line in file:
|
|
||||||
ips = [x.group() for x in re.finditer(
|
|
||||||
"^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)", line)]
|
|
||||||
for ip in ips:
|
|
||||||
if ip not in ip_list:
|
|
||||||
ip_list.append(ipaddress.ip_address(ip))
|
|
||||||
ip_list.sort()
|
|
||||||
window["ip_table"].update([[str(ip), "", "", "", ""] for ip in ip_list])
|
|
||||||
await update_ui_with_data("ip_count", str(len(ip_list)))
|
|
||||||
await update_ui_with_data("status", "")
|
|
||||||
|
|
||||||
|
|
||||||
async def export_iplist(file_location, ip_list_selected):
|
|
||||||
await update_ui_with_data("status", "Exporting")
|
|
||||||
if not os.path.exists(file_location):
|
|
||||||
return
|
|
||||||
else:
|
|
||||||
if ip_list_selected is not None and not ip_list_selected == []:
|
|
||||||
async with aiofiles.open(file_location, mode='w') as file:
|
|
||||||
for item in ip_list_selected:
|
|
||||||
await file.write(str(item) + "\n")
|
|
||||||
else:
|
|
||||||
async with aiofiles.open(file_location, mode='w') as file:
|
|
||||||
for item in window['ip_table'].Values:
|
|
||||||
await file.write(str(item[0]) + "\n")
|
|
||||||
await update_ui_with_data("status", "")
|
|
||||||
|
|
||||||
|
|
||||||
async def send_config_generator(miners: list, config):
|
|
||||||
loop = asyncio.get_event_loop()
|
|
||||||
config_tasks = []
|
|
||||||
for miner in miners:
|
|
||||||
if len(config_tasks) >= CONFIG_THREADS:
|
|
||||||
configured = asyncio.as_completed(config_tasks)
|
|
||||||
config_tasks = []
|
|
||||||
for sent_config in configured:
|
|
||||||
yield await sent_config
|
|
||||||
config_tasks.append(loop.create_task(miner.send_config(config)))
|
|
||||||
configured = asyncio.as_completed(config_tasks)
|
|
||||||
for sent_config in configured:
|
|
||||||
yield await sent_config
|
|
||||||
|
|
||||||
|
|
||||||
async def send_config(ips: list, config):
|
|
||||||
await update_ui_with_data("status", "Configuring")
|
|
||||||
await set_progress_bar_len(2 * len(ips))
|
|
||||||
progress_bar_len = 0
|
|
||||||
get_miner_genenerator = miner_factory.get_miner_generator(ips)
|
|
||||||
all_miners = []
|
|
||||||
async for miner in get_miner_genenerator:
|
|
||||||
all_miners.append(miner)
|
|
||||||
progress_bar_len += 1
|
|
||||||
asyncio.create_task(update_prog_bar(progress_bar_len))
|
|
||||||
|
|
||||||
config_sender_generator = send_config_generator(all_miners, config)
|
|
||||||
async for _config_sender in config_sender_generator:
|
|
||||||
progress_bar_len += 1
|
|
||||||
asyncio.create_task(update_prog_bar(progress_bar_len))
|
|
||||||
await update_ui_with_data("status", "")
|
|
||||||
|
|
||||||
|
|
||||||
async def import_config_file(file_location):
|
|
||||||
await update_ui_with_data("status", "Importing")
|
|
||||||
if not os.path.exists(file_location):
|
|
||||||
return
|
|
||||||
else:
|
|
||||||
async with aiofiles.open(file_location, mode='r') as file:
|
|
||||||
config = await file.read()
|
|
||||||
await update_ui_with_data("config", await bos_config_convert(toml.loads(config)))
|
|
||||||
await update_ui_with_data("status", "")
|
|
||||||
|
|
||||||
|
|
||||||
async def export_config_file(file_location, config):
|
|
||||||
await update_ui_with_data("status", "Exporting")
|
|
||||||
config = toml.loads(config)
|
|
||||||
config['format']['generator'] = 'upstream_config_util'
|
|
||||||
config['format']['timestamp'] = int(time.time())
|
|
||||||
config = toml.dumps(config)
|
|
||||||
async with aiofiles.open(file_location, mode='w+') as file:
|
|
||||||
await file.write(await general_config_convert_bos(config))
|
|
||||||
await update_ui_with_data("status", "")
|
|
||||||
|
|
||||||
|
|
||||||
async def get_data(ip_list: list):
|
|
||||||
await update_ui_with_data("status", "Getting Data")
|
|
||||||
ips = [ipaddress.ip_address(ip) for ip in ip_list]
|
|
||||||
if len(ips) == 0:
|
|
||||||
ips = [ipaddress.ip_address(ip) for ip in [item[0] for item in window["ip_table"].Values]]
|
|
||||||
await set_progress_bar_len(len(ips))
|
|
||||||
progress_bar_len = 0
|
|
||||||
data_gen = asyncio.as_completed([get_formatted_data(miner) for miner in ips])
|
|
||||||
ip_table_data = window["ip_table"].Values
|
|
||||||
ordered_all_ips = [item[0] for item in ip_table_data]
|
|
||||||
for all_data in data_gen:
|
|
||||||
data_point = await all_data
|
|
||||||
if data_point["IP"] in ordered_all_ips:
|
|
||||||
ip_table_index = ordered_all_ips.index(data_point["IP"])
|
|
||||||
ip_table_data[ip_table_index] = [
|
|
||||||
data_point["IP"], data_point["host"], str(data_point['TH/s']) + " TH/s", data_point['user'], str(data_point['wattage']) + " W"
|
|
||||||
]
|
|
||||||
window["ip_table"].update(ip_table_data)
|
|
||||||
progress_bar_len += 1
|
|
||||||
asyncio.create_task(update_prog_bar(progress_bar_len))
|
|
||||||
|
|
||||||
hashrate_list = [float(item[2].replace(" TH/s", "")) for item in window["ip_table"].Values if not item[2] == '']
|
|
||||||
total_hr = round(sum(hashrate_list), 2)
|
|
||||||
window["hr_total"].update(f"{total_hr} TH/s")
|
|
||||||
|
|
||||||
await update_ui_with_data("status", "")
|
|
||||||
|
|
||||||
|
|
||||||
async def get_formatted_data(ip: ipaddress.ip_address):
|
|
||||||
miner = await miner_factory.get_miner(ip)
|
|
||||||
try:
|
|
||||||
miner_data = await miner.api.multicommand("summary", "pools", "tunerstatus")
|
|
||||||
except APIError:
|
|
||||||
return {'TH/s': "Unknown", 'IP': str(miner.ip), 'host': "Unknown", 'user': "Unknown", 'wattage': 0}
|
|
||||||
host = await miner.get_hostname()
|
|
||||||
if "tunerstatus" in miner_data.keys():
|
|
||||||
wattage = await safe_parse_api_data(miner_data, "tunerstatus", 0, 'TUNERSTATUS', 0, "PowerLimit")
|
|
||||||
# data['tunerstatus'][0]['TUNERSTATUS'][0]['PowerLimit']
|
|
||||||
elif "Power" in miner_data["summary"][0]["SUMMARY"][0].keys():
|
|
||||||
wattage = await safe_parse_api_data(miner_data, "summary", 0, 'SUMMARY', 0, "Power")
|
|
||||||
else:
|
|
||||||
print(miner_data)
|
|
||||||
wattage = 0
|
|
||||||
if "summary" in miner_data.keys():
|
|
||||||
if 'MHS 5s' in miner_data['summary'][0]['SUMMARY'][0].keys():
|
|
||||||
th5s = round(await safe_parse_api_data(miner_data, 'summary', 0, 'SUMMARY', 0, 'MHS 5s') / 1000000, 2)
|
|
||||||
elif 'GHS 5s' in miner_data['summary'][0]['SUMMARY'][0].keys():
|
|
||||||
if not miner_data['summary'][0]['SUMMARY'][0]['GHS 5s'] == "":
|
|
||||||
th5s = round(float(await safe_parse_api_data(miner_data, 'summary', 0, 'SUMMARY', 0, 'GHS 5s')) / 1000,
|
|
||||||
2)
|
|
||||||
else:
|
|
||||||
th5s = 0
|
|
||||||
else:
|
|
||||||
th5s = 0
|
|
||||||
else:
|
|
||||||
th5s = 0
|
|
||||||
if "pools" not in miner_data.keys():
|
|
||||||
user = "?"
|
|
||||||
elif not miner_data['pools'][0]['POOLS'] == []:
|
|
||||||
user = await safe_parse_api_data(miner_data, 'pools', 0, 'POOLS', 0, 'User')
|
|
||||||
else:
|
|
||||||
user = "Blank"
|
|
||||||
return {'TH/s': th5s, 'IP': str(miner.ip), 'host': host, 'user': user, 'wattage': wattage}
|
|
||||||
|
|
||||||
|
|
||||||
async def generate_config(username, workername, v2_allowed):
|
|
||||||
if username and workername:
|
|
||||||
user = f"{username}.{workername}"
|
|
||||||
elif username and not workername:
|
|
||||||
user = username
|
|
||||||
else:
|
|
||||||
return
|
|
||||||
|
|
||||||
if v2_allowed:
|
|
||||||
url_1 = 'stratum2+tcp://v2.us-east.stratum.slushpool.com/u95GEReVMjK6k5YqiSFNqqTnKU4ypU2Wm8awa6tmbmDmk1bWt'
|
|
||||||
url_2 = 'stratum2+tcp://v2.stratum.slushpool.com/u95GEReVMjK6k5YqiSFNqqTnKU4ypU2Wm8awa6tmbmDmk1bWt'
|
|
||||||
url_3 = 'stratum+tcp://stratum.slushpool.com:3333'
|
|
||||||
else:
|
|
||||||
url_1 = 'stratum+tcp://ca.stratum.slushpool.com:3333'
|
|
||||||
url_2 = 'stratum+tcp://us-east.stratum.slushpool.com:3333'
|
|
||||||
url_3 = 'stratum+tcp://stratum.slushpool.com:3333'
|
|
||||||
|
|
||||||
config = {'group': [{
|
|
||||||
'name': 'group',
|
|
||||||
'quota': 1,
|
|
||||||
'pool': [{
|
|
||||||
'url': url_1,
|
|
||||||
'user': user,
|
|
||||||
'password': '123'
|
|
||||||
}, {
|
|
||||||
'url': url_2,
|
|
||||||
'user': user,
|
|
||||||
'password': '123'
|
|
||||||
}, {
|
|
||||||
'url': url_3,
|
|
||||||
'user': user,
|
|
||||||
'password': '123'
|
|
||||||
}]
|
|
||||||
}],
|
|
||||||
'format': {
|
|
||||||
'version': '1.2+',
|
|
||||||
'model': 'Antminer S9',
|
|
||||||
'generator': 'upstream_config_util',
|
|
||||||
'timestamp': int(time.time())
|
|
||||||
},
|
|
||||||
'temp_control': {
|
|
||||||
'target_temp': 80.0,
|
|
||||||
'hot_temp': 90.0,
|
|
||||||
'dangerous_temp': 120.0
|
|
||||||
},
|
|
||||||
'autotuning': {
|
|
||||||
'enabled': True,
|
|
||||||
'psu_power_limit': 900
|
|
||||||
}
|
|
||||||
}
|
|
||||||
window['config'].update(await bos_config_convert(config))
|
|
||||||
|
|
||||||
|
|
||||||
async def sort_data(index: int or str):
|
|
||||||
await update_ui_with_data("status", "Sorting Data")
|
|
||||||
data_list = window['ip_table'].Values
|
|
||||||
|
|
||||||
# wattage
|
|
||||||
if re.match("[0-9]* W", data_list[0][index]):
|
|
||||||
new_list = sorted(data_list, key=lambda x: int(x[index].replace(" W", "")))
|
|
||||||
|
|
||||||
# hashrate
|
|
||||||
elif re.match("[0-9]*\.?[0-9]* TH\/s", data_list[0][index]):
|
|
||||||
new_list = sorted(data_list, key=lambda x: float(x[index].replace(" TH/s", "")))
|
|
||||||
|
|
||||||
# ip addresses
|
|
||||||
elif re.match("^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)",
|
|
||||||
data_list[0][index]):
|
|
||||||
new_list = sorted(data_list, key=lambda x: ipaddress.ip_address(x[index]))
|
|
||||||
|
|
||||||
# everything else, hostname and user
|
|
||||||
else:
|
|
||||||
new_list = sorted(data_list, key=lambda x: x[index])
|
|
||||||
await update_ui_with_data("ip_table", new_list)
|
|
||||||
await update_ui_with_data("status", "")
|
|
||||||
|
|||||||
68
cfg_util/func/files.py
Normal file
68
cfg_util/func/files.py
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
import ipaddress
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import time
|
||||||
|
|
||||||
|
import aiofiles
|
||||||
|
import toml
|
||||||
|
|
||||||
|
from cfg_util.func.ui import update_ui_with_data
|
||||||
|
from cfg_util.layout import window
|
||||||
|
from config.bos import bos_config_convert, general_config_convert_bos
|
||||||
|
|
||||||
|
|
||||||
|
async def import_iplist(file_location):
|
||||||
|
await update_ui_with_data("status", "Importing")
|
||||||
|
if not os.path.exists(file_location):
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
ip_list = []
|
||||||
|
async with aiofiles.open(file_location, mode='r') as file:
|
||||||
|
async for line in file:
|
||||||
|
ips = [x.group() for x in re.finditer(
|
||||||
|
"^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)", line)]
|
||||||
|
for ip in ips:
|
||||||
|
if ip not in ip_list:
|
||||||
|
ip_list.append(ipaddress.ip_address(ip))
|
||||||
|
ip_list.sort()
|
||||||
|
window["ip_table"].update([[str(ip), "", "", "", ""] for ip in ip_list])
|
||||||
|
await update_ui_with_data("ip_count", str(len(ip_list)))
|
||||||
|
await update_ui_with_data("status", "")
|
||||||
|
|
||||||
|
|
||||||
|
async def export_iplist(file_location, ip_list_selected):
|
||||||
|
await update_ui_with_data("status", "Exporting")
|
||||||
|
if not os.path.exists(file_location):
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
if ip_list_selected is not None and not ip_list_selected == []:
|
||||||
|
async with aiofiles.open(file_location, mode='w') as file:
|
||||||
|
for item in ip_list_selected:
|
||||||
|
await file.write(str(item) + "\n")
|
||||||
|
else:
|
||||||
|
async with aiofiles.open(file_location, mode='w') as file:
|
||||||
|
for item in window['ip_table'].Values:
|
||||||
|
await file.write(str(item[0]) + "\n")
|
||||||
|
await update_ui_with_data("status", "")
|
||||||
|
|
||||||
|
|
||||||
|
async def import_config_file(file_location):
|
||||||
|
await update_ui_with_data("status", "Importing")
|
||||||
|
if not os.path.exists(file_location):
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
async with aiofiles.open(file_location, mode='r') as file:
|
||||||
|
config = await file.read()
|
||||||
|
await update_ui_with_data("config", await bos_config_convert(toml.loads(config)))
|
||||||
|
await update_ui_with_data("status", "")
|
||||||
|
|
||||||
|
|
||||||
|
async def export_config_file(file_location, config):
|
||||||
|
await update_ui_with_data("status", "Exporting")
|
||||||
|
config = toml.loads(config)
|
||||||
|
config['format']['generator'] = 'upstream_config_util'
|
||||||
|
config['format']['timestamp'] = int(time.time())
|
||||||
|
config = toml.dumps(config)
|
||||||
|
async with aiofiles.open(file_location, mode='w+') as file:
|
||||||
|
await file.write(await general_config_convert_bos(config))
|
||||||
|
await update_ui_with_data("status", "")
|
||||||
223
cfg_util/func/miners.py
Normal file
223
cfg_util/func/miners.py
Normal file
@@ -0,0 +1,223 @@
|
|||||||
|
import asyncio
|
||||||
|
import ipaddress
|
||||||
|
import time
|
||||||
|
|
||||||
|
from API import APIError
|
||||||
|
from cfg_util.func.parse_data import safe_parse_api_data
|
||||||
|
from cfg_util.func.ui import update_ui_with_data, update_prog_bar, set_progress_bar_len
|
||||||
|
from cfg_util.layout import window
|
||||||
|
from cfg_util.miner_factory import miner_factory
|
||||||
|
from config.bos import bos_config_convert
|
||||||
|
from settings import CFG_UTIL_CONFIG_THREADS as CONFIG_THREADS
|
||||||
|
|
||||||
|
|
||||||
|
async def import_config(idx):
|
||||||
|
await update_ui_with_data("status", "Importing")
|
||||||
|
miner = await miner_factory.get_miner(ipaddress.ip_address(window["ip_table"].Values[idx[0]][0]))
|
||||||
|
await miner.get_config()
|
||||||
|
config = miner.config
|
||||||
|
await update_ui_with_data("config", str(config))
|
||||||
|
await update_ui_with_data("status", "")
|
||||||
|
|
||||||
|
|
||||||
|
async def scan_network(network):
|
||||||
|
await update_ui_with_data("status", "Scanning")
|
||||||
|
await update_ui_with_data("hr_total", "")
|
||||||
|
window["ip_table"].update([])
|
||||||
|
network_size = len(network)
|
||||||
|
miner_generator = network.scan_network_generator()
|
||||||
|
await set_progress_bar_len(2 * network_size)
|
||||||
|
progress_bar_len = 0
|
||||||
|
miners = []
|
||||||
|
async for miner in miner_generator:
|
||||||
|
if miner:
|
||||||
|
miners.append(miner)
|
||||||
|
# can output "Identifying" for each found item, but it gets a bit cluttered
|
||||||
|
# and could possibly be confusing for the end user because of timing on
|
||||||
|
# adding the IPs
|
||||||
|
# window["ip_table"].update([["Identifying...", "", "", "", ""] for miner in miners])
|
||||||
|
progress_bar_len += 1
|
||||||
|
asyncio.create_task(update_prog_bar(progress_bar_len))
|
||||||
|
progress_bar_len += network_size - len(miners)
|
||||||
|
asyncio.create_task(update_prog_bar(progress_bar_len))
|
||||||
|
get_miner_genenerator = miner_factory.get_miner_generator(miners)
|
||||||
|
all_miners = []
|
||||||
|
async for found_miner in get_miner_genenerator:
|
||||||
|
all_miners.append(found_miner)
|
||||||
|
all_miners.sort(key=lambda x: x.ip)
|
||||||
|
window["ip_table"].update([[str(miner.ip), "", "", "", ""] for miner in all_miners])
|
||||||
|
progress_bar_len += 1
|
||||||
|
asyncio.create_task(update_prog_bar(progress_bar_len))
|
||||||
|
await update_ui_with_data("ip_count", str(len(all_miners)))
|
||||||
|
await update_ui_with_data("status", "")
|
||||||
|
|
||||||
|
|
||||||
|
async def miner_light(ips: list):
|
||||||
|
await asyncio.gather(*[flip_light(ip) for ip in ips])
|
||||||
|
|
||||||
|
|
||||||
|
async def flip_light(ip):
|
||||||
|
ip_list = window['ip_table'].Widget
|
||||||
|
miner = await miner_factory.get_miner(ip)
|
||||||
|
index = [item[0] for item in window["ip_table"].Values].index(ip)
|
||||||
|
index_tags = ip_list.item(index)['tags']
|
||||||
|
if "light" not in index_tags:
|
||||||
|
ip_list.item(index, tags=([*index_tags, "light"]))
|
||||||
|
window['ip_table'].update(row_colors=[(index, "white", "red")])
|
||||||
|
await miner.fault_light_on()
|
||||||
|
else:
|
||||||
|
index_tags.remove("light")
|
||||||
|
ip_list.item(index, tags=index_tags)
|
||||||
|
window['ip_table'].update(row_colors=[(index, "black", "white")])
|
||||||
|
await miner.fault_light_off()
|
||||||
|
|
||||||
|
|
||||||
|
async def send_config_generator(miners: list, config):
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
config_tasks = []
|
||||||
|
for miner in miners:
|
||||||
|
if len(config_tasks) >= CONFIG_THREADS:
|
||||||
|
configured = asyncio.as_completed(config_tasks)
|
||||||
|
config_tasks = []
|
||||||
|
for sent_config in configured:
|
||||||
|
yield await sent_config
|
||||||
|
config_tasks.append(loop.create_task(miner.send_config(config)))
|
||||||
|
configured = asyncio.as_completed(config_tasks)
|
||||||
|
for sent_config in configured:
|
||||||
|
yield await sent_config
|
||||||
|
|
||||||
|
|
||||||
|
async def send_config(ips: list, config):
|
||||||
|
await update_ui_with_data("status", "Configuring")
|
||||||
|
await set_progress_bar_len(2 * len(ips))
|
||||||
|
progress_bar_len = 0
|
||||||
|
get_miner_genenerator = miner_factory.get_miner_generator(ips)
|
||||||
|
all_miners = []
|
||||||
|
async for miner in get_miner_genenerator:
|
||||||
|
all_miners.append(miner)
|
||||||
|
progress_bar_len += 1
|
||||||
|
asyncio.create_task(update_prog_bar(progress_bar_len))
|
||||||
|
|
||||||
|
config_sender_generator = send_config_generator(all_miners, config)
|
||||||
|
async for _config_sender in config_sender_generator:
|
||||||
|
progress_bar_len += 1
|
||||||
|
asyncio.create_task(update_prog_bar(progress_bar_len))
|
||||||
|
await update_ui_with_data("status", "")
|
||||||
|
|
||||||
|
|
||||||
|
async def get_data(ip_list: list):
|
||||||
|
await update_ui_with_data("status", "Getting Data")
|
||||||
|
ips = [ipaddress.ip_address(ip) for ip in ip_list]
|
||||||
|
if len(ips) == 0:
|
||||||
|
ips = [ipaddress.ip_address(ip) for ip in [item[0] for item in window["ip_table"].Values]]
|
||||||
|
await set_progress_bar_len(len(ips))
|
||||||
|
progress_bar_len = 0
|
||||||
|
data_gen = asyncio.as_completed([get_formatted_data(miner) for miner in ips])
|
||||||
|
ip_table_data = window["ip_table"].Values
|
||||||
|
ordered_all_ips = [item[0] for item in ip_table_data]
|
||||||
|
for all_data in data_gen:
|
||||||
|
data_point = await all_data
|
||||||
|
if data_point["IP"] in ordered_all_ips:
|
||||||
|
ip_table_index = ordered_all_ips.index(data_point["IP"])
|
||||||
|
ip_table_data[ip_table_index] = [
|
||||||
|
data_point["IP"], data_point["host"], str(data_point['TH/s']) + " TH/s", data_point['user'], str(data_point['wattage']) + " W"
|
||||||
|
]
|
||||||
|
window["ip_table"].update(ip_table_data)
|
||||||
|
progress_bar_len += 1
|
||||||
|
asyncio.create_task(update_prog_bar(progress_bar_len))
|
||||||
|
|
||||||
|
hashrate_list = [float(item[2].replace(" TH/s", "")) for item in window["ip_table"].Values if not item[2] == '']
|
||||||
|
total_hr = round(sum(hashrate_list), 2)
|
||||||
|
window["hr_total"].update(f"{total_hr} TH/s")
|
||||||
|
|
||||||
|
await update_ui_with_data("status", "")
|
||||||
|
|
||||||
|
|
||||||
|
async def get_formatted_data(ip: ipaddress.ip_address):
|
||||||
|
miner = await miner_factory.get_miner(ip)
|
||||||
|
try:
|
||||||
|
miner_data = await miner.api.multicommand("summary", "pools", "tunerstatus")
|
||||||
|
except APIError:
|
||||||
|
return {'TH/s': "Unknown", 'IP': str(miner.ip), 'host': "Unknown", 'user': "Unknown", 'wattage': 0}
|
||||||
|
host = await miner.get_hostname()
|
||||||
|
if "tunerstatus" in miner_data.keys():
|
||||||
|
wattage = await safe_parse_api_data(miner_data, "tunerstatus", 0, 'TUNERSTATUS', 0, "PowerLimit")
|
||||||
|
# data['tunerstatus'][0]['TUNERSTATUS'][0]['PowerLimit']
|
||||||
|
elif "Power" in miner_data["summary"][0]["SUMMARY"][0].keys():
|
||||||
|
wattage = await safe_parse_api_data(miner_data, "summary", 0, 'SUMMARY', 0, "Power")
|
||||||
|
else:
|
||||||
|
print(miner_data)
|
||||||
|
wattage = 0
|
||||||
|
if "summary" in miner_data.keys():
|
||||||
|
if 'MHS 5s' in miner_data['summary'][0]['SUMMARY'][0].keys():
|
||||||
|
th5s = round(await safe_parse_api_data(miner_data, 'summary', 0, 'SUMMARY', 0, 'MHS 5s') / 1000000, 2)
|
||||||
|
elif 'GHS 5s' in miner_data['summary'][0]['SUMMARY'][0].keys():
|
||||||
|
if not miner_data['summary'][0]['SUMMARY'][0]['GHS 5s'] == "":
|
||||||
|
th5s = round(float(await safe_parse_api_data(miner_data, 'summary', 0, 'SUMMARY', 0, 'GHS 5s')) / 1000,
|
||||||
|
2)
|
||||||
|
else:
|
||||||
|
th5s = 0
|
||||||
|
else:
|
||||||
|
th5s = 0
|
||||||
|
else:
|
||||||
|
th5s = 0
|
||||||
|
if "pools" not in miner_data.keys():
|
||||||
|
user = "?"
|
||||||
|
elif not miner_data['pools'][0]['POOLS'] == []:
|
||||||
|
user = await safe_parse_api_data(miner_data, 'pools', 0, 'POOLS', 0, 'User')
|
||||||
|
else:
|
||||||
|
user = "Blank"
|
||||||
|
return {'TH/s': th5s, 'IP': str(miner.ip), 'host': host, 'user': user, 'wattage': wattage}
|
||||||
|
|
||||||
|
|
||||||
|
async def generate_config(username, workername, v2_allowed):
|
||||||
|
if username and workername:
|
||||||
|
user = f"{username}.{workername}"
|
||||||
|
elif username and not workername:
|
||||||
|
user = username
|
||||||
|
else:
|
||||||
|
return
|
||||||
|
|
||||||
|
if v2_allowed:
|
||||||
|
url_1 = 'stratum2+tcp://v2.us-east.stratum.slushpool.com/u95GEReVMjK6k5YqiSFNqqTnKU4ypU2Wm8awa6tmbmDmk1bWt'
|
||||||
|
url_2 = 'stratum2+tcp://v2.stratum.slushpool.com/u95GEReVMjK6k5YqiSFNqqTnKU4ypU2Wm8awa6tmbmDmk1bWt'
|
||||||
|
url_3 = 'stratum+tcp://stratum.slushpool.com:3333'
|
||||||
|
else:
|
||||||
|
url_1 = 'stratum+tcp://ca.stratum.slushpool.com:3333'
|
||||||
|
url_2 = 'stratum+tcp://us-east.stratum.slushpool.com:3333'
|
||||||
|
url_3 = 'stratum+tcp://stratum.slushpool.com:3333'
|
||||||
|
|
||||||
|
config = {'group': [{
|
||||||
|
'name': 'group',
|
||||||
|
'quota': 1,
|
||||||
|
'pool': [{
|
||||||
|
'url': url_1,
|
||||||
|
'user': user,
|
||||||
|
'password': '123'
|
||||||
|
}, {
|
||||||
|
'url': url_2,
|
||||||
|
'user': user,
|
||||||
|
'password': '123'
|
||||||
|
}, {
|
||||||
|
'url': url_3,
|
||||||
|
'user': user,
|
||||||
|
'password': '123'
|
||||||
|
}]
|
||||||
|
}],
|
||||||
|
'format': {
|
||||||
|
'version': '1.2+',
|
||||||
|
'model': 'Antminer S9',
|
||||||
|
'generator': 'upstream_config_util',
|
||||||
|
'timestamp': int(time.time())
|
||||||
|
},
|
||||||
|
'temp_control': {
|
||||||
|
'target_temp': 80.0,
|
||||||
|
'hot_temp': 90.0,
|
||||||
|
'dangerous_temp': 120.0
|
||||||
|
},
|
||||||
|
'autotuning': {
|
||||||
|
'enabled': True,
|
||||||
|
'psu_power_limit': 900
|
||||||
|
}
|
||||||
|
}
|
||||||
|
window['config'].update(await bos_config_convert(config))
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
from API import APIError
|
from API import APIError
|
||||||
|
|
||||||
|
|
||||||
|
# noinspection PyPep8
|
||||||
async def safe_parse_api_data(data: dict or list, *path: str or int, idx: int = 0):
|
async def safe_parse_api_data(data: dict or list, *path: str or int, idx: int = 0):
|
||||||
path = [*path]
|
path = [*path]
|
||||||
if len(path) == idx+1:
|
if len(path) == idx+1:
|
||||||
@@ -18,6 +19,7 @@ async def safe_parse_api_data(data: dict or list, *path: str or int, idx: int =
|
|||||||
if path[idx] in data.keys():
|
if path[idx] in data.keys():
|
||||||
parsed_data = await safe_parse_api_data(data[path[idx]], idx=idx+1, *path)
|
parsed_data = await safe_parse_api_data(data[path[idx]], idx=idx+1, *path)
|
||||||
# has to be == None, or else it fails on 0.0 hashrates
|
# has to be == None, or else it fails on 0.0 hashrates
|
||||||
|
# noinspection PyPep8
|
||||||
if parsed_data == None:
|
if parsed_data == None:
|
||||||
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
|
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
|
||||||
return parsed_data
|
return parsed_data
|
||||||
@@ -34,6 +36,7 @@ async def safe_parse_api_data(data: dict or list, *path: str or int, idx: int =
|
|||||||
if len(data) > path[idx]:
|
if len(data) > path[idx]:
|
||||||
parsed_data = await safe_parse_api_data(data[path[idx]], idx=idx+1, *path)
|
parsed_data = await safe_parse_api_data(data[path[idx]], idx=idx+1, *path)
|
||||||
# has to be == None, or else it fails on 0.0 hashrates
|
# has to be == None, or else it fails on 0.0 hashrates
|
||||||
|
# noinspection PyPep8
|
||||||
if parsed_data == None:
|
if parsed_data == None:
|
||||||
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
|
raise APIError(f"Data parsing failed on path index {idx} - \nKey: {path[idx]} \nData: {data}")
|
||||||
return parsed_data
|
return parsed_data
|
||||||
48
cfg_util/func/ui.py
Normal file
48
cfg_util/func/ui.py
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
import ipaddress
|
||||||
|
import re
|
||||||
|
|
||||||
|
from cfg_util.layout import window
|
||||||
|
|
||||||
|
|
||||||
|
async def update_ui_with_data(key, message, append=False):
|
||||||
|
if append:
|
||||||
|
message = window[key].get_text() + message
|
||||||
|
window[key].update(message)
|
||||||
|
|
||||||
|
|
||||||
|
async def update_prog_bar(amount):
|
||||||
|
window["progress"].Update(amount)
|
||||||
|
percent_done = 100 * (amount / window['progress'].maxlen)
|
||||||
|
window["progress_percent"].Update(f"{round(percent_done, 2)} %")
|
||||||
|
if percent_done == 100:
|
||||||
|
window["progress_percent"].Update("")
|
||||||
|
|
||||||
|
|
||||||
|
async def set_progress_bar_len(amount):
|
||||||
|
window["progress"].Update(0, max=amount)
|
||||||
|
window["progress"].maxlen = amount
|
||||||
|
window["progress_percent"].Update("0.0 %")
|
||||||
|
|
||||||
|
|
||||||
|
async def sort_data(index: int or str):
|
||||||
|
await update_ui_with_data("status", "Sorting Data")
|
||||||
|
data_list = window['ip_table'].Values
|
||||||
|
|
||||||
|
# wattage
|
||||||
|
if re.match("[0-9]* W", data_list[0][index]):
|
||||||
|
new_list = sorted(data_list, key=lambda x: int(x[index].replace(" W", "")))
|
||||||
|
|
||||||
|
# hashrate
|
||||||
|
elif re.match("[0-9]*\.?[0-9]* TH\/s", data_list[0][index]):
|
||||||
|
new_list = sorted(data_list, key=lambda x: float(x[index].replace(" TH/s", "")))
|
||||||
|
|
||||||
|
# ip addresses
|
||||||
|
elif re.match("^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)",
|
||||||
|
data_list[0][index]):
|
||||||
|
new_list = sorted(data_list, key=lambda x: ipaddress.ip_address(x[index]))
|
||||||
|
|
||||||
|
# everything else, hostname and user
|
||||||
|
else:
|
||||||
|
new_list = sorted(data_list, key=lambda x: x[index])
|
||||||
|
await update_ui_with_data("ip_table", new_list)
|
||||||
|
await update_ui_with_data("status", "")
|
||||||
@@ -3,8 +3,9 @@ import sys
|
|||||||
import PySimpleGUI as sg
|
import PySimpleGUI as sg
|
||||||
|
|
||||||
from cfg_util.layout import window, generate_config_layout
|
from cfg_util.layout import window, generate_config_layout
|
||||||
from cfg_util.func import scan_network, sort_data, send_config, miner_light, get_data, export_config_file, \
|
from cfg_util.func.miners import scan_network, send_config, miner_light, get_data, generate_config, import_config
|
||||||
generate_config, import_config, import_iplist, import_config_file, export_iplist
|
from cfg_util.func.files import import_iplist, import_config_file, export_iplist, export_config_file
|
||||||
|
from cfg_util.func.ui import sort_data
|
||||||
|
|
||||||
from network import MinerNetwork
|
from network import MinerNetwork
|
||||||
|
|
||||||
|
|||||||
@@ -67,10 +67,15 @@ class CGMiner(BaseMiner):
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def _result_handler(result: asyncssh.process.SSHCompletedProcess) -> None:
|
def _result_handler(result: asyncssh.process.SSHCompletedProcess) -> None:
|
||||||
if result is not None:
|
if result is not None:
|
||||||
|
# noinspection PyUnresolvedReferences
|
||||||
if len(result.stdout) > 0:
|
if len(result.stdout) > 0:
|
||||||
|
# noinspection PyUnresolvedReferences
|
||||||
print("ssh stdout: \n" + result.stdout)
|
print("ssh stdout: \n" + result.stdout)
|
||||||
|
# noinspection PyUnresolvedReferences
|
||||||
if len(result.stderr) > 0:
|
if len(result.stderr) > 0:
|
||||||
|
# noinspection PyUnresolvedReferences
|
||||||
print("ssh stderr: \n" + result.stderrr)
|
print("ssh stderr: \n" + result.stderrr)
|
||||||
|
# noinspection PyUnresolvedReferences
|
||||||
if len(result.stdout) <= 0 and len(result.stderr) <= 0:
|
if len(result.stdout) <= 0 and len(result.stderr) <= 0:
|
||||||
print("ssh stdout stderr empty")
|
print("ssh stdout stderr empty")
|
||||||
# if result.stdout != "":
|
# if result.stdout != "":
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import toml
|
import toml
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
# noinspection PyPep8
|
||||||
try:
|
try:
|
||||||
with open(os.path.join(os.getcwd(), "settings.toml"), "r") as settings_file:
|
with open(os.path.join(os.getcwd(), "settings.toml"), "r") as settings_file:
|
||||||
settings = toml.loads(settings_file.read())
|
settings = toml.loads(settings_file.read())
|
||||||
|
|||||||
Reference in New Issue
Block a user