diff --git a/.gitignore b/.gitignore
index 5eac8df9..0bdb20f8 100644
--- a/.gitignore
+++ b/.gitignore
@@ -82,6 +82,9 @@ target/
profile_default/
ipython_config.py
+# wandb
+wandb/
+
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
diff --git a/compute/__init__.py b/compute/__init__.py
index 8fc551c8..98091724 100644
--- a/compute/__init__.py
+++ b/compute/__init__.py
@@ -18,9 +18,10 @@
import string
# Define the version of the template module.
-__version__ = "1.7.4"
-__minimal_miner_version__ = "1.6.0"
-__minimal_validator_version__ = "1.7.3"
+
+__version__ = "1.8.0"
+__minimal_miner_version__ = "1.8.0"
+__minimal_validator_version__ = "1.8.0"
version_split = __version__.split(".")
__version_as_int__ = (100 * int(version_split[0])) + (10 * int(version_split[1])) + (1 * int(version_split[2]))
diff --git a/compute/axon.py b/compute/axon.py
index 820102d1..117411e9 100644
--- a/compute/axon.py
+++ b/compute/axon.py
@@ -27,8 +27,10 @@
import bittensor.utils.networking as net
import time
import uvicorn
-from bittensor import axon, subtensor
-from bittensor.axon import FastAPIThreadedServer, AxonMiddleware
+from bittensor import axon
+from bittensor.core.subtensor import Subtensor as subtensor
+
+from bittensor.core.axon import FastAPIThreadedServer, AxonMiddleware
from fastapi import FastAPI, APIRouter
from rich.prompt import Confirm
from starlette.requests import Request
@@ -39,7 +41,7 @@
def serve_extrinsic(
- subtensor: "bittensor.subtensor",
+ subtensor: "bittensor.core.subtensor",
wallet: "bittensor.wallet",
ip: str,
port: int,
@@ -250,16 +252,16 @@ def __init__(
if config is None:
config = axon.config()
config = copy.deepcopy(config)
- config.axon.ip = ip or config.axon.get("ip", bittensor.defaults.axon.ip)
- config.axon.port = port or config.axon.get("port", bittensor.defaults.axon.port)
+ config.axon.ip = ip or config.axon.get("ip", bittensor.core.settings.DEFAULTS.axon.ip)
+ config.axon.port = port or config.axon.get("port", bittensor.core.settings.DEFAULTS.axon.port)
config.axon.external_ip = external_ip or config.axon.get(
- "external_ip", bittensor.defaults.axon.external_ip
+ "external_ip", bittensor.core.settings.DEFAULTS.axon.external_ip
)
config.axon.external_port = external_port or config.axon.get(
- "external_port", bittensor.defaults.axon.external_port
+ "external_port", bittensor.core.settings.DEFAULTS.axon.external_port
)
config.axon.max_workers = max_workers or config.axon.get(
- "max_workers", bittensor.defaults.axon.max_workers
+ "max_workers", bittensor.core.settings.DEFAULTS.axon.max_workers
)
axon.check_config(config)
self.config = config
diff --git a/compute/prometheus.py b/compute/prometheus.py
index b586eb42..74464242 100644
--- a/compute/prometheus.py
+++ b/compute/prometheus.py
@@ -21,10 +21,11 @@
import bittensor.utils.networking as net
import compute
+import inspect
def prometheus_extrinsic(
- subtensor: "bittensor.subtensor",
+ subtensor: "bittensor.core.subtensor.Subtensor.MockSubtensor",
wallet: "bittensor.wallet",
port: int,
netuid: int,
@@ -99,9 +100,8 @@ def prometheus_extrinsic(
# Add netuid, not in prometheus_info
call_params["netuid"] = netuid
-
bittensor.logging.info("Serving prometheus on: {}:{} ...".format(subtensor.network, netuid))
- success, err = subtensor._do_serve_prometheus(
+ success, err = subtensor.do_serve_prometheus(
wallet=wallet,
call_params=call_params,
wait_for_finalization=wait_for_finalization,
diff --git a/compute/utils/subtensor.py b/compute/utils/subtensor.py
index 8efac3cb..8c1106d5 100644
--- a/compute/utils/subtensor.py
+++ b/compute/utils/subtensor.py
@@ -20,7 +20,7 @@
from compute.utils.cache import ttl_cache
-bt_blocktime = bt.__blocktime__
+bt_blocktime = bt.BLOCKTIME
@ttl_cache(maxsize=1, ttl=bt_blocktime)
diff --git a/neurons/register_api.py b/neurons/register_api.py
index 7f2f4197..f2029995 100644
--- a/neurons/register_api.py
+++ b/neurons/register_api.py
@@ -1733,12 +1733,8 @@ async def count_all_gpus() -> JSONResponse:
# Iterate through the miner specs details and print the table
for hotkey, details in specs_details.items():
if details :
- gpu_miner = details["gpu"]
- gpu_capacity = "{:.2f}".format(
- (gpu_miner["capacity"] / 1024)
- )
- gpu_name = str(gpu_miner["details"][0]["name"]).lower()
- gpu_count = gpu_miner["count"]
+ gpu_miner = details.get("gpu", "")
+ gpu_count = gpu_miner.get("count", 0)
GPU_COUNTS += gpu_count
bt.logging.info(f"API: List resources successfully")
return JSONResponse(
diff --git a/neurons/validator.py b/neurons/validator.py
index 854f796c..2260c88a 100644
--- a/neurons/validator.py
+++ b/neurons/validator.py
@@ -245,16 +245,23 @@ def init_prometheus(self, force_update: bool = False):
"""
Register the prometheus information on metagraph.
:return: bool
- """
+ """
+ # extrinsic prometheus is removed at 8.2.1
+
bt.logging.info("Extrinsic prometheus information on metagraph.")
- success = self.subtensor.serve_prometheus(
- wallet=self.wallet,
- port=bt.defaults.axon.port,
- netuid=self.config.netuid,
- force_update=force_update,
- )
+ success = True
+ # TODO : remove all the related code from the code base
+ # self._subtensor.serve_prometheus(
+ # wallet=self.wallet,
+ # port=bt.core.settings.DEFAULTS.axon.port,
+ # netuid=self.config.netuid,
+ # force_update=force_update,
+ # )
if success:
- bt.logging.success(prefix="Prometheus served", sufix=f"Current version: {get_local_version()}")
+ bt.logging.success(
+ prefix="Prometheus served",
+ suffix=f"Current version: {get_local_version()}" # Corrected keyword
+ )
else:
bt.logging.error("Prometheus initialization failed")
return success
@@ -1058,7 +1065,7 @@ def set_weights(self):
version_key=__version_as_int__,
wait_for_inclusion=False,
)
- if isinstance(result, bool) and result or isinstance(result, tuple) and result[0]:
+ if isinstance(result[0], bool) and result or isinstance(result, tuple) and result[0]:
bt.logging.info(result)
bt.logging.success("✅ Successfully set weights.")
else:
diff --git a/requirements.txt b/requirements.txt
index f6608eb3..0f3604ba 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,15 +1,14 @@
at==0.0.3
-bittensor==6.9.4
+bittensor==8.5.0
black==23.7.0
-cryptography==42.0.0
+cryptography==43.0.1
docker==7.0.0
GPUtil==1.4.0
igpu==0.1.2
-numpy==1.26.3
+numpy==2.0.2
psutil==5.9.8
pyinstaller==6.4.0
-torch==2.1.2
-wandb==0.16.6
+wandb==0.19.1
pyfiglet==1.0.2
python-dotenv==1.0.1
requests==2.31.0