Skip to content

Commit

Permalink
Merge branch 'dev' into feature/CSN-399-consensus-scoring-mechanism-w…
Browse files Browse the repository at this point in the history
…andb
  • Loading branch information
nauttiilus authored Jan 21, 2025
2 parents 0c47ba6 + 9204d98 commit e1cd033
Show file tree
Hide file tree
Showing 8 changed files with 99 additions and 39 deletions.
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -253,3 +253,6 @@ cert/

# wandb
wandb/

# neural internet register api
neurons/register-api/
11 changes: 11 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
hooks:
- id: trailing-whitespace
- repo: https://github.com/alessandrojcm/commitlint-pre-commit-hook
rev: v9.20.0
hooks:
- id: commitlint
stages: [commit-msg]
additional_dependencies: ['@commitlint/config-angular']
23 changes: 15 additions & 8 deletions cert/gen_ca.sh
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@ echo "2.2 Use the server private key to generate a certificate generation reques
openssl req -new -key server.key -out server.req -sha256 -subj "/C=US/ST=NY/CN=server.neuralinternet.ai/O=NI"

echo "2.3 Use the certificate generation request and the CA cert to generate the server cert."
openssl x509 -req -in server.req -CA ca.cer -CAkey ca.key -CAcreateserial -set_serial 100 -days "$ca_cert_expire_days" -outform PEM -passin pass:"$pem_password" -out server.cer -sha256 -extensions v3_req -extfile <(
cat << EOF
# Create a temporary extensions file
cat << EOF > extfile.cnf
[ v3_req ]
subjectAltName = @alt_names
Expand All @@ -35,13 +35,16 @@ IP.1 = 127.0.0.1
IP.2 = 0.0.0.0
IP.3 = "$local_ip"
EOF
)

openssl x509 -req -in server.req -CA ca.cer -CAkey ca.key -CAcreateserial -set_serial 100 -days "$ca_cert_expire_days" -outform PEM -passin pass:"$pem_password" -out server.cer -sha256 -extensions v3_req -extfile extfile.cnf

# Remove the temporary extensions file
rm extfile.cnf

echo "2.4 Convert the cer to PEM CRT format"
openssl x509 -inform PEM -in server.cer -out server.crt

echo "2.5 Clean up now that the cert has been created, we no longer need the request"
echo "2.5 Clean up - now that the cert has been created, we no longer need the request"
rm server.req

#for frontend server
Expand All @@ -52,8 +55,8 @@ echo "3.2 Use the client private key to generate a certificate generation reques
openssl req -new -key client.key -out client.req -subj "/C=US/ST=NY/CN=client.neuralinternet.ai/O=NI"

echo "3.3 Use the certificate generation request and the CA cert to generate the client cert."
openssl x509 -req -in client.req -CA ca.cer -CAkey ca.key -CAcreateserial -set_serial 101 -days "$ca_cert_expire_days" -outform PEM -out client.cer -passin pass:"$pem_password" -extensions v3_req -extfile <(
cat << EOF
# Create a temporary extensions file
cat << EOF > extfile.cnf
[ v3_req ]
subjectAltName = @alt_names
Expand All @@ -62,13 +65,17 @@ IP.1 = 127.0.0.1
IP.2 = 0.0.0.0
IP.3 = "$local_ip"
EOF
)

openssl x509 -req -in client.req -CA ca.cer -CAkey ca.key -CAcreateserial -set_serial 101 -days "$ca_cert_expire_days" -outform PEM -out client.cer -passin pass:"$pem_password" -extensions v3_req -extfile extfile.cnf

# Remove the temporary extensions file
rm extfile.cnf

echo "3.4 Convert the client certificate and private key to pkcs#12 format for use by browsers."
openssl pkcs12 -export -inkey client.key -in client.cer -out client.p12 -passout pass:"$pem_password"

echo "3.5. Convert the cer to PEM CRT format"
openssl x509 -inform PEM -in client.cer -out client.crt

echo "3.6. Clean up now that the cert has been created, we no longer need the request."
echo "3.6. Clean up - now that the cert has been created, we no longer need the request."
rm client.req
1 change: 1 addition & 0 deletions commitlint-config.js
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
module.exports = {extends: ['@commitlint/config-conventional']}
1 change: 1 addition & 0 deletions compute/protocol.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,7 @@ class Allocate(bt.Synapse):
docker_action: dict = {
"action": "",
"ssh_key": "",
"key_type": "",
}

def deserialize(self) -> dict:
Expand Down
19 changes: 17 additions & 2 deletions neurons/Miner/container.py
Original file line number Diff line number Diff line change
Expand Up @@ -432,7 +432,7 @@ def unpause_container():
bt.logging.info(f"Error unpausing container {e}")
return {"status": False}

def exchange_key_container(new_ssh_key: str):
def exchange_key_container(new_ssh_key: str, key_type: str = "user"):
try:
client, containers = get_docker()
running_container = None
Expand All @@ -443,7 +443,22 @@ def exchange_key_container(new_ssh_key: str):
if running_container:
# stop and remove the container by using the SIGTERM signal to PID 1 (init) process in the container
if running_container.status == "running":
running_container.exec_run(cmd=f"bash -c \"echo '{new_ssh_key}' > /root/.ssh/authorized_keys & sync & sleep 1\"")
exist_key = running_container.exec_run(cmd="cat /root/.ssh/authorized_keys")
exist_key = exist_key.output.decode("utf-8").split("\n")
user_key = exist_key[0]
terminal_key = ""
if len(exist_key) > 1:
terminal_key = exist_key[1]
if key_type == "terminal":
terminal_key = new_ssh_key
elif key_type == "user":
user_key = new_ssh_key
else:
bt.logging.debug("Invalid key type to swap the SSH key")
return {"status": False}
key_list = user_key + "\n" + terminal_key
# bt.logging.debug(f"New SSH key: {key_list}")
running_container.exec_run(cmd=f"bash -c \"echo '{key_list}' > /root/.ssh/authorized_keys & sync & sleep 1\"")
running_container.exec_run(cmd="kill -15 1")
running_container.wait()
running_container.restart()
Expand Down
3 changes: 2 additions & 1 deletion neurons/miner.py
Original file line number Diff line number Diff line change
Expand Up @@ -438,7 +438,8 @@ def allocate(self, synapse: Allocate) -> Allocate:
if docker_action["action"] == "exchange_key":
public_key = synapse.public_key
new_ssh_key = docker_action["ssh_key"]
result = exchange_key_container(new_ssh_key)
key_type = docker_action["key_type"]
result = exchange_key_container(new_ssh_key, key_type)
synapse.output = result
elif docker_action["action"] == "restart":
public_key = synapse.public_key
Expand Down
77 changes: 49 additions & 28 deletions neurons/register_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@
# Import FastAPI Libraries
import uvicorn
from fastapi import (
FastAPI,
FastAPI, HTTPException,
status,
Request,
WebSocket,
Expand All @@ -72,9 +72,16 @@
from fastapi.exceptions import RequestValidationError
from fastapi.concurrency import run_in_threadpool
from pydantic import BaseModel, Field
from starlette.middleware.base import BaseHTTPMiddleware
from starlette.status import HTTP_403_FORBIDDEN
from dotenv import load_dotenv
from typing import Optional, Union, List

# Loads the .env file
load_dotenv()

# Constants
ENABLE_WHITELIST_IPS = False # False for disabling, True for enabling
DEFAULT_SSL_MODE = 2 # 1 for client CERT optional, 2 for client CERT_REQUIRED
DEFAULT_API_PORT = 8903 # default port for the API
DATA_SYNC_PERIOD = 600 # metagraph resync time
Expand Down Expand Up @@ -107,6 +114,22 @@
"5Fq5v71D4LX8Db1xsmRSy6udQThcZ8sFDqxQFwnUZ1BuqY5A"]
MINER_BLACKLIST = []

# IP Whitelist middleware
class IPWhitelistMiddleware(BaseHTTPMiddleware):
def __init__(self, app: FastAPI):
super().__init__(app)
self.whitelisted_ips = set(os.getenv("WHITELISTED_IPS", "").split(","))

async def dispatch(self, request: Request, call_next):
# Extracts the client's IP address
client_ip = request.client.host
if client_ip not in self.whitelisted_ips:
bt.logging.info(f"Access attempt from IP: {client_ip}")
raise HTTPException(status_code=HTTP_403_FORBIDDEN, detail="Access forbidden: IP not whitelisted")

# Process the request and get the response
response = await call_next(request)
return response

class UserConfig(BaseModel):
netuid: str = Field(default="15")
Expand Down Expand Up @@ -292,6 +315,8 @@ def __init__(

load_dotenv()
self._setup_routes()
if ENABLE_WHITELIST_IPS:
self.app.add_middleware(IPWhitelistMiddleware)
self.process = None
self.websocket_connection = None
self.allocation_table = []
Expand Down Expand Up @@ -575,22 +600,13 @@ async def allocate_hotkey(hotkey: str, ssh_key: Optional[str] = None,
uuid_key = str(uuid.uuid1())

private_key, public_key = rsa.generate_key_pair()
if ssh_key:
if docker_requirement is None:
docker_requirement = DockerRequirement()
docker_requirement.ssh_key = ssh_key
else:
docker_requirement.ssh_key = ssh_key

if docker_requirement is None:
docker_requirement = DockerRequirement()
if ssh_key is None:
docker_requirement.ssh_key = ""
else:
bt.logging.error(f"API: Allocation {hotkey} Failed : No ssh key")
return JSONResponse(
status_code=status.HTTP_404_NOT_FOUND,
content={
"success": False,
"message": "Fail to allocate resource",
"err_detail": "No ssh key",
},
)
docker_requirement.ssh_key = ssh_key

run_start = time.time()

Expand Down Expand Up @@ -847,7 +863,7 @@ async def deallocate(hotkey: str, uuid_key: str, request: Request, notify_flag:
},
}
)
async def check_miner_status(hotkey_list: List[str]) -> JSONResponse:
async def check_miner_status(hotkey_list: List[str], query_version: bool = False) -> JSONResponse:
checking_list = []
for hotkey in hotkey_list:
checking_result = {
Expand All @@ -857,16 +873,20 @@ async def check_miner_status(hotkey_list: List[str]) -> JSONResponse:
for axon in self.metagraph.axons:
if axon.hotkey == hotkey:
try:
register_response = await run_in_threadpool(self.dendrite.query,
axon, Allocate(timeline=1, checking=True, ),
timeout=60)
if register_response:
if register_response["status"] is True:
checking_result = {"hotkey": hotkey, "status": "Docker OFFLINE"}
else:
checking_result = {"hotkey": hotkey, "status": "Docker ONLINE"}
if query_version:
checking_result = {"hotkey": hotkey, "version": axon.version}
else:
checking_result = {"hotkey": hotkey, "status": "Miner NO_RESPONSE"}
register_response = await run_in_threadpool(self.dendrite.query,
axon, Allocate(timeline=1, checking=True, ),
timeout=10)
await asyncio.sleep(0.1)
if register_response:
if register_response["status"] is True:
checking_result = {"hotkey": hotkey, "status": "Docker OFFLINE"}
else:
checking_result = {"hotkey": hotkey, "status": "Docker ONLINE"}
else:
checking_result = {"hotkey": hotkey, "status": "Miner NO_RESPONSE"}
except Exception as e:
bt.logging.error(
f"API: An error occur during the : {e}"
Expand Down Expand Up @@ -1206,7 +1226,7 @@ async def unpause_docker(hotkey: str, uuid_key: str) -> JSONResponse:
"description": "An error occurred while exchanging docker key.",
},
})
async def exchange_docker_key(hotkey: str, uuid_key: str, ssh_key: str) -> JSONResponse:
async def exchange_docker_key(hotkey: str, uuid_key: str, ssh_key: str, key_type: str = "user") -> JSONResponse:
# Instantiate the connection to the db
db = ComputeDb()
cursor = db.get_cursor()
Expand Down Expand Up @@ -1234,13 +1254,14 @@ async def exchange_docker_key(hotkey: str, uuid_key: str, ssh_key: str) -> JSONR
docker_action = {
"action": "exchange_key",
"ssh_key": ssh_key,
"key_type": key_type,
}

if uuid_key_db == uuid_key:
index = self.metagraph.hotkeys.index(hotkey)
axon = self.metagraph.axons[index]
run_start = time.time()
allocate_class = Allocate(timeline=0, device_requirement={}, checking=False, public_key=regkey,
allocate_class = Allocate(timeline=1, device_requirement={}, checking=False, public_key=regkey,
docker_change=True, docker_action=docker_action)
response = await run_in_threadpool(
self.dendrite.query, axon, allocate_class, timeout=60
Expand Down

0 comments on commit e1cd033

Please sign in to comment.