Skip to content

Commit

Permalink
not using github api
Browse files Browse the repository at this point in the history
  • Loading branch information
FloSch62 committed Jan 17, 2025
1 parent 226128d commit 5fa0fbb
Show file tree
Hide file tree
Showing 3 changed files with 44 additions and 193 deletions.
142 changes: 0 additions & 142 deletions src/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,6 @@
import sys
import tempfile

import urllib3

from jinja2 import Environment, FileSystemLoader

import src.topology as topology
Expand Down Expand Up @@ -99,146 +97,6 @@ def apply_manifest_via_kubectl(yaml_str: str, namespace: str = "eda-system"):
finally:
os.remove(tmp_path)


def get_github_token():
"""
Get GitHub token from environment or GitHub CLI in priority order:
1. GITHUB_TOKEN environment variable
2. GH_TOKEN environment variable (GitHub CLI default)
3. GitHub CLI authentication
Returns
-------
str or None
GitHub authentication token if found, None otherwise
"""

# Check environment variables
token = os.environ.get("GITHUB_TOKEN") or os.environ.get("GH_TOKEN")
if token:
logger.debug("Found GitHub token in environment variables")
return token

# Try to get token from GitHub CLI
try:
# Check if gh is installed
result = subprocess.run(["gh", "--version"], capture_output=True, text=True)
if result.returncode == 0:
# Get auth token from gh
result = subprocess.run(
["gh", "auth", "token"], capture_output=True, text=True
)
if result.returncode == 0 and result.stdout.strip():
logger.debug("Found GitHub token from GitHub CLI")
return result.stdout.strip()
except FileNotFoundError:
logger.debug("GitHub CLI (gh) not found")
except Exception as e:
logger.debug(f"Error getting GitHub CLI token: {e}")

logger.debug("No GitHub token found")
return None


def get_artifact_from_github(owner: str, repo: str, version: str, asset_filter=None):
"""
Queries GitHub for a specific release artifact using urllib3.
Parameters
----------
owner: GitHub repository owner
repo: GitHub repository name
version: Version tag to search for (without 'v' prefix)
asset_filter: Optional function(asset_name) -> bool to filter assets
token: Optional GitHub token. If None, will attempt to get from
environment or GitHub CLI
Returns
-------
Tuple of (filename, download_url) or (None, None) if not found
"""
from src.http_client import create_pool_manager

tag = f"v{version}" # Assume GitHub tags are prefixed with 'v'
url = f"https://api.github.com/repos/{owner}/{repo}/releases/tags/{tag}"

token = get_github_token()

# Set up headers with authentication if token is available
headers = {
"Accept": "application/vnd.github+json",
"X-GitHub-Api-Version": "2022-11-28",
"User-Agent": "container-lab/node-download" # Replace with your tool name
}

if token:
headers["Authorization"] = f"Bearer {token}"
logger.debug("Using authenticated GitHub API request")
else:
logger.warning("No GitHub token found - using unauthenticated request (rate limit: 60 requests/hour)")

http = create_pool_manager(url=url, verify=True)

# Log proxy environment at debug level
logger.debug(f"HTTP_PROXY: {os.environ.get('HTTP_PROXY', 'not set')}")
logger.debug(f"HTTPS_PROXY: {os.environ.get('HTTPS_PROXY', 'not set')}")
logger.debug(f"NO_PROXY: {os.environ.get('NO_PROXY', 'not set')}")
logger.debug(f"Using pool manager type: {type(http).__name__}")

try:
response = http.request("GET", url, headers=headers)
logger.debug(f"Response status: {response.status}")
logger.debug(f"Response headers: {response.headers}")

if response.status == 403:
response_data = json.loads(response.data.decode("utf-8"))
if "rate limit exceeded" in response_data.get("message", "").lower():
logger.warning(
f"GitHub API rate limit exceeded. {response_data.get('message')}"
)
if response_data.get("documentation_url"):
logger.warning(f"See: {response_data['documentation_url']}")
return None, None
else:
logger.error(
f"Access forbidden: {response_data.get('message', 'No message provided')}"
)
return None, None

if response.status != 200:
logger.error(f"Failed to fetch release {tag} (status={response.status})")
logger.debug(f"Response data: {response.data.decode('utf-8')}")
return None, None

data = json.loads(response.data.decode("utf-8"))
assets = data.get("assets", [])
logger.debug(f"Found {len(assets)} assets in release")

for asset in assets:
name = asset.get("name", "")
logger.debug(f"Checking asset: {name}")
if asset_filter is None or asset_filter(name):
download_url = asset.get("browser_download_url")
logger.info(f"Found matching asset: {name}")
logger.debug(f"Download URL: {download_url}")
return name, download_url
else:
logger.debug(f"Asset {name} did not match filter")

except urllib3.exceptions.HTTPError as e:
logger.error(f"HTTP error occurred while fetching {tag}: {e}")
logger.debug(f"Error details: {str(e)}")
except json.JSONDecodeError as e:
logger.error(f"Invalid JSON response for {tag}: {e}")
logger.debug(f"Raw response: {response.data.decode('utf-8')}")
except Exception as e:
logger.error(f"Unexpected error while fetching {tag}: {type(e).__name__}")
logger.debug(f"Error details: {str(e)}")

# No matching asset found
logger.warning("No matching asset found")
return None, None


def normalize_name(name: str) -> str:
"""
Returns a Kubernetes-compliant name by:
Expand Down
44 changes: 31 additions & 13 deletions src/integrate.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,13 +107,12 @@ def prechecks(self):
)

def create_artifacts(self):
"""
Creates artifacts needed by nodes in the topology
"""
"""Creates artifacts needed by nodes that need them"""
logger.info("Creating artifacts for nodes that need them")

processed = set() # Track which artifacts we've already created
nodes_by_artifact = {} # Track which nodes use which artifacts

# First pass: collect all nodes and their artifacts
for node in self.topology.nodes:
if not node.needs_artifact():
continue
Expand All @@ -125,30 +124,49 @@ def create_artifacts(self):
logger.warning(f"Could not get artifact details for {node}. Skipping.")
continue

# Skip if we already processed this artifact
if artifact_name in processed:
continue
processed.add(artifact_name)
if artifact_name not in nodes_by_artifact:
nodes_by_artifact[artifact_name] = {
"nodes": [],
"filename": filename,
"download_url": download_url,
"version": node.version,
}
nodes_by_artifact[artifact_name]["nodes"].append(node.name)

# Second pass: create artifacts
for artifact_name, info in nodes_by_artifact.items():
first_node = info["nodes"][0]
logger.info(
f"Creating YANG artifact for node: {first_node} (version {info['version']})"
)

# Get the YAML and create the artifact
artifact_yaml = node.get_artifact_yaml(
artifact_name, filename, download_url
artifact_yaml = self.topology.nodes[0].get_artifact_yaml(
artifact_name, info["filename"], info["download_url"]
)

if not artifact_yaml:
logger.warning(
f"Could not generate artifact YAML for {node}. Skipping."
f"Could not generate artifact YAML for {first_node}. Skipping."
)
continue
logger.debug(f"Artifact yaml: {artifact_yaml}.")

try:
helpers.apply_manifest_via_kubectl(
artifact_yaml, namespace="eda-system"
)
logger.info(f"Artifact '{artifact_name}' has been created.")
# Log about other nodes using this artifact
other_nodes = info["nodes"][1:]
if other_nodes:
logger.info(
f"Using same artifact for nodes: {', '.join(other_nodes)}"
)
except RuntimeError as ex:
if "AlreadyExists" in str(ex):
logger.info(f"Artifact '{artifact_name}' already exists, skipping.")
logger.info(
f"Artifact '{artifact_name}' already exists for nodes: {', '.join(info['nodes'])}"
)
else:
logger.error(f"Error creating artifact '{artifact_name}': {ex}")

Expand Down
51 changes: 13 additions & 38 deletions src/node_srl.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,10 @@ class SRLNode(Node):
SRL_IMAGE = "eda-system/srlimages/srlinux-{version}-bin/srlinux.bin"
SRL_IMAGE_MD5 = "eda-system/srlimages/srlinux-{version}-bin/srlinux.bin.md5"

SUPPORTED_SCHEMA_PROFILES = {
"24.10.1": "https://github.com/nokia/srlinux-yang-models/releases/download/v24.10.1/srlinux-24.10.1-492.zip"
}

def __init__(self, name, kind, node_type, version, mgmt_ipv4):
super().__init__(name, kind, node_type, version, mgmt_ipv4)
# Add cache for artifact info
Expand Down Expand Up @@ -117,7 +121,8 @@ def get_node_profile(self, topology):
"""
logger.info(f"Rendering node profile for {self}")

artifact_name, filename = self.get_artifact_metadata()
artifact_name = self.get_artifact_name()
filename = f"srlinux-{self.version}.zip"

data = {
"namespace": f"clab-{topology.name}",
Expand Down Expand Up @@ -228,46 +233,16 @@ def get_artifact_name(self):
return f"clab-srlinux-{self.version}"

def get_artifact_info(self):
"""
Gets SR Linux YANG models artifact information from GitHub.
"""
# Return cached info if available
if self._artifact_info is not None:
return self._artifact_info

def srlinux_filter(name):
return (
name.endswith(".zip")
and name.startswith("srlinux-")
and "Source code" not in name
)
"""Gets artifact information for this SR Linux version"""
if self.version not in self.SUPPORTED_SCHEMA_PROFILES:
logger.warning(f"No schema profile URL defined for version {self.version}")
return None, None, None

artifact_name = self.get_artifact_name()
filename, download_url = helpers.get_artifact_from_github(
owner="nokia",
repo="srlinux-yang-models",
version=self.version,
asset_filter=srlinux_filter,
)
filename = f"srlinux-{self.version}.zip"
download_url = self.SUPPORTED_SCHEMA_PROFILES[self.version]

# Cache the result
self._artifact_info = (artifact_name, filename, download_url)
return self._artifact_info

def get_artifact_metadata(self):
"""
Returns just the artifact name and filename without making API calls.
Used when we don't need the download URL.
"""
if self._artifact_info is not None:
# Return cached info if available
artifact_name, filename, _ = self._artifact_info
return artifact_name, filename

# If not cached, return basic info without API call
artifact_name = self.get_artifact_name()
filename = f"srlinux-{self.version}.zip" # Assume standard naming
return artifact_name, filename
return artifact_name, filename, download_url

def get_artifact_yaml(self, artifact_name, filename, download_url):
"""
Expand Down

0 comments on commit 5fa0fbb

Please sign in to comment.