Skip to content

Commit

Permalink
feature flag added
Browse files Browse the repository at this point in the history
  • Loading branch information
dylanpulver committed Nov 26, 2024
1 parent 6df26e9 commit 15547bb
Show file tree
Hide file tree
Showing 2 changed files with 89 additions and 67 deletions.
49 changes: 29 additions & 20 deletions safety/scan/command.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,12 +241,20 @@ def scan(ctx: typer.Context,
typer.Option("--apply-fixes",
help=SCAN_APPLY_FIXES,
show_default=False)
] = False
] = False,
post_api: Annotated[
bool,
typer.Option(
"--post-api",
help="Flag to enable posting the payload to the API",
show_default=False,
),
] = False,
):
"""
Scans a project (defaulted to the current directory) for supply-chain security and configuration issues
"""

if not ctx.obj.metadata.authenticated:
raise SafetyError("Authentication required. Please run 'safety auth login' to authenticate before using this command.")

Expand All @@ -270,12 +278,13 @@ def scan(ctx: typer.Context,
include_files=to_include,
console=console)

# Download necessary assets for each handler
for handler in file_finder.handlers:
if handler.ecosystem:
wait_msg = "Fetching Safety's vulnerability database..."
with console.status(wait_msg, spinner=DEFAULT_SPINNER):
handler.download_required_assets(ctx.obj.auth.client)
if not post_api:
# Download necessary assets for each handler
for handler in file_finder.handlers:
if handler.ecosystem:
wait_msg = "Fetching Safety's vulnerability database..."
with console.status(wait_msg, spinner=DEFAULT_SPINNER):
handler.download_required_assets(ctx.obj.auth.client)

# Start scanning the project directory
wait_msg = "Scanning project directory"
Expand Down Expand Up @@ -313,7 +322,7 @@ def scan(ctx: typer.Context,
# Process each file for dependencies and vulnerabilities
with console.status(wait_msg, spinner=DEFAULT_SPINNER) as status:
for path, analyzed_file in process_files(paths=file_paths,
config=config):
config=config, post_api=post_api):
count += len(analyzed_file.dependency_results.dependencies)

# Update exit code if vulnerabilities are found
Expand Down Expand Up @@ -371,7 +380,7 @@ def sort_vulns_by_score(vuln: Vulnerability) -> int:
detailed_output=detailed_output)

lines = []

if spec.remediation.recommended:
total_resolved_vulns += spec.remediation.vulnerabilities_found

Expand Down Expand Up @@ -441,18 +450,18 @@ def sort_vulns_by_score(vuln: Vulnerability) -> int:
telemetry=telemetry,
files=[],
projects=[ctx.obj.project])

total_issues_with_duplicates, total_ignored_issues = get_vulnerability_summary(report.as_v30())

print_summary(
console=console,
total_issues_with_duplicates=total_issues_with_duplicates,
console=console,
total_issues_with_duplicates=total_issues_with_duplicates,
total_ignored_issues=total_ignored_issues,
project=ctx.obj.project,
dependencies_count=count,
fixes_count=fixes_count,
resolved_vulns_per_fix=total_resolved_vulns,
is_detailed_output=detailed_output,
project=ctx.obj.project,
dependencies_count=count,
fixes_count=fixes_count,
resolved_vulns_per_fix=total_resolved_vulns,
is_detailed_output=detailed_output,
ignored_vulns_data=ignored_vulns_data
)

Expand Down Expand Up @@ -796,7 +805,7 @@ def get_vulnerability_summary(report: Dict[str, Any]) -> Tuple[int, int]:
Args:
report (ReportModel): The report containing vulnerability data.
Returns:
Tuple[int, int]: A tuple containing:
- Total number of issues (including duplicates)
Expand Down
107 changes: 60 additions & 47 deletions safety/scan/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,7 @@ def save_report_as(scan_type: ScanType, export_type: ScanExport, at: Path, repor
report_file.write(report)


def process_files(paths: Dict[str, Set[Path]], config: Optional[ConfigModel] = None) -> Generator[Tuple[Path, InspectableFile], None, None]:
def process_files(paths: Dict[str, Set[Path]], config: Optional[ConfigModel] = None, post_api: bool = False) -> Generator[Tuple[Path, InspectableFile], None, None]:
"""
Processes the files and yields each file path along with its inspectable file.
Expand All @@ -210,57 +210,70 @@ def process_files(paths: Dict[str, Set[Path]], config: Optional[ConfigModel] = N
Yields:
Tuple[Path, InspectableFile]: A tuple of file path and inspectable file.
"""
SCAN_API_ENDPOINT = "https://platform-host.com/scan" # Replace
SCAN_API_AUTH_TOKEN = "our_api_auth_token" # Replace
if not config:
config = ConfigModel()

files_metadata = []

for file_type_key, f_paths in paths.items():
file_type = FileType(file_type_key)
if not file_type or not file_type.ecosystem:
continue
for f_path in f_paths:
# Calculate the relative file path
relative_path = os.path.relpath(f_path, start=os.getcwd())

# Read the file content
try:
with open(f_path, "r") as file:
content = file.read()
except Exception as e:
LOG.error(f"Error reading file {f_path}: {e}")
# old GET implementation
if not post_api:
for file_type_key, f_paths in paths.items():
file_type = FileType(file_type_key)
if not file_type or not file_type.ecosystem:
continue
for f_path in f_paths:
with InspectableFileContext(f_path, file_type=file_type) as inspectable_file:
if inspectable_file and inspectable_file.file_type:
inspectable_file.inspect(config=config)
inspectable_file.remediate()
yield f_path, inspectable_file

# new POST implementation
else:
files_metadata = []
for file_type_key, f_paths in paths.items():
file_type = FileType(file_type_key)
if not file_type or not file_type.ecosystem:
continue
for f_path in f_paths:

# Append metadata to the payload
files_metadata.append({
"name": relative_path,
"content": content,
})
relative_path = os.path.relpath(f_path, start=os.getcwd())

# Read the file content
try:
with open(f_path, "r") as file:
content = file.read()
except Exception as e:
LOG.error(f"Error reading file {f_path}: {e}")
continue

with InspectableFileContext(f_path, file_type=file_type) as inspectable_file:
if inspectable_file and inspectable_file.file_type:
inspectable_file.inspect(config=config)
inspectable_file.remediate()
yield f_path, inspectable_file
# Append metadata to the payload
files_metadata.append({
"name": relative_path,
"content": content,
})

print("Prepared files_metadata payload for API POST request:")
print(files_metadata)
# Send the payload via API POST request
try:
headers = {
"Authorization": f"Bearer {SCAN_API_AUTH_TOKEN}",
"Content-Type": "application/json"
}
response = requests.post(SCAN_API_ENDPOINT, json={"files_metadata": files_metadata}, headers=headers)

if response.status_code == 200:
LOG.info("Sccan Payload successfully sent to the API.")
else:
LOG.error(f"Failed to send scan payload to the API. Status code: {response.status_code}")
LOG.error(f"Response: {response.text}")

except requests.RequestException as e:
LOG.error(f"Error occurred while sending scan payload to the API: {e}")
print("Prepared files_metadata payload for API POST request: ", files_metadata)
# Send the payload via API POST request

SCAN_API_ENDPOINT = "https://platform-host.com/scan" # Replace
SCAN_API_AUTH_TOKEN = "our_api_auth_token" # Replace
try:
headers = {
"Authorization": f"Bearer {SCAN_API_AUTH_TOKEN}",
"Content-Type": "application/json"
}
response = requests.post(SCAN_API_ENDPOINT, json={"files_metadata": files_metadata}, headers=headers)

if response.status_code == 200:
LOG.info("Sccan Payload successfully sent to the API.")
else:
LOG.error(f"Failed to send scan payload to the API. Status code: {response.status_code}")
LOG.error(f"Response: {response.text}")

except requests.RequestException as e:
LOG.error(f"Error occurred while sending scan payload to the API: {e}")

json_data = response.json()
results = json_data.get("results", [])

for result in results:
yield result["name"], result["inspectable_file"]

0 comments on commit 15547bb

Please sign in to comment.