Skip to content

Commit

Permalink
Add support for pulling all products
Browse files Browse the repository at this point in the history
  • Loading branch information
riteshnoronha committed Sep 12, 2024
1 parent e15131a commit 21542a2
Show file tree
Hide file tree
Showing 2 changed files with 77 additions and 13 deletions.
72 changes: 66 additions & 6 deletions lynkctx.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,34 @@

INTERLYNK_API_TIMEOUT = 100

QUERY_PRODUCTS_LIST = """
query GetProducts($name: String, $enabled: Boolean) {
QUERY_PRODUCTS_TOTAL_COUNT = """
query GetProductsCount($name: String, $enabled: Boolean) {
organization {
productNodes: projectGroups(
search: $name
enabled: $enabled
orderBy: { field: PROJECT_GROUPS_UPDATED_AT, direction: DESC }
) {
prodCount: totalCount
}
}
}
"""

QUERY_PROJECT_COUNT_PARAMS = {
'operationName': 'GetProductsCount',
'variables': {},
'query': QUERY_PRODUCTS_TOTAL_COUNT
}


QUERY_PRODUCTS_LIST = """
query GetProducts($first: Int) {
organization {
productNodes: projectGroups(
enabled: true
first: $first
orderBy: { field: PROJECT_GROUPS_UPDATED_AT, direction: DESC }
) {
prodCount: totalCount
products: nodes {
Expand Down Expand Up @@ -92,6 +113,13 @@ def validate(self):
print("Security token not found")
return False

self.products_count = self._fetch_product_count()
if not self.products_count or self.products_count.get('errors'):
print("Error getting products count")
print(
"Possible problems: invalid security token, stale pylynk or invalid INTERLYNK_API_URL")
return False

self.data = self._fetch_context()
if not self.data or self.data.get('errors'):
print("Error getting Interlynk data")
Expand All @@ -116,12 +144,43 @@ def validate(self):

return True

def _fetch_product_count(self):
headers = {"Authorization": "Bearer " + self.token}
try:
response = requests.post(self.api_url,
headers=headers,
data=QUERY_PROJECT_COUNT_PARAMS,
timeout=INTERLYNK_API_TIMEOUT)
if response.status_code == 200:
response_data = response.json()
logging.debug(
"Products count response text: %s", response_data)
return response_data
logging.error("Error fetching products: %s", response.status_code)
except requests.exceptions.RequestException as ex:
logging.error("RequestException: %s", ex)
except json.JSONDecodeError as ex:
logging.error("JSONDecodeError: %s", ex)
return None

def _fetch_context(self):
headers = {"Authorization": "Bearer " + self.token}
product_count = self.products_count.get(
'data', {}).get('organization', {}).get('productNodes', {}).get('prodCount', 0)

variables = {
"first": product_count
}

request_data = {
"query": QUERY_PRODUCTS_LIST,
"variables": variables,
}

try:
response = requests.post(self.api_url,
headers=headers,
data=QUERY_PROJECT_PARAMS,
json=request_data,
timeout=INTERLYNK_API_TIMEOUT)
if response.status_code == 200:
response_data = response.json()
Expand Down Expand Up @@ -175,7 +234,8 @@ def resolve_ver(self):
for ver in env['versions']:
if ver['primaryComponent']['version'] == self.ver:
self.ver_id = ver['id']
self.ver_status = self.vuln_status_to_status(ver['vulnRunStatus'])
self.ver_status = self.vuln_status_to_status(
ver['vulnRunStatus'])
empty_ver = False
if not self.ver:
for product in self.data.get('data', {}).get('organization', {}).get('productNodes', {}).get('products', []):
Expand All @@ -187,7 +247,8 @@ def resolve_ver(self):
self.ver = ver['primaryComponent']['version']
if not self.ver:
empty_ver = True
self.ver_status = self.vuln_status_to_status(ver['vulnRunStatus'])
self.ver_status = self.vuln_status_to_status(
ver['vulnRunStatus'])

return (empty_ver or self.ver) and self.ver_id

Expand Down Expand Up @@ -353,4 +414,3 @@ def vuln_status_to_status(self, status):
result_dict['labelingStatus'] = 'COMPLETED'
result_dict['automationStatus'] = 'COMPLETED'
return result_dict

18 changes: 11 additions & 7 deletions pylynk.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,8 @@ def user_time(utc_time):
timestamp = datetime.datetime.fromisoformat(utc_time[:-1])

# Get the local timezone
local_timezone = datetime.timezone(datetime.timedelta(seconds=-time.timezone))
local_timezone = datetime.timezone(
datetime.timedelta(seconds=-time.timezone))

# Convert the UTC time to local time
local_time = timestamp.replace(tzinfo=pytz.UTC).astimezone(local_timezone)
Expand All @@ -58,7 +59,6 @@ def print_products(lynk_ctx, fmt_json):
print(json.dumps(products, indent=4))
return 0


# Calculate dynamic column widths
name_width = max(len("NAME"), max(len(prod['name'])
for prod in products))
Expand Down Expand Up @@ -192,6 +192,7 @@ def print_status(lynk_ctx, fmt_json):
)
print(row)


def download_sbom(lynk_ctx):
"""
Download SBOM from the lynk_ctx and save it to a file or print it to stdout.
Expand Down Expand Up @@ -231,13 +232,17 @@ def upload_sbom(lynk_ctx, sbom_file):
"""
return lynk_ctx.upload(sbom_file)


def add_output_format_group(parser):
"""
Adds mutually exclusive output format arguments (--json and --table) to the parser.
"""
output_group = parser.add_mutually_exclusive_group()
output_group.add_argument("--json", action='store_true', help="JSON Formatted (default)")
output_group.add_argument("--table", action='store_true', help="Table Formatted")
output_group.add_argument(
"--json", action='store_true', help="JSON Formatted (default)")
output_group.add_argument(
"--table", action='store_true', help="Table Formatted")


def setup_args():
"""
Expand Down Expand Up @@ -265,7 +270,6 @@ def setup_args():
help="Security token")
add_output_format_group(vers_parser)


status_parser = subparsers.add_parser("status", help="SBOM Status")
status_group = status_parser.add_mutually_exclusive_group(required=True)

Expand All @@ -278,8 +282,8 @@ def setup_args():

status_parser.add_argument("--env", help="Environment", required=False)
status_parser.add_argument("--token",
required=False,
help="Security token")
required=False,
help="Security token")
add_output_format_group(status_parser)

upload_parser = subparsers.add_parser("upload", help="Upload SBOM")
Expand Down

0 comments on commit 21542a2

Please sign in to comment.