Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

✨ importer: Pass auth token. #376

Merged
merged 6 commits into from
Sep 26, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 27 additions & 8 deletions kai/hub_importer.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,12 @@ def main():
- critical: A serious error, indicating that the program itself may be unable to continue running.
Example: --loglevel debug (default: warning)""",
)
arg_parser.add_argument(
"--hub_token",
type=str,
default=os.getenv("JWT", default=""),
help="Hub auth token.",
)

arg_parser.add_argument(
"--config_filepath",
Expand Down Expand Up @@ -173,28 +179,32 @@ def main():

poll_api(
args.konveyor_hub_url,
args.hub_token,
app.incident_store,
interval=args.interval,
timeout=args.timeout,
verify=not args.skip_verify,
)


def paginate_api(url: str, timeout: int = 60, verify: bool = True) -> Iterator:
def paginate_api(
url: str, token: str, timeout: int = 60, verify: bool = True
) -> Iterator:
previous_offset = None
current_offset = 0
while previous_offset != current_offset:
previous_offset = current_offset
request_params = {"offset": f"{current_offset}"}
for item in get_data_from_api(
url, params=request_params, timeout=timeout, verify=verify
url, token, params=request_params, timeout=timeout, verify=verify
):
current_offset += 1
yield item


def poll_api(
konveyor_hub_url: str,
token: str,
incident_store: IncidentStore,
interval: int = 60,
timeout: int = 60,
Expand All @@ -205,7 +215,7 @@ def poll_api(

while True:
new_last_analysis = import_from_api(
incident_store, konveyor_hub_url, last_analysis, timeout, verify
incident_store, konveyor_hub_url, token, last_analysis, timeout, verify
)
if new_last_analysis == last_analysis:
KAI_LOG.info(f"No new analyses. Sleeping for {interval} seconds.")
Expand All @@ -220,22 +230,23 @@ def poll_api(
def import_from_api(
incident_store: IncidentStore,
konveyor_hub_url: str,
token: str,
last_analysis: int = 0,
timeout: int = 60,
verify: bool = True,
) -> int:
analyses_url = f"{konveyor_hub_url}/analyses"
request_params = {"filter": f"id>{last_analysis}"}
analyses = get_data_from_api(
analyses_url, params=request_params, timeout=timeout, verify=verify
analyses_url, token, params=request_params, timeout=timeout, verify=verify
)

validated_analyses = [Analysis(**item) for item in analyses]

# TODO(fabianvf) add mechanism to skip import if a report has already been imported
with tempfile.TemporaryDirectory() as tmpdir:
reports = process_analyses(
validated_analyses, konveyor_hub_url, tmpdir, timeout, verify
validated_analyses, konveyor_hub_url, token, tmpdir, timeout, verify
)

for app, creds, report in reports:
Expand All @@ -253,18 +264,24 @@ def import_from_api(
return last_analysis


def get_data_from_api(url: str, params=None, timeout: int = 60, verify: bool = True):
def get_data_from_api(
url: str, token: str, params=None, timeout: int = 60, verify: bool = True
):
if not params:
params = {}
KAI_LOG.debug(f"Making request to {url} with {params=}")
response = requests.get(url, params=params, timeout=timeout, verify=verify)
headers = {"Authorization": f"Bearer {token}"}
response = requests.get(
url, params=params, timeout=timeout, verify=verify, headers=headers
)
response.raise_for_status()
return response.json()


def process_analyses(
analyses: List[Analysis],
konveyor_hub_url: str,
token: str,
application_dir: str,
request_timeout: int = 60,
request_verify: bool = True,
Expand All @@ -277,6 +294,7 @@ def process_analyses(
)
resp = get_data_from_api(
f"{konveyor_hub_url}/applications/{analysis.application.id}",
token,
timeout=request_timeout,
verify=request_verify,
)
Expand All @@ -288,6 +306,7 @@ def process_analyses(
creds = Identity(
**get_data_from_api(
f"{konveyor_hub_url}/identities/{identity.id}",
token,
timeout=request_timeout,
verify=request_verify,
)
Expand All @@ -306,7 +325,7 @@ def process_analyses(
report_data = {}
issues_url = f"{konveyor_hub_url}/analyses/{analysis.id}/issues"
for raw_issue in paginate_api(
issues_url, timeout=request_timeout, verify=request_verify
issues_url, token, timeout=request_timeout, verify=request_verify
):
issue = Issue(**raw_issue)
KAI_LOG.info(
Expand Down
Loading