Skip to content

Commit

Permalink
fixup
Browse files Browse the repository at this point in the history
  • Loading branch information
shawn-hurley committed Sep 25, 2024
1 parent 77ae54f commit 6c34261
Show file tree
Hide file tree
Showing 7 changed files with 155 additions and 343 deletions.
6 changes: 5 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -166,4 +166,8 @@ cython_debug/
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
#.idea/

# Adding any sort of org.eclipse things
.metadata
org.eclipse*
326 changes: 18 additions & 308 deletions notebooks/compilation_agent/01_use_playpen_agent_with_coolstore.ipynb

Large diffs are not rendered by default.

Empty file added playpen/client/__init__.py
Empty file.
101 changes: 101 additions & 0 deletions playpen/client/anlalyzer_rpc.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
import json

Check failure on line 1 in playpen/client/anlalyzer_rpc.py

View workflow job for this annotation

GitHub Actions / Trunk Check

black

Incorrect formatting, autoformat by running 'trunk fmt'

Check failure on line 1 in playpen/client/anlalyzer_rpc.py

View workflow job for this annotation

GitHub Actions / Trunk Check

isort

Incorrect formatting, autoformat by running 'trunk fmt'
import logging
import os

Check failure on line 3 in playpen/client/anlalyzer_rpc.py

View workflow job for this annotation

GitHub Actions / Trunk Check

ruff(F401)

[new] `os` imported but unused
import sys

Check failure on line 4 in playpen/client/anlalyzer_rpc.py

View workflow job for this annotation

GitHub Actions / Trunk Check

ruff(F401)

[new] `sys` imported but unused
import time

Check failure on line 5 in playpen/client/anlalyzer_rpc.py

View workflow job for this annotation

GitHub Actions / Trunk Check

ruff(F401)

[new] `time` imported but unused
import traceback

Check failure on line 6 in playpen/client/anlalyzer_rpc.py

View workflow job for this annotation

GitHub Actions / Trunk Check

ruff(F401)

[new] `traceback` imported but unused
from typing import Any, Dict, List

Check failure on line 7 in playpen/client/anlalyzer_rpc.py

View workflow job for this annotation

GitHub Actions / Trunk Check

ruff(F401)

[new] `typing.Any` imported but unused

Check failure on line 7 in playpen/client/anlalyzer_rpc.py

View workflow job for this annotation

GitHub Actions / Trunk Check

ruff(F401)

[new] `typing.Dict` imported but unused

Check failure on line 7 in playpen/client/anlalyzer_rpc.py

View workflow job for this annotation

GitHub Actions / Trunk Check

ruff(F401)

[new] `typing.List` imported but unused
from warnings import filterwarnings

Check failure on line 8 in playpen/client/anlalyzer_rpc.py

View workflow job for this annotation

GitHub Actions / Trunk Check

ruff(F401)

[new] `warnings.filterwarnings` imported but unused

from playpen.client.cli import (
generate_fix,

Check failure on line 11 in playpen/client/anlalyzer_rpc.py

View workflow job for this annotation

GitHub Actions / Trunk Check

ruff(F401)

[new] `playpen.client.cli.generate_fix` imported but unused
get_config,

Check failure on line 12 in playpen/client/anlalyzer_rpc.py

View workflow job for this annotation

GitHub Actions / Trunk Check

ruff(F401)

[new] `playpen.client.cli.get_config` imported but unused
get_impacted_files_from_report,

Check failure on line 13 in playpen/client/anlalyzer_rpc.py

View workflow job for this annotation

GitHub Actions / Trunk Check

ruff(F401)

[new] `playpen.client.cli.get_impacted_files_from_report` imported but unused
get_model_provider,

Check failure on line 14 in playpen/client/anlalyzer_rpc.py

View workflow job for this annotation

GitHub Actions / Trunk Check

ruff(F401)

[new] `playpen.client.cli.get_model_provider` imported but unused
get_trace,

Check failure on line 15 in playpen/client/anlalyzer_rpc.py

View workflow job for this annotation

GitHub Actions / Trunk Check

ruff(F401)

[new] `playpen.client.cli.get_trace` imported but unused
render_prompt,

Check failure on line 16 in playpen/client/anlalyzer_rpc.py

View workflow job for this annotation

GitHub Actions / Trunk Check

ruff(F401)

[new] `playpen.client.cli.render_prompt` imported but unused
)
from pylspclient.json_rpc_endpoint import JsonRpcEndpoint, MyEncoder
from pylspclient.lsp_client import LspEndpoint as RpcServer
from pylspclient.lsp_errors import ErrorCodes, ResponseError

from kai.kai_logging import parent_log, setup_file_handler

Check failure on line 22 in playpen/client/anlalyzer_rpc.py

View workflow job for this annotation

GitHub Actions / Trunk Check

ruff(F401)

[new] `kai.kai_logging.parent_log` imported but unused

Check failure on line 22 in playpen/client/anlalyzer_rpc.py

View workflow job for this annotation

GitHub Actions / Trunk Check

ruff(F401)

[new] `kai.kai_logging.setup_file_handler` imported but unused
from kai.models.report_types import ExtendedIncident

Check failure on line 23 in playpen/client/anlalyzer_rpc.py

View workflow job for this annotation

GitHub Actions / Trunk Check

ruff(F401)

[new] `kai.models.report_types.ExtendedIncident` imported but unused

log = logging.getLogger("analyzer-rpc")

class AnalyzerRpcServer(RpcServer):

def run(self):
while not self.shutdown_flag:
try:
jsonrpc_message = self.json_rpc_endpoint.recv_response()
if jsonrpc_message is None:
log.debug("server quit")
break
method = jsonrpc_message.get("method")
result = jsonrpc_message.get("result")
error = jsonrpc_message.get("error")
rpc_id = jsonrpc_message.get("id")
params = jsonrpc_message.get("params")

#Because this is only a client, we will never not have a result. If we don't have a result, we are
if not result:
continue

if method:
if rpc_id is not None:
if method not in self.method_callbacks:
raise ResponseError(
ErrorCodes.MethodNotFound,
"Method not found: {method}".format(method=method),
)
result = self.method_callbacks[method](**params["kwargs"])
self.send_response(rpc_id, result, None)
else:
if method not in self.notify_callbacks:
log.debug(
"Notify method not found: {method}.".format(
method=method
)
)
else:
self.notify_callbacks[method](params)
else:
self.handle_result(rpc_id, result, error)
except ResponseError as e:
self.send_response(rpc_id, None, e)
except Exception as e:
self.send_response(
rpc_id, None, ResponseError(ErrorCodes.InternalError, str(e))
)

def send_message(self, method_name, params, id=None):
message_dict = {}
message_dict["jsonrpc"] = "2.0"
if id is not None:
message_dict["id"] = id
message_dict["Method"] = method_name
if 'kwargs' in params:
message_dict["params"] = [params["kwargs"]]
self.json_rpc_endpoint.send_request(message_dict)


class AnlayzerRPCEndpoint(JsonRpcEndpoint):
def send_request(self, message):
json_string = json.dumps(message, cls=MyEncoder)
log.debug(f"sending data over stdin {repr(json_string)}")
with self.write_lock:
self.stdin.buffer.write(json_string.encode())
self.stdin.flush()

def recv_response(self):
with self.read_lock:
jsonrpc_res = self.stdout.buffer.read().decode("utf-8")
if jsonrpc_res:
log.debug(f"read data from stdout {repr(jsonrpc_res)}")
try:
return json.loads(jsonrpc_res)
except:

Check failure on line 99 in playpen/client/anlalyzer_rpc.py

View workflow job for this annotation

GitHub Actions / Trunk Check

ruff(E722)

[new] Do not use bare `except`
print(f"unable to load read data to json: {jsonrpc_res}")
return json.loads("{}")
20 changes: 4 additions & 16 deletions playpen/client/rpc.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from typing import Any, Dict, List
from warnings import filterwarnings

from cli import (
from playpen.client.cli import (
generate_fix,
get_config,
get_impacted_files_from_report,
Expand All @@ -35,6 +35,7 @@


class CustomRpcServer(RpcServer):

def run(self):
while not self.shutdown_flag:
try:
Expand Down Expand Up @@ -85,7 +86,7 @@ def __add_header(self, json_string: str):
def send_request(self, message):
json_string = json.dumps(message, cls=MyEncoder)
jsonrpc_req = self.__add_header(json_string)
log.debug(f"sending data over stdin {repr(jsonrpc_req)}")
print(f"sending data over stdin {repr(jsonrpc_req)}")
with self.write_lock:
self.stdin.buffer.write(jsonrpc_req.encode())
self.stdin.flush()
Expand Down Expand Up @@ -121,22 +122,9 @@ def recv_response(self):
raise ResponseError(ErrorCodes.ParseError, "Bad header: missing size")

jsonrpc_res = self.stdout.buffer.read(message_size).decode("utf-8")
log.debug(f"read data from stdout {repr(jsonrpc_res)}")
print(f"read data from stdout {repr(jsonrpc_res)}")
return json.loads(jsonrpc_res)

class BaseRPCEndpoint(JsonRpcEndpoint):
def send_request(self, message):
json_string = json.dumps(message, cls=MyEncoder)
log.debug(f"sending data over stdin {repr(json_string)}")
with self.write_lock:
self.stdin.buffer.write(json_string.encode())
self.stdin.flush()

def recv_response(self):
with self.read_lock:
jsonrpc_res = self.stdout.readline().decode("utf-8")
log.debug(f"read data from stdout {repr(jsonrpc_res)}")
return json.loads(jsonrpc_res)


class RPCParams:
Expand Down
3 changes: 2 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ dependencies = [
"python-dotenv==1.0.1",
"pyyaml==6.0.1",
"boto3==1.34.157", # Allows Amazon Bedrock to work
"pylspclient==0.1.2", # used for talking to RPC clients over stdin/stdout

# --- Testing dependencies ---
"coverage==7.6.0",
Expand Down Expand Up @@ -75,6 +76,6 @@ Repository = "https://www.github.com/konveyor/kai"

[tool.setuptools.packages.find]
where = ["."]
include = ["kai*"]
include = ["kai*", "playpen*"]
exclude = ["tests*"]
namespaces = true
42 changes: 25 additions & 17 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,13 @@ aiosignal==1.3.1
# via aiohttp
annotated-types==0.7.0
# via pydantic
anyio==4.4.0
anyio==4.6.0
# via
# httpx
# jupyter-server
# openai
appnope==0.1.4
# via ipykernel
argon2-cffi==23.1.0
# via jupyter-server
argon2-cffi-bindings==21.2.0
Expand Down Expand Up @@ -90,7 +92,7 @@ execnb==0.1.6
# via nbdev
executing==2.1.0
# via stack-data
fastcore==1.7.5
fastcore==1.7.9
# via
# execnb
# ghapi
Expand All @@ -111,14 +113,14 @@ gitpython==3.1.43
# via kai (pyproject.toml)
google-ai-generativelanguage==0.6.6
# via google-generativeai
google-api-core[grpc]==2.19.2
google-api-core[grpc]==2.20.0
# via
# google-ai-generativelanguage
# google-api-python-client
# google-generativeai
google-api-python-client==2.145.0
google-api-python-client==2.146.0
# via google-generativeai
google-auth==2.34.0
google-auth==2.35.0
# via
# google-ai-generativelanguage
# google-api-core
Expand All @@ -133,8 +135,6 @@ googleapis-common-protos==1.65.0
# via
# google-api-core
# grpcio-status
greenlet==3.1.0
# via sqlalchemy
grpcio==1.66.1
# via
# google-api-core
Expand All @@ -161,7 +161,7 @@ httpx-sse==0.3.1
# via ibm-generative-ai
ibm-generative-ai==2.2.0
# via kai (pyproject.toml)
idna==3.8
idna==3.10
# via
# anyio
# httpx
Expand Down Expand Up @@ -219,7 +219,7 @@ jsonschema-specifications==2023.12.1
# via jsonschema
jupyter==1.0.0
# via kai (pyproject.toml)
jupyter-client==8.6.2
jupyter-client==8.6.3
# via
# ipykernel
# jupyter-console
Expand Down Expand Up @@ -274,7 +274,7 @@ langchain-community==0.2.10
# via
# kai (pyproject.toml)
# langchain-experimental
langchain-core==0.2.39
langchain-core==0.2.41
# via
# langchain
# langchain-aws
Expand All @@ -291,7 +291,7 @@ langchain-openai==0.1.23
# via kai (pyproject.toml)
langchain-text-splitters==0.2.4
# via langchain
langsmith==0.1.119
langsmith==0.1.125
# via
# langchain
# langchain-community
Expand All @@ -314,8 +314,12 @@ multidict==6.1.0
# via
# aiohttp
# yarl
mypy==1.11.2
# via pylspclient
mypy-extensions==1.0.0
# via typing-inspect
# via
# mypy
# typing-inspect
nbclient==0.10.0
# via nbconvert
nbconvert==7.16.4
Expand All @@ -342,7 +346,7 @@ numpy==1.26.4
# langchain
# langchain-aws
# langchain-community
openai==1.45.0
openai==1.47.1
# via langchain-openai
orjson==3.10.7
# via langsmith
Expand All @@ -368,9 +372,9 @@ parso==0.8.4
# via jedi
pexpect==4.9.0
# via ipython
platformdirs==4.3.2
platformdirs==4.3.6
# via jupyter-core
prometheus-client==0.20.0
prometheus-client==0.21.0
# via jupyter-server
prompt-toolkit==3.0.47
# via
Expand All @@ -380,7 +384,7 @@ proto-plus==1.24.0
# via
# google-ai-generativelanguage
# google-api-core
protobuf==4.25.4
protobuf==4.25.5
# via
# google-ai-generativelanguage
# google-api-core
Expand Down Expand Up @@ -416,6 +420,7 @@ pydantic==2.8.2
# langsmith
# openai
# pydantic-settings
# pylspclient
# sequoia-diff
pydantic-core==2.20.1
# via pydantic
Expand All @@ -428,6 +433,8 @@ pygments==2.18.0
# kai (pyproject.toml)
# nbconvert
# qtconsole
pylspclient==0.1.2
# via kai (pyproject.toml)
pyparsing==3.1.4
# via httplib2
python-dateutil==2.8.2
Expand Down Expand Up @@ -576,6 +583,7 @@ typing-extensions==4.12.2
# via
# google-generativeai
# langchain-core
# mypy
# openai
# pydantic
# pydantic-core
Expand Down Expand Up @@ -621,5 +629,5 @@ yarl==1.11.1
# vcrpy

# The following packages are considered to be unsafe in a requirements file:
setuptools==74.1.2
setuptools==75.1.0
# via jupyterlab

0 comments on commit 6c34261

Please sign in to comment.