Skip to content

Commit

Permalink
publish next version
Browse files Browse the repository at this point in the history
  • Loading branch information
trisongz committed May 31, 2024
1 parent ffe752d commit f1bd135
Show file tree
Hide file tree
Showing 9 changed files with 587 additions and 15 deletions.
8 changes: 8 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,13 @@
# Changelogs

#### v0.0.53 (2024-05-31)

- Added support for new `gpt-4-o` model
- Added support for external providers
- Update certain validation behavior of Functions

- TODO: Add batch create support

#### v0.0.52 (2024-02-28)

- Added support for the following parameters in `model_configurations` in `OpenAIManager`:
Expand Down
18 changes: 15 additions & 3 deletions async_openai/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -765,9 +765,21 @@ def register_client_endpoints(self): # sourcery skip: low-code-quality
config['api_base'] = self.settings.proxy.endpoint
_has_proxy = True
c = self.init_api_client(name, is_azure = is_azure, set_as_default = is_default, **config)
msg = f'Registered: `|g|{c.name}|e|` @ `{source_endpoint or c.base_url}` (Azure: {c.is_azure}, Px: {_has_proxy}'
if has_weights: msg += f', Weight: {client_weight}'
msg += ')'
msg = f'Registered: `|g|{c.name}|e|` @ `{source_endpoint or c.base_url}`'
extra_msgs = []
if has_weights:
if isinstance(client_weight, float):
_wp, _wsfx = '|g|', '|e|'
if client_weight <= 0.0:
_wp, _wsfx = '', ''
elif client_weight <= 0.25: _wp = '|r|'
elif client_weight <= 0.45: _wp = '|y|'
extra_msgs.append(f'Weight: {_wp}{client_weight}{_wsfx}')
else:
extra_msgs.append(f'Weight: {client_weight}')
if c.is_azure: extra_msgs.append('Azure')
if _has_proxy: extra_msgs.append('Proxied')
if extra_msgs: msg += f' ({", ".join(extra_msgs)})'
logger.info(msg, colored = True)

# Set the models for inclusion
Expand Down
4 changes: 2 additions & 2 deletions async_openai/types/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -524,7 +524,7 @@ def parse_response(
"""
schema = schema or self.schema
try:
result = schema.model_validate(response.function_results[0].arguments, from_attributes = True)
result = schema.model_validate(response.function_results[0].arguments, from_attributes = True, context = {'source': 'function'})
result._set_values_from_response(response, name = self.name if include_name else None, client_name = client_name)
return result
except IndexError as e:
Expand All @@ -533,7 +533,7 @@ def parse_response(
except Exception as e:
self.autologger.error(f"[{self.name} - {response.model} - {response.usage}] Failed to parse object: {e}\n{response.text}\n{response.function_results[0].arguments}")
try:
result = schema.model_validate(resolve_json(response.function_results[0].arguments), from_attributes = True)
result = schema.model_validate(resolve_json(response.function_results[0].arguments), from_attributes = True, context = {'source': 'function'})
result._set_values_from_response(response, name = self.name if include_name else None)
return result
except Exception as e:
Expand Down
3 changes: 3 additions & 0 deletions async_openai/types/options.py
Original file line number Diff line number Diff line change
Expand Up @@ -194,6 +194,7 @@ class FilePurpose(str, Enum):
fine_tune = "fine-tune"
train = "fine-tune-train"
search = "search"
batch = "batch"

@classmethod
def parse_str(cls, value: Union[str, 'FilePurpose'], raise_error: bool = True):
Expand All @@ -206,6 +207,8 @@ def parse_str(cls, value: Union[str, 'FilePurpose'], raise_error: bool = True):
return cls.fine_tune
elif "search" in value:
return cls.search
elif "batch" in value:
return cls.batch
if not raise_error: return None
raise ValueError(f"Cannot convert {value} to FilePurpose")

Expand Down
26 changes: 26 additions & 0 deletions async_openai/types/pricing.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,32 @@ gpt-4-0125-preview:
endpoints:
- chat

gpt-4-turbo-2024-04-09:
aliases:
- gpt-4-turbo-2024
- gpt-4-turbo-2024-04-09
- gpt-4-2024-preview
context_length: 128000
costs:
unit: 1_000_000
input: 10.00
output: 30.00
endpoints:
- chat

gpt-4o:
aliases:
- gpt-4-o
- gpt4o
- gpt-4o-2024-05-13
context_length: 128000
costs:
unit: 1_000_000
input: 5.00
output: 15.00
endpoints:
- chat

gpt-4:
aliases:
- gpt-4-0613
Expand Down
71 changes: 68 additions & 3 deletions async_openai/types/resources.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,13 @@
import json
import aiohttpx
import datetime
import tempfile
import pathlib
from pydantic import ConfigDict
from pydantic.types import ByteSize
from lazyops.types import BaseModel, validator, lazyproperty
from lazyops.types.models import get_pyd_field_names, pyd_parse_obj, get_pyd_dict, _BaseModel
from lazyops.utils import ObjectDecoder
from lazyops.utils import ObjectDecoder, ObjectEncoder
from async_openai.utils.logs import logger
from async_openai.utils.helpers import aparse_stream, parse_stream

Expand Down Expand Up @@ -156,6 +158,29 @@ def create_resource(
resource_obj = resource.parse_obj(resource_kwargs)
return resource_obj, return_kwargs


@staticmethod
def create_batch_resource(
resource: Type['BaseResource'],
batch: List[Union[Dict[str, Any], Any]],
**kwargs
) -> Tuple[List['BaseResource'], Dict]:
"""
Extracts the resource from the kwargs and returns the resource
and the remaining kwargs
"""
resource_fields = get_pyd_field_names(resource)
resource_kwargs = {k: v for k, v in kwargs.items() if k in resource_fields}
return_kwargs = {k: v for k, v in kwargs.items() if k not in resource_fields}
resource_objs = []
for item in batch:
if isinstance(item, dict):
item.update(resource_kwargs)
resource_objs.append(resource.parse_obj(item))
else:
resource_objs.append(item)
return resource_objs, return_kwargs

@classmethod
def create_many(cls, data: List[Dict]) -> List['BaseResource']:
"""
Expand Down Expand Up @@ -267,7 +292,7 @@ class FileResource(BaseResource):
file_id: Optional[str]
filename: Optional[str] = None
purpose: FilePurpose = FilePurpose.fine_tune
model: Optional[str]
model: Optional[str] = None

@validator("purpose")
def validate_purpose(cls, value):
Expand Down Expand Up @@ -299,4 +324,44 @@ async def async_get_params(self, **kwargs) -> List:
files.append(
("file", (self.filename or file.name, (await file.async_read_bytes() if _has_fileio else file.read_bytes()), "application/octet-stream"))
)
return files
return files

@classmethod
def create_from_batch(
cls,
batch: List[Union[Dict[str, Any], str]],
output_path: Optional[str] = None,
file_id: Optional[str] = None,
filename: Optional[str] = None,
purpose: Optional[FilePurpose] = None,
**kwargs,
) -> Tuple['FileObject', Dict[str, Any]]:
"""
Creates a file object from a batch in jsonl format
"""
for n, b in enumerate(batch):
if isinstance(b, dict):
batch[n] = json.dumps(b, cls = ObjectEncoder)
if output_path:
output = pathlib.Path(output_path)
else:
tmp = tempfile.NamedTemporaryFile(delete = False)
tmp.close()
output = pathlib.Path(tmp.name)

with output.open('w') as f:
for b in batch:
f.write(f'{b}\n')
resource_fields = get_pyd_field_names(cls)
resource_kwargs = {k: v for k, v in kwargs.items() if k in resource_fields}
return_kwargs = {k: v for k, v in kwargs.items() if k not in resource_fields}
return cls(
file = output,
purpose = purpose,
filename = filename,
file_id = file_id,
**resource_kwargs
), return_kwargs



Loading

0 comments on commit f1bd135

Please sign in to comment.