Skip to content

Commit

Permalink
[Client] Add num_prompt_tokens to the client's CompletionOutputs (#467)
Browse files Browse the repository at this point in the history
* add prompt token, untested

* comment

* remove stop_str stuff, it doesn't do anything with the public api, and it breaks on certain frameworks when hosted locally
  • Loading branch information
seanshi-scale committed Mar 12, 2024
1 parent 4b012f0 commit b09c106
Show file tree
Hide file tree
Showing 4 changed files with 12 additions and 3 deletions.
2 changes: 1 addition & 1 deletion clients/python/llmengine/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

__version__ = "0.0.0b26"
__version__ = "0.0.0b27"

import os
from typing import Sequence
Expand Down
9 changes: 9 additions & 0 deletions clients/python/llmengine/data_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -305,6 +305,11 @@ class CompletionOutput(BaseModel):
text: str
"""The text of the completion."""

# We're not guaranteed to have `num_prompt_tokens` in the response in all cases, so to be safe, set a default.
# If we send request to api.spellbook.scale.com, we don't get this back.
num_prompt_tokens: Optional[int] = None
"""Number of tokens in the prompt."""

num_completion_tokens: int
"""Number of tokens in the completion."""

Expand Down Expand Up @@ -353,6 +358,10 @@ class CompletionStreamOutput(BaseModel):
finished: bool
"""Whether the completion is finished."""

# We're not guaranteed to have `num_prompt_tokens` in the response in all cases, so to be safe, set a default.
num_prompt_tokens: Optional[int] = None
"""Number of tokens in the prompt."""

num_completion_tokens: Optional[int] = None
"""Number of tokens in the completion."""

Expand Down
2 changes: 1 addition & 1 deletion clients/python/pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "scale-llm-engine"
version = "0.0.0.beta26"
version = "0.0.0.beta27"
description = "Scale LLM Engine Python client"
license = "Apache-2.0"
authors = ["Phil Chen <[email protected]>"]
Expand Down
2 changes: 1 addition & 1 deletion clients/python/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,6 @@
setup(
name="scale-llm-engine",
python_requires=">=3.7",
version="0.0.0.beta26",
version="0.0.0.beta27",
packages=find_packages(),
)

0 comments on commit b09c106

Please sign in to comment.