Skip to content

Commit

Permalink
feat(experimental): update tabby.py to use debug_options.raw_prompt
Browse files Browse the repository at this point in the history
  • Loading branch information
wsxiaoys committed Oct 22, 2023
1 parent 1d31b33 commit 2dcb559
Show file tree
Hide file tree
Showing 2 changed files with 36 additions and 23 deletions.
2 changes: 1 addition & 1 deletion experimental/eval/output.jsonl

Large diffs are not rendered by default.

57 changes: 35 additions & 22 deletions experimental/eval/tabby.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,12 @@
from pathlib import Path

import modal
from modal import Image, Mount, Secret, Stub, asgi_app, gpu, method

GPU_CONFIG = gpu.T4()
MODEL_ID = "TabbyML/StarCoder-1B"
LAUNCH_FLAGS = [
"serve",
"--model",
MODEL_ID,
"--port",
"8000",
"--device",
"cuda"
]
LAUNCH_FLAGS = ["serve", "--model", MODEL_ID, "--port", "8000", "--device", "cuda"]


def download_model():
import subprocess
Expand All @@ -28,10 +22,15 @@ def download_model():


image = (
Image.from_registry("tabbyml/tabby:0.3.0", add_python="3.11")
.dockerfile_commands("ENTRYPOINT []")
.run_function(download_model)
.pip_install("git+https://github.com/TabbyML/tabby.git#egg=tabby-python-client&subdirectory=clients/tabby-python-client")
Image.from_registry(
"tabbyml/tabby@sha256:64d71ec4c7d9ae7269e6301ad4106baad70ee997408691a6af17d7186283a856",
add_python="3.11",
)
.dockerfile_commands("ENTRYPOINT []")
.run_function(download_model)
.pip_install(
"git+https://github.com/TabbyML/tabby.git#egg=tabby-python-client&subdirectory=experimental/eval/tabby-python-client"
)
)

stub = Stub("tabby-" + MODEL_ID.split("/")[-1], image=image)
Expand All @@ -49,11 +48,9 @@ def __enter__(self):
import subprocess
import time

from tabby_client import Client
from tabby_python_client import Client

self.launcher = subprocess.Popen(
["/opt/tabby/bin/tabby"] + LAUNCH_FLAGS
)
self.launcher = subprocess.Popen(["/opt/tabby/bin/tabby"] + LAUNCH_FLAGS)
self.client = Client("http://127.0.0.1:8000")

# Poll until webserver at 127.0.0.1:8000 accepts connections before running inputs.
Expand All @@ -79,15 +76,29 @@ def webserver_ready():
def __exit__(self, _exc_type, _exc_value, _traceback):
self.launcher.terminate()

@method()
async def health(self):
from tabby_python_client.api.v1 import health

resp = await health.asyncio(client=self.client)
return resp.to_dict()

@method()
async def complete(self, language: str, prompt: str):
from tabby_client.api.v1 import completion
from tabby_client.models import CompletionRequest, DebugOptions, CompletionResponse, Segments
from tabby_python_client.api.v1 import completion
from tabby_python_client.models import (
CompletionRequest,
DebugOptions,
CompletionResponse,
Segments,
)

request = CompletionRequest(
language=language, debug_options=DebugOptions(raw_prompt=prompt)
)
resp: CompletionResponse = await completion.asyncio(client=self.client, json_body=request)
resp: CompletionResponse = await completion.asyncio(
client=self.client, json_body=request
)
return resp.choices[0].text


Expand All @@ -96,12 +107,14 @@ def main():
import json

model = Model()
print(model.health.remote())

with open("./output.jsonl", "w") as fout:
with open("./sample.jsonl") as fin:
for line in fin:
x = json.loads(line)
prompt = x['crossfile_context']['text'] + x['prompt']
label = x['groundtruth']
prompt = x["crossfile_context"]["text"] + x["prompt"]
label = x["groundtruth"]
prediction = model.complete.remote("python", prompt)

json.dump(dict(prompt=prompt, label=label, prediction=prediction), fout)

0 comments on commit 2dcb559

Please sign in to comment.