Skip to content

Commit 3dcceb9

Browse files
authored
Merge pull request #23 from Forward-Operators/feat/web
web ui
2 parents 27888ca + be1a363 commit 3dcceb9

207 files changed

Lines changed: 54046 additions & 850 deletions

File tree

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

README.md

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ Everyone is welcome to contribute!
1616

1717
## Features
1818

19-
- Command-line execution of prompts
19+
- Command-line execution of prompts (now with web UI!)
2020
- Quick iteration on prompt design and paramter refinement with `watch` command
2121
- YAML configuration ties prompts to models and their configurations
2222
- Write prompt-scripts with #!/usr/bin/prr shebang and execute them directly
@@ -108,6 +108,16 @@ ELEVEN_LABS_API_KEY="9db0...."
108108
DEFAULT_SERVICE="openai/chat/gpt-3.5-turbo"
109109
```
110110

111+
### Running web user interface
112+
113+
Simply run prr with 'ui' command and your prompt path (if it doesn't exist, it will be created), like so:
114+
115+
```sh
116+
$ prr ui ~/Desktop/my-prompt
117+
```
118+
119+
Web browser will be launched with the UI connected to your command that you will use to launch the runs.
120+
111121
#### For Google PaLM, you need to install the following dependencies:
112122
You need to install [Google Cloud SDK](https://cloud.google.com/sdk/docs/install) and you need to have access to a Vertex AI with Generative AI enabled.
113123
`prr` assumes you're logged in into your Google Cloud account and have access to the project you want to use.

examples/configured/chihuahua.yaml

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,19 @@
11
version: 1
22
prompt:
3-
# more advanced prompt definition.
4-
# you can use either one of the two options
5-
# - content_file
6-
# - messages
7-
#
8-
# using content_file will make prr read the content
9-
# of that template and render it into simple text to use.
10-
# content_file: '_long_prompt_about_chihuahua'
11-
#
12-
# using 'messages' key instead give you finer control
13-
# over what messages are sent with what roles.
14-
# this mimics https://platform.openai.com/docs/guides/chat
15-
# structures currently
16-
messages:
3+
# more advanced prompt definition.
4+
# you can use either one of the two options
5+
# - content_file
6+
# - messages
7+
#
8+
# using content_file will make prr read the content
9+
# of that template and render it into simple text to use.
10+
# content_file: '_long_prompt_about_chihuahua'
11+
#
12+
# using 'messages' key instead give you finer control
13+
# over what messages are sent with what roles.
14+
# this mimics https://platform.openai.com/docs/guides/chat
15+
# structures currently
16+
messages:
1717
- role: 'system'
1818
content: 'You, Henry, are a little Chihuahua dog. That is all you need to know.'
1919
- role: 'assistant'

poetry.lock

Lines changed: 574 additions & 627 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

prr/__main__.py

Lines changed: 18 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,16 +2,19 @@
22

33
import argparse
44
import os
5-
import sys
65

76
from prr.commands.run import RunPromptCommand
7+
from prr.commands.ui import UIPromptCommand
88
from prr.commands.watch import WatchPromptCommand
9-
from prr.prompt.model_options import ModelOptions
109
from prr.utils.config import load_config
1110

1211
config = load_config()
1312

1413

14+
def check_if_prompt_exists(prompt_path):
15+
return os.path.exists(prompt_path) or os.path.exists(prompt_path + ".yaml")
16+
17+
1518
def main():
1619
parser = argparse.ArgumentParser(
1720
description="Run a prompt against configured models.",
@@ -30,6 +33,9 @@ def main():
3033
script_parser = sub_parsers.add_parser(
3134
"script", help="prompt script mode for use with #!/usr/bin/prr"
3235
)
36+
ui_parser = sub_parsers.add_parser(
37+
"ui", help="launch a web UI to analyze saved runs"
38+
)
3339

3440
def add_common_args(_parser):
3541
_parser.add_argument(
@@ -89,22 +95,32 @@ def add_common_args(_parser):
8995
action="store_true",
9096
default=False,
9197
)
98+
9299
_parser.add_argument("prompt_path", help="Path to prompt to run")
93100

94101
add_common_args(run_parser)
95102
add_common_args(watch_parser)
96103
add_common_args(script_parser)
97104

105+
ui_parser.add_argument("prompt_path", help="Path to prompt to analyze")
106+
98107
watch_parser.add_argument(
99108
"--cooldown", "-c", type=int, help="How much to wait after a re-run", default=5
100109
)
101110

102111
args, prompt_args = parser.parse_known_args()
103112
parsed_args = vars(args)
104113

114+
if parsed_args["command"] == "ui":
115+
if not check_if_prompt_exists(parsed_args["prompt_path"]):
116+
raise Exception(f"Prompt file {parsed_args['prompt_path']} does not exist")
117+
command = UIPromptCommand(parsed_args)
118+
command.start()
119+
105120
if parsed_args["command"] == "script":
106121
parsed_args["quiet"] = True
107122
parsed_args["abbrev"] = False
123+
108124
command = RunPromptCommand(parsed_args, prompt_args)
109125
command.run_prompt()
110126

prr/commands/run.py

Lines changed: 31 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -26,8 +26,13 @@ def __init__(self, args, prompt_args=None):
2626
else:
2727
self.console = Console(log_time=False, log_path=False)
2828

29+
if self.args["log"]:
30+
self.save_run = True
31+
else:
32+
self.save_run = False
33+
2934
self.load_prompt_for_path()
30-
self.runner = Runner(self.prompt_config, self.prompt_args)
35+
self.runner = Runner(self.prompt_config, self.save_run, self.prompt_args)
3136

3237
def print_prompt(self, request):
3338
if self.args["abbrev"]:
@@ -88,26 +93,11 @@ def print_run_results(self, result, run_save_directory):
8893

8994
if run_save_directory:
9095
self.console.log(f"💾 {run_save_directory}")
96+
self.console.log("")
9197

92-
def run_prompt_on_service(self, service_name, save=False):
93-
service_config = self.prompt_config.service_with_name(service_name)
94-
service_config.process_option_overrides(self.args)
95-
options = service_config.options
96-
97-
with self.console.status(
98-
f":robot: [bold green]{service_name}[/bold green]"
99-
) as status:
100-
self.runner.prepare_service_run(service_name, self.args)
101-
102-
request = self.runner.current_run_request()
103-
104-
self.print_run_parameters(service_name, request)
105-
self.print_prompt(request)
106-
107-
status.update(status="running model", spinner="dots8Bit")
108-
result, run_save_directory = self.runner.run(service_name, save)
109-
110-
self.print_run_results(result, run_save_directory)
98+
def on_request(self, service_name, request):
99+
self.print_run_parameters(service_name, request)
100+
self.print_prompt(request)
111101

112102
def load_prompt_for_path(self):
113103
prompt_path = self.args["prompt_path"]
@@ -117,12 +107,12 @@ def load_prompt_for_path(self):
117107
loader = PromptConfigLoader()
118108
self.prompt_config = loader.load_from_path(prompt_path)
119109

120-
def run_prompt(self):
121-
services_to_run = self.prompt_config.configured_services()
110+
def services_to_run(self):
111+
_services = self.prompt_config.configured_services()
122112

123-
if services_to_run == []:
113+
if _services == []:
124114
if self.args["service"]:
125-
services_to_run = [self.args["service"]]
115+
_services = [self.args["service"]]
126116
self.console.log(
127117
f":racing_car: Running service {self.args['service']}."
128118
)
@@ -132,10 +122,22 @@ def run_prompt(self):
132122
)
133123
exit(-1)
134124
else:
135-
self.console.log(f":racing_car: Running services: {services_to_run}")
125+
self.console.log(f":racing_car: Running services: {_services}")
136126

137-
for service_name in services_to_run:
138-
if len(services_to_run) > 1:
139-
self.console.log("")
127+
return _services
140128

141-
self.run_prompt_on_service(service_name, self.args["log"])
129+
def run_prompt(self):
130+
services = self.services_to_run()
131+
132+
self.runner.run_services(
133+
services,
134+
self.args,
135+
{
136+
"on_request": lambda service_name, request: self.on_request(
137+
service_name, request
138+
),
139+
"on_result": lambda service_name, result, run_save_directory: self.print_run_results(
140+
result, run_save_directory
141+
),
142+
},
143+
)

prr/commands/ui.py

Lines changed: 67 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,67 @@
1+
#!/usr/bin/env python
2+
3+
import os
4+
from io import StringIO
5+
6+
import uvicorn
7+
from rich.console import Console
8+
9+
console = Console(log_time=False, log_path=False)
10+
11+
DEFAULT_TEMPLATE_PATH = os.path.join(
12+
os.path.dirname(__file__), "..", "prompt_template.yaml"
13+
)
14+
15+
16+
class UIPromptCommand:
17+
def __init__(self, args, prompt_args=None):
18+
self.args = args
19+
self.prompt_config = None
20+
self.prompt_path = None
21+
22+
if self.args.get("quiet"):
23+
self.console = Console(file=StringIO())
24+
else:
25+
self.console = Console(log_time=False, log_path=False)
26+
27+
def create_default_config(self, prompt_path):
28+
if os.access(os.path.dirname(prompt_path), os.W_OK):
29+
self.console.log(
30+
f":magnifying_glass_tilted_left: {prompt_path} not found, creating it from template"
31+
)
32+
33+
with open(prompt_path, "w") as dst:
34+
with open(DEFAULT_TEMPLATE_PATH, "r") as src:
35+
dst.write(src.read())
36+
37+
self.prompt_path = prompt_path
38+
else:
39+
raise Exception(f"Cannot create prompt file {prompt_path}")
40+
41+
def prepare_prompt_path(self):
42+
prompt_path = os.path.abspath(self.args["prompt_path"])
43+
44+
if not prompt_path.endswith(".yaml"):
45+
prompt_path = prompt_path + ".yaml"
46+
47+
if os.path.exists(prompt_path):
48+
if os.access(prompt_path, os.R_OK):
49+
self.console.log(
50+
f":magnifying_glass_tilted_left: Reading prompt from {prompt_path}"
51+
)
52+
53+
self.prompt_path = prompt_path
54+
else:
55+
raise Exception(f"Cannot access prompt file {prompt_path}")
56+
else:
57+
self.create_default_config(prompt_path)
58+
59+
def start(self):
60+
self.prepare_prompt_path()
61+
62+
# a vital hack to pass the prompt path to the web ui
63+
os.environ["__PRR_WEB_UI_PROMPT_PATH"] = self.prompt_path
64+
65+
uvicorn.run(
66+
"prr.ui:app", host="localhost", port=8400, reload=False, access_log=False
67+
)

prr/prompt/__init__.py

Lines changed: 0 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,3 @@
1-
import os
2-
3-
import jinja2
4-
import yaml
5-
from jinja2 import meta
6-
7-
from prr.prompt.prompt_config import PromptConfig
8-
from prr.prompt.prompt_template import PromptTemplate
9-
from prr.prompt.service_config import ServiceConfig
10-
11-
121
class Prompt:
132
def __init__(self, content, config=None, args=None):
143
self.content = content

prr/prompt/model_options.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,9 @@ def select(self, option_keys):
2626
return ModelOptions(_options, False)
2727

2828
def update_options(self, options):
29+
if options == None:
30+
return
31+
2932
for key in options.keys():
3033
if options[key] != None:
3134
if key not in self.options_set:

prr/prompt/prompt_config.py

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -78,13 +78,10 @@ def option_for_service(self, service_name, option_name):
7878
return self.options_for_service(service_name).value(option_name)
7979

8080
def file_dependencies(self):
81-
_dependencies = []
82-
for message in self.template.messages:
83-
for dependency in message.file_dependencies:
84-
if dependency != None and dependency not in _dependencies:
85-
_dependencies.append(dependency)
81+
if self.template:
82+
return self.template.file_dependencies()
8683

87-
return _dependencies
84+
return []
8885

8986
####################################################
9087

prr/prompt/prompt_template.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import os
22

33
import jinja2
4+
from jinja2 import meta
45

56

67
class PromptMessage:

0 commit comments

Comments
 (0)