Skip to content

Commit 9d20e34

Browse files
author
Jonathan Sprauel
committed
correction on commit revert
1 parent 26257f4 commit 9d20e34

3 files changed

Lines changed: 116 additions & 67 deletions

File tree

agent.py

Lines changed: 62 additions & 58 deletions
Original file line numberDiff line numberDiff line change
@@ -78,26 +78,29 @@ async def runAgent(sandbox_id):
7878

7979
messages = conv.get("messages", [])
8080

81+
# Find last user message step for commit association
82+
last_user_msg_idx = len(messages) - 1
83+
for i in range(len(messages) - 1, -1, -1):
84+
if messages[i].get("role") == "user":
85+
last_user_msg_idx = i
86+
break
87+
88+
# Startup commit
89+
commit_sandbox_changes(sandbox_id, last_user_msg_idx, "Agent startup")
90+
8191
# --- URL PROCESSING: Process URLs in the last user message before building context ---
92+
artifact_change = False
8293
if messages:
83-
# Get the last user message
84-
last_user_msg_idx = None
85-
for i in range(len(messages) - 1, -1, -1):
86-
if messages[i].get("role") == "user":
87-
last_user_msg_idx = i
88-
break
89-
90-
if last_user_msg_idx is not None:
94+
if last_user_msg_idx != -1:
9195
last_user_msg = messages[last_user_msg_idx]
9296
content = last_user_msg.get("content", "")
9397

9498
if content:
9599
try:
96100
updated_content, url_results = process_urls_in_prompt(content, sandbox_id)
97101
if url_results:
98-
102+
artifact_change = True
99103
logger.info(f"Processed URLs in user message: {len(url_results)} files imported")
100-
# Optionally, you could yield a status message to the user here
101104
except Exception as e:
102105
logger.error(f"Error processing URLs in message: {e}")
103106

@@ -106,10 +109,14 @@ async def runAgent(sandbox_id):
106109
model_name = config.get("llm", {}).get("model")
107110
conversions = preprocess_sandbox_files(sandbox_id, model_name)
108111
if conversions:
112+
artifact_change = True
109113
logger.info(f"Preprocessed {len(conversions)} files: {conversions}")
110114
except Exception as e:
111115
logger.error(f"Error during file preprocessing: {e}")
112-
# Continue even if preprocessing fails
116+
117+
# Commit after artifacts if any
118+
if artifact_change:
119+
commit_sandbox_changes(sandbox_id, last_user_msg_idx, "Artifacts downloaded and converted")
113120

114121
# --- PERSISTENCE: Save a pending assistant message ---
115122
# This ensures "Working..." is shown even after reload
@@ -123,13 +130,11 @@ async def runAgent(sandbox_id):
123130
# Ensure messages are in OpenAI format
124131
openai_messages = []
125132
# Note: We iterate over messages but SKIP the last one (which is our pending placeholder)
126-
# for the LLM context, because we don't want to feed an empty assistant message to the LLM.
127133
for m in messages[:-1]:
128134
role = m.get("role")
129135
content = m.get("content")
130-
# Map roles if needed, but they should be standard
131136
if role not in ["user", "assistant", "system", "tool"]:
132-
role = "user" # default
137+
role = "user"
133138

134139
msg_obj = {"role": role, "content": content}
135140
if "tool_calls" in m and m["tool_calls"]:
@@ -254,46 +259,50 @@ async def runAgent(sandbox_id):
254259
content = message.get("content")
255260
tool_calls = message.get("tool_calls")
256261

257-
# Append assistant message to history used for next turn
258-
openai_messages.append(message)
262+
# Save assistant message to history (incremental)
263+
msg_to_save = message.copy()
264+
msg_to_save["status"] = "done"
265+
# Remove pending placeholder before adding new Turn
266+
if messages and messages[-1].get("status") == "pending":
267+
messages.pop()
268+
messages.append(msg_to_save)
269+
270+
# Re-add pending placeholder for the next assistant or tool result
271+
messages.append({"role": "assistant", "content": "", "status": "pending"})
272+
conv["messages"] = messages
273+
convs[sandbox_id] = conv
274+
save_all_sandboxes(convs)
259275

260276
# Yield content if any
261277
if content:
262-
# Determine message type: if tools are called, content is usually "thought"
263278
msg_type = "thought" if tool_calls else "content"
264279
yield json.dumps({"type": msg_type, "data": content}) + "\n"
265280

266281
if tool_calls:
267-
# Emit status that we're about to execute tools
268282
yield json.dumps({
269283
"type": "status",
270284
"data": f"Executing {len(tool_calls)} tool(s)..."
271285
}) + "\n"
272286

287+
modifying_tools = ["write_to_file", "append_to_file", "delete_file", "edit_file", "import_outlook_emails"]
288+
273289
for tc in tool_calls:
274290
func_name = tc["function"]["name"]
275291
args_str = tc["function"]["arguments"]
276292
call_id = tc["id"]
277293

278-
# Emit tool call event
279294
yield json.dumps({
280295
"type": "tool_call",
281-
"data": {
282-
"name": func_name,
283-
"arguments": args_str,
284-
"id": call_id
285-
}
296+
"data": {"name": func_name, "arguments": args_str, "id": call_id}
286297
}) + "\n"
287298

288299
try:
289300
args = json.loads(args_str)
290-
args["sandbox_id"] = sandbox_id # Inject sandbox_id
291-
args["model_name"] = config.get("llm", {}).get("model") # Inject model_name
301+
args["sandbox_id"] = sandbox_id
302+
args["model_name"] = config.get("llm", {}).get("model")
292303

293304
result = execute_tool(func_name, args)
294305

295-
# Emit tool result event
296-
# Check for images in result
297306
content_payload = result
298307
is_image = False
299308
try:
@@ -309,7 +318,6 @@ async def runAgent(sandbox_id):
309318
"image_url": {"url": f"data:image/png;base64,{img}"}
310319
})
311320
except Exception as e:
312-
print(f"Error parsing image result: {e}")
313321
pass
314322

315323
yield json.dumps({
@@ -334,15 +342,34 @@ async def runAgent(sandbox_id):
334342
}
335343
}) + "\n"
336344

337-
# Append tool result
338-
openai_messages.append({
345+
# Append tool result to history (incremental)
346+
tool_msg = {
339347
"role": "tool",
340348
"tool_call_id": call_id,
341-
"content": content_payload
342-
})
349+
"name": func_name,
350+
"content": content_payload,
351+
"status": "done"
352+
}
353+
openai_messages.append(tool_msg)
354+
355+
# Update saved state
356+
if messages and messages[-1].get("status") == "pending":
357+
messages.pop()
358+
messages.append(tool_msg)
359+
messages.append({"role": "assistant", "content": "", "status": "pending"})
360+
conv["messages"] = messages
361+
convs[sandbox_id] = conv
362+
save_all_sandboxes(convs)
363+
364+
# Commit if it's a modifying tool
365+
if func_name in modifying_tools:
366+
commit_sandbox_changes(sandbox_id, len(messages)-2, f"Tool: {func_name}")
367+
343368
else:
344369
# No tool calls, we are done
345370
agent_success = True
371+
# Final commit after last response
372+
commit_sandbox_changes(sandbox_id, len(messages)-2, "Agent final response")
346373
break
347374

348375
except Exception as e:
@@ -352,52 +379,29 @@ async def runAgent(sandbox_id):
352379

353380
finally:
354381
# --- FINALLY: Update the pending message ---
355-
# This runs whether success, error, or cancelled (client disconnect)
356382
try:
357383
convs = load_all_sandboxes()
358384
conv = convs.get(sandbox_id)
359385
if conv:
360386
messages = conv.get("messages", [])
361-
# Remove the pending message we added at the start
387+
# Remove the pending message
362388
if messages and messages[-1].get("status") == "pending":
363389
messages.pop()
364390

365-
# Use agent_success flag to determine status
366-
final_status = "done" if agent_success else "error"
367-
368-
# Recover any new messages and append them
369-
new_msgs = openai_messages[initial_openai_count:]
370-
for msg in new_msgs:
371-
msg["status"] = "done"
372-
messages.append(msg)
373-
374391
if not agent_success:
375392
messages.append({"role": "assistant", "content": "Generation interrupted or failed.", "status": "error"})
376393

377-
# We look for the *last* user message.
394+
# Ensure last user message is marked as done
378395
for i in range(len(messages) - 1, -1, -1):
379396
if messages[i].get("role") == "user":
380-
# If it is pending, close it.
381397
if messages[i].get("status") == "pending":
382398
messages[i]["status"] = "done" if agent_success else "error"
383-
# We only need to update the last one that triggered this run
384399
break
385400

386401
conv["messages"] = messages
387402
convs[sandbox_id] = conv
388403
save_all_sandboxes(convs)
389404

390-
# Git Commit
391-
last_user_msg = "Update"
392-
for m in reversed(messages):
393-
if m.get("role") == "user":
394-
last_user_msg = m.get("content", "Update")
395-
break
396-
397-
commit_msg = f"Agent update: {last_user_msg[:30]}..."
398-
399-
sandbox_path = get_sandbox_path(sandbox_id)
400-
commit_sandbox_changes(sandbox_path, conv["messages"], commit_msg)
401405
except Exception as e:
402406
print(f"Critical error saving conversation state: {e}")
403407

api.py

Lines changed: 14 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -101,20 +101,30 @@ async def api_create_sandbox(request: Request):
101101
messages = []
102102

103103
# Handle file copying for forks
104+
original_commits = []
104105
if source_id:
105106
convs = load_all_sandboxes()
106107
if source_id in convs:
108+
source_conv = convs[source_id]
109+
original_commits = source_conv.get("commits", [])
107110
source_path = get_sandbox_path(source_id)
108111
if os.path.exists(source_path):
109112
import shutil
110113
target_path = os.path.join(SANDBOXES_DIR, conv_id)
111114
try:
112115
# Create target directory and copy files
113-
shutil.copytree(source_path, target_path, ignore=shutil.ignore_patterns('.git', 'conversation.json'))
116+
# We copy everything including .git to preserve history
117+
shutil.copytree(source_path, target_path, ignore=shutil.ignore_patterns('conversation.json', '__pycache__', '*.pyc'))
114118
except Exception as e:
115119
print(f"Error copying files during fork: {e}")
116120

117-
conv = {"id": conv_id, "title": title, "read_only": read_only, "messages": messages}
121+
conv = {
122+
"id": conv_id,
123+
"title": title,
124+
"read_only": read_only,
125+
"messages": messages,
126+
"commits": original_commits
127+
}
118128
convs = load_all_sandboxes()
119129
convs[conv_id] = conv
120130
save_all_sandboxes(convs)
@@ -190,7 +200,7 @@ async def api_patch_sandbox(conv_id: str, request: Request):
190200
if update_commit:
191201
commits = conv.get("commits", [])
192202
target_step = len(new_messages) - 1
193-
target_commit = next((c for c in commits if c["step"] == target_step), None)
203+
target_commit = next((c for c in reversed(commits) if c["step"] == target_step), None)
194204
if target_commit:
195205
sandbox_path = get_sandbox_path(conv_id)
196206
revert_sandbox_to_commit(sandbox_path, target_commit["hash"])
@@ -221,7 +231,7 @@ async def api_revert_sandbox(conv_id: str, request: Request):
221231
return JSONResponse(status_code=404, content={"error": "Sandbox not found"})
222232
if step is not None and commit_hash is None:
223233
commits = conv.get("commits", [])
224-
target_commit = next((c for c in commits if c["step"] == step), None)
234+
target_commit = next((c for c in reversed(commits) if c["step"] == step), None)
225235
if not target_commit:
226236
return JSONResponse(status_code=404, content={"error": "Commit for step not found"})
227237
commit_hash = target_commit["hash"]

utils.py

Lines changed: 40 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -109,12 +109,24 @@ def write_conversation_json(sandbox_path: str, messages: list) -> str:
109109
return conversation_path
110110

111111

112-
def commit_sandbox_changes(sandbox_path: str, messages: list, commit_message: str) -> str:
113-
"""Commit all changes in the sandbox folder including conversation.json."""
112+
def commit_sandbox_changes(sandbox_id: str, step: int, commit_message: str) -> str:
113+
"""Commit all changes in the sandbox folder and update sandboxes.json."""
114114
from dulwich import porcelain
115+
from datetime import datetime
116+
117+
sandbox_path = get_sandbox_path(sandbox_id)
115118
repo = init_or_get_repo(sandbox_path)
116119

117-
# Write conversation.json
120+
# Load conversation to get current messages
121+
sandboxes = load_all_sandboxes()
122+
conv = sandboxes.get(sandbox_id)
123+
if not conv:
124+
print(f"Error: Sandbox {sandbox_id} not found when committing.")
125+
return ""
126+
127+
messages = conv.get("messages", [])
128+
129+
# Write conversation.json into the sandbox folder for self-containment
118130
write_conversation_json(sandbox_path, messages)
119131

120132
# Add all files in the sandbox folder
@@ -132,9 +144,32 @@ def commit_sandbox_changes(sandbox_path: str, messages: list, commit_message: st
132144

133145
# Commit changes
134146
try:
135-
commit_hash = porcelain.commit(sandbox_path, commit_message.encode('utf-8'))
136-
return commit_hash.decode('utf-8')
147+
commit_hash_bytes = porcelain.commit(sandbox_path, commit_message.encode('utf-8'))
148+
commit_hash = commit_hash_bytes.decode('utf-8')
149+
150+
# Update commits list in sandboxes.json
151+
commits = conv.setdefault("commits", [])
152+
commit_info = {
153+
"hash": commit_hash,
154+
"step": step,
155+
"message": commit_message,
156+
"timestamp": datetime.now().isoformat()
157+
}
158+
commits.append(commit_info)
159+
160+
# Save updated sandboxes.json
161+
sandboxes[sandbox_id] = conv
162+
save_all_sandboxes(sandboxes)
163+
164+
return commit_hash
137165
except Exception as e:
166+
# porcelain.commit can fail if there's nothing to commit
167+
if "nothing to commit" in str(e).lower():
168+
# If nothing to commit, we might still want to record the step if it's important,
169+
# but usually we only care about changes.
170+
# However, for the user's requirement "commit after each tool call that make a modification",
171+
# if we call it and nothing changed, it's fine to just return empty.
172+
return ""
138173
print(f"Error committing changes: {e}")
139174
return ""
140175

0 commit comments

Comments
 (0)