Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions docs/sandbox/clients.md
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,7 @@ For provider-specific setup notes and links for the checked-in extension example
| `E2BSandboxClient` | `openai-agents[e2b]` | [E2B runner](https://github.com/openai/openai-agents-python/blob/main/examples/sandbox/extensions/e2b_runner.py) |
| `ModalSandboxClient` | `openai-agents[modal]` | [Modal runner](https://github.com/openai/openai-agents-python/blob/main/examples/sandbox/extensions/modal_runner.py) |
| `RunloopSandboxClient` | `openai-agents[runloop]` | [Runloop runner](https://github.com/openai/openai-agents-python/blob/main/examples/sandbox/extensions/runloop/runner.py) |
| `TensorlakeSandboxClient` | `openai-agents[tensorlake]` | [Tensorlake runner](https://github.com/openai/openai-agents-python/blob/main/examples/sandbox/extensions/tensorlake_runner.py) |
| `VercelSandboxClient` | `openai-agents[vercel]` | [Vercel runner](https://github.com/openai/openai-agents-python/blob/main/examples/sandbox/extensions/vercel_runner.py) |

</div>
Expand All @@ -113,6 +114,7 @@ Hosted sandbox clients expose provider-specific mount strategies. Choose the bac
| `DaytonaSandboxClient` | Supports rclone-backed cloud storage mounts with `DaytonaCloudBucketMountStrategy`; use it with `S3Mount`, `GCSMount`, `R2Mount`, `AzureBlobMount`, and `BoxMount`. |
| `E2BSandboxClient` | Supports rclone-backed cloud storage mounts with `E2BCloudBucketMountStrategy`; use it with `S3Mount`, `GCSMount`, `R2Mount`, `AzureBlobMount`, and `BoxMount`. |
| `RunloopSandboxClient` | Supports rclone-backed cloud storage mounts with `RunloopCloudBucketMountStrategy`; use it with `S3Mount`, `GCSMount`, `R2Mount`, `AzureBlobMount`, and `BoxMount`. |
| `TensorlakeSandboxClient` | No hosted-specific mount strategy is currently exposed. Use manifest files, repos, or other workspace inputs instead. Tensorlake's native sandbox checkpoint API is available via `workspace_persistence="snapshot"`; prefer this over external bucket mounts for between-run persistence. |
| `VercelSandboxClient` | No hosted-specific mount strategy is currently exposed. Use manifest files, repos, or other workspace inputs instead. |

</div>
Expand All @@ -130,6 +132,7 @@ The table below summarizes which remote storage entries each backend can mount d
| `DaytonaSandboxClient` | ✓ | ✓ | ✓ | ✓ | ✓ | - |
| `E2BSandboxClient` | ✓ | ✓ | ✓ | ✓ | ✓ | - |
| `RunloopSandboxClient` | ✓ | ✓ | ✓ | ✓ | ✓ | - |
| `TensorlakeSandboxClient` | - | - | - | - | - | - |
| `VercelSandboxClient` | - | - | - | - | - | - |

</div>
Expand Down
30 changes: 30 additions & 0 deletions examples/sandbox/extensions/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -243,6 +243,36 @@ export DAYTONA_API_KEY=...
uv run python examples/sandbox/extensions/daytona/daytona_runner.py --stream
```

## Tensorlake

### Setup

Install the repo extra:

```bash
uv sync --extra tensorlake
```

Sign up at [cloud.tensorlake.ai](https://cloud.tensorlake.ai/) (or run `tl login`)
and export the required environment variables:

```bash
export OPENAI_API_KEY=...
export TENSORLAKE_API_KEY=...
```

### Run

```bash
uv run python examples/sandbox/extensions/tensorlake_runner.py --stream
```

Useful flags:

- `--image <name>` to pin a specific Tensorlake registered image.
- `--timeout-secs 600`
- `--workspace-persistence snapshot` to verify the native checkpoint round-trip.

## Runloop

### Setup
Expand Down
289 changes: 289 additions & 0 deletions examples/sandbox/extensions/tensorlake_runner.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,289 @@
"""
Minimal Tensorlake-backed sandbox example for manual validation.

This mirrors the other cloud extension examples: it creates a tiny workspace,
verifies stop/resume persistence, then asks a sandboxed agent to inspect the
workspace through one shell tool.
"""

from __future__ import annotations

import argparse
import asyncio
import io
import os
import sys
import tempfile
from pathlib import Path
from typing import Literal, cast

from openai.types.responses import ResponseTextDeltaEvent

from agents import ModelSettings, Runner
from agents.models.openai_provider import OpenAIProvider
from agents.run import RunConfig
from agents.sandbox import LocalSnapshotSpec, Manifest, SandboxAgent, SandboxRunConfig
from agents.sandbox.session import BaseSandboxSession

if __package__ is None or __package__ == "":
sys.path.insert(0, str(Path(__file__).resolve().parents[3]))

from examples.sandbox.misc.example_support import text_manifest
from examples.sandbox.misc.workspace_shell import WorkspaceShellCapability

try:
from agents.extensions.sandbox import (
TensorlakeSandboxClient,
TensorlakeSandboxClientOptions,
)
except Exception as exc: # pragma: no cover - import path depends on optional extras
raise SystemExit(
"Tensorlake sandbox examples require the optional repo extra.\n"
"Install it with: uv sync --extra tensorlake"
) from exc


DEFAULT_QUESTION = "Summarize this cloud sandbox workspace in 2 sentences."
SNAPSHOT_CHECK_PATH = Path("snapshot-check.txt")
SNAPSHOT_CHECK_CONTENT = "tensorlake snapshot round-trip ok\n"
LIVE_RESUME_CHECK_PATH = Path("live-resume-check.txt")
LIVE_RESUME_CHECK_CONTENT = "tensorlake live resume ok\n"


def _build_manifest() -> Manifest:
return text_manifest(
{
"README.md": (
"# Tensorlake Demo Workspace\n\n"
"This workspace exists to validate the Tensorlake sandbox backend manually.\n"
),
"handoff.md": (
"# Handoff\n\n"
"- Customer: Northwind Traders.\n"
"- Goal: validate Tensorlake sandbox exec and persistence flows.\n"
"- Current status: non-PTY backend slice is wired and under test.\n"
),
"todo.md": (
"# Todo\n\n"
"1. Inspect the workspace files.\n"
"2. Summarize the current status in two sentences.\n"
),
}
)


async def _read_text(session: BaseSandboxSession, path: Path) -> str:
data = await session.read(path)
text = cast(str | bytes, data.read())
if isinstance(text, bytes):
return text.decode("utf-8")
return text


def _require_env(name: str) -> None:
if os.environ.get(name):
return
raise SystemExit(f"{name} must be set before running this example.")


async def _verify_stop_resume(
*,
manifest: Manifest,
image: str | None,
timeout_secs: int | None,
workspace_persistence: Literal["tar", "snapshot"],
) -> None:
client = TensorlakeSandboxClient()
options = TensorlakeSandboxClientOptions(
image=image,
timeout_secs=timeout_secs,
workspace_persistence=workspace_persistence,
)
with tempfile.TemporaryDirectory(prefix="tensorlake-snapshot-example-") as snapshot_dir:
sandbox = await client.create(
manifest=manifest,
snapshot=LocalSnapshotSpec(base_path=Path(snapshot_dir)),
options=options,
)

try:
await sandbox.start()
await sandbox.write(
SNAPSHOT_CHECK_PATH,
io.BytesIO(SNAPSHOT_CHECK_CONTENT.encode("utf-8")),
)
await sandbox.stop()
finally:
await sandbox.shutdown()

resumed_sandbox = await client.resume(sandbox.state)
try:
await resumed_sandbox.start()
restored_text = await _read_text(resumed_sandbox, SNAPSHOT_CHECK_PATH)
if restored_text != SNAPSHOT_CHECK_CONTENT:
raise RuntimeError(
f"Snapshot resume verification failed for {workspace_persistence!r}: "
f"expected {SNAPSHOT_CHECK_CONTENT!r}, got {restored_text!r}"
)
finally:
await resumed_sandbox.aclose()

print(f"snapshot round-trip ok ({workspace_persistence})")


async def _verify_resume_running_sandbox(
*,
manifest: Manifest,
image: str | None,
timeout_secs: int | None,
workspace_persistence: Literal["tar", "snapshot"],
) -> None:
client = TensorlakeSandboxClient()
sandbox = await client.create(
manifest=manifest,
options=TensorlakeSandboxClientOptions(
image=image,
timeout_secs=timeout_secs,
workspace_persistence=workspace_persistence,
),
)

try:
await sandbox.start()
await sandbox.write(
LIVE_RESUME_CHECK_PATH,
io.BytesIO(LIVE_RESUME_CHECK_CONTENT.encode("utf-8")),
)
serialized = client.serialize_session_state(sandbox.state)
resumed_sandbox = await client.resume(client.deserialize_session_state(serialized))
try:
restored_text = await _read_text(resumed_sandbox, LIVE_RESUME_CHECK_PATH)
if restored_text != LIVE_RESUME_CHECK_CONTENT:
raise RuntimeError(
"Running sandbox resume verification failed: "
f"expected {LIVE_RESUME_CHECK_CONTENT!r}, got {restored_text!r}"
)
finally:
await resumed_sandbox.aclose()
finally:
await sandbox.shutdown()

print(f"running sandbox resume ok ({workspace_persistence})")


async def main(
*,
model: str,
question: str,
image: str | None,
timeout_secs: int | None,
workspace_persistence: Literal["tar", "snapshot"],
stream: bool,
) -> None:
_require_env("OPENAI_API_KEY")
_require_env("TENSORLAKE_API_KEY")

manifest = _build_manifest()

await _verify_stop_resume(
manifest=manifest,
image=image,
timeout_secs=timeout_secs,
workspace_persistence=workspace_persistence,
)
await _verify_resume_running_sandbox(
manifest=manifest,
image=image,
timeout_secs=timeout_secs,
workspace_persistence=workspace_persistence,
)

agent = SandboxAgent(
name="Tensorlake Sandbox Assistant",
model=model,
instructions=(
"Answer questions about the sandbox workspace. Inspect the files before answering "
"and keep the response concise. "
"Do not invent files or statuses that are not present in the workspace. Cite the "
"file names you inspected."
),
default_manifest=manifest,
capabilities=[WorkspaceShellCapability()],
model_settings=ModelSettings(tool_choice="required"),
)

client = TensorlakeSandboxClient()
sandbox = await client.create(
manifest=manifest,
options=TensorlakeSandboxClientOptions(
image=image,
timeout_secs=timeout_secs,
workspace_persistence=workspace_persistence,
),
)

run_config = RunConfig(
model_provider=OpenAIProvider(),
sandbox=SandboxRunConfig(session=sandbox),
tracing_disabled=True,
workflow_name="Tensorlake sandbox example",
)

try:
async with sandbox:
if not stream:
result = await Runner.run(agent, question, run_config=run_config)
print(result.final_output)
return

stream_result = Runner.run_streamed(agent, question, run_config=run_config)
saw_text_delta = False
async for event in stream_result.stream_events():
if event.type == "raw_response_event" and isinstance(
event.data, ResponseTextDeltaEvent
):
if not saw_text_delta:
print("assistant> ", end="", flush=True)
saw_text_delta = True
print(event.data.delta, end="", flush=True)

if saw_text_delta:
print()
finally:
await client.delete(sandbox)


if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--model", default="gpt-5.5", help="Model name to use.")
parser.add_argument("--question", default=DEFAULT_QUESTION, help="Prompt to send to the agent.")
parser.add_argument(
"--image",
default=None,
help="Optional Tensorlake registered image name. Falls back to the SDK default.",
)
parser.add_argument(
"--timeout-secs",
type=int,
default=300,
help="Optional Tensorlake sandbox lifetime in seconds.",
)
parser.add_argument(
"--workspace-persistence",
choices=("tar", "snapshot"),
default="tar",
help="Workspace persistence mode to verify before the agent run.",
)
parser.add_argument("--stream", action="store_true", default=False, help="Stream the response.")
args = parser.parse_args()

asyncio.run(
main(
model=args.model,
question=args.question,
image=args.image,
timeout_secs=args.timeout_secs,
workspace_persistence=cast(Literal["tar", "snapshot"], args.workspace_persistence),
stream=args.stream,
)
)
5 changes: 5 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ cloudflare = ["aiohttp>=3.12,<4"]
e2b = ["e2b==2.20.0", "e2b-code-interpreter==2.4.1"]
modal = ["modal==1.3.5"]
runloop = ["runloop_api_client>=1.16.0,<2.0.0"]
tensorlake = ["tensorlake>=0.5.8"]
vercel = ["vercel>=0.5.6,<0.6"]
s3 = ["boto3>=1.34"]
temporal = [
Expand Down Expand Up @@ -156,6 +157,10 @@ ignore_missing_imports = true
module = ["runloop_api_client", "runloop_api_client.*"]
ignore_missing_imports = true

[[tool.mypy.overrides]]
module = ["tensorlake", "tensorlake.*"]
ignore_missing_imports = true

[[tool.mypy.overrides]]
module = ["blaxel", "blaxel.*"]
ignore_missing_imports = true
Expand Down
Loading