Understanding the Plot runtime environment will help you build efficient, reliable twists.
Plot twists run in a sandboxed, serverless environment with the following characteristics:
Twists respond to events:
Each execution has:
Instance variables don't survive between invocations:
// ❌ WRONG - This doesn't work!
class MyTwist extends Twist<MyTwist> {
private syncToken: string = ""; // Lost after execution!
async activate() {
this.syncToken = "abc123"; // Saved to instance
}
async someMethod() {
console.log(this.syncToken); // Undefined! Different execution context
}
}
// ✅ CORRECT - Use Store
class MyTwist extends Twist<MyTwist> {
async activate() {
await this.set("sync_token", "abc123"); // Persisted
}
async someMethod() {
const token = await this.get<string>("sync_token"); // Retrieved
console.log(token); // "abc123"
}
}
Long-running operations must be broken into batches:
// ❌ WRONG - May timeout!
async syncAllEvents() {
const events = await fetchAllEvents(); // Could be thousands
for (const event of events) {
await this.processEvent(event); // Too slow!
}
}
// ✅ CORRECT - Batch processing
async startSync() {
await this.set("sync_state", { page: 1, total: 0 });
const callback = await this.callback("syncBatch");
await this.runTask(callback);
}
async syncBatch() {
const state = await this.get<{ page: number; total: number }>("sync_state");
if (!state) return;
// Process one page
const events = await fetchEventsPage(state.page);
await this.processEvents(events);
// Queue next batch if needed
if (events.hasMore) {
await this.set("sync_state", {
page: state.page + 1,
total: state.total + events.length
});
const callback = await this.callback("syncBatch");
await this.runTask(callback);
}
}
Cannot read or write files:
// ❌ WRONG - No file system
import fs from "fs";
fs.writeFileSync("data.json", JSON.stringify(data));
// ✅ CORRECT - Use Store
await this.set("data", data);
Don't rely on global variables:
// ❌ WRONG - Globals don't persist
let globalCache: Map<string, any> = new Map();
// ✅ CORRECT - Use Store with prefixed keys
await this.set("cache:key1", value1);
await this.set("cache:key2", value2);
The Store tool provides persistent key-value storage:
// Save state
await this.set("key", value);
// Retrieve state
const value = await this.get<Type>("key");
// Clear state
await this.clear("key");
await this.clearAll();
Use prefixes to organize related data:
// Configuration
await this.set("config:api_key", apiKey);
await this.set("config:workspace_id", workspaceId);
// Sync state
await this.set("sync:last_run", new Date().toISOString());
await this.set("sync:page_token", pageToken);
// Cache
await this.set("cache:user:123", userData);
await this.set("cache:repo:456", repoData);
Clean up state in deactivate:
async deactivate() {
// Option 1: Clear all
await this.clearAll();
// Option 2: Clear specific keys
await this.clear("sync:page_token");
await this.clear("cache:user:123");
// Option 3: Clear by prefix (manually)
// Store doesn't have native prefix clearing,
// so track keys if needed
}
Break long operations into smaller batches to avoid timeouts.
For paginated APIs:
async startSync() {
await this.set("sync_state", {
page: 1,
totalProcessed: 0,
startTime: new Date().toISOString()
});
const callback = await this.callback("syncPage");
await this.runTask(callback);
}
async syncPage() {
const state = await this.get<SyncState>("sync_state");
if (!state) return;
try {
// Fetch one page
const response = await fetch(
`https://api.example.com/items?page=${state.page}&per_page=50`
);
const data = await response.json();
// Process items
for (const item of data.items) {
await this.processItem(item);
}
// Update state
const newState = {
page: state.page + 1,
totalProcessed: state.totalProcessed + data.items.length,
startTime: state.startTime
};
// Queue next page if more exist
if (data.hasMore) {
await this.set("sync_state", newState);
const callback = await this.callback("syncPage");
await this.runTask(callback);
} else {
// Sync complete
console.log(`Sync complete: ${newState.totalProcessed} items`);
await this.clear("sync_state");
}
} catch (error) {
console.error("Sync error:", error);
// Could implement retry logic here
}
}
For APIs using continuation tokens:
interface SyncState {
nextToken: string | null;
totalProcessed: number;
}
async startSync() {
await this.set<SyncState>("sync_state", {
nextToken: null,
totalProcessed: 0
});
const callback = await this.callback("syncBatch");
await this.runTask(callback);
}
async syncBatch() {
const state = await this.get<SyncState>("sync_state");
if (!state) return;
const response = await fetch(
`https://api.example.com/items${state.nextToken ? `?token=${state.nextToken}` : ""}`
);
const data = await response.json();
// Process batch
for (const item of data.items) {
await this.processItem(item);
}
// Update state and continue if needed
if (data.nextToken) {
await this.set<SyncState>("sync_state", {
nextToken: data.nextToken,
totalProcessed: state.totalProcessed + data.items.length
});
const callback = await this.callback("syncBatch");
await this.runTask(callback);
} else {
console.log(`Complete: ${state.totalProcessed + data.items.length} items`);
await this.clear("sync_state");
}
}
For processing arrays of items:
async processLargeArray(items: Item[]) {
// Save items and start processing
await this.set("items_to_process", items);
await this.set("process_index", 0);
const callback = await this.callback("processBatch");
await this.runTask(callback);
}
async processBatch() {
const items = await this.get<Item[]>("items_to_process");
const index = await this.get<number>("process_index");
if (!items || index === null || index >= items.length) {
await this.clear("items_to_process");
await this.clear("process_index");
return;
}
// Process batch of 10 items
const batchSize = 10;
const batch = items.slice(index, index + batchSize);
for (const item of batch) {
await this.processItem(item);
}
// Update index and continue
const newIndex = index + batchSize;
if (newIndex < items.length) {
await this.set("process_index", newIndex);
const callback = await this.callback("processBatch");
await this.runTask(callback);
} else {
// Complete
await this.clear("items_to_process");
await this.clear("process_index");
}
}
Don't load large datasets into memory:
// ❌ WRONG - Loads everything into memory
async syncAll() {
const allEvents = await fetchAllEvents(); // Could be 10,000+ events
for (const event of allEvents) {
await this.processEvent(event);
}
}
// ✅ CORRECT - Stream/batch processing
async syncBatch() {
const page = await this.get<number>("current_page") || 1;
const events = await fetchEventsPage(page, 50); // Only 50 at a time
for (const event of events) {
await this.processEvent(event);
}
// Continue with next batch
}
Store only what's needed:
// ❌ WRONG - Storing full response
const response = await fetch("https://api.example.com/users/123");
const fullData = await response.json();
await this.set("user_data", fullData); // Lots of unnecessary data
// ✅ CORRECT - Store only what's needed
const response = await fetch("https://api.example.com/users/123");
const data = await response.json();
await this.set("user_name", data.name);
await this.set("user_email", data.email);
Batch operations where possible:
// ❌ SLOW - Multiple round trips
for (const item of items) {
await this.tools.plot.createActivity({
type: ActivityType.Task,
title: item.title,
});
}
// ✅ FAST - Batch create
await this.tools.plot.createActivities(
items.map((item) => ({
type: ActivityType.Task,
title: item.title,
}))
);
Run independent operations in parallel:
// ❌ SLOW - Sequential
const user = await fetchUser();
const repos = await fetchRepos();
const issues = await fetchIssues();
// ✅ FAST - Parallel
const [user, repos, issues] = await Promise.all([
fetchUser(),
fetchRepos(),
fetchIssues()
]);
Cache frequently accessed data:
async getUserData(userId: string): Promise<UserData> {
// Check cache first
const cached = await this.get<UserData>(`cache:user:${userId}`);
if (cached) {
return cached;
}
// Fetch and cache
const data = await fetch(`https://api.example.com/users/${userId}`);
const userData = await data.json();
await this.set(`cache:user:${userId}`, userData);
return userData;
}
Avoid processing duplicate events:
async onWebhook(request: WebhookRequest) {
const eventId = request.body.id;
// Check if already processed
const processed = await this.get<boolean>(`processed:${eventId}`);
if (processed) {
console.log("Event already processed");
return;
}
// Process and mark as done
await this.processEvent(request.body);
await this.set(`processed:${eventId}`, true);
}
Timeouts manifest as execution termination - your code simply stops running.
async longOperation() {
const start = Date.now();
const items = await fetchItems();
for (const item of items) {
// Check if approaching timeout
if (Date.now() - start > 55000) { // 55 seconds
// Save progress and reschedule
await this.set("remaining_items", items.slice(items.indexOf(item)));
const callback = await this.callback("continueOperation");
await this.runTask(callback);
return;
}
await this.processItem(item);
}
}