Skip to content

Commit a2b60c7

Browse files
authored
[HAT-429] Go SDK v1 (#2089)
* first version of new syntax and semantics * lint fix * add very basic example * fix err handling * Run method * more Go SDK v1 examples * more v1 Go SDK examples * everything inside sdks/go * fix lint * remove code * fix cancellations example * fix child workflows example * fix durablecontext reflection * events example * more examples ported to v1 * RunBulk -> RunMany * address PR comments * add link to examples in go doc
1 parent 3650b09 commit a2b60c7

File tree

32 files changed

+5845
-0
lines changed

32 files changed

+5845
-0
lines changed
Lines changed: 101 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,101 @@
1+
package main
2+
3+
import (
4+
"context"
5+
"fmt"
6+
"log"
7+
"time"
8+
9+
hatchet "github.com/hatchet-dev/hatchet/sdks/go"
10+
)
11+
12+
type ProcessInput struct {
13+
ID int `json:"id"`
14+
Message string `json:"message"`
15+
}
16+
17+
type ProcessOutput struct {
18+
ID int `json:"id"`
19+
Result string `json:"result"`
20+
}
21+
22+
func main() {
23+
// Create a new Hatchet client
24+
client, err := hatchet.NewClient()
25+
if err != nil {
26+
log.Fatalf("failed to create hatchet client: %v", err)
27+
}
28+
29+
// Create a workflow for bulk processing
30+
workflow := client.NewWorkflow("bulk-processing-workflow")
31+
32+
// Define the processing task
33+
workflow.NewTask("process-item", func(ctx hatchet.Context, input ProcessInput) (ProcessOutput, error) {
34+
// Simulate some processing work
35+
time.Sleep(time.Duration(100+input.ID*50) * time.Millisecond)
36+
37+
log.Printf("Processing item %d: %s", input.ID, input.Message)
38+
39+
return ProcessOutput{
40+
ID: input.ID,
41+
Result: fmt.Sprintf("Processed item %d: %s", input.ID, input.Message),
42+
}, nil
43+
})
44+
45+
// Create a worker to run the workflow
46+
worker, err := client.NewWorker("bulk-operations-worker", hatchet.WithWorkflows(workflow))
47+
if err != nil {
48+
log.Fatalf("failed to create worker: %v", err)
49+
}
50+
51+
// Start the worker in a goroutine
52+
go func() {
53+
log.Println("Starting bulk operations worker...")
54+
if err := worker.StartBlocking(); err != nil {
55+
log.Printf("worker failed: %v", err)
56+
}
57+
}()
58+
59+
// Wait a moment for the worker to start
60+
time.Sleep(2 * time.Second)
61+
62+
// Prepare bulk data
63+
bulkInputs := make([]ProcessInput, 10)
64+
for i := 0; i < 10; i++ {
65+
bulkInputs[i] = ProcessInput{
66+
ID: i + 1,
67+
Message: fmt.Sprintf("Task number %d", i+1),
68+
}
69+
}
70+
71+
log.Printf("Running bulk operations with %d items...", len(bulkInputs))
72+
73+
// Prepare inputs as []RunManyOpt for bulk run
74+
inputs := make([]hatchet.RunManyOpt, len(bulkInputs))
75+
for i, input := range bulkInputs {
76+
inputs[i] = hatchet.RunManyOpt{
77+
Input: input,
78+
}
79+
}
80+
81+
// Run workflows in bulk
82+
ctx := context.Background()
83+
runIDs, err := client.RunMany(ctx, "bulk-processing-workflow", inputs)
84+
if err != nil {
85+
log.Fatalf("failed to run bulk workflows: %v", err)
86+
}
87+
88+
log.Printf("Started %d bulk workflows with run IDs: %v", len(runIDs), runIDs)
89+
90+
// Optionally monitor some of the runs
91+
for i, runID := range runIDs {
92+
if i < 3 { // Monitor first 3 runs as examples
93+
log.Printf("Monitoring run %d with ID: %s", i+1, runID)
94+
}
95+
}
96+
97+
log.Println("All bulk operations started. Press Ctrl+C to stop the worker.")
98+
99+
// Keep the main function running
100+
select {}
101+
}
Lines changed: 93 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,93 @@
1+
package main
2+
3+
import (
4+
"context"
5+
"log"
6+
"time"
7+
8+
hatchet "github.com/hatchet-dev/hatchet/sdks/go"
9+
)
10+
11+
type CancellationInput struct {
12+
Message string `json:"message"`
13+
}
14+
15+
type CancellationOutput struct {
16+
Status string `json:"status"`
17+
Completed bool `json:"completed"`
18+
}
19+
20+
func main() {
21+
client, err := hatchet.NewClient()
22+
if err != nil {
23+
log.Fatalf("failed to create hatchet client: %v", err)
24+
}
25+
26+
// Create a workflow that demonstrates cancellation handling
27+
workflow := client.NewWorkflow("cancellation-demo",
28+
hatchet.WithWorkflowDescription("Demonstrates workflow cancellation patterns"),
29+
hatchet.WithWorkflowVersion("1.0.0"),
30+
)
31+
32+
// Add a long-running task that can be cancelled
33+
workflow.NewTask("long-running-task", func(ctx hatchet.Context, input CancellationInput) (CancellationOutput, error) {
34+
log.Printf("Starting long-running task with message: %s", input.Message)
35+
36+
// Simulate long-running work with cancellation checking
37+
for i := 0; i < 10; i++ {
38+
select {
39+
case <-ctx.Done():
40+
log.Printf("Task cancelled after %d seconds", i)
41+
return CancellationOutput{
42+
Status: "cancelled",
43+
Completed: false,
44+
}, nil
45+
default:
46+
log.Printf("Working... step %d/10", i+1)
47+
time.Sleep(1 * time.Second)
48+
}
49+
}
50+
51+
log.Println("Task completed successfully")
52+
return CancellationOutput{
53+
Status: "completed",
54+
Completed: true,
55+
}, nil
56+
}, hatchet.WithTimeout(30*time.Second))
57+
58+
// Create a worker
59+
worker, err := client.NewWorker("cancellation-worker",
60+
hatchet.WithWorkflows(workflow),
61+
hatchet.WithSlots(3),
62+
)
63+
if err != nil {
64+
log.Fatalf("failed to create worker: %v", err)
65+
}
66+
67+
// Run workflow instances to demonstrate cancellation
68+
go func() {
69+
time.Sleep(2 * time.Second)
70+
71+
log.Println("Starting workflow instance...")
72+
_, err := client.Run(context.Background(), "cancellation-demo", CancellationInput{
73+
Message: "This task will run for 10 seconds and can be cancelled",
74+
})
75+
if err != nil {
76+
log.Printf("failed to run workflow: %v", err)
77+
}
78+
79+
// You can demonstrate cancellation by manually cancelling the workflow
80+
// through the Hatchet UI or API after starting it
81+
}()
82+
83+
log.Println("Starting worker for cancellation demo...")
84+
log.Println("Features demonstrated:")
85+
log.Println(" - Long-running task with cancellation checking")
86+
log.Println(" - Context cancellation handling")
87+
log.Println(" - Graceful shutdown on cancellation")
88+
log.Println(" - Task timeout configuration")
89+
90+
if err := worker.StartBlocking(); err != nil {
91+
log.Fatalf("failed to start worker: %v", err)
92+
}
93+
}
Lines changed: 127 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,127 @@
1+
package main
2+
3+
import (
4+
"context"
5+
"fmt"
6+
"log"
7+
"time"
8+
9+
hatchet "github.com/hatchet-dev/hatchet/sdks/go"
10+
)
11+
12+
type ParentInput struct {
13+
Count int `json:"count"`
14+
}
15+
16+
type ChildInput struct {
17+
Value int `json:"value"`
18+
}
19+
20+
type ChildOutput struct {
21+
Result int `json:"result"`
22+
}
23+
24+
type ParentOutput struct {
25+
Sum int `json:"sum"`
26+
}
27+
28+
func main() {
29+
client, err := hatchet.NewClient()
30+
if err != nil {
31+
log.Fatalf("failed to create hatchet client: %v", err)
32+
}
33+
34+
// Create child workflow
35+
childWorkflow := client.NewWorkflow("child-workflow",
36+
hatchet.WithWorkflowDescription("Child workflow that processes a single value"),
37+
hatchet.WithWorkflowVersion("1.0.0"),
38+
)
39+
40+
childWorkflow.NewTask("process-value", func(ctx hatchet.Context, input ChildInput) (ChildOutput, error) {
41+
log.Printf("Child workflow processing value: %d", input.Value)
42+
43+
// Simulate some processing
44+
result := input.Value * 2
45+
46+
return ChildOutput{
47+
Result: result,
48+
}, nil
49+
})
50+
51+
// Create parent workflow that spawns multiple child workflows
52+
parentWorkflow := client.NewWorkflow("parent-workflow",
53+
hatchet.WithWorkflowDescription("Parent workflow that spawns child workflows"),
54+
hatchet.WithWorkflowVersion("1.0.0"),
55+
)
56+
57+
parentWorkflow.NewTask("spawn-children", func(ctx hatchet.Context, input ParentInput) (ParentOutput, error) {
58+
log.Printf("Parent workflow spawning %d child workflows", input.Count)
59+
60+
// Spawn multiple child workflows and collect results
61+
sum := 0
62+
for i := 0; i < input.Count; i++ {
63+
log.Printf("Spawning child workflow %d/%d", i+1, input.Count)
64+
65+
// Spawn child workflow and wait for result
66+
childResult, err := childWorkflow.Run(ctx.GetContext(), ChildInput{
67+
Value: i + 1,
68+
})
69+
if err != nil {
70+
return ParentOutput{}, fmt.Errorf("failed to spawn child workflow %d: %w", i, err)
71+
}
72+
73+
log.Printf("Child workflow %d completed with result: %d", i+1, childResult)
74+
}
75+
76+
log.Printf("All child workflows completed. Total sum: %d", sum)
77+
return ParentOutput{
78+
Sum: sum,
79+
}, nil
80+
})
81+
82+
// Create a worker with both workflows
83+
worker, err := client.NewWorker("child-workflow-worker",
84+
hatchet.WithWorkflows(childWorkflow, parentWorkflow),
85+
hatchet.WithSlots(10), // Allow parallel execution of child workflows
86+
)
87+
if err != nil {
88+
log.Fatalf("failed to create worker: %v", err)
89+
}
90+
91+
// Run the parent workflow
92+
go func() {
93+
// Wait a bit for worker to start
94+
for i := 0; i < 3; i++ {
95+
log.Printf("Starting in %d seconds...", 3-i)
96+
select {
97+
case <-context.Background().Done():
98+
return
99+
default:
100+
time.Sleep(1 * time.Second)
101+
}
102+
}
103+
104+
log.Println("Triggering parent workflow...")
105+
_, err := client.Run(context.Background(), "parent-workflow", ParentInput{
106+
Count: 5, // Spawn 5 child workflows
107+
})
108+
if err != nil {
109+
log.Printf("failed to run parent workflow: %v", err)
110+
}
111+
}()
112+
113+
log.Println("Starting worker for child workflows demo...")
114+
log.Println("Features demonstrated:")
115+
log.Println(" - Parent workflow spawning multiple child workflows")
116+
log.Println(" - Child workflow execution and result collection")
117+
log.Println(" - Parallel child workflow processing")
118+
log.Println(" - Parent-child workflow communication")
119+
120+
if err := worker.StartBlocking(); err != nil {
121+
log.Fatalf("failed to start worker: %v", err)
122+
}
123+
}
124+
125+
func stringPtr(s string) *string {
126+
return &s
127+
}

0 commit comments

Comments
 (0)