@@ -16,86 +16,144 @@ from inferencesh import Inference, TaskStatus
1616# Create client
1717client = Inference(api_key = " your-api-key" )
1818
19- # Simple synchronous usage
20- try :
21- task = client.run({
22- " app" : " your-app" ,
23- " input" : {" key" : " value" },
24- " infra" : " cloud" ,
25- " variant" : " default"
26- })
27-
28- print (f " Task ID: { task.get(' id' )} " )
29-
30- if task.get(" status" ) == TaskStatus.COMPLETED :
31- print (" ✓ Task completed successfully!" )
32- print (f " Output: { task.get(' output' )} " )
33- else :
34- status = task.get(" status" )
35- status_name = TaskStatus(status).name if status is not None else " UNKNOWN"
36- print (f " ✗ Task did not complete. Final status: { status_name} " )
37-
38- except Exception as exc:
39- print (f " Error: { type (exc).__name__ } : { exc} " )
40- raise # Re-raise to see full traceback
41-
42- # Streaming updates (recommended)
43- try :
44- for update in client.run(
45- {
46- " app" : " your-app" ,
47- " input" : {" key" : " value" },
48- " infra" : " cloud" ,
49- " variant" : " default"
50- },
51- stream = True # Enable streaming updates
52- ):
53- status = update.get(" status" )
54- status_name = TaskStatus(status).name if status is not None else " UNKNOWN"
55- print (f " Status: { status_name} " )
56-
57- if status == TaskStatus.COMPLETED :
58- print (" ✓ Task completed!" )
59- print (f " Output: { update.get(' output' )} " )
60- break
61- elif status == TaskStatus.FAILED :
62- print (f " ✗ Task failed: { update.get(' error' )} " )
63- break
64- elif status == TaskStatus.CANCELLED :
65- print (" ✗ Task was cancelled" )
19+ # Simple synchronous usage - waits for completion by default
20+ result = client.run({
21+ " app" : " your-app" ,
22+ " input" : {" key" : " value" },
23+ " infra" : " cloud" ,
24+ " variant" : " default"
25+ })
26+
27+ print (f " Task ID: { result.get(' id' )} " )
28+ print (f " Output: { result.get(' output' )} " )
29+ ```
30+
31+ ### run options
32+
33+ ``` python
34+ # Wait for completion (default behavior)
35+ result = client.run(params) # wait=True is default
36+
37+ # Return immediately without waiting
38+ task = client.run(params, wait = False )
39+ task_id = task[" id" ] # Use this to check status later
40+
41+ # Stream updates as they happen
42+ for update in client.run(params, stream = True ):
43+ print (f " Status: { TaskStatus(update[' status' ]).name} " )
44+ if update.get(" status" ) == TaskStatus.COMPLETED :
45+ print (f " Output: { update.get(' output' )} " )
46+ ```
47+
48+ ### task management
49+
50+ ``` python
51+ # Get current task state
52+ task = client.get_task(task_id)
53+ print (f " Status: { TaskStatus(task[' status' ]).name} " )
54+
55+ # Cancel a running task
56+ client.cancel(task_id)
57+
58+ # Wait for a task to complete
59+ result = client.wait_for_completion(task_id)
60+
61+ # Stream updates for an existing task
62+ with client.stream_task(task_id) as stream:
63+ for update in stream:
64+ print (f " Status: { TaskStatus(update[' status' ]).name} " )
65+ if update.get(" status" ) == TaskStatus.COMPLETED :
66+ print (f " Result: { update.get(' output' )} " )
6667 break
6768
68- except Exception as exc:
69- print (f " Error: { type (exc). __name__ } : { exc } " )
70- raise # Re-raise to see full traceback
69+ # Access final result after streaming
70+ print (f " Final result : { stream.result } " )
71+ ```
7172
72- # Async support
73- async def run_async ():
74- from inferencesh import AsyncInference
75-
73+ ### task status values
74+
75+ ``` python
76+ from inferencesh import TaskStatus
77+
78+ TaskStatus.RECEIVED # 1 - Task received by server
79+ TaskStatus.QUEUED # 2 - Task queued for processing
80+ TaskStatus.SCHEDULED # 3 - Task scheduled to a worker
81+ TaskStatus.PREPARING # 4 - Worker preparing environment
82+ TaskStatus.SERVING # 5 - Model being loaded
83+ TaskStatus.SETTING_UP # 6 - Task setup in progress
84+ TaskStatus.RUNNING # 7 - Task actively running
85+ TaskStatus.UPLOADING # 8 - Uploading results
86+ TaskStatus.COMPLETED # 9 - Task completed successfully
87+ TaskStatus.FAILED # 10 - Task failed
88+ TaskStatus.CANCELLED # 11 - Task was cancelled
89+ ```
90+
91+ ### file upload
92+
93+ ``` python
94+ from inferencesh import UploadFileOptions
95+
96+ # Upload from file path
97+ file_obj = client.upload_file(" /path/to/image.png" )
98+ print (f " URI: { file_obj[' uri' ]} " )
99+
100+ # Upload from bytes
101+ file_obj = client.upload_file(
102+ b " raw bytes data" ,
103+ UploadFileOptions(
104+ filename = " data.bin" ,
105+ content_type = " application/octet-stream"
106+ )
107+ )
108+
109+ # Upload with options
110+ file_obj = client.upload_file(
111+ " /path/to/image.png" ,
112+ UploadFileOptions(
113+ filename = " custom_name.png" ,
114+ content_type = " image/png" ,
115+ public = True # Make publicly accessible
116+ )
117+ )
118+ ```
119+
120+ Note: Files in task input are automatically uploaded. You only need ` upload_file() ` for manual uploads.
121+
122+ ## async client
123+
124+ ``` python
125+ from inferencesh import AsyncInference, TaskStatus
126+
127+ async def main ():
76128 client = AsyncInference(api_key = " your-api-key" )
77129
78- # Simple usage
130+ # Simple usage - wait for completion
79131 result = await client.run({
80132 " app" : " your-app" ,
81133 " input" : {" key" : " value" },
82134 " infra" : " cloud" ,
83135 " variant" : " default"
84136 })
137+ print (f " Output: { result.get(' output' )} " )
138+
139+ # Return immediately without waiting
140+ task = await client.run(params, wait = False )
85141
86142 # Stream updates
87- async for update in await client.run(
88- {
89- " app" : " your-app" ,
90- " input" : {" key" : " value" },
91- " infra" : " cloud" ,
92- " variant" : " default"
93- },
94- stream = True
95- ):
96- status = update.get(" status" )
97- status_name = TaskStatus(status).name if status is not None else " UNKNOWN"
98- print (f " Status: { status_name} " )
143+ async for update in await client.run(params, stream = True ):
144+ print (f " Status: { TaskStatus(update[' status' ]).name} " )
145+ if update.get(" status" ) == TaskStatus.COMPLETED :
146+ print (f " Output: { update.get(' output' )} " )
147+
148+ # Task management
149+ task = await client.get_task(task_id)
150+ await client.cancel(task_id)
151+ result = await client.wait_for_completion(task_id)
152+
153+ # Stream existing task
154+ async with client.stream_task(task_id) as stream:
155+ async for update in stream:
156+ print (f " Update: { update} " )
99157```
100158
101159## file handling
0 commit comments