Skip to content

Commit c382d8f

Browse files
committed
Revert "llama_cpp server: mark model as required"
This reverts commit e40fcb0.
1 parent d8fddcc commit c382d8f

File tree

1 file changed

+18
-6
lines changed

1 file changed

+18
-6
lines changed

llama_cpp/server/app.py

Lines changed: 18 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -149,8 +149,15 @@ class CreateCompletionRequest(BaseModel):
149149
description="The number of logprobs to generate. If None, no logprobs are generated."
150150
)
151151

152-
# ignored, but marked as required for the sake of compatibility with openai's api
153-
model: str = model_field
152+
# ignored or currently unsupported
153+
model: Optional[str] = model_field
154+
n: Optional[int] = 1
155+
logprobs: Optional[int] = Field(None)
156+
presence_penalty: Optional[float] = 0
157+
frequency_penalty: Optional[float] = 0
158+
best_of: Optional[int] = 1
159+
logit_bias: Optional[Dict[str, float]] = Field(None)
160+
user: Optional[str] = Field(None)
154161

155162
# llama.cpp specific parameters
156163
top_k: int = top_k_field
@@ -190,11 +197,11 @@ def create_completion(
190197

191198

192199
class CreateEmbeddingRequest(BaseModel):
193-
# ignored, but marked as required for the sake of compatibility with openai's api
194-
model: str = model_field
200+
model: Optional[str] = model_field
195201
input: str = Field(
196202
description="The input to embed."
197203
)
204+
user: Optional[str]
198205

199206
class Config:
200207
schema_extra = {
@@ -235,8 +242,13 @@ class CreateChatCompletionRequest(BaseModel):
235242
stop: Optional[List[str]] = stop_field
236243
stream: bool = stream_field
237244

238-
# ignored, but marked as required for the sake of compatibility with openai's api
239-
model: str = model_field
245+
# ignored or currently unsupported
246+
model: Optional[str] = model_field
247+
n: Optional[int] = 1
248+
presence_penalty: Optional[float] = 0
249+
frequency_penalty: Optional[float] = 0
250+
logit_bias: Optional[Dict[str, float]] = Field(None)
251+
user: Optional[str] = Field(None)
240252

241253
# llama.cpp specific parameters
242254
top_k: int = top_k_field

0 commit comments

Comments
 (0)