CISCai commited on
Commit
639a998
·
verified ·
1 Parent(s): fe2973a

Add grammar example

Browse files
Files changed (1) hide show
  1. README.md +74 -5
README.md CHANGED
@@ -160,15 +160,33 @@ pip install llama-cpp-python
160
 
161
  ```python
162
  from llama_cpp import Llama
 
 
163
 
164
  # Chat Completion API
165
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
166
  llm = Llama(model_path="./gorilla-openfunctions-v2.IQ3_M.gguf", n_gpu_layers=33, n_ctx=16384, temperature=0.0, repeat_penalty=1.1)
167
- print(llm.create_chat_completion(
168
  messages = [
169
  {
170
  "role": "user",
171
- "content": "What's the weather like in Oslo?"
172
  }
173
  ],
174
  tools=[{
@@ -192,12 +210,63 @@ print(llm.create_chat_completion(
192
  }
193
  }
194
  }],
195
- tool_choice={
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
196
  "type": "function",
197
  "function": {
198
- "name": "get_current_weather"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
199
  }
200
- }
 
 
 
 
 
 
201
  ))
202
  ```
203
 
 
160
 
161
  ```python
162
  from llama_cpp import Llama
163
+ from llama_cpp.llama_grammar import LlamaGrammar
164
+ import json
165
 
166
  # Chat Completion API
167
 
168
+ grammar = LlamaGrammar.from_json_schema(json.dumps({
169
+ "type": "array",
170
+ "items": {
171
+ "type": "object",
172
+ "required": [ "name", "arguments" ],
173
+ "properties": {
174
+ "name": {
175
+ "type": "string"
176
+ },
177
+ "arguments": {
178
+ "type": "object"
179
+ }
180
+ }
181
+ }
182
+ }))
183
+
184
  llm = Llama(model_path="./gorilla-openfunctions-v2.IQ3_M.gguf", n_gpu_layers=33, n_ctx=16384, temperature=0.0, repeat_penalty=1.1)
185
+ response = llm.create_chat_completion(
186
  messages = [
187
  {
188
  "role": "user",
189
+ "content": "What's the weather like in Oslo and Stockholm?"
190
  }
191
  ],
192
  tools=[{
 
210
  }
211
  }
212
  }],
213
+ grammar = grammar
214
+ )
215
+ print(json.loads(response["choices"][0]["text"]))
216
+
217
+ print(llm.create_chat_completion(
218
+ messages = [
219
+ {
220
+ "role": "user",
221
+ "content": "What's the weather like in Oslo and Stockholm?"
222
+ },
223
+ { # The tool_calls is from the response to the above with tool_choice active
224
+ "role": "assistant",
225
+ "content": None,
226
+ "tool_calls": [
227
+ {
228
+ "id": "call__0_get_current_weather_cmpl-...",
229
+ "type": "function",
230
+ "function": {
231
+ "name": "get_current_weather",
232
+ "arguments": '{ "location": "Oslo, NO" ,"unit": "celsius"} '
233
+ }
234
+ }
235
+ ]
236
+ },
237
+ { # The tool_call_id is from tool_calls and content is the result from the function call you made
238
+ "role": "tool",
239
+ "content": 20,
240
+ "tool_call_id": "call__0_get_current_weather_cmpl-..."
241
+ }
242
+ ],
243
+ tools=[{
244
  "type": "function",
245
  "function": {
246
+ "name": "get_current_weather",
247
+ "description": "Get the current weather in a given location",
248
+ "parameters": {
249
+ "type": "object",
250
+ "properties": {
251
+ "location": {
252
+ "type": "string",
253
+ "description": "The city and state, e.g. San Francisco, CA"
254
+ },
255
+ "unit": {
256
+ "type": "string",
257
+ "enum": [ "celsius", "fahrenheit" ]
258
+ }
259
+ },
260
+ "required": [ "location" ]
261
+ }
262
  }
263
+ }],
264
+ #tool_choice={
265
+ # "type": "function",
266
+ # "function": {
267
+ # "name": "get_current_weather"
268
+ # }
269
+ #}
270
  ))
271
  ```
272