Spaces:
Runtime error
Runtime error
fix
Browse files- llm_backend.py +1 -5
llm_backend.py
CHANGED
|
@@ -111,11 +111,7 @@ class LlmBackend:
|
|
| 111 |
return message_tokens
|
| 112 |
|
| 113 |
def get_system_tokens(self):
|
| 114 |
-
|
| 115 |
-
"role": "system",
|
| 116 |
-
"content": self.SYSTEM_PROMPT
|
| 117 |
-
}
|
| 118 |
-
return self.get_message_tokens(self._model, **system_message)
|
| 119 |
|
| 120 |
def create_chat_generator_for_saiga(self, messages, parameters):
|
| 121 |
print('create_chat_completion called')
|
|
|
|
| 111 |
return message_tokens
|
| 112 |
|
| 113 |
def get_system_tokens(self):
|
| 114 |
+
return self.get_message_tokens(role="system", content=self.SYSTEM_PROMPT)
|
|
|
|
|
|
|
|
|
|
|
|
|
| 115 |
|
| 116 |
def create_chat_generator_for_saiga(self, messages, parameters):
|
| 117 |
print('create_chat_completion called')
|