mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2025-06-07 06:06:20 -04:00
parent
e7ac06c169
commit
62c774bf24
1 changed files with 2 additions and 6 deletions
|
@ -146,9 +146,8 @@ class LlamaServer:
|
|||
pprint.PrettyPrinter(indent=4, sort_dicts=False).pprint(printable_payload)
|
||||
print()
|
||||
|
||||
# Make a request with streaming enabled
|
||||
response = self.session.post(url, json=payload, stream=True)
|
||||
try:
|
||||
# Make a direct request with streaming enabled using a context manager
|
||||
with self.session.post(url, json=payload, stream=True) as response:
|
||||
response.raise_for_status() # Raise an exception for HTTP errors
|
||||
|
||||
full_text = ""
|
||||
|
@ -186,9 +185,6 @@ class LlamaServer:
|
|||
print(f"Problematic line: {line}")
|
||||
continue
|
||||
|
||||
finally:
|
||||
response.close()
|
||||
|
||||
def generate(self, prompt, state):
|
||||
output = ""
|
||||
for output in self.generate_with_streaming(prompt, state):
|
||||
|
|
Loading…
Add table
Reference in a new issue