mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2025-06-07 14:17:09 -04:00
API: Fix a regression
This commit is contained in:
parent
470c822f44
commit
e4d3f4449d
1 changed files with 5 additions and 2 deletions
|
@ -146,8 +146,9 @@ class LlamaServer:
|
||||||
pprint.PrettyPrinter(indent=4, sort_dicts=False).pprint(printable_payload)
|
pprint.PrettyPrinter(indent=4, sort_dicts=False).pprint(printable_payload)
|
||||||
print()
|
print()
|
||||||
|
|
||||||
# Make a direct request with streaming enabled using a context manager
|
# Make the generation request
|
||||||
with self.session.post(url, json=payload, stream=True) as response:
|
response = self.session.post(url, json=payload, stream=True)
|
||||||
|
try:
|
||||||
response.raise_for_status() # Raise an exception for HTTP errors
|
response.raise_for_status() # Raise an exception for HTTP errors
|
||||||
|
|
||||||
full_text = ""
|
full_text = ""
|
||||||
|
@ -184,6 +185,8 @@ class LlamaServer:
|
||||||
print(f"JSON decode error: {e}")
|
print(f"JSON decode error: {e}")
|
||||||
print(f"Problematic line: {line}")
|
print(f"Problematic line: {line}")
|
||||||
continue
|
continue
|
||||||
|
finally:
|
||||||
|
response.close()
|
||||||
|
|
||||||
def generate(self, prompt, state):
|
def generate(self, prompt, state):
|
||||||
output = ""
|
output = ""
|
||||||
|
|
Loading…
Add table
Reference in a new issue