New attempt

This commit is contained in:
oobabooga 2025-05-10 19:20:04 -07:00
parent 0c5fa3728e
commit e7ac06c169

View file

@ -146,8 +146,9 @@ class LlamaServer:
pprint.PrettyPrinter(indent=4, sort_dicts=False).pprint(printable_payload) pprint.PrettyPrinter(indent=4, sort_dicts=False).pprint(printable_payload)
print() print()
# Make a direct request with streaming enabled using a context manager # Make a request with streaming enabled
with self.session.post(url, json=payload, stream=True) as response: response = self.session.post(url, json=payload, stream=True)
try:
response.raise_for_status() # Raise an exception for HTTP errors response.raise_for_status() # Raise an exception for HTTP errors
full_text = "" full_text = ""
@ -185,6 +186,9 @@ class LlamaServer:
print(f"Problematic line: {line}") print(f"Problematic line: {line}")
continue continue
finally:
response.close()
def generate(self, prompt, state): def generate(self, prompt, state):
output = "" output = ""
for output in self.generate_with_streaming(prompt, state): for output in self.generate_with_streaming(prompt, state):