Use Mojo::Feed now, because XML::RSS is too strict. Also making my own openai client that is actually async

This commit is contained in:
Ryan Voots 2023-11-12 08:07:56 -05:00
parent 2610608dd0
commit d257f5f05a
7 changed files with 221 additions and 1 deletions

View file

@ -1000,6 +1000,9 @@ Alien::Util local/lib/perl5/Alien/Util.pm 1;" p
Alien::Util::version_cmp local/lib/perl5/Alien/Util.pm 17;" s
AndExpr local/lib/perl5/XML/XPath/Parser.pm 258;" s
App lib/App.pm 1;" p
AppConfig lib/AppConfig.pm 1;" p
AppConfig::get_config lib/AppConfig.pm 17;" s
AppConfig::load_config lib/AppConfig.pm 21;" s
ArgsObject local/lib/perl5/Type/Params.pm 67;" s
Arguments local/lib/perl5/XML/XPath/Parser.pm 502;" s
ArrayLike local/lib/perl5/Types/TypeTiny.pm 301;" s
@ -17761,6 +17764,7 @@ get_cipher local/lib/perl5/IO/Socket/SSL.pm 1975;" s
get_class_store local/lib/perl5/namespace/clean.pm 209;" s
get_coercion local/lib/perl5/Type/Library.pm 342;" s
get_coercion local/lib/perl5/Types/TypeTiny.pm 115;" s
get_config lib/AppConfig.pm 17;" s
get_connection local/lib/perl5/Net/Async/HTTP.pm 482;" s
get_connections local/lib/perl5/LWP/ConnCache.pm 152;" s
get_content_handler local/lib/perl5/XML/SAX/Base.pm 2775;" s
@ -18982,6 +18986,7 @@ load local/lib/perl5/Mojolicious/Sessions.pm 15;" s
load_app local/lib/perl5/Mojo/Server.pm 45;" s
load_class local/lib/perl5/Mojo/Loader.pm 40;" s
load_classes local/lib/perl5/Mojo/Loader.pm 56;" s
load_config lib/AppConfig.pm 21;" s
load_config local/bin/config_data 90;" s
load_cookie local/lib/perl5/HTTP/Cookies/Microsoft.pm 120;" s
load_cookies local/lib/perl5/HTTP/CookieJar.pm 252;" s

18
bin/test_openai.pl Executable file
View file

@ -0,0 +1,18 @@
#!/usr/bin/env perl
use v5.38.0;
use lib::relative '../lib';
use AppConfig;
use OpenAIAsync::Client;
use App qw/$loop/;
my $conf = AppConfig->get_config();
my $client = OpenAIAsync::Client->new(
api_base => $conf->api_services->{base},
api_key => "12345", # Same as my luggage
);
$loop->add($client);
$client->completion("foo")->get();

10
bin/test_rss.pl Executable file
View file

@ -0,0 +1,10 @@
#!/usr/bin/env perl
use v5.38.0;
use lib::relative '../lib';
use AppConfig;
use Feed;
my $feed = Feed::get_articles();
$feed->get();

View file

@ -1,3 +1,6 @@
[api_services]
embedding_base = "http://openai.embeddings-family.brainiac.ai.simcop2387.info/v1"
base = "http://openai.general-family.brainiac.ai.simcop2387.info/v1"
[rss_services]
url = "https://rss.simcop2387.info/tt-rss/public.php?op=rss&id=-3&is_cat=0&q=&key=o38exc654e4a4bc4733"

36
lib/AppConfig.pm Normal file
View file

@ -0,0 +1,36 @@
package AppConfig;
use v5.38.0;
use Object::Pad;
use feature 'signatures';
our $config_singleton;
class AppConfig {
use TOML;
use Path::Tiny;
use Carp qw/carp/;
field $api_services :param :accessor;
field $rss_services :param :accessor;
sub get_config($class) {
return $class->load_config("config.toml");
}
sub load_config($class, $file) {
return $config_singleton if $config_singleton;
my $toml_data = path($file)->slurp_utf8();
my ($data, $err) = from_toml($toml_data);
if ($err) {
carp "Couldn't parse $file: $err";
}
$config_singleton = $class->new($data->%*);
return $config_singleton;
}
}
1;

View file

@ -3,8 +3,28 @@ use v5.38.0;
use OpenAI::API;
use Future::AsyncAwait;
use feature 'signatures';
use Net::Async::HTTP;
use App qw/$loop/;
use AppConfig;
use URI;
use Mojo::Feed;
async sub summarize($content) {
my $http = Net::Async::HTTP->new();
$loop->add($http);
my $conf = AppConfig->get_config();
async sub get_articles() {
my $url = URI->new($conf->rss_services->{url});
print $url;
my $resp = $loop->await($http->do_request(uri => $url))->get();
my $rss = Mojo::Feed->new(body => $resp->content);
use Data::Dumper;
print Dumper($rss->to_hash);
}
1;

128
lib/OpenAIAsync/Client.pm Normal file
View file

@ -0,0 +1,128 @@
use v5.38.0;
use Object::Pad;
use IO::Async::SSL; # We're not directly using it but I want to enforce that we pull it in when detecting dependencies, since openai itself is always https
use Future::AsyncAwait;
class OpenAIAsync::Client :repr(HASH) :isa(IO::Async::Notifier) {
use JSON::MaybeXS qw/encode_json decode_json/;
use Net::Async::HTTP;
use Feature::Compat::Try;
use URI;
field $_http_max_in_flight :param(http_max_in_flight) = 2;
field $_http_max_redirects :param(http_max_redirects) = 3;
field $_http_max_connections_per_host :param(http_max_connections_per_host) = 2;
field $_http_timeout :param(http_timeout) = 120; # My personal server is kinda slow, use a generous default
field $_http_stall_timeout :param(http_stall_timeout) = 600; # generous for my slow personal server
field $_http_proxy_host :param(http_proxy_host) = undef;
field $_http_proxy_port :param(http_proxy_port) = undef;
field $_http_proxy_path :param(http_proxy_path) = undef;
field $http;
field $api_base :param(api_base) = $ENV{OPENAI_API_BASE} // "https://api.openai.com/v1";
field $api_key :param(api_key) = $ENV{OPENAI_API_KEY};
method configure(%params) {
# We require them to go this way
my %io_async_params = ($params{io_async_notifier_params} // {})->%*;
IO::Async::Notifier::configure($self, );
}
method __make_http() {
die "Missing API Key for OpenAI" unless $api_key;
return Net::Async::HTTP->new(
user_agent => "SNN OpenAI Client 1.0",
+headers => {
"Authorization" => "Bearer $api_key",
"Content-Type" => "application/json",
},
max_redirects => $_http_max_redirects,
max_connections_per_host => $_http_max_connections_per_host,
max_in_flight => $_http_max_in_flight,
timeout => $_http_timeout,
stall_timeout => $_http_stall_timeout,
# TODO proxy stuff
)
}
ADJUST {
$http = $self->__make_http;
$api_base =~ s|/$||; # trim an accidental final / since we will be putting it on the endpoints
}
async method _make_request($endpoint, $data) {
my $json = encode_json($data);
my $url = URI->new($api_base . $endpoint );
my $result = await $http->do_request(
uri => $url,
method => "POST",
content => $json,
content_type => 'application/json',
);
if ($result->is_success) {
my $json = $result->decoded_content;
my $out_data = decode_json($json);
return $out_data;
} else {
die "Failure in talking to OpenAI service: ".$result->status_line.": ".$result->decoded_content;
}
}
method _add_to_loop($loop) {
$loop->add($http);
}
method _remove_from_loop($loop) {
$loop->remove($http);
$http = $self->__make_http; # overkill? want to make sure we have a clean one
}
# This is the legacy completion api
async method completion($prompt, $model = "gpt-3.5-turbo", $max_tokens = 500, $temperature = 1, $top_p = 0.9, $seed = -1) {
my $request_body = {
model => $model, # ignored by my local service
prompt => $prompt,
max_tokens => $max_tokens,
temperature => $temperature,
top_p => $top_p,
seed => $seed
};
my $res = await $self->_make_request("/completions", $request_body);
use Data::Dumper;
print Dumper($res);
die "FUCK";
}
async method chat($prompt) {
...
}
async method embedding($input) {
}
async method image_generate($input) {
...
}
async method text_to_speech($text) {
...
}
async method speech_to_text($sound_data) {
...
}
async method vision($image, $prompt) {
...
}
}