mirror of
https://github.com/pragma-/pbot.git
synced 2024-11-22 20:09:43 +01:00
Convert HashObject file format to JSON to support newlines in factoid contents, etc
This commit is contained in:
parent
33059a54d9
commit
5a26aca801
@ -54,7 +54,7 @@ sub initialize {
|
|||||||
$self->{changinghost} = {}; # tracks nicks changing hosts/identifying to strongly link them
|
$self->{changinghost} = {}; # tracks nicks changing hosts/identifying to strongly link them
|
||||||
|
|
||||||
my $filename = delete $conf{whitelist_file} // $self->{pbot}->{registry}->get_value('general', 'data_dir') . '/whitelist';
|
my $filename = delete $conf{whitelist_file} // $self->{pbot}->{registry}->get_value('general', 'data_dir') . '/whitelist';
|
||||||
$self->{whitelist} = PBot::DualIndexHashObject->new(name => 'Whitelist', filename => $filename);
|
$self->{whitelist} = PBot::DualIndexHashObject->new(name => 'Whitelist', filename => $filename, pbot => $self->{pbot});
|
||||||
$self->{whitelist}->load;
|
$self->{whitelist}->load;
|
||||||
|
|
||||||
$self->{pbot}->{timer}->register(sub { $self->adjust_offenses }, 60 * 60 * 1);
|
$self->{pbot}->{timer}->register(sub { $self->adjust_offenses }, 60 * 60 * 1);
|
||||||
@ -405,6 +405,7 @@ sub check_flood {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if ($self->whitelisted($channel, "$nick!$user\@$host", 'antiflood')) {
|
if ($self->whitelisted($channel, "$nick!$user\@$host", 'antiflood')) {
|
||||||
|
$self->{pbot}->{logger}->log("$nick!$user\@$host anti-flood whitelisted, disgregarding ban\n");
|
||||||
next;
|
next;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -35,7 +35,7 @@ sub initialize {
|
|||||||
$self->{pbot} = delete $conf{pbot} // Carp::croak("Missing pbot reference to " . __FILE__);
|
$self->{pbot} = delete $conf{pbot} // Carp::croak("Missing pbot reference to " . __FILE__);
|
||||||
|
|
||||||
my $filename = delete $conf{spamkeywords_file} // $self->{pbot}->{registry}->get_value('general', 'data_dir') . '/spam_keywords';
|
my $filename = delete $conf{spamkeywords_file} // $self->{pbot}->{registry}->get_value('general', 'data_dir') . '/spam_keywords';
|
||||||
$self->{keywords} = PBot::DualIndexHashObject->new(name => 'SpamKeywords', filename => $filename);
|
$self->{keywords} = PBot::DualIndexHashObject->new(name => 'SpamKeywords', filename => $filename, pbot => $self->{pbot});
|
||||||
$self->{keywords}->load;
|
$self->{keywords}->load;
|
||||||
|
|
||||||
$self->{pbot}->{registry}->add_default('text', 'antispam', 'enforce', $conf{enforce_antispam} // 1);
|
$self->{pbot}->{registry}->add_default('text', 'antispam', 'enforce', $conf{enforce_antispam} // 1);
|
||||||
|
@ -110,7 +110,6 @@ sub load_blacklist {
|
|||||||
}
|
}
|
||||||
|
|
||||||
$self->{pbot}->{logger}->log(" $i entries in blacklist\n");
|
$self->{pbot}->{logger}->log(" $i entries in blacklist\n");
|
||||||
$self->{pbot}->{logger}->log("Done.\n");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
sub save_blacklist {
|
sub save_blacklist {
|
||||||
|
@ -46,7 +46,7 @@ sub initialize {
|
|||||||
}
|
}
|
||||||
|
|
||||||
$self->{pbot} = delete $conf{pbot} // Carp::croak("Missing pbot reference to " . __FILE__);
|
$self->{pbot} = delete $conf{pbot} // Carp::croak("Missing pbot reference to " . __FILE__);
|
||||||
$self->{admins} = PBot::DualIndexHashObject->new(name => 'Admins', filename => $filename);
|
$self->{admins} = PBot::DualIndexHashObject->new(name => 'Admins', filename => $filename, pbot => $self->{pbot});
|
||||||
$self->{commands} = PBot::BotAdminCommands->new(pbot => $self->{pbot});
|
$self->{commands} = PBot::BotAdminCommands->new(pbot => $self->{pbot});
|
||||||
$self->{export_path} = $export_path;
|
$self->{export_path} = $export_path;
|
||||||
$self->{export_site} = $export_site;
|
$self->{export_site} = $export_site;
|
||||||
@ -125,7 +125,6 @@ sub load_admins {
|
|||||||
}
|
}
|
||||||
|
|
||||||
$self->{pbot}->{logger}->log(" $i admins loaded.\n");
|
$self->{pbot}->{logger}->log(" $i admins loaded.\n");
|
||||||
$self->{pbot}->{logger}->log("Done.\n");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
sub save_admins {
|
sub save_admins {
|
||||||
|
@ -14,6 +14,7 @@ use warnings;
|
|||||||
use strict;
|
use strict;
|
||||||
|
|
||||||
use Text::Levenshtein qw(fastdistance);
|
use Text::Levenshtein qw(fastdistance);
|
||||||
|
use JSON;
|
||||||
use Carp ();
|
use Carp ();
|
||||||
|
|
||||||
sub new {
|
sub new {
|
||||||
@ -33,21 +34,17 @@ sub initialize {
|
|||||||
|
|
||||||
$self->{name} = delete $conf{name} // 'Dual Index hash object';
|
$self->{name} = delete $conf{name} // 'Dual Index hash object';
|
||||||
$self->{filename} = delete $conf{filename} // Carp::carp("Missing filename to DualIndexHashObject, will not be able to save to or load from file.");
|
$self->{filename} = delete $conf{filename} // Carp::carp("Missing filename to DualIndexHashObject, will not be able to save to or load from file.");
|
||||||
$self->{ignore_duplicates} = delete $conf{ignore_duplicates} // 0;
|
$self->{pbot} = delete $conf{pbot} // Carp::croak("Missing pbot reference to " . __FILE__);
|
||||||
$self->{hash} = {};
|
$self->{hash} = {};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
sub load_hash_add {
|
sub hash_add {
|
||||||
my ($self, $primary_index_key, $secondary_index_key, $hash, $i, $filename) = @_;
|
my ($self, $primary_index_key, $secondary_index_key, $hash) = @_;
|
||||||
|
|
||||||
if (defined $hash) {
|
if (defined $hash) {
|
||||||
if (not $self->{ignore_duplicates} and exists $self->hash->{$primary_index_key}->{$secondary_index_key}) {
|
if (exists $self->hash->{$primary_index_key}->{$secondary_index_key}) {
|
||||||
if ($i) {
|
return undef;
|
||||||
Carp::croak "Duplicate secondary_index_key '$secondary_index_key' found in $filename around line $i\n";
|
|
||||||
} else {
|
|
||||||
return undef;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
foreach my $key (keys %$hash) {
|
foreach my $key (keys %$hash) {
|
||||||
@ -68,59 +65,20 @@ sub load {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
$self->{pbot}->{logger}->log("Loading $self->{name} from $filename ...\n");
|
||||||
|
|
||||||
if (not open(FILE, "< $filename")) {
|
if (not open(FILE, "< $filename")) {
|
||||||
Carp::carp "Skipping loading from file: Couldn't open $filename: $!\n";
|
Carp::carp "Skipping loading from file: Couldn't open $filename: $!\n";
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
my ($primary_index_key, $secondary_index_key, $i, $hash);
|
my $contents = do {
|
||||||
$hash = {};
|
local $/;
|
||||||
|
<FILE>;
|
||||||
|
};
|
||||||
|
|
||||||
foreach my $line (<FILE>) {
|
$self->{hash} = decode_json $contents;
|
||||||
$i++;
|
close FILE;
|
||||||
|
|
||||||
$line =~ s/^\s+//;
|
|
||||||
$line =~ s/\s+$//;
|
|
||||||
|
|
||||||
if ($line =~ /^\[(.*)\]$/) {
|
|
||||||
$primary_index_key = $1;
|
|
||||||
next;
|
|
||||||
}
|
|
||||||
|
|
||||||
if ($line =~ /^<(.*)>$/) {
|
|
||||||
$secondary_index_key = $1;
|
|
||||||
|
|
||||||
if (not $self->{ignore_duplicates} and exists $self->hash->{$primary_index_key}->{$secondary_index_key}) {
|
|
||||||
Carp::croak "Duplicate secondary_index_key '$secondary_index_key' at line $i of $filename\n";
|
|
||||||
}
|
|
||||||
|
|
||||||
next;
|
|
||||||
}
|
|
||||||
|
|
||||||
if ($line eq '') {
|
|
||||||
# store the old hash
|
|
||||||
$self->load_hash_add($primary_index_key, $secondary_index_key, $hash, $i, $filename);
|
|
||||||
|
|
||||||
# start a new hash
|
|
||||||
$hash = {};
|
|
||||||
next;
|
|
||||||
}
|
|
||||||
|
|
||||||
my ($key, $value) = split /:/, $line, 2;
|
|
||||||
|
|
||||||
$key =~ s/^\s+//;
|
|
||||||
$key =~ s/\s+$//;
|
|
||||||
$value =~ s/^\s+//;
|
|
||||||
$value =~ s/\s+$//;
|
|
||||||
|
|
||||||
if (not length $key or not length $value) {
|
|
||||||
Carp::croak "Missing key or value at line $i of $filename\n";
|
|
||||||
}
|
|
||||||
|
|
||||||
$hash->{$key} = $value;
|
|
||||||
}
|
|
||||||
|
|
||||||
close(FILE);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
sub save {
|
sub save {
|
||||||
@ -134,21 +92,14 @@ sub save {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
$self->{pbot}->{logger}->log("Saving $self->{name} to $filename\n");
|
||||||
|
|
||||||
|
my $json = JSON->new;
|
||||||
|
$json->space_before(0);
|
||||||
|
my $json_text = $json->pretty->encode($self->{hash});
|
||||||
|
|
||||||
open(FILE, "> $filename") or die "Couldn't open $filename: $!\n";
|
open(FILE, "> $filename") or die "Couldn't open $filename: $!\n";
|
||||||
|
print FILE "$json_text\n";
|
||||||
foreach my $primary_index_key (sort keys %{ $self->hash }) {
|
|
||||||
print FILE "[$primary_index_key]\n";
|
|
||||||
|
|
||||||
foreach my $secondary_index_key (sort keys %{ $self->hash->{$primary_index_key} }) {
|
|
||||||
print FILE "<$secondary_index_key>\n";
|
|
||||||
|
|
||||||
foreach my $key (sort keys %{ $self->hash->{$primary_index_key}->{$secondary_index_key} }) {
|
|
||||||
print FILE "$key: " . $self->hash->{$primary_index_key}->{$secondary_index_key}{$key} . "\n";
|
|
||||||
}
|
|
||||||
print FILE "\n";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
close FILE;
|
close FILE;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -314,7 +265,7 @@ sub unset {
|
|||||||
sub add {
|
sub add {
|
||||||
my ($self, $primary_index_key, $secondary_index_key, $hash) = @_;
|
my ($self, $primary_index_key, $secondary_index_key, $hash) = @_;
|
||||||
|
|
||||||
if ($self->load_hash_add($primary_index_key, $secondary_index_key, $hash, 0)) {
|
if ($self->hash_add($primary_index_key, $secondary_index_key, $hash)) {
|
||||||
$self->save();
|
$self->save();
|
||||||
} else {
|
} else {
|
||||||
return "Error occurred adding new $self->{name} object.";
|
return "Error occurred adding new $self->{name} object.";
|
||||||
|
@ -50,9 +50,9 @@ sub initialize {
|
|||||||
my $export_path = delete $conf{export_path};
|
my $export_path = delete $conf{export_path};
|
||||||
my $export_site = delete $conf{export_site};
|
my $export_site = delete $conf{export_site};
|
||||||
|
|
||||||
my $pbot = delete $conf{pbot} // Carp::croak("Missing pbot reference to Factoids");
|
my $pbot = delete $conf{pbot} // Carp::croak("Missing pbot reference to " . __FILE__);
|
||||||
|
|
||||||
$self->{factoids} = PBot::DualIndexHashObject->new(name => 'Factoids', filename => $filename);
|
$self->{factoids} = PBot::DualIndexHashObject->new(name => 'Factoids', filename => $filename, pbot => $pbot);
|
||||||
$self->{export_path} = $export_path;
|
$self->{export_path} = $export_path;
|
||||||
$self->{export_site} = $export_site;
|
$self->{export_site} = $export_site;
|
||||||
|
|
||||||
@ -90,8 +90,6 @@ sub load_factoids {
|
|||||||
}
|
}
|
||||||
|
|
||||||
$self->{pbot}->{logger}->log(" " . ($text + $regex + $modules) . " factoids loaded ($text text, $regex regexs, $modules modules).\n");
|
$self->{pbot}->{logger}->log(" " . ($text + $regex + $modules) . " factoids loaded ($text text, $regex regexs, $modules modules).\n");
|
||||||
$self->{pbot}->{logger}->log("Done.\n");
|
|
||||||
|
|
||||||
$self->add_default_factoids();
|
$self->add_default_factoids();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -15,6 +15,7 @@ use strict;
|
|||||||
|
|
||||||
use Text::Levenshtein qw(fastdistance);
|
use Text::Levenshtein qw(fastdistance);
|
||||||
use Carp ();
|
use Carp ();
|
||||||
|
use JSON;
|
||||||
|
|
||||||
sub new {
|
sub new {
|
||||||
if (ref($_[1]) eq 'HASH') {
|
if (ref($_[1]) eq 'HASH') {
|
||||||
@ -33,20 +34,16 @@ sub initialize {
|
|||||||
|
|
||||||
$self->{name} = delete $conf{name} // 'hash object';
|
$self->{name} = delete $conf{name} // 'hash object';
|
||||||
$self->{filename} = delete $conf{filename} // Carp::carp("Missing filename to HashObject, will not be able to save to or load from file.");
|
$self->{filename} = delete $conf{filename} // Carp::carp("Missing filename to HashObject, will not be able to save to or load from file.");
|
||||||
$self->{pbot} = delete $conf{pbot} // Carp::croak("Missing pbot reference to HashObject");
|
$self->{pbot} = delete $conf{pbot} // Carp::croak("Missing pbot reference to " . __FILE__);
|
||||||
$self->{hash} = {};
|
$self->{hash} = {};
|
||||||
}
|
}
|
||||||
|
|
||||||
sub load_hash_add {
|
sub hash_add {
|
||||||
my ($self, $index_key, $hash, $i, $filename) = @_;
|
my ($self, $index_key, $hash) = @_;
|
||||||
|
|
||||||
if (defined $hash) {
|
if (defined $hash) {
|
||||||
if (exists $self->hash->{$index_key}) {
|
if (exists $self->hash->{$index_key}) {
|
||||||
if ($i) {
|
return undef;
|
||||||
Carp::croak "Duplicate hash '$index_key' found in $filename around line $i\n";
|
|
||||||
} else {
|
|
||||||
return undef;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
foreach my $key (keys %$hash) {
|
foreach my $key (keys %$hash) {
|
||||||
@ -68,54 +65,21 @@ sub load {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
$self->{pbot}->{logger}->log("Loading $self->{name} objects from $filename ...\n");
|
$self->{pbot}->{logger}->log("Loading $self->{name} from $filename ...\n");
|
||||||
|
|
||||||
if (not open(FILE, "< $filename")) {
|
if (not open(FILE, "< $filename")) {
|
||||||
Carp::carp "Couldn't open $filename: $!\n";
|
Carp::carp "Skipping loading from file: Couldn't open $filename: $!\n";
|
||||||
Carp::carp "Skipping loading from file.\n";
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
my ($hash, $index_key, $i);
|
my $contents = do {
|
||||||
$hash = {};
|
local $/;
|
||||||
|
<FILE>;
|
||||||
|
};
|
||||||
|
|
||||||
foreach my $line (<FILE>) {
|
$self->{hash} = decode_json $contents;
|
||||||
$i++;
|
|
||||||
|
|
||||||
$line =~ s/^\s+//;
|
close FILE;
|
||||||
$line =~ s/\s+$//;
|
|
||||||
|
|
||||||
if ($line =~ /^\[(.*)\]$/) {
|
|
||||||
$index_key = $1;
|
|
||||||
next;
|
|
||||||
}
|
|
||||||
|
|
||||||
if ($line eq '') {
|
|
||||||
# store the old hash
|
|
||||||
$self->load_hash_add($index_key, $hash, $i, $filename);
|
|
||||||
|
|
||||||
# start a new hash
|
|
||||||
$hash = {};
|
|
||||||
next;
|
|
||||||
}
|
|
||||||
|
|
||||||
my ($key, $value) = split /\:/, $line, 2;
|
|
||||||
|
|
||||||
if (not defined $key or not defined $value) {
|
|
||||||
Carp::croak "Error around line $i of $filename\n";
|
|
||||||
}
|
|
||||||
|
|
||||||
$key =~ s/^\s+//;
|
|
||||||
$key =~ s/\s+$//;
|
|
||||||
$value =~ s/^\s+//;
|
|
||||||
$value =~ s/\s+$//;
|
|
||||||
|
|
||||||
$hash->{$key} = $value;
|
|
||||||
}
|
|
||||||
|
|
||||||
close(FILE);
|
|
||||||
|
|
||||||
$self->{pbot}->{logger}->log("Done.\n");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
sub save {
|
sub save {
|
||||||
@ -129,16 +93,14 @@ sub save {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
$self->{pbot}->{logger}->log("Saving $self->{name} to $filename\n");
|
||||||
|
|
||||||
|
my $json = JSON->new;
|
||||||
|
$json->space_before(0);
|
||||||
|
my $json_text = $json->pretty->encode($self->{hash});
|
||||||
|
|
||||||
open(FILE, "> $filename") or die "Couldn't open $filename: $!\n";
|
open(FILE, "> $filename") or die "Couldn't open $filename: $!\n";
|
||||||
|
print FILE "$json_text\n";
|
||||||
foreach my $index (sort keys %{ $self->hash }) {
|
|
||||||
print FILE "[$index]\n";
|
|
||||||
|
|
||||||
foreach my $key (sort keys %{ ${ $self->hash }{$index} }) {
|
|
||||||
print FILE "$key: ${ $self->hash }{$index}{$key}\n";
|
|
||||||
}
|
|
||||||
print FILE "\n";
|
|
||||||
}
|
|
||||||
close(FILE);
|
close(FILE);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -245,7 +207,7 @@ sub unset {
|
|||||||
sub add {
|
sub add {
|
||||||
my ($self, $index_key, $hash) = @_;
|
my ($self, $index_key, $hash) = @_;
|
||||||
|
|
||||||
if ($self->load_hash_add($index_key, $hash, 0)) {
|
if ($self->hash_add($index_key, $hash)) {
|
||||||
$self->save();
|
$self->save();
|
||||||
} else {
|
} else {
|
||||||
return "Error occurred adding new $self->{name} object.";
|
return "Error occurred adding new $self->{name} object.";
|
||||||
|
@ -112,7 +112,6 @@ sub load_ignores {
|
|||||||
}
|
}
|
||||||
|
|
||||||
$self->{pbot}->{logger}->log(" $i entries in ignorelist\n");
|
$self->{pbot}->{logger}->log(" $i entries in ignorelist\n");
|
||||||
$self->{pbot}->{logger}->log("Done.\n");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
sub save_ignores {
|
sub save_ignores {
|
||||||
|
@ -122,6 +122,7 @@ sub load_questions {
|
|||||||
};
|
};
|
||||||
local $/;
|
local $/;
|
||||||
<$fh>;
|
<$fh>;
|
||||||
|
close $fh;
|
||||||
};
|
};
|
||||||
|
|
||||||
$self->{loaded_filename} = $filename;
|
$self->{loaded_filename} = $filename;
|
||||||
|
@ -36,7 +36,7 @@ sub initialize {
|
|||||||
$self->{pbot} = delete $conf{pbot} // Carp::croak("Missing pbot reference to " . __FILE__);
|
$self->{pbot} = delete $conf{pbot} // Carp::croak("Missing pbot reference to " . __FILE__);
|
||||||
my $filename = delete $conf{filename};
|
my $filename = delete $conf{filename};
|
||||||
|
|
||||||
$self->{registry} = PBot::DualIndexHashObject->new(name => 'Registry', filename => $filename, ignore_duplicates => 1);
|
$self->{registry} = PBot::DualIndexHashObject->new(name => 'Registry', filename => $filename, pbot => $self->{pbot});
|
||||||
$self->{triggers} = {};
|
$self->{triggers} = {};
|
||||||
|
|
||||||
$self->{pbot}->{atexit}->register(sub { $self->save; return; });
|
$self->{pbot}->{atexit}->register(sub { $self->save; return; });
|
||||||
@ -56,8 +56,6 @@ sub load {
|
|||||||
$self->process_trigger($section, $item, $self->{registry}->hash->{$section}->{$item}->{value});
|
$self->process_trigger($section, $item, $self->{registry}->hash->{$section}->{$item}->{value});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
$self->{pbot}->{logger}->log("Done.\n");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
sub save {
|
sub save {
|
||||||
|
@ -1,6 +1,10 @@
|
|||||||
[.*]
|
{
|
||||||
<pbot3!stdin@localhost>
|
".*" : {
|
||||||
level: 90
|
"pbot3!stdin@localhost" : {
|
||||||
name: pbot3
|
"password" : "admin",
|
||||||
password: admin
|
"level" : "90",
|
||||||
|
"name" : "pbot3"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@ -1,4 +1,7 @@
|
|||||||
[#pbot2]
|
{
|
||||||
chanop: 0
|
"#pbot2" : {
|
||||||
enabled: 1
|
"enabled" : "1",
|
||||||
|
"chanop" : "0"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
105126
data/factoids
Normal file → Executable file
105126
data/factoids
Normal file → Executable file
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue
Block a user