mirror of
https://github.com/jlu5/PyLink.git
synced 2024-11-23 19:19:31 +01:00
parent
089dce3853
commit
b0636b40ab
@ -6,7 +6,7 @@ import collections
|
||||
import threading
|
||||
import json
|
||||
|
||||
from pylinkirc import utils, conf, world
|
||||
from pylinkirc import utils, conf, world, structures
|
||||
from pylinkirc.log import log
|
||||
from pylinkirc.coremods import permissions
|
||||
|
||||
@ -19,56 +19,21 @@ reply = modebot.reply
|
||||
|
||||
# Databasing variables.
|
||||
dbname = utils.getDatabaseName('automode')
|
||||
db = collections.defaultdict(dict)
|
||||
exportdb_timer = None
|
||||
|
||||
save_delay = conf.conf['bot'].get('save_delay', 300)
|
||||
datastore = structures.JSONDataStore('automode', dbname, save_frequency=save_delay, default_db=collections.defaultdict(dict))
|
||||
|
||||
db = datastore.store
|
||||
|
||||
# The default set of Automode permissions.
|
||||
default_permissions = {"$ircop": ['automode.manage.relay_owned', 'automode.sync.relay_owned',
|
||||
'automode.list']}
|
||||
|
||||
def loadDB():
|
||||
"""Loads the Automode database, silently creating a new one if this fails."""
|
||||
global db
|
||||
try:
|
||||
with open(dbname, "r") as f:
|
||||
db.update(json.load(f))
|
||||
except (ValueError, IOError, OSError):
|
||||
log.info("Automode: failed to load ACL database %s; creating a new one in "
|
||||
"memory.", dbname)
|
||||
|
||||
def exportDB():
|
||||
"""Exports the automode database."""
|
||||
|
||||
log.debug("Automode: exporting database to %s.", dbname)
|
||||
with open(dbname, 'w') as f:
|
||||
# Pretty print the JSON output for better readability.
|
||||
json.dump(db, f, indent=4)
|
||||
|
||||
def scheduleExport(starting=False):
|
||||
"""
|
||||
Schedules exporting of the Automode database in a repeated loop.
|
||||
"""
|
||||
global exportdb_timer
|
||||
|
||||
if not starting:
|
||||
# Export the database, unless this is being called the first
|
||||
# thing after start (i.e. DB has just been loaded).
|
||||
exportDB()
|
||||
|
||||
exportdb_timer = threading.Timer(save_delay, scheduleExport)
|
||||
exportdb_timer.name = 'Automode exportDB Loop'
|
||||
exportdb_timer.start()
|
||||
|
||||
def main(irc=None):
|
||||
"""Main function, called during plugin loading at start."""
|
||||
|
||||
# Load the automode database.
|
||||
loadDB()
|
||||
|
||||
# Schedule periodic exports of the automode database.
|
||||
scheduleExport(starting=True)
|
||||
datastore.load()
|
||||
|
||||
# Register our permissions.
|
||||
permissions.addDefaultPermissions(default_permissions)
|
||||
@ -82,14 +47,7 @@ def main(irc=None):
|
||||
|
||||
def die(sourceirc):
|
||||
"""Saves the Automode database and quit."""
|
||||
exportDB()
|
||||
|
||||
# Kill the scheduling for exports.
|
||||
global exportdb_timer
|
||||
if exportdb_timer:
|
||||
log.debug("Automode: cancelling exportDB timer thread %s due to die()", threading.get_ident())
|
||||
exportdb_timer.cancel()
|
||||
|
||||
datastore.die()
|
||||
permissions.removeDefaultPermissions(default_permissions)
|
||||
utils.unregisterService('automode')
|
||||
|
||||
@ -298,8 +256,9 @@ def save(irc, source, args):
|
||||
|
||||
Saves the Automode database to disk."""
|
||||
permissions.checkPermissions(irc, source, ['automode.savedb'])
|
||||
exportDB()
|
||||
datastore.save()
|
||||
reply(irc, 'Done.')
|
||||
|
||||
modebot.add_cmd(save)
|
||||
|
||||
def syncacc(irc, source, args):
|
||||
|
@ -5,7 +5,7 @@ import threading
|
||||
import string
|
||||
from collections import defaultdict
|
||||
|
||||
from pylinkirc import utils, world, conf
|
||||
from pylinkirc import utils, world, conf, structures
|
||||
from pylinkirc.log import log
|
||||
from pylinkirc.coremods import permissions
|
||||
|
||||
@ -15,10 +15,11 @@ relayservers = defaultdict(dict)
|
||||
spawnlocks = defaultdict(threading.RLock)
|
||||
spawnlocks_servers = defaultdict(threading.RLock)
|
||||
|
||||
exportdb_timer = None
|
||||
save_delay = conf.conf['bot'].get('save_delay', 300)
|
||||
db = {}
|
||||
|
||||
dbname = utils.getDatabaseName('pylinkrelay')
|
||||
datastore = structures.PickleDataStore('pylinkrelay', dbname, save_frequency=save_delay)
|
||||
db = datastore.store
|
||||
|
||||
default_permissions = {"*!*@*": ['relay.linked'],
|
||||
"$ircop": ['relay.create', 'relay.linkacl*',
|
||||
@ -47,16 +48,8 @@ def initializeAll(irc):
|
||||
|
||||
def main(irc=None):
|
||||
"""Main function, called during plugin loading at start."""
|
||||
|
||||
# Load the relay links database.
|
||||
loadDB()
|
||||
|
||||
log.debug('relay.main: loading links database')
|
||||
|
||||
# Schedule periodic exports of the links database.
|
||||
scheduleExport(starting=True)
|
||||
|
||||
log.debug('relay.main: scheduling export loop')
|
||||
datastore.load()
|
||||
|
||||
permissions.addDefaultPermissions(default_permissions)
|
||||
|
||||
@ -87,14 +80,8 @@ def die(sourceirc):
|
||||
# 3) Unload our permissions.
|
||||
permissions.removeDefaultPermissions(default_permissions)
|
||||
|
||||
# 4) Export the relay links database.
|
||||
exportDB()
|
||||
|
||||
# 5) Kill the scheduling for any other exports.
|
||||
global exportdb_timer
|
||||
if exportdb_timer:
|
||||
log.debug("Relay: cancelling exportDB timer thread %s due to die()", threading.get_ident())
|
||||
exportdb_timer.cancel()
|
||||
# 4) Save the database and quit.
|
||||
datastore.die()
|
||||
|
||||
allowed_chars = string.digits + string.ascii_letters + '/^|\\-_[]{}`'
|
||||
fallback_separator = '|'
|
||||
@ -203,39 +190,6 @@ def normalizeHost(irc, host):
|
||||
|
||||
return host[:63] # Limit hosts to 63 chars for best compatibility
|
||||
|
||||
def loadDB():
|
||||
"""Loads the relay database, creating a new one if this fails."""
|
||||
global db
|
||||
try:
|
||||
with open(dbname, "rb") as f:
|
||||
db = pickle.load(f)
|
||||
except (ValueError, IOError, OSError):
|
||||
log.info("Relay: failed to load links database %s"
|
||||
", creating a new one in memory...", dbname)
|
||||
db = {}
|
||||
|
||||
def exportDB():
|
||||
"""Exports the relay database."""
|
||||
|
||||
log.debug("Relay: exporting links database to %s", dbname)
|
||||
with open(dbname, 'wb') as f:
|
||||
pickle.dump(db, f, protocol=4)
|
||||
|
||||
def scheduleExport(starting=False):
|
||||
"""
|
||||
Schedules exporting of the relay database in a repeated loop.
|
||||
"""
|
||||
global exportdb_timer
|
||||
|
||||
if not starting:
|
||||
# Export the database, unless this is being called the first
|
||||
# thing after start (i.e. DB has just been loaded).
|
||||
exportDB()
|
||||
|
||||
exportdb_timer = threading.Timer(save_delay, scheduleExport)
|
||||
exportdb_timer.name = 'PyLink Relay exportDB Loop'
|
||||
exportdb_timer.start()
|
||||
|
||||
def getPrefixModes(irc, remoteirc, channel, user, mlist=None):
|
||||
"""
|
||||
Fetches all prefix modes for a user in a channel that are supported by the
|
||||
@ -1942,7 +1896,7 @@ def save(irc, source, args):
|
||||
|
||||
Saves the relay database to disk."""
|
||||
permissions.checkPermissions(irc, source, ['relay.savedb'])
|
||||
exportDB()
|
||||
datastore.save()
|
||||
irc.reply('Done.')
|
||||
|
||||
@utils.add_cmd
|
||||
|
109
structures.py
109
structures.py
@ -6,6 +6,11 @@ This module contains custom data structures that may be useful in various situat
|
||||
|
||||
import collections
|
||||
import json
|
||||
import pickle
|
||||
import os
|
||||
import threading
|
||||
|
||||
from .log import log
|
||||
|
||||
class KeyedDefaultdict(collections.defaultdict):
|
||||
"""
|
||||
@ -19,66 +24,27 @@ class KeyedDefaultdict(collections.defaultdict):
|
||||
value = self[key] = self.default_factory(key)
|
||||
return value
|
||||
|
||||
class JSONDataStore:
|
||||
def load(self):
|
||||
"""Loads the database given via JSON."""
|
||||
with self.store_lock:
|
||||
try:
|
||||
with open(self.filename, "r") as f:
|
||||
self.store.clear()
|
||||
self.store.update(json.load(f))
|
||||
except (ValueError, IOError, OSError):
|
||||
log.info("(DataStore:%s) failed to load database %s; creating a new one in "
|
||||
"memory", self.name, self.filename)
|
||||
|
||||
def save(self):
|
||||
"""Saves the database given via JSON."""
|
||||
with self.store_lock:
|
||||
with open(self.tmp_filename, 'w') as f:
|
||||
# Pretty print the JSON output for better readability.
|
||||
json.dump(self.store, f, indent=4)
|
||||
|
||||
os.rename(self.tmp_filename, self.filename)
|
||||
|
||||
class PickleDataStore:
|
||||
def load(self):
|
||||
"""Loads the database given via pickle."""
|
||||
with self.store_lock:
|
||||
try:
|
||||
with open(self.filename, "r") as f:
|
||||
self.store.clear()
|
||||
self.store.update(pickle.load(f))
|
||||
except (ValueError, IOError, OSError):
|
||||
log.info("(DataStore:%s) failed to load database %s; creating a new one in "
|
||||
"memory", self.name, self.filename)
|
||||
|
||||
def save(self):
|
||||
"""Saves the database given via pickle."""
|
||||
with self.store_lock:
|
||||
with open(self.tmp_filename, 'w') as f:
|
||||
# Force protocol version 4 as that is the lowest Python 3.4 supports.
|
||||
pickle.dump(db, f, protocol=4)
|
||||
|
||||
os.rename(self.tmp_filename, self.filename)
|
||||
|
||||
|
||||
class DataStore:
|
||||
"""
|
||||
Generic database class. Plugins should use a subclass of this such as JSONDataStore or
|
||||
PickleDataStore.
|
||||
"""
|
||||
def __init__(self, name, filename, save_frequency=30):
|
||||
def __init__(self, name, filename, save_frequency=30, default_db=None):
|
||||
self.name = name
|
||||
self.filename = filename
|
||||
self.tmp_filename = filename + '.tmp'
|
||||
|
||||
log.debug('(DataStore:%s) database path set to %s', self.name, self._filename)
|
||||
log.debug('(DataStore:%s) database path set to %s', self.name, self.filename)
|
||||
|
||||
self.save_frequency = save_frequency
|
||||
log.debug('(DataStore:%s) saving every %s seconds', self.name, self.save_frequency)
|
||||
|
||||
self.store = {}
|
||||
if default_db is not None:
|
||||
self.store = default_db
|
||||
else:
|
||||
self.store = {}
|
||||
self.store_lock = threading.Lock()
|
||||
self.exportdb_timer = None
|
||||
|
||||
self.load()
|
||||
|
||||
@ -110,3 +76,54 @@ class DataStore:
|
||||
and implement this.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def die(self):
|
||||
"""
|
||||
Saves the database and stops any save loops.
|
||||
"""
|
||||
if self.exportdb_timer:
|
||||
self.exportdb_timer.cancel()
|
||||
|
||||
self.save()
|
||||
|
||||
class JSONDataStore(DataStore):
|
||||
def load(self):
|
||||
"""Loads the database given via JSON."""
|
||||
with self.store_lock:
|
||||
try:
|
||||
with open(self.filename, "r") as f:
|
||||
self.store.clear()
|
||||
self.store.update(json.load(f))
|
||||
except (ValueError, IOError, OSError):
|
||||
log.info("(DataStore:%s) failed to load database %s; creating a new one in "
|
||||
"memory", self.name, self.filename)
|
||||
|
||||
def save(self):
|
||||
"""Saves the database given via JSON."""
|
||||
with self.store_lock:
|
||||
with open(self.tmp_filename, 'w') as f:
|
||||
# Pretty print the JSON output for better readability.
|
||||
json.dump(self.store, f, indent=4)
|
||||
|
||||
os.rename(self.tmp_filename, self.filename)
|
||||
|
||||
class PickleDataStore(DataStore):
|
||||
def load(self):
|
||||
"""Loads the database given via pickle."""
|
||||
with self.store_lock:
|
||||
try:
|
||||
with open(self.filename, "rb") as f:
|
||||
self.store.clear()
|
||||
self.store.update(pickle.load(f))
|
||||
except (ValueError, IOError, OSError):
|
||||
log.info("(DataStore:%s) failed to load database %s; creating a new one in "
|
||||
"memory", self.name, self.filename)
|
||||
|
||||
def save(self):
|
||||
"""Saves the database given via pickle."""
|
||||
with self.store_lock:
|
||||
with open(self.tmp_filename, 'wb') as f:
|
||||
# Force protocol version 4 as that is the lowest Python 3.4 supports.
|
||||
pickle.dump(self.store, f, protocol=4)
|
||||
|
||||
os.rename(self.tmp_filename, self.filename)
|
||||
|
Loading…
Reference in New Issue
Block a user