mirror of
https://github.com/jlu5/PyLink.git
synced 2024-11-01 09:19:23 +01:00
structures: revise DataStore a bit (#303)
This commit is contained in:
parent
377df413ed
commit
089dce3853
171
structures.py
171
structures.py
@ -19,107 +19,94 @@ class KeyedDefaultdict(collections.defaultdict):
|
|||||||
value = self[key] = self.default_factory(key)
|
value = self[key] = self.default_factory(key)
|
||||||
return value
|
return value
|
||||||
|
|
||||||
class DataStore:
|
class JSONDataStore:
|
||||||
# will come into play with subclassing and db version upgrading
|
def load(self):
|
||||||
initial_version = 1
|
"""Loads the database given via JSON."""
|
||||||
|
with self.store_lock:
|
||||||
def __init__(self, name, filename, db_format='json', save_frequency={'seconds': 30}):
|
|
||||||
self.name = name
|
|
||||||
|
|
||||||
self._filename = os.path.abspath(os.path.expanduser(filename))
|
|
||||||
self._tmp_filename = self._filename + '.tmp'
|
|
||||||
log.debug('(db:{}) database path set to {}'.format(self.name, self._filename))
|
|
||||||
|
|
||||||
self._format = db_format
|
|
||||||
log.debug('(db:{}) format set to {}'.format(self.name, self._format))
|
|
||||||
|
|
||||||
self._save_frequency = timedelta(**save_frequency).total_seconds()
|
|
||||||
log.debug('(db:{}) saving every {} seconds'.format(self.name, self._save_frequency))
|
|
||||||
|
|
||||||
def create_or_load(self):
|
|
||||||
log.debug('(db:{}) creating/loading datastore using {}'.format(self.name, self._format))
|
|
||||||
|
|
||||||
if self._format == 'json':
|
|
||||||
self._store = {}
|
|
||||||
self._store_lock = threading.Lock()
|
|
||||||
|
|
||||||
log.debug('(db:{}) loading json data store from {}'.format(self.name, self._filename))
|
|
||||||
try:
|
try:
|
||||||
self._store = json.loads(open(self._filename, 'r').read())
|
with open(self.filename, "r") as f:
|
||||||
except (ValueError, IOError, FileNotFoundError):
|
self.store.clear()
|
||||||
log.exception('(db:{}) failed to load existing db, creating new one in memory'.format(self.name))
|
self.store.update(json.load(f))
|
||||||
self.put('db.version', self.initial_version)
|
except (ValueError, IOError, OSError):
|
||||||
else:
|
log.info("(DataStore:%s) failed to load database %s; creating a new one in "
|
||||||
raise Exception('(db:{}) Data store format [{}] not recognised'.format(self.name, self._format))
|
"memory", self.name, self.filename)
|
||||||
|
|
||||||
|
def save(self):
|
||||||
|
"""Saves the database given via JSON."""
|
||||||
|
with self.store_lock:
|
||||||
|
with open(self.tmp_filename, 'w') as f:
|
||||||
|
# Pretty print the JSON output for better readability.
|
||||||
|
json.dump(self.store, f, indent=4)
|
||||||
|
|
||||||
|
os.rename(self.tmp_filename, self.filename)
|
||||||
|
|
||||||
|
class PickleDataStore:
|
||||||
|
def load(self):
|
||||||
|
"""Loads the database given via pickle."""
|
||||||
|
with self.store_lock:
|
||||||
|
try:
|
||||||
|
with open(self.filename, "r") as f:
|
||||||
|
self.store.clear()
|
||||||
|
self.store.update(pickle.load(f))
|
||||||
|
except (ValueError, IOError, OSError):
|
||||||
|
log.info("(DataStore:%s) failed to load database %s; creating a new one in "
|
||||||
|
"memory", self.name, self.filename)
|
||||||
|
|
||||||
|
def save(self):
|
||||||
|
"""Saves the database given via pickle."""
|
||||||
|
with self.store_lock:
|
||||||
|
with open(self.tmp_filename, 'w') as f:
|
||||||
|
# Force protocol version 4 as that is the lowest Python 3.4 supports.
|
||||||
|
pickle.dump(db, f, protocol=4)
|
||||||
|
|
||||||
|
os.rename(self.tmp_filename, self.filename)
|
||||||
|
|
||||||
|
|
||||||
|
class DataStore:
|
||||||
|
"""
|
||||||
|
Generic database class. Plugins should use a subclass of this such as JSONDataStore or
|
||||||
|
PickleDataStore.
|
||||||
|
"""
|
||||||
|
def __init__(self, name, filename, save_frequency=30):
|
||||||
|
self.name = name
|
||||||
|
self.filename = filename
|
||||||
|
self.tmp_filename = filename + '.tmp'
|
||||||
|
|
||||||
|
log.debug('(DataStore:%s) database path set to %s', self.name, self._filename)
|
||||||
|
|
||||||
|
self.save_frequency = save_frequency
|
||||||
|
log.debug('(DataStore:%s) saving every %s seconds', self.name, self.save_frequency)
|
||||||
|
|
||||||
|
self.store = {}
|
||||||
|
self.store_lock = threading.Lock()
|
||||||
|
|
||||||
|
self.load()
|
||||||
|
|
||||||
|
if save_frequency > 0:
|
||||||
|
# If autosaving is enabled, start the save_callback loop.
|
||||||
|
self.save_callback(starting=True)
|
||||||
|
|
||||||
|
def load(self):
|
||||||
|
"""
|
||||||
|
DataStore load stub. Database implementations should subclass DataStore
|
||||||
|
and implement this.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
def save_callback(self, starting=False):
|
def save_callback(self, starting=False):
|
||||||
"""Start the DB save loop."""
|
"""Start the DB save loop."""
|
||||||
if self._format == 'json':
|
|
||||||
# don't actually save the first time
|
# don't actually save the first time
|
||||||
if not starting:
|
if not starting:
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
# schedule saving in a loop.
|
# schedule saving in a loop.
|
||||||
self.exportdb_timer = threading.Timer(self._save_frequency, self.save_callback)
|
self.exportdb_timer = threading.Timer(self.save_frequency, self.save_callback)
|
||||||
self.exportdb_timer.name = 'PyLink {} save_callback Loop'.format(self.name)
|
self.exportdb_timer.name = 'DataStore {} save_callback loop'.format(self.name)
|
||||||
self.exportdb_timer.start()
|
self.exportdb_timer.start()
|
||||||
else:
|
|
||||||
raise Exception('(db:{}) Data store format [{}] not recognised'.format(self.name, self._format))
|
|
||||||
|
|
||||||
def save(self):
|
def save(self):
|
||||||
log.debug('(db:{}) saving datastore'.format(self.name))
|
"""
|
||||||
if self._format == 'json':
|
DataStore save stub. Database implementations should subclass DataStore
|
||||||
with open(self._tmp_filename, 'w') as store_file:
|
and implement this.
|
||||||
store_file.write(json.dumps(self._store))
|
"""
|
||||||
os.rename(self._tmp_filename, self._filename)
|
raise NotImplementedError
|
||||||
|
|
||||||
# single keys
|
|
||||||
def __contains__(self, key):
|
|
||||||
if self._format == 'json':
|
|
||||||
return key in self._store
|
|
||||||
|
|
||||||
def get(self, key, default=None):
|
|
||||||
if self._format == 'json':
|
|
||||||
return self._store.get(key, default)
|
|
||||||
|
|
||||||
def put(self, key, value):
|
|
||||||
if self._format == 'json':
|
|
||||||
# make sure we can serialize the given data
|
|
||||||
# so we don't choke later on saving the db out
|
|
||||||
json.dumps(value)
|
|
||||||
|
|
||||||
self._store[key] = value
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def delete(self, key):
|
|
||||||
if self._format == 'json':
|
|
||||||
try:
|
|
||||||
with self._store_lock:
|
|
||||||
del self._store[key]
|
|
||||||
except KeyError:
|
|
||||||
# key is already gone, nothing to do
|
|
||||||
...
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
# multiple keys
|
|
||||||
def list_keys(self, prefix=None):
|
|
||||||
"""Return all key names. If prefix given, return only keys that start with it."""
|
|
||||||
if self._format == 'json':
|
|
||||||
keys = []
|
|
||||||
|
|
||||||
with self._store_lock:
|
|
||||||
for key in self._store:
|
|
||||||
if prefix is None or key.startswith(prefix):
|
|
||||||
keys.append(key)
|
|
||||||
|
|
||||||
return keys
|
|
||||||
|
|
||||||
def delete_keys(self, prefix):
|
|
||||||
"""Delete all keys with the given prefix."""
|
|
||||||
if self._format == 'json':
|
|
||||||
with self._store_lock:
|
|
||||||
for key in tuple(self._store):
|
|
||||||
if key.startswith(prefix):
|
|
||||||
del self._store[key]
|
|
||||||
|
Loading…
Reference in New Issue
Block a user