2007-05-10 11:01:32 +02:00
|
|
|
# Copyright (C) 2007, One Laptop Per Child
|
2010-01-20 11:55:56 +01:00
|
|
|
# Copyright (C) 2010, Simon Schampijer
|
2006-12-11 13:55:01 +01:00
|
|
|
#
|
2007-06-24 13:10:53 +02:00
|
|
|
# This library is free software; you can redistribute it and/or
|
|
|
|
# modify it under the terms of the GNU Lesser General Public
|
|
|
|
# License as published by the Free Software Foundation; either
|
|
|
|
# version 2 of the License, or (at your option) any later version.
|
2006-12-11 13:55:01 +01:00
|
|
|
#
|
2007-06-24 13:10:53 +02:00
|
|
|
# This library is distributed in the hope that it will be useful,
|
2006-12-11 13:55:01 +01:00
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
2007-06-24 13:10:53 +02:00
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
|
# Lesser General Public License for more details.
|
2006-12-11 13:55:01 +01:00
|
|
|
#
|
2007-06-24 13:10:53 +02:00
|
|
|
# You should have received a copy of the GNU Lesser General Public
|
|
|
|
# License along with this library; if not, write to the
|
|
|
|
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
|
|
|
|
# Boston, MA 02111-1307, USA.
|
|
|
|
|
2008-10-28 14:19:01 +01:00
|
|
|
"""
|
2010-01-20 11:55:56 +01:00
|
|
|
STABLE
|
2008-10-28 14:19:01 +01:00
|
|
|
"""
|
|
|
|
|
2007-03-02 21:17:03 +01:00
|
|
|
import logging
|
2007-09-10 14:04:43 +02:00
|
|
|
import time
|
2007-07-16 13:01:35 +02:00
|
|
|
from datetime import datetime
|
2007-07-20 19:50:49 +02:00
|
|
|
import os
|
2009-09-04 18:49:03 +02:00
|
|
|
import tempfile
|
2011-11-15 19:29:07 +01:00
|
|
|
from gi.repository import GObject
|
|
|
|
from gi.repository import Gio
|
2010-01-20 11:55:56 +01:00
|
|
|
import dbus
|
2006-12-11 13:55:01 +01:00
|
|
|
|
2011-10-29 15:55:20 +02:00
|
|
|
from sugar3 import env
|
|
|
|
from sugar3 import mime
|
|
|
|
from sugar3 import dispatch
|
2016-04-17 07:57:07 +02:00
|
|
|
from sugar3.profile import get_color
|
2006-12-11 13:55:01 +01:00
|
|
|
|
2010-10-15 21:14:59 +02:00
|
|
|
DS_DBUS_SERVICE = 'org.laptop.sugar.DataStore'
|
|
|
|
DS_DBUS_INTERFACE = 'org.laptop.sugar.DataStore'
|
|
|
|
DS_DBUS_PATH = '/org/laptop/sugar/DataStore'
|
2010-01-20 11:55:56 +01:00
|
|
|
|
|
|
|
_data_store = None
|
|
|
|
|
|
|
|
|
|
|
|
def _get_data_store():
|
|
|
|
global _data_store
|
|
|
|
|
|
|
|
if not _data_store:
|
|
|
|
_bus = dbus.SessionBus()
|
|
|
|
_data_store = dbus.Interface(_bus.get_object(DS_DBUS_SERVICE,
|
|
|
|
DS_DBUS_PATH),
|
|
|
|
DS_DBUS_INTERFACE)
|
2010-01-24 17:53:32 +01:00
|
|
|
_data_store.connect_to_signal('Created', __datastore_created_cb)
|
|
|
|
_data_store.connect_to_signal('Deleted', __datastore_deleted_cb)
|
|
|
|
_data_store.connect_to_signal('Updated', __datastore_updated_cb)
|
|
|
|
|
2010-01-20 11:55:56 +01:00
|
|
|
return _data_store
|
|
|
|
|
2009-08-25 21:12:40 +02:00
|
|
|
|
2010-01-24 17:53:32 +01:00
|
|
|
def __datastore_created_cb(object_id):
|
|
|
|
metadata = _get_data_store().get_properties(object_id, byte_arrays=True)
|
|
|
|
updated.send(None, object_id=object_id, metadata=metadata)
|
|
|
|
|
|
|
|
|
|
|
|
def __datastore_updated_cb(object_id):
|
|
|
|
metadata = _get_data_store().get_properties(object_id, byte_arrays=True)
|
|
|
|
updated.send(None, object_id=object_id, metadata=metadata)
|
|
|
|
|
|
|
|
|
|
|
|
def __datastore_deleted_cb(object_id):
|
|
|
|
deleted.send(None, object_id=object_id)
|
|
|
|
|
|
|
|
created = dispatch.Signal()
|
|
|
|
deleted = dispatch.Signal()
|
|
|
|
updated = dispatch.Signal()
|
|
|
|
|
|
|
|
|
2011-11-15 19:29:07 +01:00
|
|
|
class DSMetadata(GObject.GObject):
|
2010-01-20 11:55:56 +01:00
|
|
|
"""A representation of the metadata associated with a DS entry."""
|
2006-12-11 13:55:01 +01:00
|
|
|
__gsignals__ = {
|
2011-11-15 19:29:07 +01:00
|
|
|
'updated': (GObject.SignalFlags.RUN_FIRST, None, ([])),
|
2006-12-11 13:55:01 +01:00
|
|
|
}
|
|
|
|
|
2010-01-20 11:55:56 +01:00
|
|
|
def __init__(self, properties=None):
|
2011-11-15 19:29:07 +01:00
|
|
|
GObject.GObject.__init__(self)
|
2010-01-20 11:55:56 +01:00
|
|
|
if not properties:
|
|
|
|
self._properties = {}
|
2007-06-12 21:57:49 +02:00
|
|
|
else:
|
2010-01-20 11:55:56 +01:00
|
|
|
self._properties = properties
|
2009-08-25 19:55:48 +02:00
|
|
|
|
2007-07-20 13:15:11 +02:00
|
|
|
default_keys = ['activity', 'activity_id',
|
|
|
|
'mime_type', 'title_set_by_user']
|
2007-06-12 21:57:49 +02:00
|
|
|
for key in default_keys:
|
2010-01-20 11:55:56 +01:00
|
|
|
if key not in self._properties:
|
|
|
|
self._properties[key] = ''
|
2007-05-10 11:01:32 +02:00
|
|
|
|
|
|
|
def __getitem__(self, key):
|
2010-01-20 11:55:56 +01:00
|
|
|
return self._properties[key]
|
2007-05-10 11:01:32 +02:00
|
|
|
|
|
|
|
def __setitem__(self, key, value):
|
2010-01-20 11:55:56 +01:00
|
|
|
if key not in self._properties or self._properties[key] != value:
|
|
|
|
self._properties[key] = value
|
2007-05-13 18:21:35 +02:00
|
|
|
self.emit('updated')
|
|
|
|
|
2007-05-20 12:38:08 +02:00
|
|
|
def __delitem__(self, key):
|
2010-01-20 11:55:56 +01:00
|
|
|
del self._properties[key]
|
2007-05-29 15:53:58 +02:00
|
|
|
|
2007-10-02 23:40:39 +02:00
|
|
|
def __contains__(self, key):
|
2010-01-20 11:55:56 +01:00
|
|
|
return self._properties.__contains__(key)
|
2009-08-25 19:55:48 +02:00
|
|
|
|
2007-05-29 15:53:58 +02:00
|
|
|
def has_key(self, key):
|
2010-01-20 11:55:56 +01:00
|
|
|
logging.warning(".has_key() is deprecated, use 'in'")
|
|
|
|
return key in self._properties
|
2007-10-02 23:40:39 +02:00
|
|
|
|
|
|
|
def keys(self):
|
2010-01-20 11:55:56 +01:00
|
|
|
return self._properties.keys()
|
2009-08-25 19:55:48 +02:00
|
|
|
|
2007-05-29 15:53:58 +02:00
|
|
|
def get_dictionary(self):
|
2010-01-20 11:55:56 +01:00
|
|
|
return self._properties
|
2007-05-29 15:53:58 +02:00
|
|
|
|
2007-08-27 19:44:49 +02:00
|
|
|
def copy(self):
|
2010-01-20 11:55:56 +01:00
|
|
|
return DSMetadata(self._properties.copy())
|
2007-08-27 19:44:49 +02:00
|
|
|
|
2007-10-03 21:33:34 +02:00
|
|
|
def get(self, key, default=None):
|
2010-01-20 11:55:56 +01:00
|
|
|
if key in self._properties:
|
|
|
|
return self._properties[key]
|
2007-10-03 21:33:34 +02:00
|
|
|
else:
|
|
|
|
return default
|
|
|
|
|
2010-02-06 23:08:17 +01:00
|
|
|
def update(self, properties):
|
|
|
|
"""Update all of the metadata"""
|
|
|
|
for (key, value) in properties.items():
|
|
|
|
self[key] = value
|
|
|
|
|
2009-08-25 21:12:40 +02:00
|
|
|
|
2007-07-20 19:50:49 +02:00
|
|
|
class DSObject(object):
|
2010-01-20 11:55:56 +01:00
|
|
|
"""A representation of a DS entry."""
|
2009-08-25 21:12:40 +02:00
|
|
|
|
2007-05-29 15:53:58 +02:00
|
|
|
def __init__(self, object_id, metadata=None, file_path=None):
|
2010-03-29 23:48:23 +02:00
|
|
|
self._update_signal_match = None
|
2010-08-12 16:20:14 +02:00
|
|
|
self._object_id = None
|
|
|
|
|
2010-02-13 14:59:05 +01:00
|
|
|
self.set_object_id(object_id)
|
2010-08-12 16:20:14 +02:00
|
|
|
|
2007-05-29 15:53:58 +02:00
|
|
|
self._metadata = metadata
|
|
|
|
self._file_path = file_path
|
2007-07-20 19:50:49 +02:00
|
|
|
self._destroyed = False
|
2008-09-10 17:54:45 +02:00
|
|
|
self._owns_file = False
|
2007-05-20 12:38:08 +02:00
|
|
|
|
2010-02-06 23:08:17 +01:00
|
|
|
def get_object_id(self):
|
|
|
|
return self._object_id
|
|
|
|
|
|
|
|
def set_object_id(self, object_id):
|
2010-10-15 19:45:11 +02:00
|
|
|
if self._update_signal_match is not None:
|
2010-03-29 23:48:23 +02:00
|
|
|
self._update_signal_match.remove()
|
2010-02-06 23:08:17 +01:00
|
|
|
if object_id is not None:
|
2010-03-29 23:48:23 +02:00
|
|
|
self._update_signal_match = _get_data_store().connect_to_signal(
|
2013-05-17 07:24:39 +02:00
|
|
|
'Updated', self.__object_updated_cb, arg0=object_id)
|
2010-03-29 23:48:23 +02:00
|
|
|
|
2010-02-06 23:08:17 +01:00
|
|
|
self._object_id = object_id
|
|
|
|
|
|
|
|
object_id = property(get_object_id, set_object_id)
|
|
|
|
|
|
|
|
def __object_updated_cb(self, object_id):
|
2010-03-29 23:48:23 +02:00
|
|
|
properties = _get_data_store().get_properties(self._object_id,
|
2010-02-06 23:08:17 +01:00
|
|
|
byte_arrays=True)
|
|
|
|
self._metadata.update(properties)
|
|
|
|
|
2007-05-13 18:21:35 +02:00
|
|
|
def get_metadata(self):
|
2014-03-29 20:25:34 +01:00
|
|
|
if self._metadata is None and self.object_id is not None:
|
2010-01-20 11:55:56 +01:00
|
|
|
properties = _get_data_store().get_properties(self.object_id)
|
|
|
|
metadata = DSMetadata(properties)
|
2007-05-29 15:53:58 +02:00
|
|
|
self._metadata = metadata
|
2007-05-13 18:21:35 +02:00
|
|
|
return self._metadata
|
2009-08-25 19:55:48 +02:00
|
|
|
|
2007-05-13 18:21:35 +02:00
|
|
|
def set_metadata(self, metadata):
|
|
|
|
if self._metadata != metadata:
|
|
|
|
self._metadata = metadata
|
|
|
|
|
|
|
|
metadata = property(get_metadata, set_metadata)
|
|
|
|
|
2008-09-11 09:03:05 +02:00
|
|
|
def get_file_path(self, fetch=True):
|
2014-03-29 20:25:34 +01:00
|
|
|
if fetch and self._file_path is None and self.object_id is not None:
|
2010-01-20 11:55:56 +01:00
|
|
|
self.set_file_path(_get_data_store().get_filename(self.object_id))
|
2008-09-10 17:54:45 +02:00
|
|
|
self._owns_file = True
|
2007-05-13 18:21:35 +02:00
|
|
|
return self._file_path
|
2009-08-25 19:55:48 +02:00
|
|
|
|
2007-05-13 18:21:35 +02:00
|
|
|
def set_file_path(self, file_path):
|
|
|
|
if self._file_path != file_path:
|
2008-09-10 17:54:45 +02:00
|
|
|
if self._file_path and self._owns_file:
|
2007-07-20 19:50:49 +02:00
|
|
|
if os.path.isfile(self._file_path):
|
|
|
|
os.remove(self._file_path)
|
2008-09-10 17:54:45 +02:00
|
|
|
self._owns_file = False
|
2007-05-13 18:21:35 +02:00
|
|
|
self._file_path = file_path
|
|
|
|
|
|
|
|
file_path = property(get_file_path, set_file_path)
|
2007-05-10 11:01:32 +02:00
|
|
|
|
2007-07-20 19:50:49 +02:00
|
|
|
def destroy(self):
|
|
|
|
if self._destroyed:
|
|
|
|
logging.warning('This DSObject has already been destroyed!.')
|
|
|
|
return
|
|
|
|
self._destroyed = True
|
2008-09-10 17:54:45 +02:00
|
|
|
if self._file_path and self._owns_file:
|
2007-07-20 19:50:49 +02:00
|
|
|
if os.path.isfile(self._file_path):
|
|
|
|
os.remove(self._file_path)
|
2008-09-10 17:54:45 +02:00
|
|
|
self._owns_file = False
|
2007-07-20 19:50:49 +02:00
|
|
|
self._file_path = None
|
|
|
|
|
|
|
|
def __del__(self):
|
|
|
|
if not self._destroyed:
|
2009-08-25 21:12:40 +02:00
|
|
|
logging.warning('DSObject was deleted without cleaning up first. '
|
2008-04-19 12:04:43 +02:00
|
|
|
'Call DSObject.destroy() before disposing it.')
|
2007-07-20 19:50:49 +02:00
|
|
|
self.destroy()
|
|
|
|
|
2007-08-27 19:44:49 +02:00
|
|
|
def copy(self):
|
|
|
|
return DSObject(None, self._metadata.copy(), self._file_path)
|
|
|
|
|
2010-01-20 11:55:56 +01:00
|
|
|
|
2009-09-04 18:49:03 +02:00
|
|
|
class RawObject(object):
|
2010-01-20 11:55:56 +01:00
|
|
|
"""A representation for objects not in the DS but
|
|
|
|
in the file system.
|
|
|
|
|
|
|
|
"""
|
2009-09-04 18:49:03 +02:00
|
|
|
|
|
|
|
def __init__(self, file_path):
|
2009-09-06 11:44:52 +02:00
|
|
|
stat = os.stat(file_path)
|
|
|
|
metadata = {
|
2013-05-17 07:24:39 +02:00
|
|
|
'uid': file_path,
|
|
|
|
'title': os.path.basename(file_path),
|
|
|
|
'timestamp': stat.st_mtime,
|
|
|
|
'mime_type': Gio.content_type_guess(file_path, None)[0],
|
|
|
|
'activity': '',
|
|
|
|
'activity_id': '',
|
2016-04-17 07:57:07 +02:00
|
|
|
'icon-color': get_color().to_string(),
|
2013-05-17 07:24:39 +02:00
|
|
|
'description': file_path,
|
|
|
|
}
|
2009-09-06 11:44:52 +02:00
|
|
|
|
2009-09-04 18:49:03 +02:00
|
|
|
self.object_id = file_path
|
2009-09-06 11:44:52 +02:00
|
|
|
self._metadata = DSMetadata(metadata)
|
2009-09-04 18:49:03 +02:00
|
|
|
self._file_path = None
|
|
|
|
self._destroyed = False
|
|
|
|
|
|
|
|
def get_metadata(self):
|
|
|
|
return self._metadata
|
|
|
|
|
|
|
|
metadata = property(get_metadata)
|
|
|
|
|
|
|
|
def get_file_path(self, fetch=True):
|
|
|
|
# we have to create symlink since its a common practice
|
|
|
|
# to create hardlinks to jobject files
|
2009-09-18 12:31:13 +02:00
|
|
|
# and w/o this, it wouldn't work since we have file from mounted device
|
2009-09-04 18:49:03 +02:00
|
|
|
if self._file_path is None:
|
2010-08-09 09:25:18 +02:00
|
|
|
data_path = os.path.join(env.get_profile_path(), 'data')
|
2009-09-04 18:49:03 +02:00
|
|
|
self._file_path = tempfile.mktemp(
|
2013-05-17 07:24:39 +02:00
|
|
|
prefix='rawobject', dir=data_path)
|
2010-08-09 09:25:18 +02:00
|
|
|
if not os.path.exists(data_path):
|
|
|
|
os.makedirs(data_path)
|
2009-09-04 18:49:03 +02:00
|
|
|
os.symlink(self.object_id, self._file_path)
|
|
|
|
return self._file_path
|
|
|
|
|
|
|
|
file_path = property(get_file_path)
|
|
|
|
|
|
|
|
def destroy(self):
|
|
|
|
if self._destroyed:
|
|
|
|
logging.warning('This RawObject has already been destroyed!.')
|
|
|
|
return
|
|
|
|
self._destroyed = True
|
|
|
|
if self._file_path is not None:
|
2009-09-18 12:31:13 +02:00
|
|
|
if os.path.exists(self._file_path):
|
|
|
|
os.remove(self._file_path)
|
2009-09-04 18:49:03 +02:00
|
|
|
self._file_path = None
|
|
|
|
|
|
|
|
def __del__(self):
|
|
|
|
if not self._destroyed:
|
|
|
|
logging.warning('RawObject was deleted without cleaning up. '
|
|
|
|
'Call RawObject.destroy() before disposing it.')
|
|
|
|
self.destroy()
|
|
|
|
|
2009-08-25 21:12:40 +02:00
|
|
|
|
2007-05-10 11:01:32 +02:00
|
|
|
def get(object_id):
|
2010-01-20 11:55:56 +01:00
|
|
|
"""Get the properties of the object with the ID given.
|
|
|
|
|
|
|
|
Keyword arguments:
|
|
|
|
object_id -- unique identifier of the object
|
|
|
|
|
|
|
|
Return: a DSObject
|
|
|
|
|
|
|
|
"""
|
2007-05-10 11:01:32 +02:00
|
|
|
logging.debug('datastore.get')
|
2009-09-04 18:49:03 +02:00
|
|
|
|
|
|
|
if object_id.startswith('/'):
|
|
|
|
return RawObject(object_id)
|
|
|
|
|
2010-01-20 11:55:56 +01:00
|
|
|
metadata = _get_data_store().get_properties(object_id, byte_arrays=True)
|
2007-05-13 18:21:35 +02:00
|
|
|
|
2007-07-23 13:20:24 +02:00
|
|
|
ds_object = DSObject(object_id, DSMetadata(metadata), None)
|
2007-05-10 11:01:32 +02:00
|
|
|
# TODO: register the object for updates
|
|
|
|
return ds_object
|
|
|
|
|
2009-08-25 21:12:40 +02:00
|
|
|
|
2007-05-10 11:01:32 +02:00
|
|
|
def create():
|
2010-01-20 11:55:56 +01:00
|
|
|
"""Create a new DSObject.
|
|
|
|
|
|
|
|
Return: a DSObject
|
|
|
|
|
|
|
|
"""
|
2007-07-13 13:20:44 +02:00
|
|
|
metadata = DSMetadata()
|
2007-09-10 14:04:43 +02:00
|
|
|
metadata['mtime'] = datetime.now().isoformat()
|
|
|
|
metadata['timestamp'] = int(time.time())
|
2007-07-13 13:20:44 +02:00
|
|
|
return DSObject(object_id=None, metadata=metadata, file_path=None)
|
2007-05-10 11:01:32 +02:00
|
|
|
|
2009-08-25 21:12:40 +02:00
|
|
|
|
2010-01-20 11:55:56 +01:00
|
|
|
def _update_ds_entry(uid, properties, filename, transfer_ownership=False,
|
2013-05-17 07:24:39 +02:00
|
|
|
reply_handler=None, error_handler=None, timeout=-1):
|
2010-01-20 11:55:56 +01:00
|
|
|
debug_properties = properties.copy()
|
2010-10-15 21:14:59 +02:00
|
|
|
if 'preview' in debug_properties:
|
|
|
|
debug_properties['preview'] = '<omitted>'
|
2010-01-20 11:55:56 +01:00
|
|
|
logging.debug('dbus_helpers.update: %s, %s, %s, %s', uid, filename,
|
2013-05-17 07:24:39 +02:00
|
|
|
debug_properties, transfer_ownership)
|
2010-01-20 11:55:56 +01:00
|
|
|
if reply_handler and error_handler:
|
|
|
|
_get_data_store().update(uid, dbus.Dictionary(properties), filename,
|
2013-05-17 07:24:39 +02:00
|
|
|
transfer_ownership,
|
|
|
|
reply_handler=reply_handler,
|
|
|
|
error_handler=error_handler,
|
|
|
|
timeout=timeout)
|
2010-01-20 11:55:56 +01:00
|
|
|
else:
|
|
|
|
_get_data_store().update(uid, dbus.Dictionary(properties),
|
|
|
|
filename, transfer_ownership)
|
|
|
|
|
|
|
|
|
|
|
|
def _create_ds_entry(properties, filename, transfer_ownership=False):
|
|
|
|
object_id = _get_data_store().create(dbus.Dictionary(properties), filename,
|
2013-05-17 07:24:39 +02:00
|
|
|
transfer_ownership)
|
2010-01-20 11:55:56 +01:00
|
|
|
return object_id
|
|
|
|
|
|
|
|
|
2008-04-19 12:04:43 +02:00
|
|
|
def write(ds_object, update_mtime=True, transfer_ownership=False,
|
|
|
|
reply_handler=None, error_handler=None, timeout=-1):
|
2010-01-20 11:55:56 +01:00
|
|
|
"""Write the DSObject given to the datastore. Creates a new entry if
|
|
|
|
the entry does not exist yet.
|
|
|
|
|
|
|
|
Keyword arguments:
|
|
|
|
update_mtime -- boolean if the mtime of the entry should be regenerated
|
|
|
|
(default True)
|
|
|
|
transfer_ownership -- set it to true if the ownership of the entry should
|
|
|
|
be passed - who is responsible to delete the file
|
|
|
|
when done with it (default False)
|
|
|
|
reply_handler -- will be called with the method's return values as
|
|
|
|
arguments (default None)
|
|
|
|
error_handler -- will be called with an instance of a DBusException
|
|
|
|
representing a remote exception (default None)
|
|
|
|
timeout -- dbus timeout for the caller to wait (default -1)
|
|
|
|
|
|
|
|
"""
|
2007-06-15 18:03:17 +02:00
|
|
|
logging.debug('datastore.write')
|
2007-07-09 14:26:41 +02:00
|
|
|
|
|
|
|
properties = ds_object.metadata.get_dictionary().copy()
|
|
|
|
|
2007-07-16 14:57:07 +02:00
|
|
|
if update_mtime:
|
|
|
|
properties['mtime'] = datetime.now().isoformat()
|
2007-09-10 14:04:43 +02:00
|
|
|
properties['timestamp'] = int(time.time())
|
2007-07-16 13:01:35 +02:00
|
|
|
|
2008-09-11 09:03:05 +02:00
|
|
|
file_path = ds_object.get_file_path(fetch=False)
|
|
|
|
if file_path is None:
|
2008-09-07 22:01:27 +02:00
|
|
|
file_path = ''
|
2007-09-10 18:03:40 +02:00
|
|
|
|
|
|
|
# FIXME: this func will be sync for creates regardless of the handlers
|
|
|
|
# supplied. This is very bad API, need to decide what to do here.
|
2007-05-10 11:01:32 +02:00
|
|
|
if ds_object.object_id:
|
2010-01-20 11:55:56 +01:00
|
|
|
_update_ds_entry(ds_object.object_id,
|
|
|
|
properties,
|
|
|
|
file_path,
|
|
|
|
transfer_ownership,
|
|
|
|
reply_handler=reply_handler,
|
|
|
|
error_handler=error_handler,
|
|
|
|
timeout=timeout)
|
2007-05-10 11:01:32 +02:00
|
|
|
else:
|
2007-09-10 18:03:40 +02:00
|
|
|
if reply_handler or error_handler:
|
2013-05-17 07:24:39 +02:00
|
|
|
logging.warning('datastore.write() cannot currently be called'
|
2008-04-19 12:04:43 +02:00
|
|
|
'async for creates, see ticket 3071')
|
2010-01-20 11:55:56 +01:00
|
|
|
ds_object.object_id = _create_ds_entry(properties, file_path,
|
|
|
|
transfer_ownership)
|
2009-01-04 16:13:59 +01:00
|
|
|
ds_object.metadata['uid'] = ds_object.object_id
|
2007-05-10 11:01:32 +02:00
|
|
|
# TODO: register the object for updates
|
2009-08-24 12:54:02 +02:00
|
|
|
logging.debug('Written object %s to the datastore.', ds_object.object_id)
|
2006-12-11 13:55:01 +01:00
|
|
|
|
2009-08-25 21:12:40 +02:00
|
|
|
|
2007-06-22 17:01:13 +02:00
|
|
|
def delete(object_id):
|
2010-01-20 11:55:56 +01:00
|
|
|
"""Delete the datastore entry with the given uid.
|
|
|
|
|
|
|
|
Keyword arguments:
|
|
|
|
object_id -- uid of the datastore entry
|
|
|
|
|
|
|
|
"""
|
2007-06-22 17:01:13 +02:00
|
|
|
logging.debug('datastore.delete')
|
2010-01-20 11:55:56 +01:00
|
|
|
_get_data_store().delete(object_id)
|
2007-06-22 17:01:13 +02:00
|
|
|
|
2009-08-25 21:12:40 +02:00
|
|
|
|
2008-08-11 01:20:08 +02:00
|
|
|
def find(query, sorting=None, limit=None, offset=None, properties=None,
|
2007-09-10 18:03:40 +02:00
|
|
|
reply_handler=None, error_handler=None):
|
2010-01-20 11:55:56 +01:00
|
|
|
"""Find DS entries that match the query provided.
|
|
|
|
|
|
|
|
Keyword arguments:
|
|
|
|
query -- a dictionary containing metadata key value pairs
|
|
|
|
for a fulltext search use the key 'query' e.g. {'query': 'blue*'}
|
|
|
|
other possible well-known properties are:
|
|
|
|
'activity': 'my.organization.MyActivity'
|
|
|
|
'activity_id': '6f7f3acacca87886332f50bdd522d805f0abbf1f'
|
|
|
|
'title': 'My new project'
|
|
|
|
'title_set_by_user': '0'
|
|
|
|
'keep': '0'
|
|
|
|
'ctime': '1972-05-12T18:41:08'
|
|
|
|
'mtime': '2007-06-16T03:42:33'
|
|
|
|
'timestamp': 1192715145
|
|
|
|
'preview': ByteArray(png file data, 300x225 px)
|
|
|
|
'icon-color': '#ff0000,#ffff00'
|
|
|
|
'mime_type': 'application/x-my-activity'
|
|
|
|
'share-scope': # if shared
|
|
|
|
'buddies': '{}'
|
|
|
|
'description': 'some longer text'
|
|
|
|
'tags': 'one two'
|
|
|
|
sorting -- key to order results by e.g. 'timestamp' (default None)
|
|
|
|
limit -- return only limit results (default None)
|
|
|
|
offset -- return only results starting at offset (default None)
|
|
|
|
properties -- you can specify here a list of metadata you want to be
|
|
|
|
present in the result e.g. ['title, 'keep'] (default None)
|
|
|
|
reply_handler -- will be called with the method's return values as
|
|
|
|
arguments (default None)
|
|
|
|
error_handler -- will be called with an instance of a DBusException
|
|
|
|
representing a remote exception (default None)
|
|
|
|
|
|
|
|
Return: DSObjects matching the query, number of matches
|
|
|
|
|
|
|
|
"""
|
2007-08-01 16:31:33 +02:00
|
|
|
query = query.copy()
|
|
|
|
|
2008-08-11 01:20:08 +02:00
|
|
|
if properties is None:
|
|
|
|
properties = []
|
|
|
|
|
2007-05-22 14:03:31 +02:00
|
|
|
if sorting:
|
|
|
|
query['order_by'] = sorting
|
|
|
|
if limit:
|
|
|
|
query['limit'] = limit
|
|
|
|
if offset:
|
|
|
|
query['offset'] = offset
|
2009-08-25 19:55:48 +02:00
|
|
|
|
2010-01-20 11:55:56 +01:00
|
|
|
if reply_handler and error_handler:
|
|
|
|
_get_data_store().find(query, properties,
|
|
|
|
reply_handler=reply_handler,
|
|
|
|
error_handler=error_handler,
|
|
|
|
byte_arrays=True)
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
entries, total_count = _get_data_store().find(query, properties,
|
|
|
|
byte_arrays=True)
|
|
|
|
ds_objects = []
|
|
|
|
for entry in entries:
|
|
|
|
object_id = entry['uid']
|
|
|
|
del entry['uid']
|
2009-08-25 19:55:48 +02:00
|
|
|
|
2010-01-20 11:55:56 +01:00
|
|
|
ds_object = DSObject(object_id, DSMetadata(entry), None)
|
|
|
|
ds_objects.append(ds_object)
|
2007-05-22 14:03:31 +02:00
|
|
|
|
2010-01-20 11:55:56 +01:00
|
|
|
return ds_objects, total_count
|
2007-05-22 14:03:31 +02:00
|
|
|
|
|
|
|
|
2010-01-20 11:55:56 +01:00
|
|
|
def copy(ds_object, mount_point):
|
|
|
|
"""Copy a datastore entry
|
2009-08-25 21:12:40 +02:00
|
|
|
|
2010-01-20 11:55:56 +01:00
|
|
|
Keyword arguments:
|
|
|
|
ds_object -- DSObject to copy
|
|
|
|
mount_point -- mount point of the new datastore entry
|
2007-08-27 19:44:49 +02:00
|
|
|
|
2017-06-01 04:57:34 +02:00
|
|
|
Returns:
|
|
|
|
new_ds_object -- DSObject copied
|
|
|
|
|
2010-01-20 11:55:56 +01:00
|
|
|
"""
|
|
|
|
new_ds_object = ds_object.copy()
|
|
|
|
new_ds_object.metadata['mountpoint'] = mount_point
|
2007-08-27 19:44:49 +02:00
|
|
|
|
2010-01-20 11:55:56 +01:00
|
|
|
if 'title' in ds_object.metadata:
|
|
|
|
filename = ds_object.metadata['title']
|
2007-09-20 12:25:12 +02:00
|
|
|
|
2010-01-20 11:55:56 +01:00
|
|
|
if 'mime_type' in ds_object.metadata:
|
|
|
|
mime_type = ds_object.metadata['mime_type']
|
2007-09-20 12:25:12 +02:00
|
|
|
extension = mime.get_primary_extension(mime_type)
|
|
|
|
if extension:
|
|
|
|
filename += '.' + extension
|
|
|
|
|
2010-01-20 11:55:56 +01:00
|
|
|
new_ds_object.metadata['suggested_filename'] = filename
|
2007-09-20 12:25:12 +02:00
|
|
|
|
2007-08-27 19:44:49 +02:00
|
|
|
# this will cause the file be retrieved from the DS
|
2010-01-20 11:55:56 +01:00
|
|
|
new_ds_object.file_path = ds_object.file_path
|
2007-08-27 19:44:49 +02:00
|
|
|
|
2010-01-20 11:55:56 +01:00
|
|
|
write(new_ds_object)
|
2007-08-27 19:44:49 +02:00
|
|
|
|
2017-06-01 04:57:34 +02:00
|
|
|
return new_ds_object
|
|
|
|
|
2009-08-25 21:12:40 +02:00
|
|
|
|
2007-07-03 17:07:48 +02:00
|
|
|
def get_unique_values(key):
|
2010-01-20 11:55:56 +01:00
|
|
|
"""Retrieve an array of unique values for a field.
|
|
|
|
|
|
|
|
Keyword arguments:
|
|
|
|
key -- only the property activity is currently supported
|
|
|
|
|
|
|
|
Return: list of activities
|
|
|
|
|
|
|
|
"""
|
|
|
|
return _get_data_store().get_uniquevaluesfor(
|
2013-05-17 07:24:39 +02:00
|
|
|
key, dbus.Dictionary({}, signature='ss'))
|