[Sugar-devel] [PATCH sugar-toolkit 3/4] Increase timeout for data store operations (see SL#1936)

Sascha Silbe silbe at activitycentral.com
Sun Feb 19 09:45:52 EST 2012


The default DBus timeout (currently 25s [2]) can be too short if the data
store is busy, especially (but not only) on systems with a slow SD card
and / or when writing large files.

Address this by increasing the timeout to 1 minute for read operations and
5 minutes for write operations. The values were chosen to roughly match the
patience I believe an average user on a system with slow storage would have.

We still don't cope with the fact that there is no reasonable timeout that
covers _all_ situations, nor does this patch add more support for asynchronous
calls that would help activities to keep their UI responsive while doing data
store operations in the background. The only API change is the increase of
the default timeout for sugar.datastore.datastore.write().

[1] https://bugs.sugarlabs.org/ticket/1936
[2] http://dbus.freedesktop.org/doc/api/html/dbus-connection-internal_8h_source.html#l00045

Signed-off-by: Sascha Silbe <silbe at activitycentral.com>
---
 src/sugar/datastore/datastore.py |   55 +++++++++++++++++++++----------------
 1 files changed, 31 insertions(+), 24 deletions(-)

diff --git a/src/sugar/datastore/datastore.py b/src/sugar/datastore/datastore.py
index ed05634..d09ad93 100644
--- a/src/sugar/datastore/datastore.py
+++ b/src/sugar/datastore/datastore.py
@@ -38,6 +38,8 @@ from sugar import dispatch
 DS_DBUS_SERVICE = 'org.laptop.sugar.DataStore'
 DS_DBUS_INTERFACE = 'org.laptop.sugar.DataStore'
 DS_DBUS_PATH = '/org/laptop/sugar/DataStore'
+DBUS_READ_TIMEOUT = 60
+DBUS_WRITE_TIMEOUT = 300

 _data_store = None

@@ -58,18 +60,21 @@ def _get_data_store():


 def __datastore_created_cb(object_id):
-    metadata = _get_data_store().get_properties(object_id, byte_arrays=True)
+    metadata = _get_data_store().get_properties(object_id, byte_arrays=True,
+                                                timeout=DBUS_READ_TIMEOUT)
     updated.send(None, object_id=object_id, metadata=metadata)


 def __datastore_updated_cb(object_id):
-    metadata = _get_data_store().get_properties(object_id, byte_arrays=True)
+    metadata = _get_data_store().get_properties(object_id, byte_arrays=True,
+                                                timeout=DBUS_READ_TIMEOUT)
     updated.send(None, object_id=object_id, metadata=metadata)


 def __datastore_deleted_cb(object_id):
     deleted.send(None, object_id=object_id)

+
 created = dispatch.Signal()
 deleted = dispatch.Signal()
 updated = dispatch.Signal()
@@ -165,12 +170,13 @@ class DSObject(object):

     def __object_updated_cb(self, object_id):
         properties = _get_data_store().get_properties(self._object_id,
-                                                      byte_arrays=True)
+            byte_arrays=True, timeout=DBUS_READ_TIMEOUT)
         self._metadata.update(properties)

     def get_metadata(self):
         if self._metadata is None and not self.object_id is None:
-            properties = _get_data_store().get_properties(self.object_id)
+            properties = _get_data_store().get_properties(self.object_id,
+                timeout=DBUS_READ_TIMEOUT)
             metadata = DSMetadata(properties)
             self._metadata = metadata
         return self._metadata
@@ -183,7 +189,9 @@ class DSObject(object):

     def get_file_path(self, fetch=True):
         if fetch and self._file_path is None and not self.object_id is None:
-            self.set_file_path(_get_data_store().get_filename(self.object_id))
+            path = _get_data_store().get_filename(self.object_id,
+                                                  timeout=DBUS_READ_TIMEOUT)
+            self.set_file_path(path)
             self._owns_file = True
         return self._file_path

@@ -294,7 +302,8 @@ def get(object_id):
     if object_id.startswith('/'):
         return RawObject(object_id)

-    metadata = _get_data_store().get_properties(object_id, byte_arrays=True)
+    metadata = _get_data_store().get_properties(object_id, byte_arrays=True,
+                                                timeout=DBUS_READ_TIMEOUT)

     ds_object = DSObject(object_id, DSMetadata(metadata), None)
     # TODO: register the object for updates
@@ -313,8 +322,8 @@ def create():
     return DSObject(object_id=None, metadata=metadata, file_path=None)


-def _update_ds_entry(uid, properties, filename, transfer_ownership=False,
-        reply_handler=None, error_handler=None, timeout=-1):
+def _update_ds_entry(uid, properties, filename, transfer_ownership, timeout,
+                     reply_handler=None, error_handler=None):
     debug_properties = properties.copy()
     if 'preview' in debug_properties:
         debug_properties['preview'] = '<omitted>'
@@ -328,17 +337,18 @@ def _update_ds_entry(uid, properties, filename, transfer_ownership=False,
                 timeout=timeout)
     else:
         _get_data_store().update(uid, dbus.Dictionary(properties),
-                                 filename, transfer_ownership)
+                                 filename, transfer_ownership,
+                                 timeout=timeout)


-def _create_ds_entry(properties, filename, transfer_ownership=False):
+def _create_ds_entry(properties, filename, transfer_ownership, timeout):
     object_id = _get_data_store().create(dbus.Dictionary(properties), filename,
-    transfer_ownership)
+                                         transfer_ownership, timeout=timeout)
     return object_id


 def write(ds_object, update_mtime=True, transfer_ownership=False,
-          reply_handler=None, error_handler=None, timeout=-1):
+          reply_handler=None, error_handler=None, timeout=DBUS_WRITE_TIMEOUT):
     """Write the DSObject given to the datastore. Creates a new entry if
     the entry does not exist yet.

@@ -370,19 +380,16 @@ def write(ds_object, update_mtime=True, transfer_ownership=False,
     # FIXME: this func will be sync for creates regardless of the handlers
     # supplied. This is very bad API, need to decide what to do here.
     if ds_object.object_id:
-        _update_ds_entry(ds_object.object_id,
-                         properties,
-                         file_path,
-                         transfer_ownership,
+        _update_ds_entry(ds_object.object_id, properties, file_path,
+                         transfer_ownership, timeout,
                          reply_handler=reply_handler,
-                         error_handler=error_handler,
-                         timeout=timeout)
+                         error_handler=error_handler)
     else:
         if reply_handler or error_handler:
             logging.warning('datastore.write() cannot currently be called' \
                             'async for creates, see ticket 3071')
         ds_object.object_id = _create_ds_entry(properties, file_path,
-                                               transfer_ownership)
+                                               transfer_ownership, timeout)
         ds_object.metadata['uid'] = ds_object.object_id
         # TODO: register the object for updates
     logging.debug('Written object %s to the datastore.', ds_object.object_id)
@@ -396,7 +403,7 @@ def delete(object_id):

     """
     logging.debug('datastore.delete')
-    _get_data_store().delete(object_id)
+    _get_data_store().delete(object_id, timeout=DBUS_WRITE_TIMEOUT)


 def find(query, sorting=None, limit=None, offset=None, properties=None,
@@ -451,11 +458,11 @@ def find(query, sorting=None, limit=None, offset=None, properties=None,
         _get_data_store().find(query, properties,
                                reply_handler=reply_handler,
                                error_handler=error_handler,
-                               byte_arrays=True)
+                               byte_arrays=True, timeout=DBUS_READ_TIMEOUT)
         return
     else:
         entries, total_count = _get_data_store().find(query, properties,
-                                                      byte_arrays=True)
+            byte_arrays=True, timeout=DBUS_READ_TIMEOUT)
     ds_objects = []
     for entry in entries:
         object_id = entry['uid']
@@ -545,5 +552,5 @@ def get_unique_values(key):
     Return: list of activities

     """
-    return _get_data_store().get_uniquevaluesfor(
-                                key, dbus.Dictionary({}, signature='ss'))
+    return _get_data_store().get_uniquevaluesfor(key,
+        dbus.Dictionary({}, signature='ss'), timeout=DBUS_WRITE_TIMEOUT)
--
1.7.9



More information about the Sugar-devel mailing list