[Sugar-devel] [PATCH] Share Journal entries over external devices #1636

simon at schampijer.de simon at schampijer.de
Thu Nov 25 04:51:16 EST 2010


From: Simon Schampijer <simon at schampijer.de>

This adds the ability to share journal entries over an external device. The
metadata and preview of an entry are stored in hidden files. For example
when copying an entry homework.pdf to the device a file called
.homework.pdf.metadata and .homework.pdf.preview will be stored on the
device as well. Those will be read in when copying an entry from the
external device to a Journal.

The approach has been discussed in the ticket #1636 and has been reviewed.
The 0.84 version have been tested as working by the OLPCA QA as well
OLPC #9657.

This patch should go into master and 0.90 to allow for an update from
0.84 to 0.90.

|TestCase|

* drag a Journal item (that contains a file like Paint) to the external
device - show the content of the external device

* rename the entry on the external device - that should work

* check the contents on the removable device on another
machine (hidden files enabled) - there should be the file
itself a file .[filename].metadata and a file .[filename].preview

* insert the device again and copy the entry to your Journal
(drag on the Journal icon) - make sure that this works, the entry \
is "resumable" and there is a preview
---
 src/jarabe/journal/model.py |  153 +++++++++++++++++++++++++++++++++++++++---
 1 files changed, 142 insertions(+), 11 deletions(-)

diff --git a/src/jarabe/journal/model.py b/src/jarabe/journal/model.py
index 0773446..8e5bd96 100644
--- a/src/jarabe/journal/model.py
+++ b/src/jarabe/journal/model.py
@@ -19,9 +19,11 @@ import os
 from datetime import datetime
 import time
 import shutil
+import tempfile
 from stat import S_IFMT, S_IFDIR, S_IFREG
 import re
 from operator import itemgetter
+import json
 
 import gobject
 import dbus
@@ -289,8 +291,10 @@ class InplaceResultSet(BaseResultSet):
         files = self._file_list[offset:offset + limit]
 
         entries = []
-        for file_path, stat, mtime_, size_ in files:
-            metadata = _get_file_metadata(file_path, stat)
+        for file_path, stat, mtime_, size_, metadata in files:
+            if metadata is None:
+                # FIXME: the find should fetch metadata
+                metadata = _get_file_metadata(file_path, stat)
             metadata['mountpoint'] = self._mount_point
             entries.append(metadata)
 
@@ -331,11 +335,20 @@ class InplaceResultSet(BaseResultSet):
 
                 elif S_IFMT(stat.st_mode) == S_IFREG:
                     add_to_list = True
+                    metadata = None
 
                     if self._regex is not None and \
                             not self._regex.match(full_path):
                         add_to_list = False
-
+                        metadata = _get_file_metadata_from_json( \
+                            dir_path, entry, preview=False)
+                        if metadata is not None:
+                            for f in ['fulltext', 'title',
+                                      'description', 'tags']:
+                                if f in metadata and \
+                                        self._regex.match(metadata[f]):
+                                    add_to_list = True
+                                    break
                     if None not in [self._date_start, self._date_end] and \
                             (stat.st_mtime < self._date_start or
                              stat.st_mtime > self._date_end):
@@ -348,7 +361,7 @@ class InplaceResultSet(BaseResultSet):
 
                     if add_to_list:
                         file_info = (full_path, stat, int(stat.st_mtime),
-                                     stat.st_size)
+                                     stat.st_size, metadata)
                         self._file_list.append(file_info)
 
                     self.progress.send(self)
@@ -358,6 +371,17 @@ class InplaceResultSet(BaseResultSet):
 
 
 def _get_file_metadata(path, stat):
+    """Returns the metadata from the corresponding file
+    on the external device or does create the metadata
+    based on the file properties.
+
+    """
+    filename = os.path.basename(path)
+    dir_path = os.path.dirname(path)
+    metadata = _get_file_metadata_from_json(dir_path, filename, preview=True)
+    if metadata:
+        return metadata
+
     client = gconf.client_get_default()
     return {'uid': path,
             'title': os.path.basename(path),
@@ -370,6 +394,37 @@ def _get_file_metadata(path, stat):
             'description': path}
 
 
+def _get_file_metadata_from_json(dir_path, filename, preview=False):
+    """Returns the metadata from the json file and the preview
+    stored on the external device.
+
+    """
+    metadata = None
+    metadata_path = os.path.join(dir_path,
+                                 '.' + filename + '.metadata')
+    if os.path.exists(metadata_path):
+        try:
+            metadata = json.load(open(metadata_path))
+        except ValueError:
+            logging.debug("Could not read metadata for file %r on" \
+                              "external device.", filename)
+        else:
+            metadata['uid'] = os.path.join(dir_path, filename)
+    if preview:
+        preview_path = os.path.join(dir_path,
+                                    '.' + filename + '.preview')
+        if os.path.exists(preview_path):
+            try:
+                metadata['preview'] = dbus.ByteArray(open(preview_path).read())
+            except:
+                logging.debug("Could not read preview for file %r on" \
+                                  "external device.", filename)
+    else:
+        if metadata and 'preview' in metadata:
+            del(metadata['preview'])
+    return metadata
+
+
 def _get_datastore():
     global _datastore
     if _datastore is None:
@@ -476,6 +531,16 @@ def delete(object_id):
     """
     if os.path.exists(object_id):
         os.unlink(object_id)
+        dir_path = os.path.dirname(object_id)
+        filename = os.path.basename(object_id)
+        old_files = [os.path.join(dir_path, '.' + filename + '.metadata'),
+                     os.path.join(dir_path, '.' + filename + '.preview')]
+        for old_file in old_files:
+            if os.path.exists(old_file):
+                try:
+                    os.unlink(old_file)
+                except:
+                    pass
         deleted.send(None, object_id=object_id)
     else:
         _get_datastore().delete(object_id)
@@ -513,17 +578,83 @@ def write(metadata, file_path='', update_mtime=True, transfer_ownership=True):
                                                  file_path,
                                                  transfer_ownership)
     else:
-        if not os.path.exists(file_path):
-            raise ValueError('Entries without a file cannot be copied to '
-                             'removable devices')
+        object_id = _write_entry_on_external_device(metadata, file_path)
+
+    return object_id
 
-        file_name = _get_file_name(metadata['title'], metadata['mime_type'])
-        file_name = _get_unique_file_name(metadata['mountpoint'], file_name)
 
+def _write_entry_on_external_device(metadata, file_path):
+    """This creates and updates an entry copied from the
+    DS to external storage device. Besides copying the
+    associated file a hidden file for the preview and one
+    for the metadata are stored. We make sure that the
+    metadata and preview file are in the same directory
+    as the data file.
+
+    This function handles renames of an entry on the
+    external device and avoids name collisions. Renames are
+    handled failsafe.
+
+    """
+    if 'uid' in metadata and os.path.exists(metadata['uid']):
+        file_path = metadata['uid']
+
+    if not file_path or not os.path.exists(file_path):
+        raise ValueError('Entries without a file cannot be copied to '
+                         'removable devices')
+
+    file_name = _get_file_name(metadata['title'], metadata['mime_type'])
+
+    destination_path = os.path.join(metadata['mountpoint'], file_name)
+    if destination_path != file_path:
+        file_name = _get_unique_file_name(metadata['mountpoint'], file_name)
         destination_path = os.path.join(metadata['mountpoint'], file_name)
+        clean_name, extension_ = os.path.splitext(file_name)
+        metadata['title'] = clean_name
+
+    metadata_copy = metadata.copy()
+    del metadata_copy['mountpoint']
+    if 'uid' in metadata_copy:
+        del metadata_copy['uid']
+
+    if 'preview' in metadata_copy:
+        preview = metadata_copy['preview']
+        preview_fname = '.' + file_name + '.preview'
+        preview_path = os.path.join(metadata['mountpoint'], preview_fname)
+        metadata_copy['preview'] = preview_fname
+
+        (fh, fn) = tempfile.mkstemp(dir=metadata['mountpoint'])
+        os.write(fh, preview)
+        os.close(fh)
+        os.rename(fn, preview_path)
+
+    metadata_path = os.path.join(metadata['mountpoint'],
+                                 '.' + file_name + '.metadata')
+    (fh, fn) = tempfile.mkstemp(dir=metadata['mountpoint'])
+    os.write(fh, json.dumps(metadata_copy))
+    os.close(fh)
+    os.rename(fn, metadata_path)
+
+    if os.path.dirname(destination_path) == os.path.dirname(file_path):
+        old_file_path = file_path
+        if old_file_path != destination_path:
+            os.rename(file_path, destination_path)
+            old_fname = os.path.basename(file_path)
+            old_files = [os.path.join(metadata['mountpoint'],
+                                      '.' + old_fname + '.metadata'),
+                         os.path.join(metadata['mountpoint'],
+                                      '.' + old_fname + '.preview')]
+            for ofile in old_files:
+                if os.path.exists(ofile):
+                    try:
+                        os.unlink(ofile)
+                    except:
+                        pass
+    else:
         shutil.copy(file_path, destination_path)
-        object_id = destination_path
-        created.send(None, object_id=object_id)
+        
+    object_id = destination_path
+    created.send(None, object_id=object_id)
 
     return object_id
 
-- 
1.7.2.3



More information about the Sugar-devel mailing list