Merge branch '8784-dir-listings'
[arvados.git] / services / fuse / arvados_fuse / fusefile.py
index efe31c387c09432f3905144c19b5417161d6a1a7..8189a19742b3311f1c720a241e55fa2a30846395 100644 (file)
@@ -1,6 +1,12 @@
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
+import json
+import llfuse
 import logging
 import re
-import json
+import time
 
 from fresh import FreshBase, convertTime
 
@@ -18,32 +24,55 @@ class File(FreshBase):
     def size(self):
         return 0
 
-    def readfrom(self, off, size):
+    def readfrom(self, off, size, num_retries=0):
         return ''
 
+    def writeto(self, off, size, num_retries=0):
+        raise Exception("Not writable")
+
     def mtime(self):
         return self._mtime
 
-    def clear(self, force=False):
-        return True
+    def clear(self):
+        pass
 
+    def writable(self):
+        return False
 
-class StreamReaderFile(File):
-    """Wraps a StreamFileReader as a file."""
+    def flush(self):
+        pass
 
-    def __init__(self, parent_inode, reader, _mtime):
-        super(StreamReaderFile, self).__init__(parent_inode, _mtime)
-        self.reader = reader
+
+class FuseArvadosFile(File):
+    """Wraps a ArvadosFile."""
+
+    def __init__(self, parent_inode, arvfile, _mtime):
+        super(FuseArvadosFile, self).__init__(parent_inode, _mtime)
+        self.arvfile = arvfile
 
     def size(self):
-        return self.reader.size()
+        with llfuse.lock_released:
+            return self.arvfile.size()
+
+    def readfrom(self, off, size, num_retries=0):
+        with llfuse.lock_released:
+            return self.arvfile.readfrom(off, size, num_retries, exact=True)
 
-    def readfrom(self, off, size):
-        return self.reader.readfrom(off, size)
+    def writeto(self, off, buf, num_retries=0):
+        with llfuse.lock_released:
+            return self.arvfile.writeto(off, buf, num_retries)
 
     def stale(self):
         return False
 
+    def writable(self):
+        return self.arvfile.writable()
+
+    def flush(self):
+        with llfuse.lock_released:
+            if self.writable():
+                self.arvfile.parent.root_collection().save()
+
 
 class StringFile(File):
     """Wrap a simple string as a file"""
@@ -54,7 +83,7 @@ class StringFile(File):
     def size(self):
         return len(self.contents)
 
-    def readfrom(self, off, size):
+    def readfrom(self, off, size, num_retries=0):
         return self.contents[off:(off+size)]
 
 
@@ -63,9 +92,55 @@ class ObjectFile(StringFile):
 
     def __init__(self, parent_inode, obj):
         super(ObjectFile, self).__init__(parent_inode, "", 0)
-        self.uuid = obj['uuid']
+        self.object_uuid = obj['uuid']
         self.update(obj)
 
-    def update(self, obj):
+    def uuid(self):
+        return self.object_uuid
+
+    def update(self, obj=None):
+        if obj is None:
+            # TODO: retrieve the current record for self.object_uuid
+            # from the server. For now, at least don't crash when
+            # someone tells us it's a good time to update but doesn't
+            # pass us a fresh obj. See #8345
+            return
         self._mtime = convertTime(obj['modified_at']) if 'modified_at' in obj else 0
         self.contents = json.dumps(obj, indent=4, sort_keys=True) + "\n"
+
+    def persisted(self):
+        return True
+
+
+class FuncToJSONFile(StringFile):
+    """File content is the return value of a given function, encoded as JSON.
+
+    The function is called at the time the file is read. The result is
+    cached until invalidate() is called.
+    """
+    def __init__(self, parent_inode, func):
+        super(FuncToJSONFile, self).__init__(parent_inode, "", 0)
+        self.func = func
+
+        # invalidate_inode() and invalidate_entry() are asynchronous
+        # with no callback to wait for. In order to guarantee
+        # userspace programs don't get stale data that was generated
+        # before the last invalidate(), we must disallow dirent
+        # caching entirely.
+        self.allow_dirent_cache = False
+
+    def size(self):
+        self._update()
+        return super(FuncToJSONFile, self).size()
+
+    def readfrom(self, *args, **kwargs):
+        self._update()
+        return super(FuncToJSONFile, self).readfrom(*args, **kwargs)
+
+    def _update(self):
+        if not self.stale():
+            return
+        self._mtime = time.time()
+        obj = self.func()
+        self.contents = json.dumps(obj, indent=4, sort_keys=True) + "\n"
+        self.fresh()