X-Git-Url: https://git.arvados.org/arvados.git/blobdiff_plain/d2f68bd1e108c3f2dda2322c427050d019b17e04..33e4f1574f5bd14784c65863739478ff864732d2:/services/fuse/arvados_fuse/fusefile.py diff --git a/services/fuse/arvados_fuse/fusefile.py b/services/fuse/arvados_fuse/fusefile.py index d3c13f3a16..45d3db16fe 100644 --- a/services/fuse/arvados_fuse/fusefile.py +++ b/services/fuse/arvados_fuse/fusefile.py @@ -1,14 +1,24 @@ +# Copyright (C) The Arvados Authors. All rights reserved. +# +# SPDX-License-Identifier: AGPL-3.0 + +from __future__ import absolute_import +from builtins import bytes +import json +import llfuse import logging import re -import json +import time -from fresh import FreshBase, convertTime +from .fresh import FreshBase, convertTime _logger = logging.getLogger('arvados.arvados_fuse') class File(FreshBase): """Base for file objects.""" + __slots__ = ("inode", "parent_inode", "_mtime") + def __init__(self, parent_inode, _mtime=0): super(File, self).__init__() self.inode = None @@ -27,8 +37,8 @@ class File(FreshBase): def mtime(self): return self._mtime - def clear(self, force=False): - return True + def clear(self): + pass def writable(self): return False @@ -36,31 +46,39 @@ class File(FreshBase): def flush(self): pass + class FuseArvadosFile(File): """Wraps a ArvadosFile.""" - def __init__(self, parent_inode, arvfile, _mtime): + __slots__ = ('arvfile', '_enable_write') + + def __init__(self, parent_inode, arvfile, _mtime, enable_write): super(FuseArvadosFile, self).__init__(parent_inode, _mtime) self.arvfile = arvfile + self._enable_write = enable_write def size(self): - return self.arvfile.size() + with llfuse.lock_released: + return self.arvfile.size() def readfrom(self, off, size, num_retries=0): - return self.arvfile.readfrom(off, size, num_retries, exact=True) + with llfuse.lock_released: + return self.arvfile.readfrom(off, size, num_retries, exact=True) def writeto(self, off, buf, num_retries=0): - return self.arvfile.writeto(off, buf, num_retries) + with llfuse.lock_released: + return self.arvfile.writeto(off, buf, num_retries) def stale(self): return False def writable(self): - return self.arvfile.writable() + return self._enable_write and self.arvfile.writable() def flush(self): - if self.writable(): - self.arvfile.parent.root_collection().save() + with llfuse.lock_released: + if self.writable(): + self.arvfile.parent.root_collection().save() class StringFile(File): @@ -73,7 +91,7 @@ class StringFile(File): return len(self.contents) def readfrom(self, off, size, num_retries=0): - return self.contents[off:(off+size)] + return bytes(self.contents[off:(off+size)], encoding='utf-8') class ObjectFile(StringFile): @@ -81,9 +99,54 @@ class ObjectFile(StringFile): def __init__(self, parent_inode, obj): super(ObjectFile, self).__init__(parent_inode, "", 0) - self.uuid = obj['uuid'] + self.object_uuid = obj['uuid'] self.update(obj) - def update(self, obj): + def uuid(self): + return self.object_uuid + + def update(self, obj=None): + if obj is None: + # TODO: retrieve the current record for self.object_uuid + # from the server. For now, at least don't crash when + # someone tells us it's a good time to update but doesn't + # pass us a fresh obj. See #8345 + return self._mtime = convertTime(obj['modified_at']) if 'modified_at' in obj else 0 self.contents = json.dumps(obj, indent=4, sort_keys=True) + "\n" + + def persisted(self): + return True + + +class FuncToJSONFile(StringFile): + """File content is the return value of a given function, encoded as JSON. + + The function is called at the time the file is read. The result is + cached until invalidate() is called. + """ + def __init__(self, parent_inode, func): + super(FuncToJSONFile, self).__init__(parent_inode, "", 0) + self.func = func + + # invalidate_inode() is asynchronous with no callback to wait for. In + # order to guarantee userspace programs don't get stale data that was + # generated before the last invalidate(), we must disallow inode + # caching entirely. + self.allow_attr_cache = False + + def size(self): + self._update() + return super(FuncToJSONFile, self).size() + + def readfrom(self, *args, **kwargs): + self._update() + return super(FuncToJSONFile, self).readfrom(*args, **kwargs) + + def _update(self): + if not self.stale(): + return + self._mtime = time.time() + obj = self.func() + self.contents = json.dumps(obj, indent=4, sort_keys=True) + "\n" + self.fresh()