1
0
mirror of https://github.com/dkmstr/openuds.git synced 2024-12-22 13:34:04 +03:00

Added caching to FileStorage

This commit is contained in:
Adolfo Gómez García 2016-04-06 06:02:51 +02:00
parent baf4a677dd
commit 9c70fb3caf

View File

@ -30,15 +30,18 @@
'''
@author: Adolfo Gómez, dkmaster at dkmon dot com
'''
# pylint: disable=no-name-in-module,import-error
# pylint: disable=no-name-in-module,import-error, maybe-no-member
from __future__ import unicode_literals
from django.core.cache import caches
from django.core.files import File
from django.core.files.storage import Storage
from uds.models.DBFile import DBFile
from django.conf import settings
from six.moves.urllib import parse as urlparse # @UnresolvedImport
from uds.models.DBFile import DBFile
from .tools import DictAsObj
import six
import os
import logging
@ -52,16 +55,66 @@ class FileStorage(Storage):
if self._base_url[-1] != '/':
self._base_url += '/'
cacheName = getattr(settings, 'FILE_CACHE', 'memory')
try:
cache = caches[cacheName]
except:
cache = None
self.cache = cache
Storage.__init__(self, *args, **kwargs)
def get_valid_name(self, name):
return name.replace('\\', os.path.sep)
def _file(self, name):
return DBFile.objects.get(name=self.get_valid_name(name))
def _getKey(self, name):
'''
We have only a few files on db, an we are running on a 64 bits system
memcached does not allow keys bigger than 250 chars, so we are going to use hash() to
get a key for this
'''
return 'fstor' + six.text_type(hash(self.get_valid_name(name)))
def _dbFileForReadOnly(self, name):
# If we have a cache, & the cache contains the object
if self.cache is not None:
dbf = self.cache.get(self._getKey(name))
if dbf is not None:
return dbf
return self._dbFileForReadWrite(name)
def _dbFileForReadWrite(self, name):
f = DBFile.objects.get(name=self.get_valid_name(name))
self._storeInCache(f)
return f
def _storeInCache(self, f):
if self.cache is None:
return
dbf = DictAsObj({
'name': f.name,
'uuid': f.uuid,
'size': f.size,
'data': f.data,
'created': f.created,
'modified': f.modified
})
self.cache.set(self._getKey(f.name), dbf, 3600) # Cache defaults to one hour
def _removeFromCache(self, name):
if self.cache is None:
return
self.cache.delete(self._getKey(name))
def _open(self, name, mode='rb'):
f = six.BytesIO(self._file(name).data)
f = six.BytesIO(self._dbFileForReadOnly(name).data)
f.name = name
f.mode = mode
return File(f)
@ -69,36 +122,41 @@ class FileStorage(Storage):
def _save(self, name, content):
name = self.get_valid_name(name)
try:
file = self._file(name)
f = self._dbFileForReadWrite(name)
except DBFile.DoesNotExist:
file = DBFile.objects.create(name=name)
f = DBFile.objects.create(name=name)
f.data = content.read()
f.save()
# Store on cache also
self._storeInCache(f)
file.data = content.read()
file.save()
return name
def accessed_time(self, name):
raise NotImplementedError
def created_time(self, name):
return self._file(name).created
return self._dbFileForReadOnly(name).created
def modified_time(self, name):
return self._file(name).modified
return self._dbFileForReadOnly(name).modified
def size(self, name):
return self._file(name).size
return self._dbFileForReadOnly(name).size
def delete(self, name):
self._file(name).delete()
self._dbFileForReadWrite(name).delete()
self._removeFromCache(name)
def exists(self, name):
try:
self._file(name)
self._dbFileForReadOnly(name)
return True
except DBFile.DoesNotExist:
return False
def url(self, name):
uuid = self._file(name).uuid
uuid = self._dbFileForReadWrite(name).uuid
return urlparse.urljoin(self._base_url, uuid)