Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision
  • 8420
  • 8813_refactor_doc_id_rev
  • 8908
  • 8942
  • 8944
  • 8948
  • 8979
  • 8995-8996-8997
  • 9004
  • 9006
  • 9010
  • 9023
  • 9028
  • allow-offline
  • attachments-api
  • blobs-upload-download
  • feature/add-host-hostname-to-benchmarking-machine-info
  • feature/avoid-starting-up-if-not-migrated
  • feature/encrypt-doc-id
  • feature/gen-perf-stats
  • feature/improve-missing-couch-config-doc-error-logging
  • feature/migrate-cleans-up-transactions-from-documents
  • fix-db-copying
  • fix-fs-backend-benchmarks
  • fix-logging
  • improve-server-startup
  • master
  • move-sphinx-up
  • tests/perf-stats
  • 0.2.1
  • 0.2.2
  • 0.2.3
  • 0.3.0
  • 0.3.1
  • 0.3.2
  • 0.4.0
  • 0.4.1
  • 0.4.2
  • 0.4.3
  • 0.4.4
  • 0.4.5
  • 0.5.0
  • 0.5.1
  • 0.5.2
  • 0.6.0
  • 0.6.1
  • 0.6.2
  • 0.6.3
  • 0.6.4
  • 0.6.5
  • 0.7.0
  • 0.7.1
  • 0.7.2
  • 0.7.3
  • 0.7.4
  • 0.8.0
  • 0.8.1
  • 0.9.0
  • 0.9.0rc1
  • 0.9.1
  • 0.9.2
  • 0.9.3
  • 0.9.5
  • 0.9.6
  • 0.9.6post1
  • 0.9.6post2
66 results

Target

Select target project
  • drebs/soledad
  • shyba/soledad
  • kali/soledad
  • micah/soledad
  • efkin/soledad
  • vdegou/soledad
  • cyberdrudge/soledad
  • jrabbit/soledad
8 results
Select Git revision
  • 8420
  • 8813_refactor_doc_id_rev
  • 8908
  • 8942
  • 8944
  • 8948
  • 8979
  • 8995-8996-8997
  • 9004
  • 9006
  • 9010
  • 9023
  • 9028
  • allow-offline
  • attachments-api
  • blobs-upload-download
  • feature/add-host-hostname-to-benchmarking-machine-info
  • feature/avoid-starting-up-if-not-migrated
  • feature/encrypt-doc-id
  • feature/gen-perf-stats
  • feature/improve-missing-couch-config-doc-error-logging
  • feature/migrate-cleans-up-transactions-from-documents
  • fix-db-copying
  • fix-fs-backend-benchmarks
  • fix-logging
  • improve-server-startup
  • master
  • move-sphinx-up
  • tests/perf-stats
  • 0.2.1
  • 0.2.2
  • 0.2.3
  • 0.3.0
  • 0.3.1
  • 0.3.2
  • 0.4.0
  • 0.4.1
  • 0.4.2
  • 0.4.3
  • 0.4.4
  • 0.4.5
  • 0.5.0
  • 0.5.1
  • 0.5.2
  • 0.6.0
  • 0.6.1
  • 0.6.2
  • 0.6.3
  • 0.6.4
  • 0.6.5
  • 0.7.0
  • 0.7.1
  • 0.7.2
  • 0.7.3
  • 0.7.4
  • 0.8.0
  • 0.8.1
  • 0.9.0
  • 0.9.0rc1
  • 0.9.1
  • 0.9.2
  • 0.9.3
  • 0.9.5
  • 0.9.6
  • 0.9.6post1
  • 0.9.6post2
66 results
Show changes
Commits on Source (2)
......@@ -12,6 +12,8 @@ from urlparse import urljoin
from leap.soledad.client import Soledad
from leap.soledad.common.couch import CouchDatabase
from stats import timed
# we have to manually setup the events server in order to be able to signal
# events. This is usually done by the enclosing application using soledad
......@@ -195,3 +197,20 @@ def soledad_client(tmpdir, soledad_server, user_db, soledad_dbs):
cert_file=None,
auth_token=token,
defer_encryption=True)
#
# timeit fixture
#
@pytest.fixture
def timeit(monkeypatch):
"""
A fixture to time the execution of arbitrary attributes of objects.
"""
def _timeit(name, obj, attrname):
old_attr = getattr(obj, attrname)
new_attr = timed(name, old_attr)
monkeypatch.setattr(obj, attrname, new_attr)
return _timeit
# stats.py
"""
Statistics related functions: timing, graphite publishing, etc.
"""
import os
import socket
import subprocess
import time
from twisted.logger import Logger
log = Logger()
#
# timed
#
def timed(name, f):
"""
A wrapper for timing the execution of an arbitrary function `f`.
"""
def _wrapper(*args, **kwargs):
start = time.time()
res = f(*args, **kwargs)
end = time.time()
elapsed = end - start
publish_stats(name, elapsed)
return res
return _wrapper
#
# timed deferred
#
def timed_deferred(name, d):
"""
A wrapper for timing the execution of an arbytrary deferred `d`.
"""
d._start_time = time.time()
def _timeit(res):
d._end_time = time.time()
d._elapsed_time = d._end_time - d._start_time
publish_stats(name, d._elapsed_time)
return res
d.addCallbacks(_timeit)
return d
#
# stats publishing helper functions
#
GRAPHITE_HOST = 'localhost'
GRAPHITE_PORT = 2003
def _get_git_branch_name(path):
try:
cmd = ['git', '-C', path, 'rev-parse', '--abbrev-ref', 'HEAD']
branch = subprocess.check_output(cmd).strip()
return branch
except Exception as e:
log.error("error getting git branch name: %s" % e)
return 'error' # avoid messing up the namespace
def _get_git_commit_id(path):
try:
cmd = ['git', '-C', path, 'rev-parse', 'HEAD']
commit_id = subprocess.check_output(cmd).strip()
return commit_id
except Exception as e:
log.error("error getting git commit id: %s" % e)
return "error" # avoid messing up the namespace
def _add_graphite_namespace(name):
"""
Add the application namespace to the given stat `name`.
"""
hostname = socket.gethostname().split('.').pop(0)
path = os.path.dirname(os.path.realpath(__file__))
branch = _get_git_branch_name(path)
commit_id = _get_git_commit_id(path)
return 'soledad.client.perf.%s.%s@%s.%s' \
% (hostname, branch, commit_id, name)
def publish_stats(name, value):
"""
Publishes a value to graphite, under the application namespace.
"""
log.info('[stats] {name!s}: {value!r}', name=name, value=value)
name = _add_graphite_namespace(name)
timestamp = int(time.time())
msg = '%s %s %d\n' % (name, value, timestamp)
sock = socket.socket()
try:
sock.connect((GRAPHITE_HOST, GRAPHITE_PORT))
sock.send(msg)
sock.close()
except socket.error:
pass # give up in case of any error. maybe log?
import json
import os
import time
from leap.soledad.client import crypto
from stats import publish_stats
# test params
content = ' ' * 10000000 # 10 Kb
repeat = 100
# crypto params and objects
key = os.urandom(32) # 256 bits long
secret = os.urandom(64) # 512 bits long
docstr = json.dumps({'content': content})
doc_dict = json.loads(
crypto.encrypt_docstr(docstr, 'an-id', 'a-rev', key, secret))
def test_encrypt(soledad_client, request, timeit):
acc = 0
for i in xrange(repeat):
start = time.time()
crypto.encrypt_docstr(docstr, 'an-id', 'a-rev', key, secret)
acc += time.time() - start
mean = acc / repeat
publish_stats('encrypt_time', mean)
def test_decrypt(soledad_client, request, timeit):
acc = 0
for i in xrange(repeat):
start = time.time()
crypto.decrypt_doc_dict(doc_dict, 'an-id', 'a-rev', key, secret)
acc += time.time() - start
mean = acc / repeat
publish_stats('decrypt_time', mean)
......@@ -4,13 +4,20 @@ from twisted.internet.defer import gatherResults
from leap.soledad.common.couch import CouchDatabase
from leap.soledad.common.document import ServerDocument
from leap.soledad.client import encdecpool
from stats import timed_deferred
content = ' ' * 10000
content = ' ' * 10000 # 10 Kb
@pytest.inlineCallbacks
def test_upload(soledad_client, request):
def test_upload(soledad_client, request, timeit):
# time specific methods
timeit("encrypt_doc_time", encdecpool, 'encrypt_doc_task')
timeit("decrypt_doc_time", encdecpool, 'decrypt_doc_task')
# create a bunch of local documents
uploads = request.config.option.num_docs
deferreds = []
......@@ -20,7 +27,7 @@ def test_upload(soledad_client, request):
yield gatherResults(deferreds)
# synchronize
yield soledad_client.sync()
yield timed_deferred('upload_time', soledad_client.sync())
# check that documents reached the remote database
url = request.config.getoption('--couch-url')
......@@ -30,18 +37,25 @@ def test_upload(soledad_client, request):
@pytest.inlineCallbacks
def test_download(soledad_client, request):
def test_download(soledad_client, request, timeit):
# time specific methods
timeit("encrypt_doc_time", encdecpool, 'encrypt_doc_task')
timeit("decrypt_doc_time", encdecpool, 'decrypt_doc_task')
# create a bunch of remote documents
downloads = request.config.option.num_docs
url = request.config.getoption('--couch-url')
remote = CouchDatabase(url, 'user-0')
for i in xrange(downloads):
doc = ServerDocument('doc-%d' % i, 'replica:1')
# TODO: encrypt documents before saving, otherwise the client
# decryption functions will not be triggered and we will not be
# accounting for decryption time in this test.
doc.content = {'download': True, 'content': content}
remote.save_document(None, doc, i)
# synchronize
yield soledad_client.sync()
yield timed_deferred('download_time', soledad_client.sync())
# check that documents reached the local database
local_count, docs = yield soledad_client.get_all_docs()
......