wlauto.workloads.caffeinemark package

Module contents

class wlauto.workloads.caffeinemark.Caffeinemark(device, **kwargs)[source]

Bases: wlauto.common.android.workload.AndroidUiAutoBenchmark

activity = '.Application'
aliases = AC([])
artifacts = AC([])
core_modules = []
description = '\n CaffeineMark is a series of tests that measure the speed of Java\n programs running in various hardware and software configurations.\n\n http://www.benchmarkhq.ru/cm30/info.html\n\n From the website:\n\n CaffeineMark scores roughly correlate with the number of Java instructions\n executed per second, and do not depend significantly on the the amount of\n memory in the system or on the speed of a computers disk drives or internet\n connection.\n\n The following is a brief description of what each test does:\n\n - Sieve: The classic sieve of eratosthenes finds prime numbers.\n - Loop: The loop test uses sorting and sequence generation as to measure\n compiler optimization of loops.\n - Logic: Tests the speed with which the virtual machine executes\n decision-making instructions.\n - Method: The Method test executes recursive function calls to see how\n well the VM handles method calls.\n - Float: Simulates a 3D rotation of objects around a point.\n - Graphics: Draws random rectangles and lines.\n - Image: Draws a sequence of three graphics repeatedly.\n - Dialog: Writes a set of values into labels and editboxes on a form.\n\n The overall CaffeineMark score is the geometric mean of the individual\n scores, i.e., it is the 9th root of the product of all the scores.\n '
finalize(*args, **kwargs)
initialize(*args, **kwargs)
kind = 'workload'
name = 'caffeinemark'
package = 'com.flexycore.caffeinemark'
parameters = AC(["Param({'kind': <type 'list'>, 'mandatory': None, 'name': 'modules', 'constraint': None, 'default': None, 'allowed_values': None, 'global_alias': None, 'override': False})", "Param({'kind': <function integer>, 'mandatory': None, 'name': 'install_timeout', 'constraint': None, 'default': 300, 'allowed_values': None, 'global_alias': None, 'override': False})", "Param({'kind': <function boolean>, 'mandatory': None, 'name': 'check_apk', 'constraint': None, 'default': True, 'allowed_values': None, 'global_alias': None, 'override': False})", "Param({'kind': <function boolean>, 'mandatory': None, 'name': 'force_install', 'constraint': None, 'default': False, 'allowed_values': None, 'global_alias': None, 'override': False})", "Param({'kind': <function boolean>, 'mandatory': None, 'name': 'uninstall_apk', 'constraint': None, 'default': False, 'allowed_values': None, 'global_alias': None, 'override': False})", "Param({'kind': <function boolean>, 'mandatory': None, 'name': 'exact_abi', 'constraint': None, 'default': False, 'allowed_values': None, 'global_alias': None, 'override': False})", "Param({'kind': <function boolean>, 'mandatory': None, 'name': 'clear_data_on_reset', 'constraint': None, 'default': True, 'allowed_values': None, 'global_alias': None, 'override': False})"])
regex = <_sre.SRE_Pattern object at 0x1be6230>
summary_metrics = ['OverallScore']
update_result(context)[source]
validate(*args, **kwargs)