wlauto.workloads.telemetry package

Module contents

class wlauto.workloads.telemetry.Telemetry(device, **kwargs)[source]

Bases: wlauto.core.workload.Workload

aliases = AC([])
artifacts = AC([])
build_command()[source]
core_modules = []
description = "\n Executes Google's Telemetery benchmarking framework\n\n Url: https://www.chromium.org/developers/telemetry\n\n From the web site:\n\n Telemetry is Chrome's performance testing framework. It allows you to\n perform arbitrary actions on a set of web pages and report metrics about\n it. The framework abstracts:\n\n - Launching a browser with arbitrary flags on any platform.\n - Opening a tab and navigating to the page under test.\n - Fetching data via the Inspector timeline and traces.\n - Using Web Page Replay to cache real-world websites so they don't\n change when used in benchmarks.\n\n Design Principles\n\n - Write one performance test that runs on all platforms - Windows, Mac,\n Linux, Chrome OS, and Android for both Chrome and ContentShell.\n - Runs on browser binaries, without a full Chromium checkout, and without\n having to build the browser yourself.\n - Use WebPageReplay to get repeatable test results.\n - Clean architecture for writing benchmarks that keeps measurements and\n use cases separate.\n - Run on non-Chrome browsers for comparative studies.\n\n This instrument runs telemetry via its ``run_benchmark`` script (which\n must be in PATH or specified using ``run_benchmark_path`` parameter) and\n parses metrics from the resulting output.\n\n **device setup**\n\n The device setup will depend on whether you're running a test image (in\n which case little or no setup should be necessary)\n\n\n "
finalize(*args, **kwargs)
initialize(*args, **kwargs)
kind = 'workload'
name = 'telemetry'
parameters = AC(["Param({'kind': <type 'list'>, 'mandatory': None, 'name': 'modules', 'constraint': None, 'default': None, 'allowed_values': None, 'global_alias': None, 'override': False})", "Param({'kind': <type 'str'>, 'mandatory': None, 'name': 'run_benchmark_path', 'constraint': None, 'default': None, 'allowed_values': None, 'global_alias': None, 'override': False})", "Param({'kind': <type 'str'>, 'mandatory': None, 'name': 'test', 'constraint': None, 'default': 'page_cycler.top_10_mobile', 'allowed_values': None, 'global_alias': None, 'override': False})", "Param({'kind': <type 'str'>, 'mandatory': None, 'name': 'run_benchmark_params', 'constraint': None, 'default': '', 'allowed_values': None, 'global_alias': None, 'override': False})", "Param({'kind': <function integer>, 'mandatory': None, 'name': 'run_timeout', 'constraint': None, 'default': 900, 'allowed_values': None, 'global_alias': None, 'override': False})", "Param({'kind': <function boolean>, 'mandatory': None, 'name': 'extract_fps', 'constraint': None, 'default': False, 'allowed_values': None, 'global_alias': None, 'override': False})", "Param({'kind': <type 'str'>, 'mandatory': None, 'name': 'target_config', 'constraint': None, 'default': None, 'allowed_values': None, 'global_alias': None, 'override': False})"])
run(context)[source]
setup(context)[source]
supported_platforms = ['android', 'chromeos']
update_result(context)[source]
validate(*args, **kwargs)
class wlauto.workloads.telemetry.TelemetryResult(kind=None, url=None, values=None, units=None)[source]

Bases: object

average
rows
std
wlauto.workloads.telemetry.parse_telemetry_results(filepath)[source]