mirror of
https://github.com/Feodor2/Mypal68.git
synced 2025-06-18 14:55:44 -04:00
68.14.0 - testing
This commit is contained in:
parent
8d3bf7e486
commit
2da2788848
@ -11,6 +11,7 @@
|
||||
[TestCasting]
|
||||
[TestCeilingFloor]
|
||||
[TestCheckedInt]
|
||||
[TestCompactPair]
|
||||
[TestCountPopulation]
|
||||
[TestCountZeroes]
|
||||
[TestDefineEnum]
|
||||
@ -40,7 +41,6 @@ skip-if = os != 'win'
|
||||
[TestNotNull]
|
||||
[TestParseFTPList]
|
||||
[TestPLDHash]
|
||||
[TestPair]
|
||||
[TestPoisonArea]
|
||||
skip-if = os == 'android' # Bug 1147630
|
||||
[TestRange]
|
||||
|
@ -34,7 +34,7 @@ class RemoteGTests(object):
|
||||
def __init__(self):
|
||||
self.device = None
|
||||
|
||||
def build_environment(self, shuffle, test_filter):
|
||||
def build_environment(self, shuffle, test_filter, enable_webrender):
|
||||
"""
|
||||
Create and return a dictionary of all the appropriate env variables
|
||||
and values.
|
||||
@ -54,11 +54,15 @@ class RemoteGTests(object):
|
||||
env["GTEST_SHUFFLE"] = "True"
|
||||
if test_filter:
|
||||
env["GTEST_FILTER"] = test_filter
|
||||
if enable_webrender:
|
||||
env["MOZ_WEBRENDER"] = "1"
|
||||
else:
|
||||
env["MOZ_WEBRENDER"] = "0"
|
||||
|
||||
return env
|
||||
|
||||
def run_gtest(self, test_dir, shuffle, test_filter, package, adb_path, device_serial,
|
||||
remote_test_root, libxul_path, symbols_path):
|
||||
remote_test_root, libxul_path, symbols_path, enable_webrender):
|
||||
"""
|
||||
Launch the test app, run gtest, collect test results and wait for completion.
|
||||
Return False if a crash or other failure is detected, else True.
|
||||
@ -94,7 +98,7 @@ class RemoteGTests(object):
|
||||
if not os.path.isdir(f):
|
||||
self.device.push(f, self.remote_profile)
|
||||
|
||||
env = self.build_environment(shuffle, test_filter)
|
||||
env = self.build_environment(shuffle, test_filter, enable_webrender)
|
||||
args = ["-unittest", "--gtest_death_test_style=threadsafe",
|
||||
"-profile %s" % self.remote_profile]
|
||||
if 'geckoview' in self.package:
|
||||
@ -349,6 +353,11 @@ class remoteGtestOptions(OptionParser):
|
||||
self.add_option("--tests-path",
|
||||
default=None,
|
||||
help="Path to gtest directory containing test support files.")
|
||||
self.add_option("--enable-webrender",
|
||||
action="store_true",
|
||||
dest="enable_webrender",
|
||||
default=False,
|
||||
help="Enable the WebRender compositor in Gecko.")
|
||||
|
||||
|
||||
def update_mozinfo():
|
||||
@ -383,7 +392,7 @@ def main():
|
||||
options.shuffle, test_filter, options.package,
|
||||
options.adb_path, options.device_serial,
|
||||
options.remote_test_root, options.libxul_path,
|
||||
options.symbols_path)
|
||||
options.symbols_path, options.enable_webrender)
|
||||
except KeyboardInterrupt:
|
||||
log.info("gtest | Received keyboard interrupt")
|
||||
except Exception as e:
|
||||
|
@ -26,7 +26,7 @@ class GTests(object):
|
||||
TEST_PROC_NO_OUTPUT_TIMEOUT = 300
|
||||
|
||||
def run_gtest(self, prog, xre_path, cwd, symbols_path=None,
|
||||
utility_path=None):
|
||||
utility_path=None, enable_webrender=False):
|
||||
"""
|
||||
Run a single C++ unit test program.
|
||||
|
||||
@ -44,7 +44,7 @@ class GTests(object):
|
||||
Return True if the program exits with a zero status, False otherwise.
|
||||
"""
|
||||
self.xre_path = xre_path
|
||||
env = self.build_environment()
|
||||
env = self.build_environment(enable_webrender)
|
||||
log.info("Running gtest")
|
||||
|
||||
if cwd and not os.path.isdir(cwd):
|
||||
@ -110,7 +110,7 @@ class GTests(object):
|
||||
|
||||
return env
|
||||
|
||||
def build_environment(self):
|
||||
def build_environment(self, enable_webrender):
|
||||
"""
|
||||
Create and return a dictionary of all the appropriate env variables
|
||||
and values. On a remote system, we overload this to set different
|
||||
@ -149,6 +149,12 @@ class GTests(object):
|
||||
# This should be |testFail| instead of |info|. See bug 1050891.
|
||||
log.info("gtest | Failed to find ASan symbolizer at %s", llvmsym)
|
||||
|
||||
if enable_webrender:
|
||||
env["MOZ_WEBRENDER"] = "1"
|
||||
env["MOZ_ACCELERATED"] = "1"
|
||||
else:
|
||||
env["MOZ_WEBRENDER"] = "0"
|
||||
|
||||
return env
|
||||
|
||||
|
||||
@ -175,6 +181,11 @@ class gtestOptions(OptionParser):
|
||||
dest="utility_path",
|
||||
default=None,
|
||||
help="path to a directory containing utility program binaries")
|
||||
self.add_option("--enable-webrender",
|
||||
action="store_true",
|
||||
dest="enable_webrender",
|
||||
default=False,
|
||||
help="Enable the WebRender compositor in Gecko.")
|
||||
|
||||
|
||||
def update_mozinfo():
|
||||
@ -209,7 +220,8 @@ def main():
|
||||
result = tester.run_gtest(prog, options.xre_path,
|
||||
options.cwd,
|
||||
symbols_path=options.symbols_path,
|
||||
utility_path=options.utility_path)
|
||||
utility_path=options.utility_path,
|
||||
enable_webrender=options.enable_webrender)
|
||||
except Exception as e:
|
||||
log.error(str(e))
|
||||
result = False
|
||||
|
@ -4,8 +4,6 @@
|
||||
|
||||
"use strict";
|
||||
|
||||
/* global Pipe, ScriptableInputStream */
|
||||
|
||||
const CC = Components.Constructor;
|
||||
|
||||
const { EventEmitter } = ChromeUtils.import(
|
||||
|
@ -11,7 +11,7 @@
|
||||
|
||||
// This file uses ContentTask & frame scripts, where these are available.
|
||||
/* global addEventListener, removeEventListener, sendAsyncMessage,
|
||||
addMessageListener, removeMessageListener, privateNoteIntentionalCrash */
|
||||
addMessageListener, removeMessageListener, ContentTaskUtils */
|
||||
|
||||
"use strict";
|
||||
|
||||
@ -747,6 +747,25 @@ var BrowserTestUtils = {
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Maybe create a preloaded browser and ensure it's finished loading.
|
||||
*
|
||||
* @param gBrowser (<xul:tabbrowser>)
|
||||
* The tabbrowser in which to preload a browser.
|
||||
*/
|
||||
async maybeCreatePreloadedBrowser(gBrowser) {
|
||||
let win = gBrowser.ownerGlobal;
|
||||
win.NewTabPagePreloading.maybeCreatePreloadedBrowser(win);
|
||||
|
||||
// We cannot use the regular BrowserTestUtils helper for waiting here, since that
|
||||
// would try to insert the preloaded browser, which would only break things.
|
||||
await ContentTask.spawn(gBrowser.preloadedBrowser, null, async () => {
|
||||
await ContentTaskUtils.waitForCondition(() => {
|
||||
return content.document && content.document.readyState == "complete";
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* @param win (optional)
|
||||
* The window we should wait to have "domwindowopened" sent through
|
||||
|
@ -1,5 +1,6 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<meta http-equiv="Content-Security-Policy" content="default-src 'none'"></meta>
|
||||
<title>This is a dummy page</title>
|
||||
<meta charset="utf-8">
|
||||
<body>This is a dummy page</body>
|
||||
|
@ -67,11 +67,11 @@ def read_ini(fp, variables=None, default='DEFAULT', defaults_only=False,
|
||||
while comment_start == sys.maxsize and inline_prefixes:
|
||||
next_prefixes = {}
|
||||
for prefix, index in inline_prefixes.items():
|
||||
index = line.find(prefix, index+1)
|
||||
index = stripped.find(prefix, index+1)
|
||||
if index == -1:
|
||||
continue
|
||||
next_prefixes[prefix] = index
|
||||
if index == 0 or (index > 0 and line[index-1].isspace()):
|
||||
if index == 0 or (index > 0 and stripped[index-1].isspace()):
|
||||
comment_start = min(comment_start, index)
|
||||
inline_prefixes = next_prefixes
|
||||
|
||||
|
@ -0,0 +1,8 @@
|
||||
# flake8: noqa
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .mitm import *
|
@ -196,7 +196,51 @@ class Mitmproxy(Playback):
|
||||
command = [mitmdump_path]
|
||||
|
||||
if "playback_tool_args" in self.config:
|
||||
LOG.info("Staring Proxy using provided command line!")
|
||||
command.extend(self.config["playback_tool_args"])
|
||||
elif "playback_files" in self.config:
|
||||
script = os.path.join(
|
||||
os.path.dirname(os.path.realpath(__file__)), "scripts",
|
||||
"alternate-server-replay-{}.py".format(
|
||||
self.config["playback_version"]))
|
||||
recording_paths = self.config["playback_files"]
|
||||
# this part is platform-specific
|
||||
if mozinfo.os == "win":
|
||||
script = script.replace("\\", "\\\\\\")
|
||||
recording_paths = [recording_path.replace("\\", "\\\\\\")
|
||||
for recording_path in recording_paths]
|
||||
|
||||
if self.config["playback_version"] == "2.0.2":
|
||||
args = [
|
||||
"--replay-kill-extra",
|
||||
"-v",
|
||||
"--script",
|
||||
'""{} {}""'.format(script, " ".join(recording_paths)),
|
||||
]
|
||||
|
||||
if not self.config["playback_upstream_cert"]:
|
||||
LOG.info("No upstream certificate sniffing")
|
||||
args.insert(0, "--no-upstream-cert")
|
||||
self.playback.config["playback_tool_args"] = args
|
||||
elif self.config["playback_version"] == "4.0.4":
|
||||
args = [
|
||||
"-v",
|
||||
"--set",
|
||||
"websocket=false",
|
||||
"--set",
|
||||
"server_replay_files={}".format(" ".join(recording_paths)),
|
||||
"--scripts",
|
||||
script,
|
||||
]
|
||||
if not self.config["playback_upstream_cert"]:
|
||||
LOG.info("No upstream certificate sniffing")
|
||||
args = ["--set", "upstream_cert=false"] + args
|
||||
command.extend(args)
|
||||
else:
|
||||
raise Exception("Mitmproxy version is unknown!")
|
||||
|
||||
else:
|
||||
raise Exception("Mitmproxy can't start playback! Playback settings missing.")
|
||||
|
||||
LOG.info("Starting mitmproxy playback using env path: %s" % env["PATH"])
|
||||
LOG.info("Starting mitmproxy playback using command: %s" % " ".join(command))
|
@ -6,10 +6,12 @@
|
||||
# * best-match response handling is used to improve success rates
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import os
|
||||
import hashlib
|
||||
import sys
|
||||
import urllib
|
||||
from collections import defaultdict
|
||||
import json
|
||||
|
||||
from mitmproxy import ctx
|
||||
from mitmproxy import exceptions
|
||||
@ -18,9 +20,81 @@ from mitmproxy import io
|
||||
from typing import Any # noqa
|
||||
from typing import List # noqa
|
||||
|
||||
# PATCHING AREA - ALLOWS HTTP/2 WITH NO CERT SNIFFING
|
||||
from mitmproxy.proxy.protocol import tls
|
||||
from mitmproxy.proxy.protocol.http2 import Http2Layer, SafeH2Connection
|
||||
|
||||
_PROTO = {}
|
||||
|
||||
|
||||
@property
|
||||
def _alpn(self):
|
||||
proto = _PROTO.get(self.server_sni)
|
||||
if proto is None:
|
||||
return self.server_conn.get_alpn_proto_negotiated()
|
||||
if proto.startswith(b"HTTP/2"):
|
||||
return b"h2"
|
||||
elif proto.startswith(b"HTTP/1"):
|
||||
return b"h1"
|
||||
return b""
|
||||
|
||||
|
||||
tls.TlsLayer.alpn_for_client_connection = _alpn
|
||||
|
||||
|
||||
def _server_conn(self):
|
||||
if not self.server_conn.connected() and self.server_conn not in self.connections:
|
||||
# we can't use ctx.log in this layer
|
||||
print("Ignored CONNECT call on upstream server")
|
||||
return
|
||||
if self.server_conn.connected():
|
||||
import h2.config
|
||||
|
||||
config = h2.config.H2Configuration(
|
||||
client_side=True,
|
||||
header_encoding=False,
|
||||
validate_outbound_headers=False,
|
||||
validate_inbound_headers=False,
|
||||
)
|
||||
self.connections[self.server_conn] = SafeH2Connection(
|
||||
self.server_conn, config=config
|
||||
)
|
||||
self.connections[self.server_conn].initiate_connection()
|
||||
self.server_conn.send(self.connections[self.server_conn].data_to_send())
|
||||
|
||||
|
||||
Http2Layer._initiate_server_conn = _server_conn
|
||||
|
||||
|
||||
def _remote_settings_changed(self, event, other_conn):
|
||||
if other_conn not in self.connections:
|
||||
# we can't use ctx.log in this layer
|
||||
print("Ignored remote settings upstream")
|
||||
return True
|
||||
new_settings = dict(
|
||||
[(key, cs.new_value) for (key, cs) in event.changed_settings.items()]
|
||||
)
|
||||
self.connections[other_conn].safe_update_settings(new_settings)
|
||||
return True
|
||||
|
||||
|
||||
Http2Layer._handle_remote_settings_changed = _remote_settings_changed
|
||||
# END OF PATCHING
|
||||
|
||||
|
||||
class ServerPlayback:
|
||||
def __init__(self, replayfiles):
|
||||
if len(replayfiles) > 0:
|
||||
for path in replayfiles:
|
||||
proto = os.path.splitext(path)[0] + ".json"
|
||||
if os.path.exists(proto):
|
||||
ctx.log.info("Loading proto info from %s" % proto)
|
||||
with open(proto) as f:
|
||||
p = json.loads(f.read()).get('http_protocol')
|
||||
|
||||
if p is not None:
|
||||
_PROTO.update(p)
|
||||
|
||||
self.options = None
|
||||
self.replayfiles = replayfiles
|
||||
self.flowmap = {}
|
||||
@ -46,7 +120,7 @@ class ServerPlayback:
|
||||
|
||||
content = None
|
||||
formdata = None
|
||||
if r.raw_content != b'':
|
||||
if r.raw_content != b"":
|
||||
if r.multipart_form:
|
||||
formdata = r.multipart_form
|
||||
elif r.urlencoded_form:
|
||||
@ -68,9 +142,7 @@ class ServerPlayback:
|
||||
if len(queries):
|
||||
key.append("?")
|
||||
|
||||
return hashlib.sha256(
|
||||
repr(key).encode("utf8", "surrogateescape")
|
||||
).digest()
|
||||
return hashlib.sha256(repr(key).encode("utf8", "surrogateescape")).digest()
|
||||
|
||||
def _match(self, request_a, request_b):
|
||||
"""
|
||||
@ -134,23 +206,32 @@ class ServerPlayback:
|
||||
# if it's an exact match, great!
|
||||
if len(flows) == 1:
|
||||
candidate = flows[0]
|
||||
if (candidate.request.url == request.url and
|
||||
candidate.request.raw_content == request.raw_content):
|
||||
ctx.log.info("For request {} found exact replay match".format(request.url))
|
||||
if (
|
||||
candidate.request.url == request.url
|
||||
and candidate.request.raw_content == request.raw_content
|
||||
):
|
||||
ctx.log.info(
|
||||
"For request {} found exact replay match".format(request.url)
|
||||
)
|
||||
return candidate
|
||||
|
||||
# find the best match between the request and the available flow candidates
|
||||
match = -1
|
||||
flow = None
|
||||
ctx.log.debug("Candiate flows for request: {}".format(request.url))
|
||||
ctx.log.debug("Candidate flows for request: {}".format(request.url))
|
||||
for candidate_flow in flows:
|
||||
candidate_match = self._match(candidate_flow.request, request)
|
||||
ctx.log.debug(" score={} url={}".format(candidate_match, candidate_flow.request.url))
|
||||
ctx.log.debug(
|
||||
" score={} url={}".format(candidate_match, candidate_flow.request.url)
|
||||
)
|
||||
if candidate_match >= match:
|
||||
match = candidate_match
|
||||
flow = candidate_flow
|
||||
ctx.log.info("For request {} best match {} with score=={}".format(request.url,
|
||||
flow.request.url, match))
|
||||
ctx.log.info(
|
||||
"For request {} best match {} with score=={}".format(
|
||||
request.url, flow.request.url, match
|
||||
)
|
||||
)
|
||||
return flow
|
||||
|
||||
def configure(self, options, updated):
|
||||
@ -177,7 +258,9 @@ class ServerPlayback:
|
||||
f.request.url
|
||||
)
|
||||
)
|
||||
f.response = http.HTTPResponse.make(404, b'', {'content-type': 'text/plain'})
|
||||
f.response = http.HTTPResponse.make(
|
||||
404, b"", {"content-type": "text/plain"}
|
||||
)
|
||||
|
||||
|
||||
def start():
|
@ -0,0 +1,196 @@
|
||||
# This file was copied from mitmproxy/mitmproxy/addons/serverplayback.py release tag 4.0.4
|
||||
# and modified by Florin Strugariu
|
||||
|
||||
# Altered features:
|
||||
# * returns 404 rather than dropping the whole HTTP/2 connection on the floor
|
||||
# * remove the replay packages that don't have any content in their response package
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import os
|
||||
import json
|
||||
import hashlib
|
||||
import urllib
|
||||
|
||||
import typing
|
||||
from urllib import parse
|
||||
|
||||
from mitmproxy import ctx, http
|
||||
from mitmproxy import exceptions
|
||||
from mitmproxy import io
|
||||
|
||||
# PATCHING AREA - ALLOWS HTTP/2 WITH NO CERT SNIFFING
|
||||
from mitmproxy.proxy.protocol import tls
|
||||
from mitmproxy.proxy.protocol.http2 import Http2Layer, SafeH2Connection
|
||||
|
||||
_PROTO = {}
|
||||
|
||||
|
||||
@property
|
||||
def _alpn(self):
|
||||
proto = _PROTO.get(self.server_sni)
|
||||
if proto is None:
|
||||
return self.server_conn.get_alpn_proto_negotiated()
|
||||
if proto.startswith("HTTP/2"):
|
||||
return b"h2"
|
||||
elif proto.startswith("HTTP/1"):
|
||||
return b"h1"
|
||||
return b""
|
||||
|
||||
|
||||
tls.TlsLayer.alpn_for_client_connection = _alpn
|
||||
|
||||
|
||||
def _server_conn(self):
|
||||
if not self.server_conn.connected() and self.server_conn not in self.connections:
|
||||
# we can't use ctx.log in this layer
|
||||
print("Ignored CONNECT call on upstream server")
|
||||
return
|
||||
if self.server_conn.connected():
|
||||
import h2.config
|
||||
|
||||
config = h2.config.H2Configuration(
|
||||
client_side=True,
|
||||
header_encoding=False,
|
||||
validate_outbound_headers=False,
|
||||
validate_inbound_headers=False,
|
||||
)
|
||||
self.connections[self.server_conn] = SafeH2Connection(
|
||||
self.server_conn, config=config
|
||||
)
|
||||
self.connections[self.server_conn].initiate_connection()
|
||||
self.server_conn.send(self.connections[self.server_conn].data_to_send())
|
||||
|
||||
|
||||
Http2Layer._initiate_server_conn = _server_conn
|
||||
|
||||
|
||||
def _remote_settings_changed(self, event, other_conn):
|
||||
if other_conn not in self.connections:
|
||||
# we can't use ctx.log in this layer
|
||||
print("Ignored remote settings upstream")
|
||||
return True
|
||||
new_settings = dict(
|
||||
[(key, cs.new_value) for (key, cs) in event.changed_settings.items()]
|
||||
)
|
||||
self.connections[other_conn].safe_update_settings(new_settings)
|
||||
return True
|
||||
|
||||
|
||||
Http2Layer._handle_remote_settings_changed = _remote_settings_changed
|
||||
# END OF PATCHING
|
||||
|
||||
|
||||
class AlternateServerPlayback:
|
||||
def __init__(self):
|
||||
ctx.master.addons.remove(ctx.master.addons.get("serverplayback"))
|
||||
self.flowmap = {}
|
||||
self.configured = False
|
||||
|
||||
def load(self, loader):
|
||||
loader.add_option(
|
||||
"server_replay_files",
|
||||
typing.Sequence[str],
|
||||
[],
|
||||
"Replay server responses from a saved file.",
|
||||
)
|
||||
|
||||
def load_flows(self, flows):
|
||||
"""
|
||||
Replay server responses from flows.
|
||||
"""
|
||||
self.flowmap = {}
|
||||
for i in flows:
|
||||
if i.type == 'websocket':
|
||||
ctx.log.info(
|
||||
"Request is a WebSocketFlow. Removing from request list as WebSockets"
|
||||
" are dissabled "
|
||||
)
|
||||
elif i.response:
|
||||
l = self.flowmap.setdefault(self._hash(i), [])
|
||||
l.append(i)
|
||||
else:
|
||||
ctx.log.info(
|
||||
"Request %s has no response. Removing from request list"
|
||||
% i.request.url
|
||||
)
|
||||
ctx.master.addons.trigger("update", [])
|
||||
|
||||
def load_files(self, paths):
|
||||
for path in paths:
|
||||
ctx.log.info("Loading flows from %s" % path)
|
||||
try:
|
||||
flows = io.read_flows_from_paths([path])
|
||||
except exceptions.FlowReadException as e:
|
||||
raise exceptions.CommandError(str(e))
|
||||
self.load_flows(flows)
|
||||
proto = os.path.splitext(path)[0] + ".json"
|
||||
if os.path.exists(proto):
|
||||
ctx.log.info("Loading proto info from %s" % proto)
|
||||
with open(proto) as f:
|
||||
recording_info = json.loads(f.read())
|
||||
ctx.log.info(
|
||||
"Replaying file {} recorded on {}".format(
|
||||
os.path.basename(path), recording_info["recording_date"]
|
||||
)
|
||||
)
|
||||
_PROTO.update(recording_info["http_protocol"])
|
||||
|
||||
def _hash(self, flow):
|
||||
"""
|
||||
Calculates a loose hash of the flow request.
|
||||
"""
|
||||
r = flow.request
|
||||
|
||||
# unquote url
|
||||
# See Bug 1509835
|
||||
_, _, path, _, query, _ = urllib.parse.urlparse(parse.unquote(r.url))
|
||||
queriesArray = urllib.parse.parse_qsl(query, keep_blank_values=True)
|
||||
|
||||
key = [str(r.port), str(r.scheme), str(r.method), str(path)]
|
||||
key.append(str(r.raw_content))
|
||||
key.append(r.host)
|
||||
|
||||
for p in queriesArray:
|
||||
key.append(p[0])
|
||||
key.append(p[1])
|
||||
|
||||
return hashlib.sha256(repr(key).encode("utf8", "surrogateescape")).digest()
|
||||
|
||||
def next_flow(self, request):
|
||||
"""
|
||||
Returns the next flow object, or None if no matching flow was
|
||||
found.
|
||||
"""
|
||||
hsh = self._hash(request)
|
||||
if hsh in self.flowmap:
|
||||
return self.flowmap[hsh][-1]
|
||||
|
||||
def configure(self, updated):
|
||||
if not self.configured and ctx.options.server_replay_files:
|
||||
self.configured = True
|
||||
self.load_files(ctx.options.server_replay_files)
|
||||
|
||||
def request(self, f):
|
||||
if self.flowmap:
|
||||
rflow = self.next_flow(f)
|
||||
if rflow:
|
||||
response = rflow.response.copy()
|
||||
response.is_replay = True
|
||||
# Refresh server replay responses by adjusting date, expires and
|
||||
# last-modified headers, as well as adjusting cookie expiration.
|
||||
response.refresh()
|
||||
|
||||
f.response = response
|
||||
else:
|
||||
# returns 404 rather than dropping the whole HTTP/2 connection
|
||||
ctx.log.warn(
|
||||
"server_playback: killed non-replay request {}".format(
|
||||
f.request.url
|
||||
)
|
||||
)
|
||||
f.response = http.HTTPResponse.make(
|
||||
404, b"", {"content-type": "text/plain"}
|
||||
)
|
||||
|
||||
|
||||
addons = [AlternateServerPlayback()]
|
@ -1,3 +1,5 @@
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import base64
|
||||
import hashlib
|
||||
import re
|
@ -31,13 +31,15 @@ class Process:
|
||||
@mock.patch("mozproxy.backends.mitm.tooltool_download", new=mock.DEFAULT)
|
||||
@mock.patch("mozproxy.backends.mitm.Mitmproxy.check_proxy", lambda x: True)
|
||||
def test_mitm(*args):
|
||||
bin_name = "mitmproxy-rel-bin-{platform}.manifest"
|
||||
pageset_name = "mitmproxy-recordings-raptor-paypal.manifest"
|
||||
bin_name = "mitmproxy-rel-bin-4.0.4-{platform}.manifest"
|
||||
pageset_name = "mitm4-linux-firefox-amazon.manifest"
|
||||
|
||||
config = {
|
||||
"playback_tool": "mitmproxy",
|
||||
"playback_binary_manifest": bin_name,
|
||||
"playback_pageset_manifest": pageset_name,
|
||||
"playback_upstream_cert": 'false',
|
||||
"playback_version": '4.0.4',
|
||||
"platform": mozinfo.os,
|
||||
"playback_recordings": os.path.join(here, "paypal.mp"),
|
||||
"run_local": True,
|
||||
@ -49,7 +51,7 @@ def test_mitm(*args):
|
||||
with tempdir() as obj_path:
|
||||
config["obj_path"] = obj_path
|
||||
playback = get_playback(config)
|
||||
|
||||
playback.config['playback_files'] = config['playback_recordings']
|
||||
assert playback is not None
|
||||
try:
|
||||
playback.start()
|
||||
@ -70,13 +72,15 @@ def test_playback_setup_failed(*args):
|
||||
|
||||
return _s
|
||||
|
||||
bin_name = "mitmproxy-rel-bin-{platform}.manifest"
|
||||
pageset_name = "mitmproxy-recordings-raptor-paypal.manifest"
|
||||
bin_name = "mitmproxy-rel-bin-4.0.4-{platform}.manifest"
|
||||
pageset_name = "mitm4-linux-firefox-amazon.manifest"
|
||||
|
||||
config = {
|
||||
"playback_tool": "mitmproxy",
|
||||
"playback_binary_manifest": bin_name,
|
||||
"playback_pageset_manifest": pageset_name,
|
||||
"playback_upstream_cert": 'false',
|
||||
"playback_version": '4.0.4',
|
||||
"platform": mozinfo.os,
|
||||
"playback_recordings": os.path.join(here, "paypal.mp"),
|
||||
"run_local": True,
|
||||
@ -93,6 +97,7 @@ def test_playback_setup_failed(*args):
|
||||
with mock.patch(prefix + "stop_mitmproxy_playback") as p:
|
||||
try:
|
||||
pb = get_playback(config)
|
||||
pb.config['playback_files'] = config['playback_recordings']
|
||||
pb.start()
|
||||
except SetupFailed:
|
||||
assert p.call_count == 1
|
||||
|
@ -18,8 +18,9 @@ import mozharness
|
||||
|
||||
from mozharness.base.errors import PythonErrorList
|
||||
from mozharness.base.log import OutputParser, DEBUG, ERROR, CRITICAL, INFO
|
||||
from mozharness.mozilla.testing.testbase import TestingMixin, testing_config_options
|
||||
from mozharness.mozilla.testing.android import AndroidMixin
|
||||
from mozharness.mozilla.testing.errors import HarnessErrorList
|
||||
from mozharness.mozilla.testing.testbase import TestingMixin, testing_config_options
|
||||
from mozharness.base.vcs.vcsbase import MercurialScript
|
||||
from mozharness.mozilla.testing.codecoverage import (
|
||||
CodeCoverageMixin,
|
||||
@ -30,12 +31,11 @@ scripts_path = os.path.abspath(os.path.dirname(os.path.dirname(mozharness.__file
|
||||
external_tools_path = os.path.join(scripts_path, 'external_tools')
|
||||
here = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
RaptorErrorList = PythonErrorList + [
|
||||
RaptorErrorList = PythonErrorList + HarnessErrorList + [
|
||||
{'regex': re.compile(r'''run-as: Package '.*' is unknown'''), 'level': DEBUG},
|
||||
{'substr': r'''FAIL: Busted:''', 'level': CRITICAL},
|
||||
{'substr': r'''FAIL: failed to cleanup''', 'level': ERROR},
|
||||
{'substr': r'''erfConfigurator.py: Unknown error''', 'level': CRITICAL},
|
||||
{'substr': r'''raptorError''', 'level': CRITICAL},
|
||||
{'substr': r'''raptorDebug''', 'level': DEBUG},
|
||||
{'regex': re.compile(r'''^raptor[a-zA-Z-]*( - )?( )?(?i)error(:)?'''), 'level': ERROR},
|
||||
{'regex': re.compile(r'''^raptor[a-zA-Z-]*( - )?( )?(?i)critical(:)?'''), 'level': CRITICAL},
|
||||
{'regex': re.compile(r'''No machine_name called '.*' can be found'''), 'level': CRITICAL},
|
||||
{'substr': r"""No such file or directory: 'browser_output.txt'""",
|
||||
'level': CRITICAL,
|
||||
@ -86,7 +86,7 @@ class Raptor(TestingMixin, MercurialScript, CodeCoverageMixin, AndroidMixin):
|
||||
"action": "store_true",
|
||||
"dest": "enable_webrender",
|
||||
"default": False,
|
||||
"help": "Tries to enable the WebRender compositor.",
|
||||
"help": "Enable the WebRender compositor in Gecko.",
|
||||
}],
|
||||
[["--geckoProfile"], {
|
||||
"dest": "gecko_profile",
|
||||
@ -157,6 +157,12 @@ class Raptor(TestingMixin, MercurialScript, CodeCoverageMixin, AndroidMixin):
|
||||
"default": False,
|
||||
"help": "Use Raptor to measure memory usage.",
|
||||
}],
|
||||
[["--cpu-test"], {
|
||||
"dest": "cpu_test",
|
||||
"action": "store_true",
|
||||
"default": False,
|
||||
"help": "Use Raptor to measure CPU usage"
|
||||
}],
|
||||
[["--debug-mode"], {
|
||||
"dest": "debug_mode",
|
||||
"action": "store_true",
|
||||
@ -254,6 +260,7 @@ class Raptor(TestingMixin, MercurialScript, CodeCoverageMixin, AndroidMixin):
|
||||
self.host = os.environ['HOST_IP']
|
||||
self.power_test = self.config.get('power_test')
|
||||
self.memory_test = self.config.get('memory_test')
|
||||
self.cpu_test = self.config.get('cpu_test')
|
||||
self.is_release_build = self.config.get('is_release_build')
|
||||
self.debug_mode = self.config.get('debug_mode', False)
|
||||
self.firefox_android_browsers = ["fennec", "geckoview", "refbrow", "fenix"]
|
||||
@ -389,6 +396,10 @@ class Raptor(TestingMixin, MercurialScript, CodeCoverageMixin, AndroidMixin):
|
||||
options.extend(['--power-test'])
|
||||
if self.config.get('memory_test', False):
|
||||
options.extend(['--memory-test'])
|
||||
if self.config.get('cpu_test', False):
|
||||
options.extend(['--cpu-test'])
|
||||
if self.config.get('enable_webrender', False):
|
||||
options.extend(['--enable-webrender'])
|
||||
for key, value in kw_options.items():
|
||||
options.extend(['--%s' % key, value])
|
||||
|
||||
@ -520,6 +531,8 @@ class Raptor(TestingMixin, MercurialScript, CodeCoverageMixin, AndroidMixin):
|
||||
expected_perfherder += 1
|
||||
if self.config.get('memory_test', None):
|
||||
expected_perfherder += 1
|
||||
if self.config.get('cpu_test', None):
|
||||
expected_perfherder += 1
|
||||
if len(parser.found_perf_data) != expected_perfherder:
|
||||
self.critical("PERFHERDER_DATA was seen %d times, expected %d."
|
||||
% (len(parser.found_perf_data), expected_perfherder))
|
||||
@ -575,12 +588,6 @@ class Raptor(TestingMixin, MercurialScript, CodeCoverageMixin, AndroidMixin):
|
||||
else:
|
||||
env['PYTHONPATH'] = self.raptor_path
|
||||
|
||||
# if running in production on a quantum_render build
|
||||
if self.config['enable_webrender']:
|
||||
self.info("webrender is enabled so setting MOZ_WEBRENDER=1 and MOZ_ACCELERATED=1")
|
||||
env['MOZ_WEBRENDER'] = '1'
|
||||
env['MOZ_ACCELERATED'] = '1'
|
||||
|
||||
# mitmproxy needs path to mozharness when installing the cert, and tooltool
|
||||
env['SCRIPTSPATH'] = scripts_path
|
||||
env['EXTERNALTOOLSPATH'] = external_tools_path
|
||||
@ -659,6 +666,10 @@ class Raptor(TestingMixin, MercurialScript, CodeCoverageMixin, AndroidMixin):
|
||||
src = os.path.join(self.query_abs_dirs()['abs_work_dir'], 'raptor-memory.json')
|
||||
self._artifact_perf_data(src, dest)
|
||||
|
||||
if self.cpu_test:
|
||||
src = os.path.join(self.query_abs_dirs()['abs_work_dir'], 'raptor-cpu.json')
|
||||
self._artifact_perf_data(src, dest)
|
||||
|
||||
src = os.path.join(self.query_abs_dirs()['abs_work_dir'], 'screenshots.html')
|
||||
if os.path.exists(src):
|
||||
dest = os.path.join(env['MOZ_UPLOAD_DIR'], 'screenshots.html')
|
||||
|
@ -69,9 +69,6 @@ user_pref("media.navigator.permission.disabled", true);
|
||||
user_pref("media.peerconnection.enabled", true);
|
||||
// Disable speculative connections so they aren't reported as leaking when they're hanging around.
|
||||
user_pref("network.http.speculative-parallel-limit", 0);
|
||||
user_pref("network.proxy.http", "localhost");
|
||||
user_pref("network.proxy.http_port", 80);
|
||||
user_pref("network.proxy.type", 1);
|
||||
// Set places maintenance far in the future (the maximum time possible in an
|
||||
// int32_t) to avoid it kicking in during tests. The maintenance can take a
|
||||
// relatively long time which may cause unnecessary intermittents and slow down
|
||||
|
2
testing/profiles/raptor-android/extensions/README.txt
Normal file
2
testing/profiles/raptor-android/extensions/README.txt
Normal file
@ -0,0 +1,2 @@
|
||||
Dropping extensions here will get them installed in all test harnesses
|
||||
that make use of this profile.
|
5
testing/profiles/raptor-android/user.js
Normal file
5
testing/profiles/raptor-android/user.js
Normal file
@ -0,0 +1,5 @@
|
||||
// Preferences file used by the raptor harness exclusively on android
|
||||
/* globals user_pref */
|
||||
|
||||
// disk cache smart size is enabled in shipped apps
|
||||
user_pref("browser.cache.disk.smart_size.enabled", true);
|
@ -1,14 +1,15 @@
|
||||
"use strict";
|
||||
|
||||
module.exports = {
|
||||
"env": {
|
||||
"webextensions": true,
|
||||
},
|
||||
|
||||
globals: {
|
||||
"browser": [],
|
||||
"chrome": [],
|
||||
"getTestConfig": true,
|
||||
"startMark": [],
|
||||
"endMark": [],
|
||||
"name": "",
|
||||
"getTestConfig": false,
|
||||
"startMark": true,
|
||||
"endMark": true,
|
||||
"name": true,
|
||||
},
|
||||
|
||||
"plugins": [
|
||||
|
0
testing/raptor/logger/__init__.py
Normal file
0
testing/raptor/logger/__init__.py
Normal file
39
testing/raptor/logger/logger.py
Normal file
39
testing/raptor/logger/logger.py
Normal file
@ -0,0 +1,39 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from mozlog.proxy import ProxyLogger
|
||||
|
||||
|
||||
class RaptorLogger(ProxyLogger):
|
||||
|
||||
def __init__(self, component=None):
|
||||
self.logger = ProxyLogger(component)
|
||||
|
||||
def exception(self, message):
|
||||
self.critical(message)
|
||||
|
||||
def debug(self, message):
|
||||
return self.logger.debug("Debug: {}".format(message))
|
||||
|
||||
def info(self, message):
|
||||
return self.logger.info("Info: {}".format(message))
|
||||
|
||||
def warning(self, message):
|
||||
return self.logger.warning("Warning: {}".format(message))
|
||||
|
||||
def error(self, message):
|
||||
return self.logger.error("Error: {}".format(message))
|
||||
|
||||
def critical(self, message):
|
||||
return self.logger.critical("Critical: {}".format(message))
|
||||
|
||||
def log_raw(self, message):
|
||||
return self.logger.log_raw(message)
|
||||
|
||||
def process_output(self, *args, **kwargs):
|
||||
return self.logger.process_output(*args, **kwargs)
|
@ -8,33 +8,38 @@
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import socket
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
import mozfile
|
||||
from mach.decorators import CommandProvider, Command
|
||||
from mach.decorators import Command, CommandProvider
|
||||
from mozboot.util import get_state_dir
|
||||
from mozbuild.base import MozbuildObject, MachCommandBase
|
||||
from mozbuild.base import MachCommandConditions as conditions
|
||||
from mozbuild.base import MachCommandBase, MozbuildObject
|
||||
from mozbuild.base import MachCommandConditions as Conditions
|
||||
|
||||
HERE = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
BENCHMARK_REPOSITORY = 'https://github.com/mozilla/perf-automation'
|
||||
BENCHMARK_REVISION = '2720cdc790828952964524bb44ce8b4c14670e90'
|
||||
|
||||
FIREFOX_ANDROID_BROWSERS = ["fennec", "geckoview", "refbrow", "fenix"]
|
||||
|
||||
|
||||
class RaptorRunner(MozbuildObject):
|
||||
|
||||
def run_test(self, raptor_args, kwargs):
|
||||
"""
|
||||
"""Setup and run mozharness.
|
||||
|
||||
We want to do a few things before running Raptor:
|
||||
|
||||
1. Clone mozharness
|
||||
2. Make the config for Raptor mozharness
|
||||
3. Run mozharness
|
||||
"""
|
||||
|
||||
self.init_variables(raptor_args, kwargs)
|
||||
self.setup_benchmarks()
|
||||
self.make_config()
|
||||
@ -43,25 +48,32 @@ class RaptorRunner(MozbuildObject):
|
||||
return self.run_mozharness()
|
||||
|
||||
def init_variables(self, raptor_args, kwargs):
|
||||
self.raptor_dir = os.path.join(self.topsrcdir, 'testing', 'raptor')
|
||||
self.mozharness_dir = os.path.join(self.topsrcdir, 'testing',
|
||||
'mozharness')
|
||||
self.config_file_path = os.path.join(self._topobjdir, 'testing',
|
||||
'raptor-in_tree_conf.json')
|
||||
self.binary_path = self.get_binary_path() if kwargs['app'] not in \
|
||||
['geckoview', 'fennec', 'refbrow', 'fenix'] else None
|
||||
self.virtualenv_script = os.path.join(self.topsrcdir, 'third_party', 'python',
|
||||
'virtualenv', 'virtualenv.py')
|
||||
self.virtualenv_path = os.path.join(self._topobjdir, 'testing',
|
||||
'raptor-venv')
|
||||
self.python_interp = sys.executable
|
||||
self.raptor_args = raptor_args
|
||||
if kwargs.get('host', None) == 'HOST_IP':
|
||||
|
||||
if kwargs.get('host') == 'HOST_IP':
|
||||
kwargs['host'] = os.environ['HOST_IP']
|
||||
self.host = kwargs['host']
|
||||
self.power_test = kwargs['power_test']
|
||||
self.memory_test = kwargs['memory_test']
|
||||
self.is_release_build = kwargs['is_release_build']
|
||||
self.memory_test = kwargs['memory_test']
|
||||
self.power_test = kwargs['power_test']
|
||||
self.cpu_test = kwargs['cpu_test']
|
||||
|
||||
if Conditions.is_android(self) or kwargs["app"] in FIREFOX_ANDROID_BROWSERS:
|
||||
self.binary_path = None
|
||||
else:
|
||||
self.binary_path = kwargs.get("binary") or self.get_binary_path()
|
||||
|
||||
self.python = sys.executable
|
||||
|
||||
self.raptor_dir = os.path.join(self.topsrcdir, 'testing', 'raptor')
|
||||
self.mozharness_dir = os.path.join(self.topsrcdir, 'testing', 'mozharness')
|
||||
self.config_file_path = os.path.join(
|
||||
self._topobjdir, 'testing', 'raptor-in_tree_conf.json')
|
||||
|
||||
self.virtualenv_script = os.path.join(
|
||||
self.topsrcdir, 'third_party', 'python', 'virtualenv', 'virtualenv.py')
|
||||
self.virtualenv_path = os.path.join(
|
||||
self._topobjdir, 'testing', 'raptor-venv')
|
||||
|
||||
def setup_benchmarks(self):
|
||||
"""Make sure benchmarks are linked to the proper location in the objdir.
|
||||
@ -69,11 +81,11 @@ class RaptorRunner(MozbuildObject):
|
||||
Benchmarks can either live in-tree or in an external repository. In the latter
|
||||
case also clone/update the repository if necessary.
|
||||
"""
|
||||
print("Updating external benchmarks from {}".format(BENCHMARK_REPOSITORY))
|
||||
|
||||
# Set up the external repo
|
||||
external_repo_path = os.path.join(get_state_dir(), 'performance-tests')
|
||||
|
||||
print("Updating external benchmarks from {}".format(BENCHMARK_REPOSITORY))
|
||||
print("Cloning the benchmarks to {}".format(external_repo_path))
|
||||
|
||||
try:
|
||||
subprocess.check_output(['git', '--version'])
|
||||
except Exception as ex:
|
||||
@ -132,8 +144,8 @@ class RaptorRunner(MozbuildObject):
|
||||
'pypi_url': 'http://pypi.org/simple',
|
||||
'base_work_dir': self.mozharness_dir,
|
||||
'exes': {
|
||||
'python': self.python_interp,
|
||||
'virtualenv': [self.python_interp, self.virtualenv_script],
|
||||
'python': self.python,
|
||||
'virtualenv': [self.python, self.virtualenv_script],
|
||||
},
|
||||
'title': socket.gethostname(),
|
||||
'default_actions': default_actions,
|
||||
@ -141,6 +153,7 @@ class RaptorRunner(MozbuildObject):
|
||||
'host': self.host,
|
||||
'power_test': self.power_test,
|
||||
'memory_test': self.memory_test,
|
||||
'cpu_test': self.cpu_test,
|
||||
'is_release_build': self.is_release_build,
|
||||
}
|
||||
|
||||
@ -180,12 +193,12 @@ class MachRaptor(MachCommandBase):
|
||||
description='Run raptor performance tests.',
|
||||
parser=create_parser)
|
||||
def run_raptor_test(self, **kwargs):
|
||||
|
||||
build_obj = MozbuildObject.from_environment(cwd=HERE)
|
||||
|
||||
firefox_android_browsers = ["fennec", "geckoview", "refbrow", "fenix"]
|
||||
is_android = Conditions.is_android(build_obj) or \
|
||||
kwargs['app'] in FIREFOX_ANDROID_BROWSERS
|
||||
|
||||
if conditions.is_android(build_obj) or kwargs['app'] in firefox_android_browsers:
|
||||
if is_android:
|
||||
from mozrunner.devices.android_device import verify_android_device
|
||||
from mozdevice import ADBAndroid, ADBHost
|
||||
if not verify_android_device(build_obj, install=True, app=kwargs['binary'],
|
||||
@ -199,10 +212,10 @@ class MachRaptor(MachCommandBase):
|
||||
raptor = self._spawn(RaptorRunner)
|
||||
|
||||
try:
|
||||
if kwargs['app'] in firefox_android_browsers and kwargs['power_test']:
|
||||
if is_android and kwargs['power_test']:
|
||||
device = ADBAndroid(verbose=True)
|
||||
adbhost = ADBHost(verbose=True)
|
||||
device_serial = "%s:5555" % device.get_ip_address()
|
||||
device_serial = "{}:5555".format(device.get_ip_address())
|
||||
device.command_output(["tcpip", "5555"])
|
||||
raw_input("Please disconnect your device from USB then press Enter/return...")
|
||||
adbhost.command_output(["connect", device_serial])
|
||||
@ -217,7 +230,7 @@ class MachRaptor(MachCommandBase):
|
||||
return 1
|
||||
finally:
|
||||
try:
|
||||
if kwargs['app'] in firefox_android_browsers and kwargs['power_test']:
|
||||
if is_android and kwargs['power_test']:
|
||||
raw_input("Connect device via USB and press Enter/return...")
|
||||
device = ADBAndroid(device=device_serial, verbose=True)
|
||||
device.command_output(["usb"])
|
||||
|
@ -8,11 +8,10 @@ import os
|
||||
import shutil
|
||||
import socket
|
||||
|
||||
from mozlog import get_proxy_logger
|
||||
|
||||
from logger.logger import RaptorLogger
|
||||
from wptserve import server, handlers
|
||||
|
||||
LOG = get_proxy_logger(component="raptor-benchmark")
|
||||
LOG = RaptorLogger(component='raptor-benchmark')
|
||||
here = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
|
||||
|
@ -26,7 +26,7 @@ APPS = {
|
||||
FENNEC: {
|
||||
"long_name": "Firefox Fennec on Android"},
|
||||
GECKOVIEW: {
|
||||
"long_name": "Firefox Geckoview on Android",
|
||||
"long_name": "Firefox GeckoView on Android",
|
||||
"default_activity": "org.mozilla.geckoview_example.GeckoViewActivity",
|
||||
"default_intent": "android.intent.action.MAIN"},
|
||||
REFBROW: {
|
||||
@ -64,33 +64,36 @@ def create_parser(mach_interface=False):
|
||||
add_arg = parser.add_argument
|
||||
|
||||
add_arg('-t', '--test', required=True, dest='test',
|
||||
help="name of raptor test to run (can be a top-level suite name i.e. "
|
||||
help="Name of Raptor test to run (can be a top-level suite name i.e. "
|
||||
"'--test raptor-speedometer','--test raptor-tp6-1', or for page-load "
|
||||
"tests a suite sub-test i.e. '--test raptor-tp6-google-firefox')")
|
||||
add_arg('--app', default='firefox', dest='app',
|
||||
help="name of the application we are testing (default: firefox)",
|
||||
help="Name of the application we are testing (default: firefox)",
|
||||
choices=APPS.keys())
|
||||
add_arg('-b', '--binary', dest='binary',
|
||||
help="path to the browser executable that we are testing")
|
||||
add_arg('-a', '--activity', dest='activity', default=None,
|
||||
help="Name of android activity used to launch the android app."
|
||||
help="Name of Android activity used to launch the Android app."
|
||||
"i.e.: %s" % print_all_activities())
|
||||
add_arg('-i', '--intent', dest='intent', default=None,
|
||||
help="Name of android intent action used to launch the android app."
|
||||
help="Name of Android intent action used to launch the Android app."
|
||||
"i.e.: %s" % print_all_intents())
|
||||
add_arg('--host', dest='host',
|
||||
help="Hostname from which to serve urls, defaults to 127.0.0.1. "
|
||||
help="Hostname from which to serve URLs; defaults to 127.0.0.1. "
|
||||
"The value HOST_IP will cause the value of host to be "
|
||||
"loaded from the environment variable HOST_IP.",
|
||||
default='127.0.0.1')
|
||||
add_arg('--power-test', dest="power_test", action="store_true",
|
||||
help="Use Raptor to measure power usage. Currently supported for Geckoview. "
|
||||
help="Use Raptor to measure power usage. Supported across GeckoView, "
|
||||
"Fenix, Firefox (Fennec), and Reference Browsers."
|
||||
"The host ip address must be specified via the --host command line argument.")
|
||||
add_arg('--memory-test', dest="memory_test", action="store_true",
|
||||
help="Use Raptor to measure memory usage.")
|
||||
add_arg('--cpu-test', dest="cpu_test", action="store_true",
|
||||
help="Use Raptor to measure CPU usage. Currently supported for Android only.")
|
||||
add_arg('--is-release-build', dest="is_release_build", default=False,
|
||||
action='store_true',
|
||||
help="Whether the build is a release build which requires work arounds "
|
||||
help="Whether the build is a release build which requires workarounds "
|
||||
"using MOZ_DISABLE_NONLOCAL_CONNECTIONS to support installing unsigned "
|
||||
"webextensions. Defaults to False.")
|
||||
add_arg('--geckoProfile', action="store_true", dest="gecko_profile",
|
||||
@ -100,10 +103,10 @@ def create_parser(mach_interface=False):
|
||||
add_arg('--geckoProfileEntries', dest="gecko_profile_entries", type=int,
|
||||
help=argparse.SUPPRESS)
|
||||
add_arg('--gecko-profile', action="store_true", dest="gecko_profile",
|
||||
help="Profile the run and output the results in $MOZ_UPLOAD_DIR. "
|
||||
help="Profile the run and out-put the results in $MOZ_UPLOAD_DIR. "
|
||||
"After talos is finished, profiler.firefox.com will be launched in Firefox "
|
||||
"so you can analyze the local profiles. To disable auto-launching of "
|
||||
"profiler.firefox.com set the DISABLE_PROFILE_LAUNCH=1 env var.")
|
||||
"profiler.firefox.com, set the DISABLE_PROFILE_LAUNCH=1 env var.")
|
||||
add_arg('--gecko-profile-entries', dest="gecko_profile_entries", type=int,
|
||||
help='How many samples to take with the profiler')
|
||||
add_arg('--gecko-profile-interval', dest='gecko_profile_interval', type=int,
|
||||
@ -114,7 +117,7 @@ def create_parser(mach_interface=False):
|
||||
help="Path to the symbols for the build we are testing")
|
||||
add_arg('--page-cycles', dest="page_cycles", type=int,
|
||||
help="How many times to repeat loading the test page (for page load tests); "
|
||||
"for benchmark tests this is how many times the benchmark test will be run")
|
||||
"for benchmark tests, this is how many times the benchmark test will be run")
|
||||
add_arg('--page-timeout', dest="page_timeout", type=int,
|
||||
help="How long to wait (ms) for one page_cycle to complete, before timing out")
|
||||
add_arg('--post-startup-delay',
|
||||
@ -131,11 +134,13 @@ def create_parser(mach_interface=False):
|
||||
help="Run Raptor in debug mode (open browser console, limited page-cycles, etc.)")
|
||||
add_arg('--disable-e10s', dest="e10s", action="store_false", default=True,
|
||||
help="Run without multiple processes (e10s).")
|
||||
add_arg('--enable-webrender', dest="enable_webrender", action="store_true", default=False,
|
||||
help="Enable the WebRender compositor in Gecko.")
|
||||
if not mach_interface:
|
||||
add_arg('--run-local', dest="run_local", default=False, action="store_true",
|
||||
help="Flag that indicates if raptor is running locally or in production")
|
||||
help="Flag which indicates if Raptor is running locally or in production")
|
||||
add_arg('--obj-path', dest="obj_path", default=None,
|
||||
help="Browser build obj_path (received when running in production)")
|
||||
help="Browser-build obj_path (received when running in production)")
|
||||
|
||||
add_logging_group(parser)
|
||||
return parser
|
||||
@ -153,15 +158,21 @@ def verify_options(parser, args):
|
||||
|
||||
# if geckoProfile specified but not running on Firefox, not supported
|
||||
if args.gecko_profile is True and args.app != "firefox":
|
||||
parser.error("Gecko profiling is only supported when running raptor on Firefox!")
|
||||
parser.error("Gecko profiling is only supported when running Raptor on Firefox!")
|
||||
|
||||
# if --power-test specified, must be on geckoview/android with --host specified.
|
||||
if args.power_test:
|
||||
if args.app not in ["fennec", "geckoview", "refbrow", "fenix"] \
|
||||
or args.host in ('localhost', '127.0.0.1'):
|
||||
parser.error("Power test is only supported when running raptor on Firefox Android "
|
||||
parser.error("Power test is only supported when running Raptor on Firefox Android "
|
||||
"browsers when host is specified!")
|
||||
|
||||
# if --enable-webrender specified, must be on desktop firefox or geckoview-based browser.
|
||||
if args.enable_webrender:
|
||||
if args.app not in ["firefox", "geckoview", "refbrow", "fenix"]:
|
||||
parser.error("WebRender is only supported when running Raptor on Firefox Desktop "
|
||||
"or GeckoView-based Android browsers!")
|
||||
|
||||
# if running on geckoview/refbrow/fenix, we need an activity and intent
|
||||
if args.app in ["geckoview", "refbrow", "fenix"]:
|
||||
if not args.activity:
|
||||
@ -170,14 +181,14 @@ def verify_options(parser, args):
|
||||
args.activity = APPS[args.app]['default_activity']
|
||||
else:
|
||||
# otherwise fail out
|
||||
parser.error("--activity command line argument is required!")
|
||||
parser.error("--activity command-line argument is required!")
|
||||
if not args.intent:
|
||||
# if we have a default intent specified in APPS above, use that
|
||||
if APPS[args.app].get("default_intent", None) is not None:
|
||||
args.intent = APPS[args.app]['default_intent']
|
||||
else:
|
||||
# otherwise fail out
|
||||
parser.error("--intent command line argument is required!")
|
||||
parser.error("--intent command-line argument is required!")
|
||||
|
||||
|
||||
def parse_args(argv=None):
|
||||
@ -219,7 +230,7 @@ class _PrintTests(_StopAction):
|
||||
filters=[self.filter_app],
|
||||
**info)
|
||||
if len(available_tests) == 0:
|
||||
# none for that app, skip to next
|
||||
# none for that app; skip to next
|
||||
continue
|
||||
|
||||
# print in readable format
|
||||
@ -237,14 +248,14 @@ class _PrintTests(_StopAction):
|
||||
|
||||
for next_test in available_tests:
|
||||
if next_test.get("name", None) is None:
|
||||
# no test name, skip it
|
||||
# no test name; skip it
|
||||
continue
|
||||
|
||||
suite = os.path.basename(next_test['manifest'])[:-4]
|
||||
if suite not in test_list:
|
||||
test_list[suite] = {'type': None, 'subtests': []}
|
||||
|
||||
# for page-load tests we want to list every subtest, so we
|
||||
# for page-load tests, we want to list every subtest, so we
|
||||
# can see which pages are available in which tp6-* sets
|
||||
if next_test.get("type", None) is not None:
|
||||
test_list[suite]['type'] = next_test['type']
|
||||
@ -255,7 +266,7 @@ class _PrintTests(_StopAction):
|
||||
subtest = "{0} ({1})".format(subtest, measure)
|
||||
test_list[suite]['subtests'].append(subtest)
|
||||
|
||||
# print the list in a nice readable format
|
||||
# print the list in a nice, readable format
|
||||
for key in sorted(test_list.iterkeys()):
|
||||
print("\n%s" % key)
|
||||
print(" type: %s" % test_list[key]['type'])
|
||||
|
@ -14,9 +14,9 @@ import socket
|
||||
import threading
|
||||
import time
|
||||
|
||||
from mozlog import get_proxy_logger
|
||||
from logger.logger import RaptorLogger
|
||||
|
||||
LOG = get_proxy_logger(component='raptor-control-server')
|
||||
LOG = RaptorLogger(component='raptor-control-server')
|
||||
|
||||
here = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
|
56
testing/raptor/raptor/cpu.py
Normal file
56
testing/raptor/raptor/cpu.py
Normal file
@ -0,0 +1,56 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
from __future__ import absolute_import
|
||||
|
||||
|
||||
def get_app_cpu_usage(raptor):
|
||||
# If we don't find the browser running, we default to 0 usage
|
||||
cpu_usage = 0
|
||||
app_name = raptor.config['binary']
|
||||
verbose = raptor.device._verbose
|
||||
raptor.device._verbose = False
|
||||
|
||||
'''
|
||||
There are two ways to get CPU usage information:
|
||||
|
||||
1. By using the 'top' command and parsing details
|
||||
2. By using 'dumpsys cpuinfo' and parsing the details
|
||||
|
||||
'top' is our first choice if it is available but the
|
||||
parameters we use are only available in Android 8 or
|
||||
greater, otherwise we fall back to using dumpsys
|
||||
'''
|
||||
if raptor.device.version >= 8:
|
||||
cpuinfo = raptor.device.shell_output("top -O %CPU -n 1").split("\n")
|
||||
raptor.device._verbose = verbose
|
||||
for line in cpuinfo:
|
||||
# 14781 u0_a83 0 92.8 12.4 64:53.04 org.mozilla.geckoview_example
|
||||
data = line.split()
|
||||
if data[-1] == app_name:
|
||||
cpu_usage = float(data[3])
|
||||
else:
|
||||
cpuinfo = raptor.device.shell_output("dumpsys cpuinfo | grep %s" % app_name).split("\n")
|
||||
for line in cpuinfo:
|
||||
# 34% 14781/org.mozilla.geckoview_example: 26% user + 7.5% kernel
|
||||
data = line.split()
|
||||
cpu_usage = float(data[0].strip('%'))
|
||||
|
||||
return cpu_usage
|
||||
|
||||
|
||||
def generate_android_cpu_profile(raptor, test_name):
|
||||
if not raptor.device or not raptor.config['cpu_test']:
|
||||
return
|
||||
|
||||
result = get_app_cpu_usage(raptor)
|
||||
|
||||
cpuinfo_data = {
|
||||
u'type': u'cpu',
|
||||
u'test': test_name,
|
||||
u'unit': u'%',
|
||||
u'values': {
|
||||
u'browser_cpu_usage': result
|
||||
}
|
||||
}
|
||||
raptor.control_server.submit_supporting_data(cpuinfo_data)
|
@ -13,12 +13,12 @@ import tempfile
|
||||
import zipfile
|
||||
|
||||
import mozfile
|
||||
from mozlog import get_proxy_logger
|
||||
|
||||
from logger.logger import RaptorLogger
|
||||
from profiler import symbolication, profiling
|
||||
|
||||
here = os.path.dirname(os.path.realpath(__file__))
|
||||
LOG = get_proxy_logger()
|
||||
LOG = RaptorLogger(component='raptor-gecko-profile')
|
||||
|
||||
|
||||
class GeckoProfile(object):
|
||||
|
@ -5,12 +5,12 @@ from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
|
||||
from mozlog import get_proxy_logger
|
||||
from logger.logger import RaptorLogger
|
||||
|
||||
|
||||
here = os.path.abspath(os.path.dirname(__file__))
|
||||
webext_dir = os.path.join(os.path.dirname(here), 'webext', 'raptor')
|
||||
LOG = get_proxy_logger(component="raptor-gen-test-config")
|
||||
LOG = RaptorLogger(component='raptor-gen-test-config')
|
||||
|
||||
|
||||
def gen_test_config(browser, test, cs_port, post_startup_delay,
|
||||
|
@ -6,14 +6,14 @@ from __future__ import absolute_import
|
||||
import json
|
||||
import os
|
||||
|
||||
from logger.logger import RaptorLogger
|
||||
from manifestparser import TestManifest
|
||||
from mozlog import get_proxy_logger
|
||||
from utils import transform_platform
|
||||
|
||||
here = os.path.abspath(os.path.dirname(__file__))
|
||||
raptor_ini = os.path.join(here, 'raptor.ini')
|
||||
tests_dir = os.path.join(here, 'tests')
|
||||
LOG = get_proxy_logger(component="raptor-manifest")
|
||||
LOG = RaptorLogger(component='raptor-manifest')
|
||||
|
||||
LIVE_SITE_TIMEOUT_MULTIPLIER = 1.2
|
||||
|
||||
@ -93,7 +93,7 @@ def validate_test_ini(test_details):
|
||||
if setting == "page-cycles" and test_details.get('browser_cycles') is not None:
|
||||
continue
|
||||
valid_settings = False
|
||||
LOG.error("ERROR: setting '%s' is required but not found in %s"
|
||||
LOG.error("setting '%s' is required but not found in %s"
|
||||
% (setting, test_details['manifest']))
|
||||
|
||||
test_details.setdefault("page_timeout", 30000)
|
||||
@ -103,7 +103,7 @@ def validate_test_ini(test_details):
|
||||
for setting in playback_settings:
|
||||
if test_details.get(setting) is None:
|
||||
valid_settings = False
|
||||
LOG.error("ERROR: setting '%s' is required but not found in %s"
|
||||
LOG.error("setting '%s' is required but not found in %s"
|
||||
% (setting, test_details['manifest']))
|
||||
|
||||
# if 'alert-on' is specified, we need to make sure that the value given is valid
|
||||
@ -117,7 +117,7 @@ def validate_test_ini(test_details):
|
||||
# now make sure each alert_on value provided is valid
|
||||
for alert_on_value in test_details['alert_on']:
|
||||
if alert_on_value not in test_details['measure']:
|
||||
LOG.error("ERROR: The 'alert_on' value of '%s' is not valid because "
|
||||
LOG.error("The 'alert_on' value of '%s' is not valid because "
|
||||
"it doesn't exist in the 'measure' test setting!"
|
||||
% alert_on_value)
|
||||
valid_settings = False
|
||||
|
@ -13,9 +13,9 @@ import filters
|
||||
import json
|
||||
import os
|
||||
|
||||
from mozlog import get_proxy_logger
|
||||
from logger.logger import RaptorLogger
|
||||
|
||||
LOG = get_proxy_logger(component="raptor-output")
|
||||
LOG = RaptorLogger(component='raptor-output')
|
||||
|
||||
|
||||
class Output(object):
|
||||
@ -43,7 +43,7 @@ class Output(object):
|
||||
|
||||
# check if we actually have any results
|
||||
if len(self.results) == 0:
|
||||
LOG.error("error: no raptor test results found for %s" %
|
||||
LOG.error("no raptor test results found for %s" %
|
||||
', '.join(test_names))
|
||||
return
|
||||
|
||||
@ -821,7 +821,7 @@ class Output(object):
|
||||
screenshot_path = os.path.join(os.getcwd(), 'screenshots.html')
|
||||
|
||||
if self.summarized_results == {}:
|
||||
LOG.error("error: no summarized raptor results found for %s" %
|
||||
LOG.error("no summarized raptor results found for %s" %
|
||||
', '.join(test_names))
|
||||
else:
|
||||
with open(results_path, 'w') as f:
|
||||
@ -870,7 +870,7 @@ class Output(object):
|
||||
from the actual Raptor test that was ran when the supporting data was gathered.
|
||||
'''
|
||||
if len(self.summarized_supporting_data) == 0:
|
||||
LOG.error("error: no summarized supporting data found for %s" %
|
||||
LOG.error("no summarized supporting data found for %s" %
|
||||
', '.join(test_names))
|
||||
return False
|
||||
|
||||
|
@ -7,10 +7,10 @@ from __future__ import absolute_import
|
||||
|
||||
import json
|
||||
|
||||
from mozlog import get_proxy_logger
|
||||
from logger.logger import RaptorLogger
|
||||
|
||||
|
||||
LOG = get_proxy_logger(component='raptor-output-handler')
|
||||
LOG = RaptorLogger(component='raptor-output-handler')
|
||||
|
||||
|
||||
class OutputHandler(object):
|
||||
|
@ -1,133 +0,0 @@
|
||||
# This file was copied from mitmproxy/mitmproxy/addons/serverplayback.py release tag 4.0.4
|
||||
# and modified by Florin Strugariu
|
||||
|
||||
# Altered features:
|
||||
# * returns 404 rather than dropping the whole HTTP/2 connection on the floor
|
||||
# * remove the replay packages that don't have any content in their response package
|
||||
|
||||
import hashlib
|
||||
import urllib
|
||||
|
||||
import typing
|
||||
from urllib import parse
|
||||
from mitmproxy import command
|
||||
from mitmproxy import ctx, http
|
||||
from mitmproxy import exceptions
|
||||
from mitmproxy import io
|
||||
|
||||
|
||||
class AlternateServerPlayback:
|
||||
|
||||
def __init__(self):
|
||||
ctx.master.addons.remove(ctx.master.addons.get("serverplayback"))
|
||||
self.flowmap = {}
|
||||
self.configured = False
|
||||
|
||||
def load(self, loader):
|
||||
ctx.log.info("load options")
|
||||
loader.add_option(
|
||||
"server_replay", typing.Sequence[str], [],
|
||||
"Replay server responses from a saved file."
|
||||
)
|
||||
|
||||
@command.command("replay.server")
|
||||
def load_flows(self, flows):
|
||||
"""
|
||||
Replay server responses from flows.
|
||||
"""
|
||||
self.flowmap = {}
|
||||
for i in flows:
|
||||
# Check that response has data.content. If response has no content a
|
||||
# HttpException("Cannot assemble flow with missing content") will get raised
|
||||
if i.response and i.response.data.content:
|
||||
l = self.flowmap.setdefault(self._hash(i), [])
|
||||
l.append(i)
|
||||
else:
|
||||
ctx.log.info(
|
||||
"Request %s has no response data content. Removing from request list" %
|
||||
i.request.url)
|
||||
ctx.master.addons.trigger("update", [])
|
||||
|
||||
@command.command("replay.server.file")
|
||||
def load_file(self, path):
|
||||
try:
|
||||
flows = io.read_flows_from_paths([path])
|
||||
except exceptions.FlowReadException as e:
|
||||
raise exceptions.CommandError(str(e))
|
||||
self.load_flows(flows)
|
||||
|
||||
@command.command("replay.server.stop")
|
||||
def clear(self):
|
||||
"""
|
||||
Stop server replay.
|
||||
"""
|
||||
self.flowmap = {}
|
||||
ctx.master.addons.trigger("update", [])
|
||||
|
||||
@command.command("replay.server.count")
|
||||
def count(self):
|
||||
return sum([len(i) for i in self.flowmap.values()])
|
||||
|
||||
def _hash(self, flow):
|
||||
"""
|
||||
Calculates a loose hash of the flow request.
|
||||
"""
|
||||
r = flow.request
|
||||
|
||||
# unquote url
|
||||
# See Bug 1509835
|
||||
_, _, path, _, query, _ = urllib.parse.urlparse(parse.unquote(r.url))
|
||||
queriesArray = urllib.parse.parse_qsl(query, keep_blank_values=True)
|
||||
|
||||
key = [str(r.port), str(r.scheme), str(r.method), str(path)]
|
||||
key.append(str(r.raw_content))
|
||||
key.append(r.host)
|
||||
|
||||
for p in queriesArray:
|
||||
key.append(p[0])
|
||||
key.append(p[1])
|
||||
|
||||
return hashlib.sha256(
|
||||
repr(key).encode("utf8", "surrogateescape")
|
||||
).digest()
|
||||
|
||||
def next_flow(self, request):
|
||||
"""
|
||||
Returns the next flow object, or None if no matching flow was
|
||||
found.
|
||||
"""
|
||||
hsh = self._hash(request)
|
||||
if hsh in self.flowmap:
|
||||
return self.flowmap[hsh][-1]
|
||||
|
||||
def configure(self, updated):
|
||||
if not self.configured and ctx.options.server_replay:
|
||||
self.configured = True
|
||||
try:
|
||||
flows = io.read_flows_from_paths(ctx.options.server_replay)
|
||||
except exceptions.FlowReadException as e:
|
||||
raise exceptions.OptionsError(str(e))
|
||||
self.load_flows(flows)
|
||||
|
||||
def request(self, f):
|
||||
if self.flowmap:
|
||||
rflow = self.next_flow(f)
|
||||
if rflow:
|
||||
response = rflow.response.copy()
|
||||
response.is_replay = True
|
||||
# Refresh server replay responses by adjusting date, expires and
|
||||
# last-modified headers, as well as adjusting cookie expiration.
|
||||
response.refresh()
|
||||
|
||||
f.response = response
|
||||
else:
|
||||
# returns 404 rather than dropping the whole HTTP/2 connection
|
||||
ctx.log.warn(
|
||||
"server_playback: killed non-replay request {}".format(
|
||||
f.request.url
|
||||
)
|
||||
)
|
||||
f.response = http.HTTPResponse.make(404, b'', {'content-type': 'text/plain'})
|
||||
|
||||
|
||||
addons = [AlternateServerPlayback()]
|
@ -0,0 +1,10 @@
|
||||
[
|
||||
{
|
||||
"size": 87375104,
|
||||
"visibility": "public",
|
||||
"digest": "b281ee5f9ca3c7b8fba30b29714f752880ae8d92aaa97fab70edb5dfb69742882276b7df332de6a76fb7b6d9aebc50185bddbcc080a9d531e5f08e92b1a2c593",
|
||||
"algorithm": "sha512",
|
||||
"filename": "mitm4-linux-firefox-netflix.zip",
|
||||
"unpack": true
|
||||
}
|
||||
]
|
@ -7,11 +7,16 @@ from __future__ import absolute_import
|
||||
import os
|
||||
import re
|
||||
|
||||
from logger.logger import RaptorLogger
|
||||
|
||||
|
||||
LOG = RaptorLogger(component='raptor-power')
|
||||
|
||||
|
||||
def init_android_power_test(raptor):
|
||||
upload_dir = os.getenv("MOZ_UPLOAD_DIR")
|
||||
if not upload_dir:
|
||||
raptor.log.critical(
|
||||
LOG.critical(
|
||||
"% power test ignored; MOZ_UPLOAD_DIR unset" % raptor.config["app"]
|
||||
)
|
||||
return
|
||||
@ -85,7 +90,7 @@ def init_android_power_test(raptor):
|
||||
def finish_android_power_test(raptor, test_name):
|
||||
upload_dir = os.getenv("MOZ_UPLOAD_DIR")
|
||||
if not upload_dir:
|
||||
raptor.log.critical(
|
||||
LOG.critical(
|
||||
"% power test ignored because MOZ_UPLOAD_DIR was not set" % test_name
|
||||
)
|
||||
return
|
||||
@ -110,6 +115,13 @@ def finish_android_power_test(raptor, test_name):
|
||||
batterystats = raptor.device.shell_output("dumpsys batterystats")
|
||||
output.write(batterystats)
|
||||
raptor.device._verbose = verbose
|
||||
|
||||
# Get the android version
|
||||
android_version = raptor.device.shell_output(
|
||||
"getprop ro.build.version.release"
|
||||
).strip()
|
||||
major_android_version = int(android_version.split('.')[0])
|
||||
|
||||
estimated_power = False
|
||||
uid = None
|
||||
total = cpu = wifi = smearing = screen = proportional = 0
|
||||
@ -174,13 +186,13 @@ def finish_android_power_test(raptor, test_name):
|
||||
screen = full_screen if screen == 0 else screen
|
||||
wifi = full_wifi if wifi is None else wifi
|
||||
|
||||
raptor.log.info(
|
||||
LOG.info(
|
||||
"power data for uid: %s, cpu: %s, wifi: %s, screen: %s, proportional: %s"
|
||||
% (uid, cpu, wifi, screen, proportional)
|
||||
)
|
||||
|
||||
# send power data directly to the control-server results handler,
|
||||
# so it can be formatted and out-put for perfherder ingestion
|
||||
# Send power data directly to the control-server results handler
|
||||
# so it can be formatted and output for perfherder ingestion
|
||||
|
||||
power_data = {
|
||||
"type": "power",
|
||||
@ -190,13 +202,15 @@ def finish_android_power_test(raptor, test_name):
|
||||
"cpu": float(cpu),
|
||||
"wifi": float(wifi),
|
||||
"screen": float(screen),
|
||||
"proportional": float(proportional),
|
||||
},
|
||||
}
|
||||
|
||||
raptor.log.info("submitting power data via control server directly")
|
||||
LOG.info("submitting power data via control server directly")
|
||||
if major_android_version >= 8:
|
||||
power_data['values']['proportional'] = float(proportional)
|
||||
|
||||
raptor.control_server.submit_supporting_data(power_data)
|
||||
|
||||
# generate power bugreport zip
|
||||
raptor.log.info("generating power bugreport zip")
|
||||
# Generate power bugreport zip
|
||||
LOG.info("generating power bugreport zip")
|
||||
raptor.device.command_output(["bugreport", upload_dir])
|
||||
|
@ -19,8 +19,9 @@ import requests
|
||||
|
||||
import mozcrash
|
||||
import mozinfo
|
||||
from logger.logger import RaptorLogger
|
||||
from mozdevice import ADBDevice
|
||||
from mozlog import commandline, get_default_logger
|
||||
from mozlog import commandline
|
||||
from mozprofile import create_profile
|
||||
from mozproxy import get_playback
|
||||
from mozrunner import runners
|
||||
@ -54,10 +55,12 @@ from gen_test_config import gen_test_config
|
||||
from outputhandler import OutputHandler
|
||||
from manifest import get_raptor_test_list
|
||||
from memory import generate_android_memory_profile
|
||||
from mozproxy import get_playback
|
||||
from power import init_android_power_test, finish_android_power_test
|
||||
from results import RaptorResultsHandler
|
||||
from utils import view_gecko_profile
|
||||
from cpu import generate_android_cpu_profile
|
||||
|
||||
LOG = RaptorLogger(component='raptor-main')
|
||||
|
||||
|
||||
class SignalHandler:
|
||||
@ -79,9 +82,9 @@ class Raptor(object):
|
||||
|
||||
def __init__(self, app, binary, run_local=False, obj_path=None, profile_class=None,
|
||||
gecko_profile=False, gecko_profile_interval=None, gecko_profile_entries=None,
|
||||
symbols_path=None, host=None, power_test=False, memory_test=False,
|
||||
symbols_path=None, host=None, power_test=False, cpu_test=False, memory_test=False,
|
||||
is_release_build=False, debug_mode=False, post_startup_delay=None,
|
||||
interrupt_handler=None, e10s=True, **kwargs):
|
||||
interrupt_handler=None, e10s=True, enable_webrender=False, **kwargs):
|
||||
|
||||
# Override the magic --host HOST_IP with the value of the environment variable.
|
||||
if host == 'HOST_IP':
|
||||
@ -101,13 +104,15 @@ class Raptor(object):
|
||||
'host': host,
|
||||
'power_test': power_test,
|
||||
'memory_test': memory_test,
|
||||
'cpu_test': cpu_test,
|
||||
'is_release_build': is_release_build,
|
||||
'enable_control_server_wait': memory_test,
|
||||
'e10s': e10s,
|
||||
'enable_webrender': enable_webrender,
|
||||
}
|
||||
|
||||
self.raptor_venv = os.path.join(os.getcwd(), 'raptor-venv')
|
||||
self.log = get_default_logger(component='raptor-main')
|
||||
self.raptor_webext = None
|
||||
self.control_server = None
|
||||
self.playback = None
|
||||
self.benchmark = None
|
||||
@ -125,10 +130,10 @@ class Raptor(object):
|
||||
# if running debug-mode reduce the pause after browser startup
|
||||
if self.debug_mode:
|
||||
self.post_startup_delay = min(self.post_startup_delay, 3000)
|
||||
self.log.info("debug-mode enabled, reducing post-browser startup pause to %d ms"
|
||||
% self.post_startup_delay)
|
||||
LOG.info("debug-mode enabled, reducing post-browser startup pause to %d ms"
|
||||
% self.post_startup_delay)
|
||||
|
||||
self.log.info("main raptor init, config is: %s" % str(self.config))
|
||||
LOG.info("main raptor init, config is: %s" % str(self.config))
|
||||
|
||||
# create results holder
|
||||
self.results_handler = RaptorResultsHandler()
|
||||
@ -148,9 +153,9 @@ class Raptor(object):
|
||||
raise NotImplementedError
|
||||
|
||||
def run_test_setup(self, test):
|
||||
self.log.info("starting raptor test: %s" % test['name'])
|
||||
self.log.info("test settings: %s" % str(test))
|
||||
self.log.info("raptor config: %s" % str(self.config))
|
||||
LOG.info("starting raptor test: %s" % test['name'])
|
||||
LOG.info("test settings: %s" % str(test))
|
||||
LOG.info("raptor config: %s" % str(self.config))
|
||||
|
||||
if test.get('type') == "benchmark":
|
||||
self.serve_benchmark_source(test)
|
||||
@ -212,7 +217,7 @@ class Raptor(object):
|
||||
if not self.debug_mode:
|
||||
elapsed_time += 1
|
||||
if elapsed_time > (timeout) - 5: # stop 5 seconds early
|
||||
self.log.info("application timed out after {} seconds".format(timeout))
|
||||
LOG.info("application timed out after {} seconds".format(timeout))
|
||||
self.control_server.wait_for_quit()
|
||||
break
|
||||
|
||||
@ -228,12 +233,12 @@ class Raptor(object):
|
||||
if self.config['gecko_profile'] is True:
|
||||
self.gecko_profiler.symbolicate()
|
||||
# clean up the temp gecko profiling folders
|
||||
self.log.info("cleaning up after gecko profiling")
|
||||
LOG.info("cleaning up after gecko profiling")
|
||||
self.gecko_profiler.clean()
|
||||
|
||||
def set_browser_test_prefs(self, raw_prefs):
|
||||
# add test specific preferences
|
||||
self.log.info("setting test-specific Firefox preferences")
|
||||
LOG.info("setting test-specific Firefox preferences")
|
||||
self.profile.set_preferences(json.loads(raw_prefs))
|
||||
|
||||
def build_browser_profile(self):
|
||||
@ -245,7 +250,7 @@ class Raptor(object):
|
||||
|
||||
for profile in base_profiles:
|
||||
path = os.path.join(self.profile_data_dir, profile)
|
||||
self.log.info("Merging profile: {}".format(path))
|
||||
LOG.info("Merging profile: {}".format(path))
|
||||
self.profile.merge(path)
|
||||
|
||||
# add profile dir to our config
|
||||
@ -270,12 +275,18 @@ class Raptor(object):
|
||||
'playback_binary_manifest': test.get('playback_binary_manifest'),
|
||||
'playback_pageset_manifest': test.get('playback_pageset_manifest'),
|
||||
})
|
||||
# By default we are connecting to upstream. In the future we might want
|
||||
# to flip that default to false so all tests will stop connecting to
|
||||
# the upstream server.
|
||||
upstream = test.get("playback_upstream_cert", "true")
|
||||
self.config["playback_upstream_cert"] = upstream.lower() in ("true", "1")
|
||||
|
||||
for key in ('playback_pageset_manifest', 'playback_pageset_zip'):
|
||||
if self.config.get(key) is None:
|
||||
continue
|
||||
self.config[key] = os.path.join(playback_dir, self.config[key])
|
||||
|
||||
self.log.info("test uses playback tool: %s " % self.config['playback_tool'])
|
||||
LOG.info("test uses playback tool: %s " % self.config['playback_tool'])
|
||||
|
||||
def serve_benchmark_source(self, test):
|
||||
# benchmark-type tests require the benchmark test to be served out
|
||||
@ -288,7 +299,7 @@ class Raptor(object):
|
||||
# note: for chrome the addon is just a list of paths that ultimately are added
|
||||
# to the chromium command line '--load-extension' argument
|
||||
self.raptor_webext = os.path.join(webext_dir, 'raptor')
|
||||
self.log.info("installing webext %s" % self.raptor_webext)
|
||||
LOG.info("installing webext %s" % self.raptor_webext)
|
||||
self.profile.addons.install(self.raptor_webext)
|
||||
|
||||
# on firefox we can get an addon id; chrome addon actually is just cmd line arg
|
||||
@ -299,7 +310,11 @@ class Raptor(object):
|
||||
|
||||
def remove_raptor_webext(self):
|
||||
# remove the raptor webext; as it must be reloaded with each subtest anyway
|
||||
self.log.info("removing webext %s" % self.raptor_webext)
|
||||
if not self.raptor_webext:
|
||||
LOG.info("raptor webext not installed - not attempting removal")
|
||||
return
|
||||
|
||||
LOG.info("removing webext %s" % self.raptor_webext)
|
||||
if self.config['app'] in ['firefox', 'geckoview', 'fennec', 'refbrow', 'fenix']:
|
||||
self.profile.addons.remove_addon(self.webext_id)
|
||||
|
||||
@ -308,48 +323,48 @@ class Raptor(object):
|
||||
if self.config['app'] in chrome_apps:
|
||||
self.profile.addons.remove(self.raptor_webext)
|
||||
|
||||
def get_proxy_command_for_mitm(self, test, version):
|
||||
# Generate Mitmproxy playback args
|
||||
script = os.path.join(here, "playback", "alternate-server-replay-{}.py".format(version))
|
||||
recordings = test.get("playback_recordings")
|
||||
if recordings:
|
||||
recording_paths = []
|
||||
proxy_dir = self.playback.mozproxy_dir
|
||||
for recording in recordings.split():
|
||||
if not recording:
|
||||
continue
|
||||
recording_paths.append(os.path.join(proxy_dir, recording))
|
||||
|
||||
# this part is platform-specific
|
||||
if mozinfo.os == "win":
|
||||
script = script.replace("\\", "\\\\\\")
|
||||
recording_paths = [recording_path.replace("\\", "\\\\\\")
|
||||
for recording_path in recording_paths]
|
||||
|
||||
if version == "2.0.2":
|
||||
self.playback.config['playback_tool_args'] = ["--replay-kill-extra",
|
||||
"--script",
|
||||
'""{} {}""'.
|
||||
format(script,
|
||||
" ".join(recording_paths))]
|
||||
elif version == "4.0.4":
|
||||
self.playback.config['playback_tool_args'] = ["--scripts", script,
|
||||
"--set",
|
||||
"server_replay={}".
|
||||
format(" ".join(recording_paths))]
|
||||
else:
|
||||
raise Exception("Mitmproxy version is unknown!")
|
||||
|
||||
def start_playback(self, test):
|
||||
# creating the playback tool
|
||||
self.get_playback_config(test)
|
||||
self.playback = get_playback(self.config, self.device)
|
||||
|
||||
self.get_proxy_command_for_mitm(test, self.config['playback_version'])
|
||||
self.playback.config['playback_files'] = self.get_recording_paths(test)
|
||||
|
||||
# let's start it!
|
||||
self.playback.start()
|
||||
|
||||
self.log_recording_dates(test)
|
||||
|
||||
def get_recording_paths(self, test):
|
||||
recordings = test.get("playback_recordings")
|
||||
|
||||
if recordings:
|
||||
recording_paths = []
|
||||
proxy_dir = self.playback.mozproxy_dir
|
||||
|
||||
for recording in recordings.split():
|
||||
if not recording:
|
||||
continue
|
||||
recording_paths.append(os.path.join(proxy_dir, recording))
|
||||
|
||||
return recording_paths
|
||||
|
||||
def log_recording_dates(self, test):
|
||||
for r in self.get_recording_paths(test):
|
||||
json_path = '{}.json'.format(r.split('.')[0])
|
||||
|
||||
if os.path.exists(json_path):
|
||||
with open(json_path) as f:
|
||||
recording_date = json.loads(f.read()).get('recording_date')
|
||||
|
||||
if recording_date is not None:
|
||||
LOG.info('Playback recording date: {} '.
|
||||
format(recording_date.split(' ')[0]))
|
||||
else:
|
||||
LOG.info('Playback recording date not available')
|
||||
else:
|
||||
LOG.info('Playback recording information not available')
|
||||
|
||||
def delete_proxy_settings_from_profile(self):
|
||||
# Must delete the proxy settings from the profile if running
|
||||
# the test with a host different from localhost.
|
||||
@ -361,10 +376,10 @@ class Raptor(object):
|
||||
userjsfile.writelines(prefs)
|
||||
|
||||
def _init_gecko_profiling(self, test):
|
||||
self.log.info("initializing gecko profiler")
|
||||
LOG.info("initializing gecko profiler")
|
||||
upload_dir = os.getenv('MOZ_UPLOAD_DIR')
|
||||
if not upload_dir:
|
||||
self.log.critical("Profiling ignored because MOZ_UPLOAD_DIR was not set")
|
||||
LOG.critical("Profiling ignored because MOZ_UPLOAD_DIR was not set")
|
||||
else:
|
||||
self.gecko_profiler = GeckoProfile(upload_dir,
|
||||
self.config,
|
||||
@ -393,7 +408,7 @@ class Raptor(object):
|
||||
self.control_server_wait_clear('all')
|
||||
|
||||
self.control_server.stop()
|
||||
self.log.info("finished")
|
||||
LOG.info("finished")
|
||||
|
||||
def control_server_wait_set(self, state):
|
||||
response = requests.post("http://127.0.0.1:%s/" % self.control_server.port,
|
||||
@ -427,7 +442,7 @@ class RaptorDesktop(Raptor):
|
||||
super(RaptorDesktop, self).__init__(*args, **kwargs)
|
||||
|
||||
# create the desktop browser runner
|
||||
self.log.info("creating browser runner using mozrunner")
|
||||
LOG.info("creating browser runner using mozrunner")
|
||||
self.output_handler = OutputHandler()
|
||||
process_args = {
|
||||
'processOutputLine': [self.output_handler],
|
||||
@ -437,6 +452,12 @@ class RaptorDesktop(Raptor):
|
||||
self.config['binary'], profile=self.profile, process_args=process_args,
|
||||
symbols_path=self.config['symbols_path'])
|
||||
|
||||
if self.config['enable_webrender']:
|
||||
self.runner.env['MOZ_WEBRENDER'] = '1'
|
||||
self.runner.env['MOZ_ACCELERATED'] = '1'
|
||||
else:
|
||||
self.runner.env['MOZ_WEBRENDER'] = '0'
|
||||
|
||||
def launch_desktop_browser(self, test):
|
||||
raise NotImplementedError
|
||||
|
||||
@ -477,13 +498,13 @@ class RaptorDesktop(Raptor):
|
||||
|
||||
The default will be to run in warm mode; unless 'cold = true' is set in the test INI.
|
||||
'''
|
||||
self.log.info("test %s is running in cold mode; browser WILL be restarted between "
|
||||
"page cycles" % test['name'])
|
||||
LOG.info("test %s is running in cold mode; browser WILL be restarted between "
|
||||
"page cycles" % test['name'])
|
||||
|
||||
for test['browser_cycle'] in range(1, test['expected_browser_cycles'] + 1):
|
||||
|
||||
self.log.info("begin browser cycle %d of %d for test %s"
|
||||
% (test['browser_cycle'], test['expected_browser_cycles'], test['name']))
|
||||
LOG.info("begin browser cycle %d of %d for test %s"
|
||||
% (test['browser_cycle'], test['expected_browser_cycles'], test['name']))
|
||||
|
||||
self.run_test_setup(test)
|
||||
|
||||
@ -541,7 +562,7 @@ class RaptorDesktop(Raptor):
|
||||
else:
|
||||
# in debug mode, and running locally, leave the browser running
|
||||
if self.config['run_local']:
|
||||
self.log.info("* debug-mode enabled - please shutdown the browser manually...")
|
||||
LOG.info("* debug-mode enabled - please shutdown the browser manually...")
|
||||
self.runner.wait(timeout=None)
|
||||
|
||||
super(RaptorDesktop, self).run_test_teardown()
|
||||
@ -564,16 +585,16 @@ class RaptorDesktopFirefox(RaptorDesktop):
|
||||
# For Firefox we need to set MOZ_DISABLE_NONLOCAL_CONNECTIONS=1 env var before startup
|
||||
# when testing release builds from mozilla-beta/release. This is because of restrictions
|
||||
# on release builds that require webextensions to be signed unless this env var is set
|
||||
self.log.info("setting MOZ_DISABLE_NONLOCAL_CONNECTIONS=1")
|
||||
LOG.info("setting MOZ_DISABLE_NONLOCAL_CONNECTIONS=1")
|
||||
os.environ['MOZ_DISABLE_NONLOCAL_CONNECTIONS'] = "1"
|
||||
|
||||
def enable_non_local_connections(self):
|
||||
# pageload tests need to be able to access non-local connections via mitmproxy
|
||||
self.log.info("setting MOZ_DISABLE_NONLOCAL_CONNECTIONS=0")
|
||||
LOG.info("setting MOZ_DISABLE_NONLOCAL_CONNECTIONS=0")
|
||||
os.environ['MOZ_DISABLE_NONLOCAL_CONNECTIONS'] = "0"
|
||||
|
||||
def launch_desktop_browser(self, test):
|
||||
self.log.info("starting %s" % self.config['app'])
|
||||
LOG.info("starting %s" % self.config['app'])
|
||||
if self.config['is_release_build']:
|
||||
self.disable_non_local_connections()
|
||||
|
||||
@ -612,7 +633,7 @@ class RaptorDesktopChrome(RaptorDesktop):
|
||||
self.runner.cmdargs.extend(chrome_args)
|
||||
|
||||
def launch_desktop_browser(self, test):
|
||||
self.log.info("starting %s" % self.config['app'])
|
||||
LOG.info("starting %s" % self.config['app'])
|
||||
# some chromium-specfic cmd line opts required
|
||||
self.runner.cmdargs.extend(['--use-mock-keychain', '--no-default-browser-check'])
|
||||
|
||||
@ -627,7 +648,7 @@ class RaptorDesktopChrome(RaptorDesktop):
|
||||
|
||||
def set_browser_test_prefs(self, raw_prefs):
|
||||
# add test-specific preferences
|
||||
self.log.info("preferences were configured for the test, however \
|
||||
LOG.info("preferences were configured for the test, however \
|
||||
we currently do not install them on non-Firefox browsers.")
|
||||
|
||||
|
||||
@ -651,15 +672,15 @@ class RaptorAndroid(Raptor):
|
||||
def set_reverse_ports(self, is_benchmark=False):
|
||||
# Make services running on the host available to the device
|
||||
if self.config['host'] in ('localhost', '127.0.0.1'):
|
||||
self.log.info("making the raptor control server port available to device")
|
||||
LOG.info("making the raptor control server port available to device")
|
||||
self.set_reverse_port(self.control_server.port)
|
||||
|
||||
if self.config['host'] in ('localhost', '127.0.0.1'):
|
||||
self.log.info("making the raptor playback server port available to device")
|
||||
LOG.info("making the raptor playback server port available to device")
|
||||
self.set_reverse_port(8080)
|
||||
|
||||
if is_benchmark and self.config['host'] in ('localhost', '127.0.0.1'):
|
||||
self.log.info("making the raptor benchmarks server port available to device")
|
||||
LOG.info("making the raptor benchmarks server port available to device")
|
||||
self.set_reverse_port(self.benchmark_port)
|
||||
|
||||
def setup_adb_device(self):
|
||||
@ -667,7 +688,7 @@ class RaptorAndroid(Raptor):
|
||||
self.device = ADBDevice(verbose=True)
|
||||
self.tune_performance()
|
||||
|
||||
self.log.info("creating remote root folder for raptor: %s" % self.remote_test_root)
|
||||
LOG.info("creating remote root folder for raptor: %s" % self.remote_test_root)
|
||||
self.device.rm(self.remote_test_root, force=True, recursive=True)
|
||||
self.device.mkdir(self.remote_test_root)
|
||||
self.device.chmod(self.remote_test_root, recursive=True, root=True)
|
||||
@ -679,10 +700,10 @@ class RaptorAndroid(Raptor):
|
||||
|
||||
For more information, see https://bugzilla.mozilla.org/show_bug.cgi?id=1547135.
|
||||
"""
|
||||
self.log.info("tuning android device performance")
|
||||
LOG.info("tuning android device performance")
|
||||
self.set_svc_power_stayon()
|
||||
if (self.device._have_su or self.device._have_android_su):
|
||||
self.log.info("executing additional tuning commands requiring root")
|
||||
LOG.info("executing additional tuning commands requiring root")
|
||||
device_name = self.device.shell_output('getprop ro.product.model')
|
||||
# all commands require root shell from here on
|
||||
self.set_scheduler()
|
||||
@ -692,22 +713,22 @@ class RaptorAndroid(Raptor):
|
||||
self.set_gpu_performance_parameters(device_name)
|
||||
self.set_kernel_performance_parameters()
|
||||
self.device.clear_logcat()
|
||||
self.log.info("android device performance tuning complete")
|
||||
LOG.info("android device performance tuning complete")
|
||||
|
||||
def _set_value_and_check_exitcode(self, file_name, value, root=False):
|
||||
self.log.info('setting {} to {}'.format(file_name, value))
|
||||
LOG.info('setting {} to {}'.format(file_name, value))
|
||||
process = self.device.shell(' '.join(['echo', str(value), '>', str(file_name)]), root=root)
|
||||
if process.exitcode == 0:
|
||||
self.log.info('successfully set {} to {}'.format(file_name, value))
|
||||
LOG.info('successfully set {} to {}'.format(file_name, value))
|
||||
else:
|
||||
self.log.warning('command failed with exitcode {}'.format(str(process.exitcode)))
|
||||
LOG.warning('command failed with exitcode {}'.format(str(process.exitcode)))
|
||||
|
||||
def set_svc_power_stayon(self):
|
||||
self.log.info('set device to stay awake on usb')
|
||||
LOG.info('set device to stay awake on usb')
|
||||
self.device.shell('svc power stayon usb')
|
||||
|
||||
def set_scheduler(self):
|
||||
self.log.info('setting scheduler to noop')
|
||||
LOG.info('setting scheduler to noop')
|
||||
scheduler_location = '/sys/block/sda/queue/scheduler'
|
||||
|
||||
self._set_value_and_check_exitcode(scheduler_location, 'noop')
|
||||
@ -719,18 +740,18 @@ class RaptorAndroid(Raptor):
|
||||
'thermald',
|
||||
]
|
||||
for service in services:
|
||||
self.log.info(' '.join(['turning off service:', service]))
|
||||
LOG.info(' '.join(['turning off service:', service]))
|
||||
self.device.shell(' '.join(['stop', service]), root=True)
|
||||
|
||||
services_list_output = self.device.shell_output('service list')
|
||||
for service in services:
|
||||
if service not in services_list_output:
|
||||
self.log.info(' '.join(['successfully terminated:', service]))
|
||||
LOG.info(' '.join(['successfully terminated:', service]))
|
||||
else:
|
||||
self.log.warning(' '.join(['failed to terminate:', service]))
|
||||
LOG.warning(' '.join(['failed to terminate:', service]))
|
||||
|
||||
def disable_animations(self):
|
||||
self.log.info('disabling animations')
|
||||
LOG.info('disabling animations')
|
||||
commands = {
|
||||
'animator_duration_scale': 0.0,
|
||||
'transition_animation_scale': 0.0,
|
||||
@ -739,12 +760,12 @@ class RaptorAndroid(Raptor):
|
||||
|
||||
for key, value in commands.items():
|
||||
command = ' '.join(['settings', 'put', 'global', key, str(value)])
|
||||
self.log.info('setting {} to {}'.format(key, value))
|
||||
LOG.info('setting {} to {}'.format(key, value))
|
||||
self.device.shell(command)
|
||||
|
||||
def restore_animations(self):
|
||||
# animation settings are not restored to default by reboot
|
||||
self.log.info('restoring animations')
|
||||
LOG.info('restoring animations')
|
||||
commands = {
|
||||
'animator_duration_scale': 1.0,
|
||||
'transition_animation_scale': 1.0,
|
||||
@ -756,7 +777,7 @@ class RaptorAndroid(Raptor):
|
||||
self.device.shell(command)
|
||||
|
||||
def set_virtual_memory_parameters(self):
|
||||
self.log.info('setting virtual memory parameters')
|
||||
LOG.info('setting virtual memory parameters')
|
||||
commands = {
|
||||
'/proc/sys/vm/swappiness': 0,
|
||||
'/proc/sys/vm/dirty_ratio': 85,
|
||||
@ -767,7 +788,7 @@ class RaptorAndroid(Raptor):
|
||||
self._set_value_and_check_exitcode(key, value, root=True)
|
||||
|
||||
def set_cpu_performance_parameters(self, device_name):
|
||||
self.log.info('setting cpu performance parameters')
|
||||
LOG.info('setting cpu performance parameters')
|
||||
commands = {}
|
||||
|
||||
if device_name == 'Pixel 2':
|
||||
@ -799,7 +820,7 @@ class RaptorAndroid(Raptor):
|
||||
self._set_value_and_check_exitcode(key, value, root=True)
|
||||
|
||||
def set_gpu_performance_parameters(self, device_name):
|
||||
self.log.info('setting gpu performance parameters')
|
||||
LOG.info('setting gpu performance parameters')
|
||||
commands = {
|
||||
'/sys/class/kgsl/kgsl-3d0/bus_split': '0',
|
||||
'/sys/class/kgsl/kgsl-3d0/force_bus_on': '1',
|
||||
@ -835,7 +856,7 @@ class RaptorAndroid(Raptor):
|
||||
self._set_value_and_check_exitcode(key, value, root=True)
|
||||
|
||||
def set_kernel_performance_parameters(self):
|
||||
self.log.info('setting kernel performance parameters')
|
||||
LOG.info('setting kernel performance parameters')
|
||||
commands = {
|
||||
'/sys/kernel/debug/msm-bus-dbg/shell-client/update_request': '1',
|
||||
'/sys/kernel/debug/msm-bus-dbg/shell-client/mas': '1',
|
||||
@ -845,8 +866,16 @@ class RaptorAndroid(Raptor):
|
||||
for key, value in commands.items():
|
||||
self._set_value_and_check_exitcode(key, value, root=True)
|
||||
|
||||
def build_browser_profile(self):
|
||||
super(RaptorAndroid, self).build_browser_profile()
|
||||
|
||||
# Merge in the android profile
|
||||
path = os.path.join(self.profile_data_dir, 'raptor-android')
|
||||
LOG.info("Merging profile: {}".format(path))
|
||||
self.profile.merge(path)
|
||||
|
||||
def clear_app_data(self):
|
||||
self.log.info("clearing %s app data" % self.config['binary'])
|
||||
LOG.info("clearing %s app data" % self.config['binary'])
|
||||
self.device.shell("pm clear %s" % self.config['binary'])
|
||||
|
||||
def copy_profile_to_device(self):
|
||||
@ -855,21 +884,21 @@ class RaptorAndroid(Raptor):
|
||||
raise Exception('%s is not installed' % self.config['binary'])
|
||||
|
||||
try:
|
||||
self.log.info("copying profile to device: %s" % self.remote_profile)
|
||||
LOG.info("copying profile to device: %s" % self.remote_profile)
|
||||
self.device.rm(self.remote_profile, force=True, recursive=True)
|
||||
# self.device.mkdir(self.remote_profile)
|
||||
self.device.push(self.profile.profile, self.remote_profile)
|
||||
self.device.chmod(self.remote_profile, recursive=True, root=True)
|
||||
|
||||
except Exception:
|
||||
self.log.error("Unable to copy profile to device.")
|
||||
LOG.error("Unable to copy profile to device.")
|
||||
raise
|
||||
|
||||
def turn_on_android_app_proxy(self):
|
||||
# for geckoview/android pageload playback we can't use a policy to turn on the
|
||||
# proxy; we need to set prefs instead; note that the 'host' may be different
|
||||
# than '127.0.0.1' so we must set the prefs accordingly
|
||||
self.log.info("setting profile prefs to turn on the android app proxy")
|
||||
LOG.info("setting profile prefs to turn on the android app proxy")
|
||||
proxy_prefs = {}
|
||||
proxy_prefs["network.proxy.type"] = 1
|
||||
proxy_prefs["network.proxy.http"] = self.config['host']
|
||||
@ -880,11 +909,12 @@ class RaptorAndroid(Raptor):
|
||||
self.profile.set_preferences(proxy_prefs)
|
||||
|
||||
def launch_firefox_android_app(self, test_name):
|
||||
self.log.info("starting %s" % self.config['app'])
|
||||
LOG.info("starting %s" % self.config['app'])
|
||||
|
||||
extra_args = ["-profile", self.remote_profile,
|
||||
"--es", "env0", "LOG_VERBOSE=1",
|
||||
"--es", "env1", "R_LOG_LEVEL=6"]
|
||||
"--es", "env1", "R_LOG_LEVEL=6",
|
||||
"--es", "env2", "MOZ_WEBRENDER=%d" % self.config['enable_webrender']]
|
||||
|
||||
try:
|
||||
# make sure the android app is not already running
|
||||
@ -918,8 +948,8 @@ class RaptorAndroid(Raptor):
|
||||
raise Exception("Error launching %s. App did not start properly!" %
|
||||
self.config['binary'])
|
||||
except Exception as e:
|
||||
self.log.error("Exception launching %s" % self.config['binary'])
|
||||
self.log.error("Exception: %s %s" % (type(e).__name__, str(e)))
|
||||
LOG.error("Exception launching %s" % self.config['binary'])
|
||||
LOG.error("Exception: %s %s" % (type(e).__name__, str(e)))
|
||||
if self.config['power_test']:
|
||||
finish_android_power_test(self, test_name)
|
||||
raise
|
||||
@ -935,10 +965,10 @@ class RaptorAndroid(Raptor):
|
||||
_source = os.path.join(source_dir, next_file)
|
||||
_dest = os.path.join(target_dir, next_file)
|
||||
if os.path.exists(_source):
|
||||
self.log.info("copying %s to %s" % (_source, _dest))
|
||||
LOG.info("copying %s to %s" % (_source, _dest))
|
||||
shutil.copyfile(_source, _dest)
|
||||
else:
|
||||
self.log.critical("unable to find ssl cert db file: %s" % _source)
|
||||
LOG.critical("unable to find ssl cert db file: %s" % _source)
|
||||
|
||||
def run_tests(self, tests, test_names):
|
||||
self.setup_adb_device()
|
||||
@ -952,7 +982,7 @@ class RaptorAndroid(Raptor):
|
||||
self.set_reverse_ports(is_benchmark=is_benchmark)
|
||||
|
||||
def run_test_teardown(self):
|
||||
self.log.info('removing reverse socket connections')
|
||||
LOG.info('removing reverse socket connections')
|
||||
self.device.remove_socket_connections('reverse')
|
||||
|
||||
super(RaptorAndroid, self).run_test_teardown()
|
||||
@ -972,7 +1002,6 @@ class RaptorAndroid(Raptor):
|
||||
finally:
|
||||
if self.config['power_test']:
|
||||
finish_android_power_test(self, test['name'])
|
||||
|
||||
self.run_test_teardown()
|
||||
|
||||
def run_test_cold(self, test, timeout=None):
|
||||
@ -999,16 +1028,16 @@ class RaptorAndroid(Raptor):
|
||||
|
||||
The default will be to run in warm mode; unless 'cold = true' is set in the test INI.
|
||||
'''
|
||||
self.log.info("test %s is running in cold mode; browser WILL be restarted between "
|
||||
"page cycles" % test['name'])
|
||||
LOG.info("test %s is running in cold mode; browser WILL be restarted between "
|
||||
"page cycles" % test['name'])
|
||||
|
||||
if self.config['power_test']:
|
||||
init_android_power_test(self)
|
||||
|
||||
for test['browser_cycle'] in range(1, test['expected_browser_cycles'] + 1):
|
||||
|
||||
self.log.info("begin browser cycle %d of %d for test %s"
|
||||
% (test['browser_cycle'], test['expected_browser_cycles'], test['name']))
|
||||
LOG.info("begin browser cycle %d of %d for test %s"
|
||||
% (test['browser_cycle'], test['expected_browser_cycles'], test['name']))
|
||||
|
||||
self.run_test_setup(test)
|
||||
|
||||
@ -1022,7 +1051,7 @@ class RaptorAndroid(Raptor):
|
||||
# an ssl cert db has now been created in the profile; copy it out so we
|
||||
# can use the same cert db in future test cycles / browser restarts
|
||||
local_cert_db_dir = tempfile.mkdtemp()
|
||||
self.log.info("backing up browser ssl cert db that was created via certutil")
|
||||
LOG.info("backing up browser ssl cert db that was created via certutil")
|
||||
self.copy_cert_db(self.config['local_profile_dir'], local_cert_db_dir)
|
||||
|
||||
if self.config['host'] not in ('localhost', '127.0.0.1'):
|
||||
@ -1039,7 +1068,7 @@ class RaptorAndroid(Raptor):
|
||||
if test.get('playback') is not None:
|
||||
# get cert db from previous cycle profile and copy into new clean profile
|
||||
# this saves us from having to start playback again / recreate cert db etc.
|
||||
self.log.info("copying existing ssl cert db into new browser profile")
|
||||
LOG.info("copying existing ssl cert db into new browser profile")
|
||||
self.copy_cert_db(local_cert_db_dir, self.config['local_profile_dir'])
|
||||
|
||||
self.run_test_setup(test)
|
||||
@ -1052,6 +1081,10 @@ class RaptorAndroid(Raptor):
|
||||
# now start the browser/app under test
|
||||
self.launch_firefox_android_app(test['name'])
|
||||
|
||||
# If we are measuring CPU, let's grab a snapshot
|
||||
if self.config['cpu_test']:
|
||||
generate_android_cpu_profile(self, test['name'])
|
||||
|
||||
# set our control server flag to indicate we are running the browser/app
|
||||
self.control_server._finished = False
|
||||
|
||||
@ -1059,7 +1092,7 @@ class RaptorAndroid(Raptor):
|
||||
|
||||
# in debug mode, and running locally, leave the browser running
|
||||
if self.debug_mode and self.config['run_local']:
|
||||
self.log.info("* debug-mode enabled - please shutdown the browser manually...")
|
||||
LOG.info("* debug-mode enabled - please shutdown the browser manually...")
|
||||
self.runner.wait(timeout=None)
|
||||
|
||||
# break test execution if a exception is present
|
||||
@ -1067,8 +1100,8 @@ class RaptorAndroid(Raptor):
|
||||
break
|
||||
|
||||
def run_test_warm(self, test, timeout=None):
|
||||
self.log.info("test %s is running in warm mode; browser will NOT be restarted between "
|
||||
"page cycles" % test['name'])
|
||||
LOG.info("test %s is running in warm mode; browser will NOT be restarted between "
|
||||
"page cycles" % test['name'])
|
||||
if self.config['power_test']:
|
||||
init_android_power_test(self)
|
||||
|
||||
@ -1089,6 +1122,10 @@ class RaptorAndroid(Raptor):
|
||||
# now start the browser/app under test
|
||||
self.launch_firefox_android_app(test['name'])
|
||||
|
||||
# If we are collecting CPU info, let's grab the details
|
||||
if self.config['cpu_test']:
|
||||
generate_android_cpu_profile(self, test['name'])
|
||||
|
||||
# set our control server flag to indicate we are running the browser/app
|
||||
self.control_server._finished = False
|
||||
|
||||
@ -1096,7 +1133,7 @@ class RaptorAndroid(Raptor):
|
||||
|
||||
# in debug mode, and running locally, leave the browser running
|
||||
if self.debug_mode and self.config['run_local']:
|
||||
self.log.info("* debug-mode enabled - please shutdown the browser manually...")
|
||||
LOG.info("* debug-mode enabled - please shutdown the browser manually...")
|
||||
self.runner.wait(timeout=None)
|
||||
|
||||
def check_for_crashes(self):
|
||||
@ -1112,18 +1149,18 @@ class RaptorAndroid(Raptor):
|
||||
dump_dir = tempfile.mkdtemp()
|
||||
remote_dir = posixpath.join(self.remote_profile, 'minidumps')
|
||||
if not self.device.is_dir(remote_dir):
|
||||
self.log.error("No crash directory (%s) found on remote device" % remote_dir)
|
||||
LOG.error("No crash directory (%s) found on remote device" % remote_dir)
|
||||
return
|
||||
self.device.pull(remote_dir, dump_dir)
|
||||
mozcrash.log_crashes(self.log, dump_dir, self.config['symbols_path'])
|
||||
mozcrash.log_crashes(LOG, dump_dir, self.config['symbols_path'])
|
||||
finally:
|
||||
try:
|
||||
shutil.rmtree(dump_dir)
|
||||
except Exception:
|
||||
self.log.warning("unable to remove directory: %s" % dump_dir)
|
||||
LOG.warning("unable to remove directory: %s" % dump_dir)
|
||||
|
||||
def clean_up(self):
|
||||
self.log.info("removing test folder for raptor: %s" % self.remote_test_root)
|
||||
LOG.info("removing test folder for raptor: %s" % self.remote_test_root)
|
||||
self.device.rm(self.remote_test_root, force=True, recursive=True)
|
||||
|
||||
super(RaptorAndroid, self).clean_up()
|
||||
@ -1132,7 +1169,6 @@ class RaptorAndroid(Raptor):
|
||||
def main(args=sys.argv[1:]):
|
||||
args = parse_args()
|
||||
commandline.setup_logging('raptor', args, {'tbpl': sys.stdout})
|
||||
LOG = get_default_logger(component='raptor-main')
|
||||
|
||||
LOG.info("raptor-start")
|
||||
|
||||
@ -1172,6 +1208,7 @@ def main(args=sys.argv[1:]):
|
||||
symbols_path=args.symbols_path,
|
||||
host=args.host,
|
||||
power_test=args.power_test,
|
||||
cpu_test=args.cpu_test,
|
||||
memory_test=args.memory_test,
|
||||
is_release_build=args.is_release_build,
|
||||
debug_mode=args.debug_mode,
|
||||
@ -1179,6 +1216,7 @@ def main(args=sys.argv[1:]):
|
||||
activity=args.activity,
|
||||
intent=args.intent,
|
||||
interrupt_handler=SignalHandler(),
|
||||
enable_webrender=args.enable_webrender,
|
||||
)
|
||||
|
||||
success = raptor.run_tests(raptor_test_list, raptor_test_names)
|
||||
|
@ -6,10 +6,10 @@
|
||||
# received from the raptor control server
|
||||
from __future__ import absolute_import
|
||||
|
||||
from mozlog import get_proxy_logger
|
||||
from logger.logger import RaptorLogger
|
||||
from output import Output
|
||||
|
||||
LOG = get_proxy_logger(component='results-handler')
|
||||
LOG = RaptorLogger(component='raptor-results-handler')
|
||||
|
||||
|
||||
class RaptorResultsHandler():
|
||||
|
@ -31,6 +31,18 @@ playback_pageset_manifest = mitmproxy-recordings-raptor-yahoo-news.manifest
|
||||
playback_recordings = yahoo-news.mp
|
||||
measure = fnbpaint, fcp, dcf, loadtime
|
||||
|
||||
[raptor-tp6-netflix-firefox]
|
||||
apps = firefox
|
||||
test_url = https://www.netflix.com/title/80117263
|
||||
playback_binary_manifest = mitmproxy-rel-bin-4.0.4-{platform}.manifest
|
||||
playback_pageset_manifest = mitm4-linux-firefox-netflix.manifest
|
||||
playback_recordings = netflix.mp
|
||||
playback_version = 4.0.4
|
||||
playback_upstream_cert = false
|
||||
preferences = {"media.autoplay.default": 1,
|
||||
"media.eme.enabled": true}
|
||||
measure = fnbpaint, fcp, dcf, loadtime
|
||||
|
||||
[raptor-tp6-reddit-chrome]
|
||||
apps = chrome
|
||||
test_url = https://www.reddit.com/r/technology/comments/9sqwyh/we_posed_as_100_senators_to_run_ads_on_facebook/
|
||||
@ -44,6 +56,16 @@ playback_pageset_manifest = mitmproxy-recordings-raptor-yahoo-news.manifest
|
||||
playback_recordings = yahoo-news.mp
|
||||
measure = fcp, loadtime
|
||||
|
||||
[raptor-tp6-netflix-chrome]
|
||||
apps = chrome
|
||||
test_url = https://www.netflix.com/title/80117263
|
||||
playback_binary_manifest = mitmproxy-rel-bin-4.0.4-{platform}.manifest
|
||||
playback_pageset_manifest = mitm4-linux-firefox-netflix.manifest
|
||||
playback_recordings = netflix.mp
|
||||
playback_version = 4.0.4
|
||||
playback_upstream_cert = False
|
||||
measure = fcp, loadtime
|
||||
|
||||
[raptor-tp6-reddit-chromium]
|
||||
apps = chromium
|
||||
test_url = https://www.reddit.com/r/technology/comments/9sqwyh/we_posed_as_100_senators_to_run_ads_on_facebook/
|
||||
@ -56,3 +78,13 @@ test_url = https://www.yahoo.com/lifestyle/police-respond-noise-complaint-end-pl
|
||||
playback_pageset_manifest = mitmproxy-recordings-raptor-yahoo-news.manifest
|
||||
playback_recordings = yahoo-news.mp
|
||||
measure = fcp, loadtime
|
||||
|
||||
[raptor-tp6-netflix-chromium]
|
||||
apps = chromium
|
||||
test_url = https://www.netflix.com/title/80117263
|
||||
playback_binary_manifest = mitmproxy-rel-bin-4.0.4-{platform}.manifest
|
||||
playback_pageset_manifest = mitm4-linux-firefox-netflix.manifest
|
||||
playback_recordings = netflix.mp
|
||||
playback_version = 4.0.4
|
||||
playback_upstream_cert = False
|
||||
measure = fcp, loadtime
|
||||
|
@ -22,8 +22,6 @@ lower_is_better = true
|
||||
unit = score
|
||||
subtest_lower_is_better = true
|
||||
subtest_unit = score
|
||||
# TODO: Allow the host / port option in the manifest (Bug 1547932)
|
||||
preferences = {"network.proxy.type": 0}
|
||||
|
||||
[raptor-youtube-playback-firefox]
|
||||
apps = firefox
|
||||
|
@ -10,9 +10,9 @@ import subprocess
|
||||
import sys
|
||||
import time
|
||||
|
||||
from mozlog import get_proxy_logger, get_default_logger
|
||||
from logger.logger import RaptorLogger
|
||||
|
||||
LOG = get_proxy_logger(component="raptor-utils")
|
||||
LOG = RaptorLogger(component='raptor-utils')
|
||||
here = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
external_tools_path = os.environ.get('EXTERNALTOOLSPATH', None)
|
||||
@ -50,30 +50,31 @@ def transform_platform(str_to_transform, config_platform, config_processor=None)
|
||||
|
||||
def view_gecko_profile(ffox_bin):
|
||||
# automatically load the latest talos gecko-profile archive in profiler.firefox.com
|
||||
LOG = get_default_logger(component='raptor-view-gecko-profile')
|
||||
LOG_GECKO = RaptorLogger(component='raptor-view-gecko-profile')
|
||||
|
||||
if sys.platform.startswith('win') and not ffox_bin.endswith(".exe"):
|
||||
ffox_bin = ffox_bin + ".exe"
|
||||
|
||||
if not os.path.exists(ffox_bin):
|
||||
LOG.info("unable to find Firefox bin, cannot launch view-gecko-profile")
|
||||
LOG_GECKO.info("unable to find Firefox bin, cannot launch view-gecko-profile")
|
||||
return
|
||||
|
||||
profile_zip = os.environ.get('RAPTOR_LATEST_GECKO_PROFILE_ARCHIVE', None)
|
||||
if profile_zip is None or not os.path.exists(profile_zip):
|
||||
LOG.info("No local talos gecko profiles were found so not launching profiler.firefox.com")
|
||||
LOG_GECKO.info("No local talos gecko profiles were found so not "
|
||||
"launching profiler.firefox.com")
|
||||
return
|
||||
|
||||
# need the view-gecko-profile tool, it's in repo/testing/tools
|
||||
repo_dir = os.environ.get('MOZ_DEVELOPER_REPO_DIR', None)
|
||||
if repo_dir is None:
|
||||
LOG.info("unable to find MOZ_DEVELOPER_REPO_DIR, can't launch view-gecko-profile")
|
||||
LOG_GECKO.info("unable to find MOZ_DEVELOPER_REPO_DIR, can't launch view-gecko-profile")
|
||||
return
|
||||
|
||||
view_gp = os.path.join(repo_dir, 'testing', 'tools',
|
||||
'view_gecko_profile', 'view_gecko_profile.py')
|
||||
if not os.path.exists(view_gp):
|
||||
LOG.info("unable to find the view-gecko-profile tool, cannot launch it")
|
||||
LOG_GECKO.info("unable to find the view-gecko-profile tool, cannot launch it")
|
||||
return
|
||||
|
||||
command = ['python',
|
||||
@ -81,8 +82,8 @@ def view_gecko_profile(ffox_bin):
|
||||
'-b', ffox_bin,
|
||||
'-p', profile_zip]
|
||||
|
||||
LOG.info('Auto-loading this profile in perfhtml.io: %s' % profile_zip)
|
||||
LOG.info(command)
|
||||
LOG_GECKO.info('Auto-loading this profile in perfhtml.io: %s' % profile_zip)
|
||||
LOG_GECKO.info(command)
|
||||
|
||||
# if the view-gecko-profile tool fails to launch for some reason, we don't
|
||||
# want to crash talos! just dump error and finsh up talos as usual
|
||||
@ -92,12 +93,12 @@ def view_gecko_profile(ffox_bin):
|
||||
stderr=subprocess.PIPE)
|
||||
# that will leave it running in own instance and let talos finish up
|
||||
except Exception as e:
|
||||
LOG.info("failed to launch view-gecko-profile tool, exeption: %s" % e)
|
||||
LOG_GECKO.info("failed to launch view-gecko-profile tool, exeption: %s" % e)
|
||||
return
|
||||
|
||||
time.sleep(5)
|
||||
ret = view_profile.poll()
|
||||
if ret is None:
|
||||
LOG.info("view-gecko-profile successfully started as pid %d" % view_profile.pid)
|
||||
LOG_GECKO.info("view-gecko-profile successfully started as pid %d" % view_profile.pid)
|
||||
else:
|
||||
LOG.error('view-gecko-profile process failed to start, poll returned: %s' % ret)
|
||||
LOG_GECKO.error('view-gecko-profile process failed to start, poll returned: %s' % ret)
|
||||
|
41
testing/raptor/test/files/top-info.txt
Normal file
41
testing/raptor/test/files/top-info.txt
Normal file
@ -0,0 +1,41 @@
|
||||
Tasks: 142 total, 1 running, 140 sleeping, 0 stopped, 1 zombie
|
||||
Mem: 1548824k total, 1234756k used, 314068k free, 37080k buffers
|
||||
Swap: 0k total, 0k used, 0k free, 552360k cached
|
||||
200%cpu 122%user 9%nice 50%sys 13%idle 0%iow 0%irq 6%sirq 0%host
|
||||
PID USER [%CPU]%CPU %MEM TIME+ ARGS
|
||||
17504 u0_a83 93.7 93.7 14.2 0:12.12 org.mozilla.geckoview_example
|
||||
17529 u0_a83 43.7 43.7 19.3 0:11.80 org.mozilla.geckoview_example:tab
|
||||
7030 u0_a54 28.1 28.1 5.6 0:05.47 com.google.android.tts
|
||||
1598 root 9.3 9.3 0.1 0:13.73 dhcpclient -i eth0
|
||||
1667 system 6.2 6.2 9.6 16:10.78 system_server
|
||||
1400 system 6.2 6.2 0.2 8:15.20 android.hardware.sensors@1.0-service
|
||||
17729 shell 3.1 3.1 0.1 0:00.02 top -O %CPU -n 1
|
||||
1411 system 3.1 3.1 0.7 23:06.11 surfaceflinger
|
||||
17497 shell 0.0 0.0 0.1 0:00.01 sh -
|
||||
17321 root 0.0 0.0 0.0 0:00.13 [kworker/0:1]
|
||||
17320 root 0.0 0.0 0.0 0:00.15 [kworker/u4:1]
|
||||
17306 root 0.0 0.0 0.0 0:00.21 [kworker/u5:1]
|
||||
16545 root 0.0 0.0 0.0 0:00.17 [kworker/0:0]
|
||||
16543 root 0.0 0.0 0.0 0:00.15 [kworker/u4:2]
|
||||
16411 root 0.0 0.0 0.0 0:00.41 [kworker/u5:2]
|
||||
15827 root 0.0 0.0 0.0 0:00.04 [kworker/1:2]
|
||||
14998 root 0.0 0.0 0.0 0:00.03 [kworker/1:1]
|
||||
14996 root 0.0 0.0 0.0 0:00.38 [kworker/0:2]
|
||||
14790 root 0.0 0.0 0.0 0:01.04 [kworker/u5:0]
|
||||
14167 root 0.0 0.0 0.0 0:01.32 [kworker/u4:0]
|
||||
11922 u0_a50 0.0 0.0 6.9 0:00.80 com.google.android.apps.docs
|
||||
11906 u0_a67 0.0 0.0 5.0 0:00.25 com.google.android.apps.photos
|
||||
11887 u0_a11 0.0 0.0 4.3 0:00.25 com.android.documentsui
|
||||
11864 u0_a6 0.0 0.0 3.3 0:00.19 com.android.defcontainer
|
||||
10866 u0_a15 0.0 0.0 3.3 0:00.04 com.google.android.partnersetup
|
||||
8956 u0_a1 0.0 0.0 3.7 0:00.40 com.android.providers.calendar
|
||||
8070 u0_a10 0.0 0.0 6.7 0:01.21 com.google.android.gms.unstable
|
||||
6638 u0_a10 0.0 0.0 7.4 0:12.89 com.google.android.gms
|
||||
2291 u0_a30 0.0 0.0 9.0 5:45.93 com.google.android.googlequicksearchbox:search
|
||||
2230 u0_a10 0.0 0.0 3.9 0:02.00 com.google.process.gapps
|
||||
2213 u0_a22 0.0 0.0 7.2 4:12.95 com.google.android.apps.nexuslauncher
|
||||
2195 u0_a30 0.0 0.0 4.1 0:00.37 com.google.android.googlequicksearchbox:interactor
|
||||
2163 u0_a10 0.0 0.0 8.2 1:49.32 com.google.android.gms.persistent
|
||||
1882 radio 0.0 0.0 5.1 0:53.61 com.android.phone
|
||||
1875 wifi 0.0 0.0 0.4 0:02.25 wpa_supplicant -Dnl80211 -iwlan0 -c/vendor/etc/wifi/wpa_supplicant.conf -g@android:wpa_wla+
|
||||
1828 webview_zyg+ 0.0 0.0 3.0 0:00.45 webview_zygote32
|
@ -9,3 +9,4 @@ skip-if = python == 3
|
||||
[test_playback.py]
|
||||
[test_print_tests.py]
|
||||
[test_raptor.py]
|
||||
[test_cpu.py]
|
||||
|
@ -17,7 +17,9 @@ def test_verify_options(filedir):
|
||||
page_timeout=60000,
|
||||
debug='True',
|
||||
power_test=False,
|
||||
memory_test=False)
|
||||
cpu_test=False,
|
||||
memory_test=False,
|
||||
enable_webrender=False)
|
||||
parser = ArgumentParser()
|
||||
|
||||
with pytest.raises(SystemExit):
|
||||
@ -34,7 +36,9 @@ def test_verify_options(filedir):
|
||||
is_release_build=False,
|
||||
host='sophie',
|
||||
power_test=False,
|
||||
memory_test=False)
|
||||
cpu_test=False,
|
||||
memory_test=False,
|
||||
enable_webrender=False)
|
||||
verify_options(parser, args) # assert no exception
|
||||
|
||||
args = Namespace(app='refbrow',
|
||||
@ -45,7 +49,9 @@ def test_verify_options(filedir):
|
||||
is_release_build=False,
|
||||
host='sophie',
|
||||
power_test=False,
|
||||
memory_test=False)
|
||||
cpu_test=False,
|
||||
memory_test=False,
|
||||
enable_webrender=False)
|
||||
verify_options(parser, args) # assert no exception
|
||||
|
||||
args = Namespace(app='fenix',
|
||||
@ -56,7 +62,22 @@ def test_verify_options(filedir):
|
||||
is_release_build=False,
|
||||
host='sophie',
|
||||
power_test=False,
|
||||
memory_test=False)
|
||||
cpu_test=False,
|
||||
memory_test=False,
|
||||
enable_webrender=False)
|
||||
verify_options(parser, args) # assert no exception
|
||||
|
||||
args = Namespace(app='geckoview',
|
||||
binary='org.mozilla.geckoview_example',
|
||||
activity='org.mozilla.geckoview_example.GeckoViewActivity',
|
||||
intent='android.intent.action.MAIN',
|
||||
gecko_profile='False',
|
||||
is_release_build=False,
|
||||
host='sophie',
|
||||
power_test=False,
|
||||
cpu_test=True,
|
||||
memory_test=False,
|
||||
enable_webrender=False)
|
||||
verify_options(parser, args) # assert no exception
|
||||
|
||||
args = Namespace(app='refbrow',
|
||||
@ -67,7 +88,9 @@ def test_verify_options(filedir):
|
||||
is_release_build=False,
|
||||
host='sophie',
|
||||
power_test=False,
|
||||
memory_test=False)
|
||||
cpu_test=False,
|
||||
memory_test=False,
|
||||
enable_webrender=False)
|
||||
parser = ArgumentParser()
|
||||
|
||||
verify_options(parser, args) # also will work as uses default activity
|
||||
|
125
testing/raptor/test/test_cpu.py
Normal file
125
testing/raptor/test/test_cpu.py
Normal file
@ -0,0 +1,125 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import mozunit
|
||||
import os
|
||||
import mock
|
||||
|
||||
from raptor import cpu
|
||||
from raptor.raptor import RaptorAndroid
|
||||
|
||||
|
||||
def test_no_device():
|
||||
raptor = RaptorAndroid('geckoview', 'org.mozilla.org.mozilla.geckoview_example', cpu_test=True)
|
||||
raptor.device = None
|
||||
resp = cpu.generate_android_cpu_profile(raptor, 'no_control_server_device')
|
||||
|
||||
assert resp is None
|
||||
|
||||
|
||||
def test_usage_with_invalid_data_returns_zero():
|
||||
with mock.patch('mozdevice.adb.ADBDevice') as device:
|
||||
with mock.patch('raptor.raptor.RaptorControlServer') as control_server:
|
||||
# Create a device that returns invalid data
|
||||
device.shell_output.return_value = 'geckoview'
|
||||
device.version = 8
|
||||
device._verbose = True
|
||||
|
||||
# Create a control server
|
||||
control_server.cpu_test = True
|
||||
control_server.device = device
|
||||
raptor = RaptorAndroid('geckoview', 'org.mozilla.geckoview_example', cpu_test=True)
|
||||
raptor.config['cpu_test'] = True
|
||||
raptor.control_server = control_server
|
||||
raptor.device = device
|
||||
|
||||
# Verify the call to submit data was made
|
||||
cpuinfo_data = {
|
||||
'type': 'cpu',
|
||||
'test': 'usage_with_invalid_data_returns_zero',
|
||||
'unit': '%',
|
||||
'values': {
|
||||
'browser_cpu_usage': float(0)
|
||||
}
|
||||
}
|
||||
cpu.generate_android_cpu_profile(
|
||||
raptor,
|
||||
"usage_with_invalid_data_returns_zero")
|
||||
control_server.submit_supporting_data.assert_called_once_with(cpuinfo_data)
|
||||
|
||||
|
||||
def test_usage_with_output():
|
||||
with mock.patch('mozdevice.adb.ADBDevice') as device:
|
||||
with mock.patch('raptor.raptor.RaptorControlServer') as control_server:
|
||||
# Override the shell output with sample CPU usage details
|
||||
filepath = os.path.abspath(os.path.dirname(__file__)) + '/files/'
|
||||
f = open(filepath + 'top-info.txt', 'r')
|
||||
device.shell_output.return_value = f.read()
|
||||
device._verbose = True
|
||||
device.version = 8
|
||||
|
||||
# Create a control server
|
||||
control_server.cpu_test = True
|
||||
control_server.test_name = 'cpuunittest'
|
||||
control_server.device = device
|
||||
control_server.app_name = 'org.mozilla.geckoview_example'
|
||||
raptor = RaptorAndroid('geckoview', 'org.mozilla.geckoview_example', cpu_test=True)
|
||||
raptor.device = device
|
||||
raptor.config['cpu_test'] = True
|
||||
raptor.control_server = control_server
|
||||
|
||||
# Verify the response contains our expected CPU % of 93.7
|
||||
cpuinfo_data = {
|
||||
u'type': u'cpu',
|
||||
u'test': u'usage_with_integer_cpu_info_output',
|
||||
u'unit': u'%',
|
||||
u'values': {
|
||||
u'browser_cpu_usage': float(93.7)
|
||||
}
|
||||
}
|
||||
cpu.generate_android_cpu_profile(
|
||||
raptor,
|
||||
"usage_with_integer_cpu_info_output")
|
||||
control_server.submit_supporting_data.assert_called_once_with(cpuinfo_data)
|
||||
|
||||
|
||||
def test_usage_with_fallback():
|
||||
with mock.patch('mozdevice.adb.ADBDevice') as device:
|
||||
with mock.patch('raptor.raptor.RaptorControlServer') as control_server:
|
||||
# We set the version to be less than Android 8
|
||||
device.version = 7
|
||||
device._verbose = True
|
||||
|
||||
# Return what our shell call to dumpsys would give us
|
||||
shell_output = ' 34% 14781/org.mozilla.geckoview_example: 26% user + 7.5% kernel'
|
||||
device.shell_output.return_value = shell_output
|
||||
|
||||
# Create a control server
|
||||
control_server.cpu_test = True
|
||||
control_server.test_name = 'cpuunittest'
|
||||
control_server.device = device
|
||||
control_server.app_name = 'org.mozilla.geckoview_example'
|
||||
raptor = RaptorAndroid('geckoview', 'org.mozilla.geckoview_example', cpu_test=True)
|
||||
raptor.device = device
|
||||
raptor.config['cpu_test'] = True
|
||||
raptor.control_server = control_server
|
||||
|
||||
# Verify the response contains our expected CPU % of 34
|
||||
cpuinfo_data = {
|
||||
u'type': u'cpu',
|
||||
u'test': u'usage_with_fallback',
|
||||
u'unit': u'%',
|
||||
u'values': {
|
||||
u'browser_cpu_usage': float(34)
|
||||
}
|
||||
}
|
||||
cpu.generate_android_cpu_profile(
|
||||
raptor,
|
||||
"usage_with_fallback")
|
||||
control_server.submit_supporting_data.assert_called_once_with(cpuinfo_data)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
mozunit.main()
|
@ -1,6 +1,7 @@
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import os
|
||||
import time
|
||||
|
||||
import mozinfo
|
||||
import mozunit
|
||||
@ -27,17 +28,24 @@ def test_get_playback(get_binary):
|
||||
return
|
||||
config['obj_path'] = os.path.dirname(get_binary('firefox'))
|
||||
config['playback_tool'] = 'mitmproxy'
|
||||
config['playback_binary_manifest'] = 'mitmproxy-rel-bin-osx.manifest'
|
||||
config['playback_pageset_manifest'] = 'mitmproxy-recordings-raptor-tp6-1.manifest'
|
||||
config['playback_version'] = '4.0.4'
|
||||
config['playback_upstream_cert'] = 'false'
|
||||
config['playback_binary_manifest'] = 'mitmproxy-rel-bin-4.0.4-{platform}.manifest'
|
||||
config['playback_pageset_manifest'] = os.path.join(
|
||||
os.path.dirname(os.path.abspath(os.path.dirname(__file__))), "raptor", "playback",
|
||||
'mitm4-linux-firefox-amazon.manifest')
|
||||
config['playback_recordings'] = 'amazon.mp'
|
||||
config['binary'] = get_binary('firefox')
|
||||
config['run_local'] = run_local
|
||||
config['app'] = 'firefox'
|
||||
config['host'] = 'example.com'
|
||||
config['host'] = 'https://www.amazon.com/s?k=laptop&ref=nb_sb_noss_1'
|
||||
|
||||
playback = get_playback(config)
|
||||
playback.config['playback_files'] = [os.path.join(playback.mozproxy_dir,
|
||||
config['playback_recordings'])]
|
||||
assert isinstance(playback, MitmproxyDesktop)
|
||||
playback.start()
|
||||
time.sleep(1)
|
||||
playback.stop()
|
||||
|
||||
|
||||
|
@ -34,6 +34,7 @@
|
||||
"*://*.linkedin.com/*",
|
||||
"*://*.live.com/*",
|
||||
"*://*.microsoft.com/*",
|
||||
"*://*.netflix.com/*",
|
||||
"*://*.paypal.com/*",
|
||||
"*://*.pinterest.com/*",
|
||||
"*://*.reddit.com/*",
|
||||
|
@ -105,9 +105,7 @@ var Profiler;
|
||||
profiler_entries,
|
||||
profiler_interval,
|
||||
["js", "leaf", "stackwalk", "threads"],
|
||||
4,
|
||||
profiler_threadsArray,
|
||||
profiler_threadsArray.length
|
||||
profiler_threadsArray
|
||||
);
|
||||
if (_profiler.PauseSampling) {
|
||||
_profiler.PauseSampling();
|
||||
|
@ -104,9 +104,7 @@ var Profiler;
|
||||
profiler_entries,
|
||||
profiler_interval,
|
||||
["js", "leaf", "stackwalk", "threads"],
|
||||
4,
|
||||
profiler_threadsArray,
|
||||
profiler_threadsArray.length
|
||||
profiler_threadsArray
|
||||
);
|
||||
if (_profiler.PauseSampling) {
|
||||
_profiler.PauseSampling();
|
||||
|
@ -120,9 +120,7 @@ TalosPowersService.prototype = {
|
||||
data.entries,
|
||||
data.interval,
|
||||
["js", "leaf", "stackwalk", "threads"],
|
||||
4,
|
||||
data.threadsArray,
|
||||
data.threadsArray.length
|
||||
data.threadsArray
|
||||
);
|
||||
|
||||
Services.profiler.PauseSampling();
|
||||
|
@ -178,10 +178,8 @@ function selectSource(dbg, url) {
|
||||
return waitForState(
|
||||
dbg,
|
||||
state => {
|
||||
const { source, content } = dbg.selectors.getSelectedSourceWithContent(
|
||||
state
|
||||
);
|
||||
if (!content) {
|
||||
const source = dbg.selectors.getSelectedSourceWithContent(state);
|
||||
if (!source || !source.content) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -168,7 +168,7 @@ endTest();
|
||||
</head>
|
||||
<body>
|
||||
<div class="head">
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="http://www.w3.org/Icons/w3c_home" width=72></a>
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="w3c_home.png" width=72></a>
|
||||
|
||||
<h1 id="title">Selectors</h1>
|
||||
|
||||
|
@ -169,7 +169,7 @@ endTest();
|
||||
</head>
|
||||
<body>
|
||||
<div class="head">
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="http://www.w3.org/Icons/w3c_home" width=72></a>
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="w3c_home.png" width=72></a>
|
||||
|
||||
<h1 id="title">Selectors</h1>
|
||||
|
||||
|
@ -167,7 +167,7 @@ endTest();
|
||||
</head>
|
||||
<body>
|
||||
<div class="head">
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="http://www.w3.org/Icons/w3c_home" width=72></a>
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="w3c_home.png" width=72></a>
|
||||
|
||||
<h1 id="title">Selectors</h1>
|
||||
|
||||
|
@ -167,7 +167,7 @@ endTest();
|
||||
</head>
|
||||
<body>
|
||||
<div class="head">
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="http://www.w3.org/Icons/w3c_home" width=72></a>
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="w3c_home.png" width=72></a>
|
||||
|
||||
<h1 id="title">Selectors</h1>
|
||||
|
||||
|
@ -167,7 +167,7 @@ endTest();
|
||||
</head>
|
||||
<body>
|
||||
<div class="head">
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="http://www.w3.org/Icons/w3c_home" width=72></a>
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="w3c_home.png" width=72></a>
|
||||
|
||||
<h1 id="title">Selectors</h1>
|
||||
|
||||
|
@ -170,7 +170,7 @@ endTest();
|
||||
</head>
|
||||
<body>
|
||||
<div class="head">
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="http://www.w3.org/Icons/w3c_home" width=72></a>
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="w3c_home.png" width=72></a>
|
||||
|
||||
<h1 id="title">Selectors</h1>
|
||||
|
||||
|
@ -48,7 +48,7 @@ endTest();
|
||||
</head>
|
||||
<body>
|
||||
<div class="head">
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="http://www.w3.org/Icons/w3c_home" width=72></a>
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="w3c_home.png" width=72></a>
|
||||
|
||||
<h1 id="title">Selectors</h1>
|
||||
|
||||
|
@ -78,7 +78,7 @@ endTest();
|
||||
</head>
|
||||
<body>
|
||||
<div class="head">
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="http://www.w3.org/Icons/w3c_home" width=72></a>
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="w3c_home.png" width=72></a>
|
||||
|
||||
<h1 id="title">Selectors</h1>
|
||||
|
||||
|
@ -105,7 +105,7 @@ endTest();
|
||||
</head>
|
||||
<body>
|
||||
<div class="head">
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="http://www.w3.org/Icons/w3c_home" width=72></a>
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="w3c_home.png" width=72></a>
|
||||
|
||||
<h1 id="title">Selectors</h1>
|
||||
|
||||
|
@ -77,7 +77,7 @@ endTest();
|
||||
</head>
|
||||
<body>
|
||||
<div class="head">
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="http://www.w3.org/Icons/w3c_home" width=72></a>
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="w3c_home.png" width=72></a>
|
||||
|
||||
<h1 id="title">Selectors</h1>
|
||||
|
||||
|
@ -54,7 +54,7 @@ endTest();
|
||||
</head>
|
||||
<body>
|
||||
<div class="head">
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="http://www.w3.org/Icons/w3c_home" width=72></a>
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="w3c_home.png" width=72></a>
|
||||
|
||||
<h1 id="title">Selectors</h1>
|
||||
|
||||
|
@ -54,7 +54,7 @@ endTest();
|
||||
</head>
|
||||
<body>
|
||||
<div class="head">
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="http://www.w3.org/Icons/w3c_home" width=72></a>
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="w3c_home.png" width=72></a>
|
||||
|
||||
<h1 id="title">Selectors</h1>
|
||||
|
||||
|
@ -41,7 +41,7 @@ endTest();
|
||||
</head>
|
||||
<body>
|
||||
<div class="head">
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="http://www.w3.org/Icons/w3c_home" width=72></a>
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="w3c_home.png" width=72></a>
|
||||
|
||||
<h1 id="title">Selectors</h1>
|
||||
|
||||
|
@ -41,7 +41,7 @@ endTest();
|
||||
</head>
|
||||
<body>
|
||||
<div class="head">
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="http://www.w3.org/Icons/w3c_home" width=72></a>
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="w3c_home.png" width=72></a>
|
||||
|
||||
<h1 id="title">Selectors</h1>
|
||||
|
||||
|
@ -65,7 +65,7 @@ endTest();
|
||||
</head>
|
||||
<body>
|
||||
<div class="head">
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="http://www.w3.org/Icons/w3c_home" width=72></a>
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="w3c_home.png" width=72></a>
|
||||
|
||||
<h1 id="title">Selectors</h1>
|
||||
|
||||
|
@ -65,7 +65,7 @@ endTest();
|
||||
</head>
|
||||
<body>
|
||||
<div class="head">
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="http://www.w3.org/Icons/w3c_home" width=72></a>
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="w3c_home.png" width=72></a>
|
||||
|
||||
<h1 id="title">Selectors</h1>
|
||||
|
||||
|
@ -63,7 +63,7 @@ endTest();
|
||||
</head>
|
||||
<body>
|
||||
<div class="head">
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="http://www.w3.org/Icons/w3c_home" width=72></a>
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="w3c_home.png" width=72></a>
|
||||
|
||||
<h1 id="title">Selectors</h1>
|
||||
|
||||
|
@ -64,7 +64,7 @@ endTest();
|
||||
</head>
|
||||
<body>
|
||||
<div class="head">
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="http://www.w3.org/Icons/w3c_home" width=72></a>
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="w3c_home.png" width=72></a>
|
||||
|
||||
<h1 id="title">Selectors</h1>
|
||||
|
||||
|
@ -65,7 +65,7 @@ endTest();
|
||||
</head>
|
||||
<body>
|
||||
<div class="head">
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="http://www.w3.org/Icons/w3c_home" width=72></a>
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="w3c_home.png" width=72></a>
|
||||
|
||||
<h1 id="title">Selectors</h1>
|
||||
|
||||
|
@ -65,7 +65,7 @@ endTest();
|
||||
</head>
|
||||
<body>
|
||||
<div class="head">
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="http://www.w3.org/Icons/w3c_home" width=72></a>
|
||||
<p><a href="http://www.w3.org/"><img height=48 alt=W3C src="w3c_home.png" width=72></a>
|
||||
|
||||
<h1 id="title">Selectors</h1>
|
||||
|
||||
|
BIN
testing/talos/talos/tests/dromaeo/tests/w3c_home.png
Normal file
BIN
testing/talos/talos/tests/dromaeo/tests/w3c_home.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 1.9 KiB |
@ -104,9 +104,7 @@ var Profiler;
|
||||
profiler_entries,
|
||||
profiler_interval,
|
||||
["js", "leaf", "stackwalk", "threads"],
|
||||
4,
|
||||
profiler_threadsArray,
|
||||
profiler_threadsArray.length
|
||||
profiler_threadsArray
|
||||
);
|
||||
if (_profiler.PauseSampling) {
|
||||
_profiler.PauseSampling();
|
||||
|
@ -104,9 +104,7 @@ var Profiler;
|
||||
profiler_entries,
|
||||
profiler_interval,
|
||||
["js", "leaf", "stackwalk", "threads"],
|
||||
4,
|
||||
profiler_threadsArray,
|
||||
profiler_threadsArray.length
|
||||
profiler_threadsArray
|
||||
);
|
||||
if (_profiler.PauseSampling) {
|
||||
_profiler.PauseSampling();
|
||||
|
@ -1,9 +1,10 @@
|
||||
[
|
||||
{
|
||||
"filename": "tp5n.zip",
|
||||
"size": 81753814,
|
||||
"digest": "fa74bb06abc4c097ab2cd013f1a7b5f629cb4b2943244aa1547a2d37eb5e4622671174baa22cee194ab3550870aa5d9f1443e61506519c0875f9ee0a1ad0d737",
|
||||
"size": 82228254,
|
||||
"visibility": "public",
|
||||
"digest": "992153bcc549ce0c5aae9e422f3d65ff2f8f8162ec713b13d77719d6fdfff788213ac898dfa120d69d142aeb308444310545e8a5fe46fdc73cd5dbb1c115e9f0",
|
||||
"algorithm": "sha512",
|
||||
"filename": "tp5n.zip",
|
||||
"unpack": false
|
||||
}
|
||||
]
|
||||
]
|
||||
|
@ -1,9 +0,0 @@
|
||||
[idb-explicit-commit-throw.any.worker.html]
|
||||
[Any errors in callbacks that run after an explicit commit will not stop the commit from being processed.]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[idb-explicit-commit-throw.any.html]
|
||||
[Any errors in callbacks that run after an explicit commit will not stop the commit from being processed.]
|
||||
expected: FAIL
|
||||
|
@ -1,69 +0,0 @@
|
||||
[idb-explicit-commit.any.worker.html]
|
||||
[Calling commit on a committed transaction throws.]
|
||||
expected: FAIL
|
||||
|
||||
[Explicitly committed data can be read back out.]
|
||||
expected: FAIL
|
||||
|
||||
[Calling txn.commit() when txn is inactive should throw.]
|
||||
expected: FAIL
|
||||
|
||||
[commit() on a version change transaction does not cause errors.]
|
||||
expected: FAIL
|
||||
|
||||
[Puts issued after commit are not fulfilled.]
|
||||
expected: FAIL
|
||||
|
||||
[Calling abort on a committed transaction throws and does not prevent persisting the data.]
|
||||
expected: FAIL
|
||||
|
||||
[A committed transaction becomes inactive immediately.]
|
||||
expected: FAIL
|
||||
|
||||
[A committed transaction is inactive in future request callbacks.]
|
||||
expected: FAIL
|
||||
|
||||
[Transactions that explicitly commit and have errors should abort.]
|
||||
expected: FAIL
|
||||
|
||||
[Transactions with same scope should stay in program order, even if one calls commit.]
|
||||
expected: FAIL
|
||||
|
||||
[Transactions that handle all errors properly should behave as expected when an explicit commit is called in an onerror handler.]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[idb-explicit-commit.any.html]
|
||||
[Calling commit on a committed transaction throws.]
|
||||
expected: FAIL
|
||||
|
||||
[Explicitly committed data can be read back out.]
|
||||
expected: FAIL
|
||||
|
||||
[Calling txn.commit() when txn is inactive should throw.]
|
||||
expected: FAIL
|
||||
|
||||
[commit() on a version change transaction does not cause errors.]
|
||||
expected: FAIL
|
||||
|
||||
[Puts issued after commit are not fulfilled.]
|
||||
expected: FAIL
|
||||
|
||||
[Calling abort on a committed transaction throws and does not prevent persisting the data.]
|
||||
expected: FAIL
|
||||
|
||||
[A committed transaction becomes inactive immediately.]
|
||||
expected: FAIL
|
||||
|
||||
[A committed transaction is inactive in future request callbacks.]
|
||||
expected: FAIL
|
||||
|
||||
[Transactions that explicitly commit and have errors should abort.]
|
||||
expected: FAIL
|
||||
|
||||
[Transactions with same scope should stay in program order, even if one calls commit.]
|
||||
expected: FAIL
|
||||
|
||||
[Transactions that handle all errors properly should behave as expected when an explicit commit is called in an onerror handler.]
|
||||
expected: FAIL
|
||||
|
@ -0,0 +1,9 @@
|
||||
[class-string-iterator-prototype-object.any.html]
|
||||
[Object.prototype.toString applied after nulling the prototype]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[class-string-iterator-prototype-object.any.worker.html]
|
||||
[Object.prototype.toString applied after nulling the prototype]
|
||||
expected: FAIL
|
||||
|
@ -0,0 +1,9 @@
|
||||
[class-string-named-properties-object.window.html]
|
||||
expected: ERROR
|
||||
|
||||
[Object.prototype.toString applied after modifying @@toStringTag]
|
||||
expected: FAIL
|
||||
|
||||
[Object.prototype.toString applied after deleting @@toStringTag]
|
||||
expected: NOTRUN
|
||||
|
@ -0,0 +1,174 @@
|
||||
[compression-bad-chunks.tentative.any.html]
|
||||
[chunk of type array should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type object, not BufferSource should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type array should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type null should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type SharedArrayBuffer should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type numeric should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type undefined should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type SharedArrayBuffer should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type numeric should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type shared Uint8Array should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type object, not BufferSource should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type undefined should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type shared Uint8Array should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type null should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[compression-bad-chunks.tentative.any.worker.html]
|
||||
[chunk of type array should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type object, not BufferSource should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type array should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type null should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type SharedArrayBuffer should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type numeric should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type undefined should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type SharedArrayBuffer should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type numeric should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type shared Uint8Array should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type object, not BufferSource should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type undefined should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type shared Uint8Array should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type null should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[compression-bad-chunks.tentative.any.serviceworker.html]
|
||||
[chunk of type array should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type object, not BufferSource should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type array should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type null should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type SharedArrayBuffer should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type numeric should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type undefined should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type SharedArrayBuffer should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type numeric should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type shared Uint8Array should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type object, not BufferSource should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type undefined should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type shared Uint8Array should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type null should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[compression-bad-chunks.tentative.any.sharedworker.html]
|
||||
[chunk of type array should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type object, not BufferSource should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type array should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type null should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type SharedArrayBuffer should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type numeric should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type undefined should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type SharedArrayBuffer should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type numeric should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type shared Uint8Array should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type object, not BufferSource should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type undefined should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type shared Uint8Array should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type null should error the stream for gzip]
|
||||
expected: FAIL
|
@ -0,0 +1,78 @@
|
||||
[compression-including-empty-chunk.tentative.any.sharedworker.html]
|
||||
[the result of compressing [Hello,,Hello\] with deflate should be 'HelloHello']
|
||||
expected: FAIL
|
||||
|
||||
[the result of compressing [,Hello,Hello\] with deflate should be 'HelloHello']
|
||||
expected: FAIL
|
||||
|
||||
[the result of compressing [Hello,Hello,\] with deflate should be 'HelloHello']
|
||||
expected: FAIL
|
||||
|
||||
[the result of compressing [Hello,,Hello\] with gzip should be 'HelloHello']
|
||||
expected: FAIL
|
||||
|
||||
[the result of compressing [Hello,Hello,\] with gzip should be 'HelloHello']
|
||||
expected: FAIL
|
||||
|
||||
[the result of compressing [,Hello,Hello\] with gzip should be 'HelloHello']
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[compression-including-empty-chunk.tentative.any.html]
|
||||
[the result of compressing [Hello,,Hello\] with deflate should be 'HelloHello']
|
||||
expected: FAIL
|
||||
|
||||
[the result of compressing [,Hello,Hello\] with deflate should be 'HelloHello']
|
||||
expected: FAIL
|
||||
|
||||
[the result of compressing [Hello,Hello,\] with deflate should be 'HelloHello']
|
||||
expected: FAIL
|
||||
|
||||
[the result of compressing [Hello,,Hello\] with gzip should be 'HelloHello']
|
||||
expected: FAIL
|
||||
|
||||
[the result of compressing [Hello,Hello,\] with gzip should be 'HelloHello']
|
||||
expected: FAIL
|
||||
|
||||
[the result of compressing [,Hello,Hello\] with gzip should be 'HelloHello']
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[compression-including-empty-chunk.tentative.any.serviceworker.html]
|
||||
[the result of compressing [Hello,,Hello\] with deflate should be 'HelloHello']
|
||||
expected: FAIL
|
||||
|
||||
[the result of compressing [,Hello,Hello\] with deflate should be 'HelloHello']
|
||||
expected: FAIL
|
||||
|
||||
[the result of compressing [Hello,Hello,\] with deflate should be 'HelloHello']
|
||||
expected: FAIL
|
||||
|
||||
[the result of compressing [Hello,,Hello\] with gzip should be 'HelloHello']
|
||||
expected: FAIL
|
||||
|
||||
[the result of compressing [Hello,Hello,\] with gzip should be 'HelloHello']
|
||||
expected: FAIL
|
||||
|
||||
[the result of compressing [,Hello,Hello\] with gzip should be 'HelloHello']
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[compression-including-empty-chunk.tentative.any.worker.html]
|
||||
[the result of compressing [Hello,,Hello\] with deflate should be 'HelloHello']
|
||||
expected: FAIL
|
||||
|
||||
[the result of compressing [,Hello,Hello\] with deflate should be 'HelloHello']
|
||||
expected: FAIL
|
||||
|
||||
[the result of compressing [Hello,Hello,\] with deflate should be 'HelloHello']
|
||||
expected: FAIL
|
||||
|
||||
[the result of compressing [Hello,,Hello\] with gzip should be 'HelloHello']
|
||||
expected: FAIL
|
||||
|
||||
[the result of compressing [Hello,Hello,\] with gzip should be 'HelloHello']
|
||||
expected: FAIL
|
||||
|
||||
[the result of compressing [,Hello,Hello\] with gzip should be 'HelloHello']
|
||||
expected: FAIL
|
@ -0,0 +1,366 @@
|
||||
[compression-multiple-chunks.tentative.any.worker.html]
|
||||
[compressing 2 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 5 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 9 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 16 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 3 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 14 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 4 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 2 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 7 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 10 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 14 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 9 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 11 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 3 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 12 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 15 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 6 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 13 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 8 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 5 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 16 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 12 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 13 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 15 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 6 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 4 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 10 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 11 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 8 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 7 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[compression-multiple-chunks.tentative.any.sharedworker.html]
|
||||
[compressing 2 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 5 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 9 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 16 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 3 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 14 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 4 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 2 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 7 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 10 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 14 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 9 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 11 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 3 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 12 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 15 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 6 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 13 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 8 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 5 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 16 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 12 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 13 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 15 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 6 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 4 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 10 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 11 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 8 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 7 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[compression-multiple-chunks.tentative.any.html]
|
||||
[compressing 2 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 5 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 9 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 16 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 3 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 14 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 4 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 2 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 7 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 10 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 14 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 9 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 11 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 3 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 12 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 15 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 6 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 13 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 8 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 5 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 16 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 12 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 13 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 15 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 6 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 4 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 10 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 11 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 8 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 7 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[compression-multiple-chunks.tentative.any.serviceworker.html]
|
||||
[compressing 2 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 5 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 9 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 16 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 3 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 14 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 4 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 2 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 7 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 10 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 14 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 9 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 11 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 3 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 12 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 15 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 6 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 13 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 8 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 5 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 16 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 12 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 13 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 15 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 6 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 4 chunks with deflate should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 10 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 11 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 8 chunks with gzip should work]
|
||||
expected: FAIL
|
||||
|
||||
[compressing 7 chunks with deflate should work]
|
||||
expected: FAIL
|
@ -0,0 +1,30 @@
|
||||
[compression-output-length.tentative.any.html]
|
||||
[the length of gzipped data should be shorter than that of the original data]
|
||||
expected: FAIL
|
||||
|
||||
[the length of deflated data should be shorter than that of the original data]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[compression-output-length.tentative.any.worker.html]
|
||||
[the length of gzipped data should be shorter than that of the original data]
|
||||
expected: FAIL
|
||||
|
||||
[the length of deflated data should be shorter than that of the original data]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[compression-output-length.tentative.any.serviceworker.html]
|
||||
[the length of gzipped data should be shorter than that of the original data]
|
||||
expected: FAIL
|
||||
|
||||
[the length of deflated data should be shorter than that of the original data]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[compression-output-length.tentative.any.sharedworker.html]
|
||||
[the length of gzipped data should be shorter than that of the original data]
|
||||
expected: FAIL
|
||||
|
||||
[the length of deflated data should be shorter than that of the original data]
|
||||
expected: FAIL
|
@ -0,0 +1,90 @@
|
||||
[compression-stream.tentative.any.html]
|
||||
[gzipped empty data should be reinflated back to its origin]
|
||||
expected: FAIL
|
||||
|
||||
[deflated empty data should be reinflated back to its origin]
|
||||
expected: FAIL
|
||||
|
||||
[deflated small amount data should be reinflated back to its origin]
|
||||
expected: FAIL
|
||||
|
||||
[gzipped small amount data should be reinflated back to its origin]
|
||||
expected: FAIL
|
||||
|
||||
[deflated large amount data should be reinflated back to its origin]
|
||||
expected: FAIL
|
||||
|
||||
[gzipped large amount data should be reinflated back to its origin]
|
||||
expected: FAIL
|
||||
|
||||
[CompressionStream constructor should throw on invalid format]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[compression-stream.tentative.any.worker.html]
|
||||
[gzipped empty data should be reinflated back to its origin]
|
||||
expected: FAIL
|
||||
|
||||
[deflated empty data should be reinflated back to its origin]
|
||||
expected: FAIL
|
||||
|
||||
[deflated small amount data should be reinflated back to its origin]
|
||||
expected: FAIL
|
||||
|
||||
[gzipped small amount data should be reinflated back to its origin]
|
||||
expected: FAIL
|
||||
|
||||
[deflated large amount data should be reinflated back to its origin]
|
||||
expected: FAIL
|
||||
|
||||
[gzipped large amount data should be reinflated back to its origin]
|
||||
expected: FAIL
|
||||
|
||||
[CompressionStream constructor should throw on invalid format]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[compression-stream.tentative.any.serviceworker.html]
|
||||
[gzipped empty data should be reinflated back to its origin]
|
||||
expected: FAIL
|
||||
|
||||
[deflated empty data should be reinflated back to its origin]
|
||||
expected: FAIL
|
||||
|
||||
[deflated small amount data should be reinflated back to its origin]
|
||||
expected: FAIL
|
||||
|
||||
[gzipped small amount data should be reinflated back to its origin]
|
||||
expected: FAIL
|
||||
|
||||
[deflated large amount data should be reinflated back to its origin]
|
||||
expected: FAIL
|
||||
|
||||
[gzipped large amount data should be reinflated back to its origin]
|
||||
expected: FAIL
|
||||
|
||||
[CompressionStream constructor should throw on invalid format]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[compression-stream.tentative.any.sharedworker.html]
|
||||
[gzipped empty data should be reinflated back to its origin]
|
||||
expected: FAIL
|
||||
|
||||
[deflated empty data should be reinflated back to its origin]
|
||||
expected: FAIL
|
||||
|
||||
[deflated small amount data should be reinflated back to its origin]
|
||||
expected: FAIL
|
||||
|
||||
[gzipped small amount data should be reinflated back to its origin]
|
||||
expected: FAIL
|
||||
|
||||
[deflated large amount data should be reinflated back to its origin]
|
||||
expected: FAIL
|
||||
|
||||
[gzipped large amount data should be reinflated back to its origin]
|
||||
expected: FAIL
|
||||
|
||||
[CompressionStream constructor should throw on invalid format]
|
||||
expected: FAIL
|
@ -0,0 +1,222 @@
|
||||
[decompression-bad-chunks.tentative.any.html]
|
||||
[chunk of type array should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type object, not BufferSource should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type array should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type null should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type SharedArrayBuffer should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type invalid gzip bytes should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type numeric should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type undefined should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type SharedArrayBuffer should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type numeric should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type shared Uint8Array should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type object, not BufferSource should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type invalid deflate bytes should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type invalid gzip bytes should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type undefined should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type shared Uint8Array should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type invalid deflate bytes should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type null should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[decompression-bad-chunks.tentative.any.worker.html]
|
||||
[chunk of type array should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type object, not BufferSource should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type array should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type null should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type SharedArrayBuffer should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type invalid gzip bytes should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type numeric should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type undefined should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type SharedArrayBuffer should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type numeric should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type shared Uint8Array should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type object, not BufferSource should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type invalid deflate bytes should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type invalid gzip bytes should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type undefined should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type shared Uint8Array should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type invalid deflate bytes should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type null should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[decompression-bad-chunks.tentative.any.serviceworker.html]
|
||||
[chunk of type array should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type object, not BufferSource should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type array should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type null should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type SharedArrayBuffer should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type invalid gzip bytes should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type numeric should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type undefined should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type SharedArrayBuffer should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type numeric should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type shared Uint8Array should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type object, not BufferSource should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type invalid deflate bytes should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type invalid gzip bytes should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type undefined should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type shared Uint8Array should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type invalid deflate bytes should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type null should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[decompression-bad-chunks.tentative.any.sharedworker.html]
|
||||
[chunk of type array should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type object, not BufferSource should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type array should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type null should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type SharedArrayBuffer should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type invalid gzip bytes should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type numeric should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type undefined should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type SharedArrayBuffer should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type numeric should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type shared Uint8Array should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type object, not BufferSource should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type invalid deflate bytes should error the stream for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type invalid gzip bytes should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type undefined should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type shared Uint8Array should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type invalid deflate bytes should error the stream for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type null should error the stream for gzip]
|
||||
expected: FAIL
|
@ -0,0 +1,270 @@
|
||||
[decompression-buffersource.tentative.any.serviceworker.html]
|
||||
[chunk of type Int8Array should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Float64Array should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type ArrayBuffer should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Float64Array should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Int32Array should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Int16Array should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Float32Array should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type DataView should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Uint8ClambedArray should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Uint8Array should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Uint32Array should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Int32Array should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Int16Array should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Uint16Array should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Float32Array should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Uint8Array should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Int8Array should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Uint16Array should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type ArrayBuffer should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type DataView should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Uint8ClampedArray should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Uint32Array should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[decompression-buffersource.tentative.any.sharedworker.html]
|
||||
[chunk of type Int8Array should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Float64Array should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type ArrayBuffer should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Float64Array should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Int32Array should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Int16Array should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Float32Array should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type DataView should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Uint8ClambedArray should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Uint8Array should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Uint32Array should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Int32Array should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Int16Array should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Uint16Array should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Float32Array should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Uint8Array should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Int8Array should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Uint16Array should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type ArrayBuffer should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type DataView should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Uint8ClampedArray should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Uint32Array should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[decompression-buffersource.tentative.any.html]
|
||||
[chunk of type Int8Array should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Float64Array should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type ArrayBuffer should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Float64Array should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Int32Array should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Int16Array should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Float32Array should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type DataView should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Uint8ClambedArray should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Uint8Array should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Uint32Array should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Int32Array should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Int16Array should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Uint16Array should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Float32Array should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Uint8Array should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Int8Array should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Uint16Array should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type ArrayBuffer should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type DataView should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Uint8ClampedArray should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Uint32Array should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[decompression-buffersource.tentative.any.worker.html]
|
||||
[chunk of type Int8Array should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Float64Array should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type ArrayBuffer should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Float64Array should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Int32Array should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Int16Array should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Float32Array should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type DataView should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Uint8ClambedArray should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Uint8Array should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Uint32Array should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Int32Array should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Int16Array should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Uint16Array should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Float32Array should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Uint8Array should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Int8Array should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Uint16Array should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type ArrayBuffer should work for gzip]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type DataView should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Uint8ClampedArray should work for deflate]
|
||||
expected: FAIL
|
||||
|
||||
[chunk of type Uint32Array should work for deflate]
|
||||
expected: FAIL
|
@ -0,0 +1,42 @@
|
||||
[decompression-constructor-error.tentative.any.serviceworker.html]
|
||||
[non-string input should cause the constructor to throw]
|
||||
expected: FAIL
|
||||
|
||||
["a" should cause the constructor to throw]
|
||||
expected: FAIL
|
||||
|
||||
[no input should cause the constructor to throw]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[decompression-constructor-error.tentative.any.sharedworker.html]
|
||||
[non-string input should cause the constructor to throw]
|
||||
expected: FAIL
|
||||
|
||||
["a" should cause the constructor to throw]
|
||||
expected: FAIL
|
||||
|
||||
[no input should cause the constructor to throw]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[decompression-constructor-error.tentative.any.worker.html]
|
||||
[non-string input should cause the constructor to throw]
|
||||
expected: FAIL
|
||||
|
||||
["a" should cause the constructor to throw]
|
||||
expected: FAIL
|
||||
|
||||
[no input should cause the constructor to throw]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[decompression-constructor-error.tentative.any.html]
|
||||
[non-string input should cause the constructor to throw]
|
||||
expected: FAIL
|
||||
|
||||
["a" should cause the constructor to throw]
|
||||
expected: FAIL
|
||||
|
||||
[no input should cause the constructor to throw]
|
||||
expected: FAIL
|
@ -0,0 +1,30 @@
|
||||
[decompression-correct-input.tentative.any.serviceworker.html]
|
||||
[decompressing gzip input should work]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing deflated input should work]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[decompression-correct-input.tentative.any.sharedworker.html]
|
||||
[decompressing gzip input should work]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing deflated input should work]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[decompression-correct-input.tentative.any.worker.html]
|
||||
[decompressing gzip input should work]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing deflated input should work]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[decompression-correct-input.tentative.any.html]
|
||||
[decompressing gzip input should work]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing deflated input should work]
|
||||
expected: FAIL
|
@ -0,0 +1,152 @@
|
||||
[decompression-corrupt-input.any.html]
|
||||
[format 'gzip' field OS should be success for 128]
|
||||
expected: FAIL
|
||||
|
||||
[the unchanged input for 'deflate' should decompress successfully]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field FLG should be error for 2]
|
||||
expected: FAIL
|
||||
|
||||
[trailing junk for 'deflate' should give an error]
|
||||
expected: FAIL
|
||||
|
||||
[format 'deflate' field FLG should be success for 218]
|
||||
expected: FAIL
|
||||
|
||||
[the unchanged input for 'gzip' should decompress successfully]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field DATA should be error for 3]
|
||||
expected: FAIL
|
||||
|
||||
[format 'deflate' field CMF should be error for 0]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field XFL should be success for 255]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field DATA should be success for 4]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field MTIME should be success for 255]
|
||||
expected: FAIL
|
||||
|
||||
[truncating the input for 'deflate' should give an error]
|
||||
expected: FAIL
|
||||
|
||||
[format 'deflate' field ADLER should be error for 255]
|
||||
expected: FAIL
|
||||
|
||||
[format 'deflate' field FLG should be error for 157]
|
||||
expected: FAIL
|
||||
|
||||
[format 'deflate' field DATA should be error for 5]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field CM should be error for 0]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field ID should be error for 255]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field FLG should be success for 1]
|
||||
expected: FAIL
|
||||
|
||||
[format 'deflate' field FLG should be success for 94]
|
||||
expected: FAIL
|
||||
|
||||
[format 'deflate' field FLG should be success for 1]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field ISIZE should be error for 1]
|
||||
expected: FAIL
|
||||
|
||||
[trailing junk for 'gzip' should give an error]
|
||||
expected: FAIL
|
||||
|
||||
[truncating the input for 'gzip' should give an error]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field CRC should be error for 0]
|
||||
expected: FAIL
|
||||
|
||||
[format 'deflate' field DATA should be success for 4]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[decompression-corrupt-input.any.worker.html]
|
||||
[format 'gzip' field OS should be success for 128]
|
||||
expected: FAIL
|
||||
|
||||
[the unchanged input for 'deflate' should decompress successfully]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field FLG should be error for 2]
|
||||
expected: FAIL
|
||||
|
||||
[trailing junk for 'deflate' should give an error]
|
||||
expected: FAIL
|
||||
|
||||
[format 'deflate' field FLG should be success for 218]
|
||||
expected: FAIL
|
||||
|
||||
[the unchanged input for 'gzip' should decompress successfully]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field DATA should be error for 3]
|
||||
expected: FAIL
|
||||
|
||||
[format 'deflate' field CMF should be error for 0]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field XFL should be success for 255]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field DATA should be success for 4]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field MTIME should be success for 255]
|
||||
expected: FAIL
|
||||
|
||||
[truncating the input for 'deflate' should give an error]
|
||||
expected: FAIL
|
||||
|
||||
[format 'deflate' field ADLER should be error for 255]
|
||||
expected: FAIL
|
||||
|
||||
[format 'deflate' field FLG should be error for 157]
|
||||
expected: FAIL
|
||||
|
||||
[format 'deflate' field DATA should be error for 5]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field CM should be error for 0]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field ID should be error for 255]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field FLG should be success for 1]
|
||||
expected: FAIL
|
||||
|
||||
[format 'deflate' field FLG should be success for 94]
|
||||
expected: FAIL
|
||||
|
||||
[format 'deflate' field FLG should be success for 1]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field ISIZE should be error for 1]
|
||||
expected: FAIL
|
||||
|
||||
[trailing junk for 'gzip' should give an error]
|
||||
expected: FAIL
|
||||
|
||||
[truncating the input for 'gzip' should give an error]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field CRC should be error for 0]
|
||||
expected: FAIL
|
||||
|
||||
[format 'deflate' field DATA should be success for 4]
|
||||
expected: FAIL
|
@ -0,0 +1,152 @@
|
||||
[decompression-corrupt-input.tentative.any.html]
|
||||
[format 'gzip' field OS should be success for 128]
|
||||
expected: FAIL
|
||||
|
||||
[the unchanged input for 'deflate' should decompress successfully]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field FLG should be error for 2]
|
||||
expected: FAIL
|
||||
|
||||
[trailing junk for 'deflate' should give an error]
|
||||
expected: FAIL
|
||||
|
||||
[format 'deflate' field FLG should be success for 218]
|
||||
expected: FAIL
|
||||
|
||||
[the unchanged input for 'gzip' should decompress successfully]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field DATA should be error for 3]
|
||||
expected: FAIL
|
||||
|
||||
[format 'deflate' field CMF should be error for 0]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field XFL should be success for 255]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field DATA should be success for 4]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field MTIME should be success for 255]
|
||||
expected: FAIL
|
||||
|
||||
[truncating the input for 'deflate' should give an error]
|
||||
expected: FAIL
|
||||
|
||||
[format 'deflate' field ADLER should be error for 255]
|
||||
expected: FAIL
|
||||
|
||||
[format 'deflate' field FLG should be error for 157]
|
||||
expected: FAIL
|
||||
|
||||
[format 'deflate' field DATA should be error for 5]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field CM should be error for 0]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field ID should be error for 255]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field FLG should be success for 1]
|
||||
expected: FAIL
|
||||
|
||||
[format 'deflate' field FLG should be success for 94]
|
||||
expected: FAIL
|
||||
|
||||
[format 'deflate' field FLG should be success for 1]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field ISIZE should be error for 1]
|
||||
expected: FAIL
|
||||
|
||||
[trailing junk for 'gzip' should give an error]
|
||||
expected: FAIL
|
||||
|
||||
[truncating the input for 'gzip' should give an error]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field CRC should be error for 0]
|
||||
expected: FAIL
|
||||
|
||||
[format 'deflate' field DATA should be success for 4]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[decompression-corrupt-input.tentative.any.worker.html]
|
||||
[format 'gzip' field OS should be success for 128]
|
||||
expected: FAIL
|
||||
|
||||
[the unchanged input for 'deflate' should decompress successfully]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field FLG should be error for 2]
|
||||
expected: FAIL
|
||||
|
||||
[trailing junk for 'deflate' should give an error]
|
||||
expected: FAIL
|
||||
|
||||
[format 'deflate' field FLG should be success for 218]
|
||||
expected: FAIL
|
||||
|
||||
[the unchanged input for 'gzip' should decompress successfully]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field DATA should be error for 3]
|
||||
expected: FAIL
|
||||
|
||||
[format 'deflate' field CMF should be error for 0]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field XFL should be success for 255]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field DATA should be success for 4]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field MTIME should be success for 255]
|
||||
expected: FAIL
|
||||
|
||||
[truncating the input for 'deflate' should give an error]
|
||||
expected: FAIL
|
||||
|
||||
[format 'deflate' field ADLER should be error for 255]
|
||||
expected: FAIL
|
||||
|
||||
[format 'deflate' field FLG should be error for 157]
|
||||
expected: FAIL
|
||||
|
||||
[format 'deflate' field DATA should be error for 5]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field CM should be error for 0]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field ID should be error for 255]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field FLG should be success for 1]
|
||||
expected: FAIL
|
||||
|
||||
[format 'deflate' field FLG should be success for 94]
|
||||
expected: FAIL
|
||||
|
||||
[format 'deflate' field FLG should be success for 1]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field ISIZE should be error for 1]
|
||||
expected: FAIL
|
||||
|
||||
[trailing junk for 'gzip' should give an error]
|
||||
expected: FAIL
|
||||
|
||||
[truncating the input for 'gzip' should give an error]
|
||||
expected: FAIL
|
||||
|
||||
[format 'gzip' field CRC should be error for 0]
|
||||
expected: FAIL
|
||||
|
||||
[format 'deflate' field DATA should be success for 4]
|
||||
expected: FAIL
|
@ -0,0 +1,30 @@
|
||||
[decompression-empty-input.tentative.any.serviceworker.html]
|
||||
[decompressing gzip empty input should work]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing deflate empty input should work]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[decompression-empty-input.tentative.any.sharedworker.html]
|
||||
[decompressing gzip empty input should work]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing deflate empty input should work]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[decompression-empty-input.tentative.any.html]
|
||||
[decompressing gzip empty input should work]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing deflate empty input should work]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[decompression-empty-input.tentative.any.worker.html]
|
||||
[decompressing gzip empty input should work]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing deflate empty input should work]
|
||||
expected: FAIL
|
@ -0,0 +1,366 @@
|
||||
[decompression-split-chunk.tentative.any.html]
|
||||
[decompressing splitted chunk into pieces of size 1 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 5 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 8 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 9 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 10 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 12 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 11 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 5 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 4 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 2 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 7 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 8 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 12 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 13 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 9 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 6 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 3 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 11 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 14 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 13 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 14 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 3 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 15 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 1 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 2 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 7 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 6 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 15 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 4 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 10 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[decompression-split-chunk.tentative.any.worker.html]
|
||||
[decompressing splitted chunk into pieces of size 1 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 5 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 8 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 9 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 10 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 12 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 11 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 5 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 4 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 2 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 7 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 8 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 12 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 13 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 9 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 6 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 3 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 11 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 14 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 13 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 14 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 3 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 15 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 1 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 2 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 7 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 6 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 15 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 4 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 10 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[decompression-split-chunk.tentative.any.sharedworker.html]
|
||||
[decompressing splitted chunk into pieces of size 1 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 5 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 8 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 9 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 10 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 12 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 11 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 5 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 4 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 2 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 7 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 8 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 12 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 13 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 9 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 6 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 3 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 11 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 14 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 13 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 14 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 3 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 15 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 1 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 2 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 7 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 6 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 15 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 4 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 10 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[decompression-split-chunk.tentative.any.serviceworker.html]
|
||||
[decompressing splitted chunk into pieces of size 1 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 5 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 8 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 9 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 10 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 12 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 11 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 5 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 4 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 2 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 7 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 8 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 12 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 13 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 9 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 6 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 3 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 11 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 14 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 13 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 14 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 3 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 15 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 1 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 2 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 7 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 6 should work in gzip]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 15 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 4 should work in deflate]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing splitted chunk into pieces of size 10 should work in gzip]
|
||||
expected: FAIL
|
@ -0,0 +1,30 @@
|
||||
[decompression-uint8array-output.tentative.any.worker.html]
|
||||
[decompressing gzip output should give Uint8Array chunks]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing deflated output should give Uint8Array chunks]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[decompression-uint8array-output.tentative.any.serviceworker.html]
|
||||
[decompressing gzip output should give Uint8Array chunks]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing deflated output should give Uint8Array chunks]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[decompression-uint8array-output.tentative.any.html]
|
||||
[decompressing gzip output should give Uint8Array chunks]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing deflated output should give Uint8Array chunks]
|
||||
expected: FAIL
|
||||
|
||||
|
||||
[decompression-uint8array-output.tentative.any.sharedworker.html]
|
||||
[decompressing gzip output should give Uint8Array chunks]
|
||||
expected: FAIL
|
||||
|
||||
[decompressing deflated output should give Uint8Array chunks]
|
||||
expected: FAIL
|
@ -1,4 +1,6 @@
|
||||
[idlharness.window.html]
|
||||
expected:
|
||||
if debug and (os == "win") and not webrender and (processor == "x86_64"): ["OK", "CRASH"]
|
||||
[HTMLIFrameElement interface: attribute csp]
|
||||
expected: FAIL
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user