0

Move to mozlog 7.1.0 in .vpython

This brings in the latest wptrunner code (normally imported via the
WPT importer), along with upgrading mozlog as necessary for recent
changes in wptrunner. This should unblock the WPT importer.

Bug: 1161212, 1161210
Change-Id: Ic89ddf17128ec454e4560e359d788e17f649464d
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2614858
Reviewed-by: Dirk Pranke <dpranke@google.com>
Reviewed-by: Jason Chase <chasej@chromium.org>
Commit-Queue: Stephen McGruer <smcgruer@chromium.org>
Cr-Commit-Position: refs/heads/master@{#841193}
This commit is contained in:
Stephen McGruer
2021-01-07 21:29:29 +00:00
committed by Chromium LUCI CQ
parent 54b704a56c
commit ce44555099
47 changed files with 1601 additions and 138 deletions

@ -317,7 +317,7 @@ wheel: <
>
wheel: <
name: "infra/python/wheels/mozlog-py2_py3"
version: "version:5.0"
version: "version:7.1.0"
>
wheel: <
name: "infra/python/wheels/mozprocess-py2_py3"

@ -45904,6 +45904,19 @@
],
{}
]
],
"one-element-transition.html": [
"51d3a968c735f78c3ba2b3f0213ec0d2a989ae15",
[
null,
[
[
"/css/css-backgrounds/animations/one-element-animation-ref.html",
"=="
]
],
{}
]
]
},
"background-334.html": [
@ -52819,6 +52832,32 @@
{}
]
],
"out-of-flow-in-multicolumn-012.html": [
"349d059b7a77f9b70046d99bc4a5e26c7c298bad",
[
null,
[
[
"/css/reference/ref-filled-green-100px-square.xht",
"=="
]
],
{}
]
],
"out-of-flow-in-multicolumn-013.html": [
"3bf06dfc1a779e3034122987a456fb33b2feb36e",
[
null,
[
[
"/css/reference/ref-filled-green-100px-square.xht",
"=="
]
],
{}
]
],
"overflowed-block-with-no-room-after-000.html": [
"084e16fb38de072fb83f92ba01302a2e404cdd97",
[
@ -120017,7 +120056,7 @@
]
],
"transform-interpolation-rotate-slerp.html": [
"ac11801bd40137c8a7aa46ecbcef407cc32f65e4",
"73d4bd8a1f8da143fac0183aca3c4acd8c15b6df",
[
null,
[
@ -120026,7 +120065,23 @@
"=="
]
],
{}
{
"fuzzy": [
[
null,
[
[
0,
160
],
[
0,
500
]
]
]
]
}
]
],
"transform-interpolation-rotate.html": [
@ -177322,7 +177377,7 @@
[]
],
"address-space.window-expected.txt": [
"ec4009fa9fc7f5e418ffe5881e4f67c8b8a0ccb7",
"d796fb9ad2812bbff17bad1693a769baf0df5b35",
[]
],
"resources": {
@ -222674,11 +222729,23 @@
"b47f0f274ef43148d3be418e2c0628a567f1c3c1",
[]
],
"sandbox-inherited-from-initiator-response-helper.html": [
"29c7f12441f7473d8f7dc5adb9046c764daa7745",
[]
],
"sandbox-inherited-from-initiator-response-helper.html.headers": [
"82e8023d0ba61851af5747ee2ccba154193d1875",
[]
],
"sandbox-javascript-window-open.html": [
"909956a54ff52153756f863c3489f4c792b5aaff",
[]
]
},
"sandbox-inherited-from-required-csp-expected.txt": [
"9e875fecf3e2fdd3aa2705d2cfc5901b539567e9",
[]
],
"sandbox-navigation-timing-iframe.tentative.html": [
"43726e7720065d9fe3c7d2ea3f3563f5f80315bb",
[]
@ -232804,6 +232871,40 @@
[]
]
},
"import-assertions": {
"empty-assertion-clause-expected.txt": [
"8139c2d954064ceb5c431cd3d8bdd1982a5a43bc",
[]
],
"empty-assertion-clause.js": [
"6913dd61dffe682063eb887a08c90242ca1b5f42",
[]
],
"empty-type-assertion.js": [
"5bb9b1ddb8e648c7165a3b29ab425fc97cb81037",
[]
],
"hello.js": [
"2f348444606435700656725dbcd9a08e8e10775f",
[]
],
"invalid-type-assertion-error-expected.txt": [
"e73d0699b1740a82751f8fb3f61807c51ec184be",
[]
],
"invalid-type-assertion.js": [
"e28c0176d5c3e444051f7b21070ac62a86f0ff0f",
[]
],
"unsupported-assertion-expected.txt": [
"aa0317aba4c060fd2bb0a0eb371b8de75a5e46e6",
[]
],
"unsupported-assertion.js": [
"45f6d60c9dae25f26640d6b79b08fc9a009fc06b",
[]
]
},
"is-module-goal.mjs": [
"b533fc2e906b4b9a0a912044b19ee523ae5eef7a",
[]
@ -235424,7 +235525,7 @@
[]
],
"OWNERS": [
"0df202b1edbe15295f368da5bdde3cdfd5687251",
"c4ad7cc6cae03b1453674f2b659eac0fb16b2ad4",
[]
],
"README.md": [
@ -237947,6 +238048,10 @@
"4fff9d9fba4c81f953826ffea010a75be626b95d",
[]
],
"randomized-breakdown.tentative.https.window.js.headers": [
"4fff9d9fba4c81f953826ffea010a75be626b95d",
[]
],
"redirect.client.tentative.https.window.js.headers": [
"4fff9d9fba4c81f953826ffea010a75be626b95d",
[]
@ -237957,7 +238062,7 @@
],
"resources": {
"common.js": [
"11b40d7d3d17ff0708c457a43d66b360b7e0bb07",
"63d5eac7c9b807b1faf67a1adc60cdfeca97c901",
[]
],
"iframe.redirect.sub.html": [
@ -240977,14 +241082,6 @@
"2c8f43716403bfd4a8bd8a7bdecaf888a2bcd697",
[]
],
"pointerevent_auxclick_is_a_pointerevent.html.ini": [
"e4f7b0ee92d0e4d3b4bdf95c83223be85ca0b0a5",
[]
],
"pointerevent_click_is_a_pointerevent.html.ini": [
"3994b407af65d66547f2820b6a72e96f99ed86a7",
[]
],
"pointerevent_coalesced_events_attributes-expected.txt": [
"e9ecdaa4bfe35a4e711c10c960d3d7124c389537",
[]
@ -245659,6 +245756,10 @@
"ff333bd97da4c6949a3142ac8789cb2e45d41b86",
[]
],
"create-blob-url-worker.js": [
"57e4882c24f7273e3469b9b60649ffa4b4f39617",
[]
],
"echo-content.py": [
"70ae4b60254cf7971cdd92dc2b1e382ef1a6196b",
[]
@ -246243,10 +246344,22 @@
"8539b40066dd91bbfaf7ef240b8104dcb2ab3b27",
[]
],
"nested-blob-url-worker-created-from-worker.html": [
"fc048e288e903e6c558c5518d133f2c2ec96223e",
[]
],
"nested-blob-url-workers.html": [
"f0eafcd3e01c3f1857eee7fced78a3a494a8afb4",
[]
],
"nested-iframe-parent.html": [
"115ab26e1221024b30313569f484942b10f6dba0",
[]
],
"nested-worker-created-from-blob-url-worker.html": [
"3fad2c9228ceb1573a09a18cba480fdd6747cdc4",
[]
],
"nested_load_worker.js": [
"ef0ed8fc704ceaa81f1eef8d4ed75fd041baae6d",
[]
@ -246387,6 +246500,10 @@
"f088ad127804297cf981e61acb1c33ea7b0620c4",
[]
],
"postmessage-fetched-text.js": [
"9fc67171d05dd28d6317e9e2ab986b0b68a7fc15",
[]
],
"postmessage-msgport-to-client-worker.js": [
"7af935f4f8fc01756980e46dcce5018ddc620e67",
[]
@ -248638,11 +248755,11 @@
},
"tools": {
"META.yml": [
"3eb893f4e65c3c0e5567688bd30725bc21eb40cb",
"bb8ed039dc39ed4105de306302398be25e037f07",
[]
],
"OWNERS": [
"1cfc2ba5907a71ed1007031053a855bde4ebeea3",
"83511f437a1dc1719d18b7b87df9f6369673a56c",
[]
],
"__init__.py": [
@ -248738,7 +248855,7 @@
[]
],
"safari-technology-preview.rb": [
"c78bcef5c454c0e023c411f09753aedb1b90df0b",
"628c5f60914cffa26da7d26d2d9a0502e1be735f",
[]
],
"system_info.yml": [
@ -248883,7 +249000,7 @@
}
},
"website_build.sh": [
"f6b26310aabce200fc0d77844f5f3ad28de0bb7f",
"fc255ae83f2219d3e32a859f0d63745ed682b436",
[]
]
},
@ -248964,7 +249081,7 @@
]
},
"localpaths.py": [
"87eafccf4d23367d765cffd9754543c72aec38a7",
"a79acb82a50c55ece58bb6040f6b9a19808c8a2a",
[]
],
"manifest": {
@ -248999,11 +249116,11 @@
[]
],
"log.py": [
"9e2ad74081e4a1b38b9c3c3ba61d49be2e8151fd",
"6551c2b5f7cc0f61a85ef5ffb4bbc466a585b1ab",
[]
],
"manifest.py": [
"14d3844d461cd7e0d7a071dd957ad2829885a3c6",
"1b4f407b9626be61fca132cd7d0124b4b3d69d7e",
[]
],
"sourcefile.py": [
@ -249019,7 +249136,7 @@
[]
],
"update.py": [
"296533a6babf176e4514bcf3f29e163fdf9a9fdf",
"a86ef7c8aa28f1f209f10b7254d1ca07cb17df51",
[]
],
"utils.py": [
@ -249092,7 +249209,7 @@
[]
],
"requirements_mypy.txt": [
"65100d70376cf614b4396dd242181394453efa49",
"d5e04b94d534cbb6491aa0a04950c56434725ff2",
[]
],
"runner": {
@ -249159,7 +249276,7 @@
[]
],
"serve.py": [
"8fdbdc6bc671090107a5e6479cf16ebda076d084",
"0a5f16ef57e4740138f14f2e5c1a5fb797af915a",
[]
],
"test_functional.py": [
@ -255247,6 +255364,96 @@
[]
]
}
},
"zipp": {
"CHANGES.rst": [
"a464a6324b7bea813db8846bd6c69fe9cdec433f",
[]
],
"LICENSE": [
"5e795a61f302ff5d425eac2281ff13c15b21ed1b",
[]
],
"PKG-INFO": [
"33ef1cf01c4b232648a70a0f2fa9733a4232b498",
[]
],
"README.rst": [
"ce128a32ba3113f118830a1fb975879b7dd1038b",
[]
],
"appveyor.yml": [
"f35aa27d684244ed59c1dedfd84d758aedab4cd2",
[]
],
"conftest.py": [
"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
[]
],
"docs": {
"conf.py": [
"41b53557fb2e372683899c51a6a5892b80bc5935",
[]
],
"history.rst": [
"8e217503ba1a68826af8e1e9db4bd16ce7a21857",
[]
],
"index.rst": [
"ff49bf9dc706d62c9e8524f346db1bdd63d026bb",
[]
]
},
"pyproject.toml": [
"3afc8c33b78baa7666d30f38ee522c84a21bfb7f",
[]
],
"setup.cfg": [
"ef4abd248a435a9f990f78401172db9f6f3650e3",
[]
],
"setup.py": [
"827e955fcd6c2af63ad1ad4cb42e41e29e62074f",
[]
],
"skeleton.md": [
"52b97f09b418894679951fb9867b5b0dc6618f1b",
[]
],
"test_zipp.py": [
"810d10bd68fc27f897dff2d411582de979962451",
[]
],
"tox.ini": [
"cb542c136c38eaf330fd2e9b3231d1010fbe1aac",
[]
],
"zipp.egg-info": {
"PKG-INFO": [
"33ef1cf01c4b232648a70a0f2fa9733a4232b498",
[]
],
"SOURCES.txt": [
"845b342cefc40058134ac715dc8fa806e40b6e95",
[]
],
"dependency_links.txt": [
"8b137891791fe96927ad78e64b0aad7bded08bdc",
[]
],
"requires.txt": [
"90bab46ac4930d35f39e82172aadf0e9a5bdd8a2",
[]
],
"top_level.txt": [
"e82f676f82a3381fa909d1e6578c7a22044fafca",
[]
]
},
"zipp.py": [
"892205834abaee2b4c1d3bb4507609de0ad1c2f6",
[]
]
}
},
"tox.ini": [
@ -255939,7 +256146,7 @@
[]
],
"requirements.txt": [
"a0b9b2df9f7dc4654f25e510498da706cff9cda8",
"1dd417465bb883a1f4802978a85c7a0732b90616",
[]
],
"requirements_android_webview.txt": [
@ -256061,11 +256268,11 @@
[]
],
"firefox.py": [
"9104fedb5f9f45b6c8e9640159315a62e93a37c7",
"b8f1c7d8286fdef935ce8dbf3e2ed513f202d04f",
[]
],
"firefox_android.py": [
"ca6ea96013b67ed548f5487520253ca61e0ae32c",
"5e9de564963f63e013a3fb63fdede1a97a1f7386",
[]
],
"ie.py": [
@ -256106,7 +256313,7 @@
[]
],
"environment.py": [
"2527d26d391d99683779392c035ceffa30fe5bc2",
"d98e1203930cab51ab7468037a0665b85126e1e5",
[]
],
"executors": {
@ -256119,7 +256326,7 @@
[]
],
"base.py": [
"51d752cdebc61976066a15c5dd9e778f9378ff00",
"539c791fbd9c9d1db4598246ff460742e514974a",
[]
],
"executorchrome.py": [
@ -256139,7 +256346,7 @@
[]
],
"executormarionette.py": [
"dc9e3e164e40251b9e0b91b900e2ae6651776447",
"e5d7683facd7880d13a7d30451230b23833c3cee",
[]
],
"executoropera.py": [
@ -256151,7 +256358,7 @@
[]
],
"executorselenium.py": [
"0228922134a704afdde5f37e4edec31dc540fc2f",
"6070007b21a7af0dd4b83cae283f853eecde576f",
[]
],
"executorservo.py": [
@ -256159,11 +256366,11 @@
[]
],
"executorservodriver.py": [
"2cd1dbf3ad06f41b9555dad64d6d0c0a86537260",
"039bcd14e2be57680089e785e60c74cddee285b4",
[]
],
"executorwebdriver.py": [
"43fc910c5151554df38d2a11608040442754d962",
"e6c5f9f1a206cb4f9f4d4fc3d2b72f5dd20e1fbc",
[]
],
"executorwebkit.py": [
@ -256269,6 +256476,10 @@
"ab8d4740f3fcc2b186a68d57758d46db47ad97ab",
[]
],
"mpcontext.py": [
"daade1054cf41689981a50b7ec5d944be1681424",
[]
],
"print_reftest_runner.html": [
"3ce18d4dd823e042e986ce94f2f858977a0ddb19",
[]
@ -256310,11 +256521,11 @@
[]
],
"testloader.py": [
"4a3fa4273654a8a86d5224ef6b0a9daed73b01d7",
"e57619b45fefda53305b43ff3001cdd4b078b47a",
[]
],
"testrunner.py": [
"e5d7042c73b30d8d6280c2e231d9d81b9d0d8975",
"8dd93418dd8f37fc220306f7a4e6d85e7fb9b55a",
[]
],
"update": {
@ -256356,7 +256567,7 @@
[]
],
"wptcommandline.py": [
"15d2322494dbe5c281cb77db7a1c0dd754e7db2a",
"bfd6491b2d66693e225d91b84846b5c0a38e1aba",
[]
],
"wptlogging.py": [
@ -256426,7 +256637,7 @@
}
},
"wptrunner.py": [
"b02805ff7e3df6645722e63fbc2d9715959c1ce6",
"3fc9e397a92a0927ac2c85ca27453754f78e0771",
[]
],
"wpttest.py": [
@ -256520,7 +256731,7 @@
]
},
"stash.py": [
"6b351847491bfee1f8e17a42127c88be97a62923",
"535355a828796196ee6be950c5ef9a0802236826",
[]
],
"utils.py": [
@ -292751,6 +292962,13 @@
{}
]
],
"content-visibility-080.html": [
"d3cea5fb83767ddfc236850097387644e0f74c8e",
[
null,
{}
]
],
"inheritance.html": [
"e1ae8164de00467a450511da9ca4bf7f138e9c71",
[
@ -346961,6 +347179,27 @@
{}
]
],
"sandbox-inherited-from-initiator-frame.html": [
"ab87fce5e0ce82bfed5ac88bcbf9890e8c39a4ce",
[
null,
{}
]
],
"sandbox-inherited-from-initiator-response.html": [
"638f1ba783985c309759cdc63ec332d1b5ad5a58",
[
null,
{}
]
],
"sandbox-inherited-from-required-csp.html": [
"d1bc4d1e04d01177943b91893156d715ec2ed15a",
[
null,
{}
]
],
"sandbox-initial-empty-document-toward-same-origin.html": [
"d1306c970322682f3979c497a5decd78218ba845",
[
@ -377539,6 +377778,29 @@
{}
]
],
"import-assertions": {
"empty-assertion-clause.html": [
"3a7c371189c9e847327ac14f49d4ee5153f35295",
[
null,
{}
]
],
"invalid-type-assertion-error.html": [
"d3399f085cede5afa124523b43ac6a301b6cb080",
[
null,
{}
]
],
"unsupported-assertion.html": [
"edda2d737a3cfa4c39f717f527ed6ad6da6b338d",
[
null,
{}
]
]
},
"json-module": {
"invalid-content-type.tentative.html": [
"e6da2db7ebc03ac7753b19dce4e9c3fc38a30014",
@ -387777,6 +388039,29 @@
}
]
],
"randomized-breakdown.tentative.https.window.js": [
"e3a65c1ba2a6a90c142b9c2da3368fc949c874b7",
[
"measure-memory/randomized-breakdown.tentative.https.window.html",
{
"script_metadata": [
[
"script",
"/common/get-host-info.sub.js"
],
[
"script",
"./resources/common.js"
],
[
"timeout",
"long"
]
],
"timeout": "long"
}
]
],
"redirect.client.tentative.https.window.js": [
"36d9a9d44cd1734572c2b3b930cec7ff0b3eeee2",
[
@ -397589,7 +397874,7 @@
]
],
"pointerevent_auxclick_is_a_pointerevent.html": [
"000d0df764e53f8d1cfa629ebfcccfb768a7a4bf",
"aac73db9eb7c5f5a7c71207e52eab30bf53f3526",
[
null,
{
@ -397652,7 +397937,7 @@
]
],
"pointerevent_click_is_a_pointerevent.html": [
"dfea148898e4eed29f25e9bf04067a6e7e251b25",
"2f5dfd4dbfdd81e45868517632a41998aed8f21d",
[
null,
{
@ -415749,6 +416034,13 @@
{}
]
],
"nested-blob-url-workers.https.html": [
"7269cbb701fabc2faa746f6ed47fba8fa9d423c8",
[
null,
{}
]
],
"next-hop-protocol.https.html": [
"7a907438d5d9f0b3ba1e198e048ea0b2e86a2415",
[

@ -1,5 +1,4 @@
suggested_reviewers:
- jgraham
- Hexcles
- stephenmcgruer
- LukeZielinski

@ -1,10 +1,10 @@
cask "safari-technology-preview" do
if MacOS.version <= :catalina
version "117,001-86197-20201209-2fd0fe89-0c37-412d-99e2-4e288519e886"
sha256 "739630b43a8f021cc246b2c8f610b759e82a6336830d34dac1f388763e7cceea"
version "118,001-92142-20210105-a1c7713a-1f38-411e-85e3-c650a62d5c06"
sha256 "8ffd7f83166106992cfc65a9760efe61578b55e1d8a1c960d56867f2048bd953"
else
version "117,001-86222-20201209-8021be92-32b3-403f-b9a7-630ab8e91afb"
sha256 "2dc42342e1e3fdd95086dfbd3a8588f1e2d181fbd4f8bb279994be0c2e57ff6f"
version "118,001-92171-20210105-8d3c22a7-e518-4758-8df4-fe87c4fa078a"
sha256 "98c60037f4dace62ca78d5bc3ab6974c9ca078f60fe2a82062bbc8e3cbcfc55a"
end
url "https://secure-appldnld.apple.com/STP/#{version.after_comma}/SafariTechnologyPreview.dmg"

@ -14,7 +14,7 @@ remote_url=https://${DEPLOY_TOKEN}@github.com/web-platform-tests/wpt.git
function json_property {
cat ${1} | \
python -c "import json, sys; print json.load(sys.stdin).get(\"${2}\", \"\")"
python -c "import json, sys; print(json.load(sys.stdin).get(\"${2}\", \"\"))"
}
function is_pull_request {

@ -10,6 +10,7 @@ sys.path.insert(0, os.path.join(here, "third_party", "atomicwrites"))
sys.path.insert(0, os.path.join(here, "third_party", "attrs", "src"))
sys.path.insert(0, os.path.join(here, "third_party", "funcsigs"))
sys.path.insert(0, os.path.join(here, "third_party", "html5lib"))
sys.path.insert(0, os.path.join(here, "third_party", "zipp"))
sys.path.insert(0, os.path.join(here, "third_party", "more-itertools"))
sys.path.insert(0, os.path.join(here, "third_party", "packaging"))
sys.path.insert(0, os.path.join(here, "third_party", "pathlib2"))

@ -1,15 +1,10 @@
import logging
import sys
logger = logging.getLogger("manifest")
def setup():
def enable_debug_logging():
# type: () -> None
logger.setLevel(logging.DEBUG)
handler = logging.StreamHandler(sys.stdout)
formatter = logging.Formatter(logging.BASIC_FORMAT, None)
handler.setFormatter(formatter)
logger.addHandler(handler)
def get_logger():
# type: () -> logging.Logger

@ -1,6 +1,7 @@
import io
import itertools
import os
import sys
from atomicwrites import atomic_write
from copy import deepcopy
from multiprocessing import Pool, cpu_count
@ -173,6 +174,8 @@ class Manifest(object):
constructed in the case we are not updating a path, but the absence of an item from
the iterator may be used to remove defunct entries from the manifest."""
logger = get_logger()
changed = False
# Create local variable references to these dicts so we avoid the
@ -221,20 +224,33 @@ class Manifest(object):
to_update.append(source_file)
if to_update:
logger.debug("Computing manifest update for %s items" % len(to_update))
changed = True
# 25 items was derived experimentally (2020-01) to be approximately the
# point at which it is quicker to create a Pool and parallelize update.
if parallel and len(to_update) > 25 and cpu_count() > 1:
# 25 derived experimentally (2020-01) to be approximately
# the point at which it is quicker to create Pool and
# parallelize this
pool = Pool()
# On Python 3 on Windows, using >= MAXIMUM_WAIT_OBJECTS processes
# causes a crash in the multiprocessing module. Whilst this enum
# can technically have any value, it is usually 64. For safety,
# restrict manifest regeneration to 48 processes on Windows.
#
# See https://bugs.python.org/issue26903 and https://bugs.python.org/issue40263
processes = cpu_count()
if sys.platform == "win32" and processes > 48:
processes = 48
pool = Pool(processes)
# chunksize set > 1 when more than 10000 tests, because
# chunking is a net-gain once we get to very large numbers
# of items (again, experimentally, 2020-01)
chunksize = max(1, len(to_update) // 10000)
logger.debug("Doing a multiprocessed update. CPU count: %s, "
"processes: %s, chunksize: %s" % (cpu_count(), processes, chunksize))
results = pool.imap_unordered(compute_manifest_items,
to_update,
chunksize=max(1, len(to_update) // 10000)
chunksize=chunksize
) # type: Iterator[Tuple[Tuple[Text, ...], Text, Set[ManifestItem], Text]]
elif PY3:
results = map(compute_manifest_items, to_update)
@ -444,6 +460,7 @@ def _load_and_update(tests_root, # type: Text
update = True
if rebuild or update:
logger.info("Updating manifest")
for retry in range(2):
try:
tree = vcs.get_tree(tests_root, manifest, manifest_path, cache_root,

@ -4,7 +4,7 @@ import os
from . import manifest
from . import vcs
from .log import get_logger
from .log import get_logger, enable_debug_logging
from .download import download_from_github
here = os.path.dirname(__file__)
@ -64,6 +64,9 @@ def abs_path(path):
def create_parser():
# type: () -> argparse.ArgumentParser
parser = argparse.ArgumentParser()
parser.add_argument(
"-v", "--verbose", dest="verbose", action="store_true", default=False,
help="Turn on verbose logging")
parser.add_argument(
"-p", "--path", type=abs_path, help="Path to manifest file.")
parser.add_argument(
@ -90,6 +93,8 @@ def run(*args, **kwargs):
# type: (*Any, **Any) -> None
if kwargs["path"] is None:
kwargs["path"] = os.path.join(kwargs["tests_root"], "MANIFEST.json")
if kwargs["verbose"]:
enable_debug_logging()
update_from_cli(**kwargs)

@ -1,3 +1,3 @@
mypy==0.790
mypy-extensions==0.4.3
typed-ast==1.4.1
typed-ast==1.4.2

@ -6,6 +6,7 @@ import abc
import argparse
import json
import logging
import multiprocessing
import os
import platform
import signal
@ -19,7 +20,6 @@ from six.moves import urllib
import uuid
from collections import defaultdict, OrderedDict
from itertools import chain, product
from multiprocessing import Process, Event
from localpaths import repo_root
from six.moves import reload_module
@ -407,18 +407,19 @@ def get_route_builder(aliases, config=None):
class ServerProc(object):
def __init__(self, scheme=None):
def __init__(self, mp_context, scheme=None):
self.proc = None
self.daemon = None
self.stop = Event()
self.mp_context = mp_context
self.stop = mp_context.Event()
self.scheme = scheme
def start(self, init_func, host, port, paths, routes, bind_address, config, **kwargs):
self.proc = Process(target=self.create_daemon,
args=(init_func, host, port, paths, routes, bind_address,
config),
name='%s on port %s' % (self.scheme, port),
kwargs=kwargs)
self.proc = self.mp_context.Process(target=self.create_daemon,
args=(init_func, host, port, paths, routes, bind_address,
config),
name='%s on port %s' % (self.scheme, port),
kwargs=kwargs)
self.proc.daemon = True
self.proc.start()
@ -470,7 +471,7 @@ class ServerProc(object):
return self.proc.is_alive()
def check_subdomains(config, routes):
def check_subdomains(config, routes, mp_context):
paths = config.paths
bind_address = config.bind_address
@ -478,7 +479,7 @@ def check_subdomains(config, routes):
port = get_port()
logger.debug("Going to use port %d to check subdomains" % port)
wrapper = ServerProc()
wrapper = ServerProc(mp_context)
wrapper.start(start_http_server, host, port, paths, routes,
bind_address, config)
@ -530,7 +531,8 @@ def make_hosts_file(config, host):
return "".join(rv)
def start_servers(host, ports, paths, routes, bind_address, config, **kwargs):
def start_servers(host, ports, paths, routes, bind_address, config,
mp_context, **kwargs):
servers = defaultdict(list)
for scheme, ports in ports.items():
assert len(ports) == {"http": 2, "https": 2}.get(scheme, 1)
@ -551,7 +553,7 @@ def start_servers(host, ports, paths, routes, bind_address, config, **kwargs):
"wss": start_wss_server,
"quic-transport": start_quic_transport_server}[scheme]
server_proc = ServerProc(scheme=scheme)
server_proc = ServerProc(mp_context, scheme=scheme)
server_proc.start(init_func, host, port, paths, routes, bind_address,
config, **kwargs)
servers[scheme].append((port, server_proc))
@ -781,7 +783,7 @@ def start_quic_transport_server(host, port, paths, routes, bind_address, config,
startup_failed(log=False)
def start(config, routes, **kwargs):
def start(config, routes, mp_context, **kwargs):
host = config["server_host"]
ports = config.ports
paths = config.paths
@ -789,7 +791,7 @@ def start(config, routes, **kwargs):
logger.debug("Using ports: %r" % ports)
servers = start_servers(host, ports, paths, routes, bind_address, config, **kwargs)
servers = start_servers(host, ports, paths, routes, bind_address, config, mp_context, **kwargs)
return servers
@ -966,9 +968,20 @@ def get_parser():
return parser
def run(config_cls=ConfigBuilder, route_builder=None, **kwargs):
class MpContext(object):
def __getattr__(self, name):
return getattr(multiprocessing, name)
def run(config_cls=ConfigBuilder, route_builder=None, mp_context=None, **kwargs):
received_signal = threading.Event()
if mp_context is None:
if hasattr(multiprocessing, "get_context"):
mp_context = multiprocessing.get_context()
else:
mp_context = MpContext()
with build_config(os.path.join(repo_root, "config.json"),
config_cls=config_cls,
**kwargs) as config:
@ -998,7 +1011,7 @@ def run(config_cls=ConfigBuilder, route_builder=None, **kwargs):
routes = route_builder(config.aliases, config).get_routes()
if config["check_subdomains"]:
check_subdomains(config, routes)
check_subdomains(config, routes, mp_context)
stash_address = None
if bind_address:
@ -1006,7 +1019,7 @@ def run(config_cls=ConfigBuilder, route_builder=None, **kwargs):
logger.debug("Going to use port %d for stash" % stash_address[1])
with stash.StashServer(stash_address, authkey=str(uuid.uuid4())):
servers = start(config, routes, **kwargs)
servers = start(config, routes, mp_context, **kwargs)
signal.signal(signal.SIGTERM, handle_signal)
signal.signal(signal.SIGINT, handle_signal)

@ -0,0 +1,100 @@
v1.2.0
======
#44: ``zipp.Path.open()`` now supports a compatible signature
as ``pathlib.Path.open()``, accepting text (default) or binary
modes and soliciting keyword parameters passed through to
``io.TextIOWrapper`` (encoding, newline, etc). The stream is
opened in text-mode by default now. ``open`` no
longer accepts ``pwd`` as a positional argument and does not
accept the ``force_zip64`` parameter at all. This change is
a backward-incompatible change for that single function.
v1.1.1
======
#43: Restored performance of implicit dir computation.
v1.1.0
======
#32: For read-only zip files, complexity of ``.exists`` and
``joinpath`` is now constant time instead of ``O(n)``, preventing
quadratic time in common use-cases and rendering large
zip files unusable for Path. Big thanks to Benjy Weinberger
for the bug report and contributed fix (#33).
v1.0.0
======
Re-release of 0.6 to correspond with release as found in
Python 3.8.
v0.6.0
======
#12: When adding implicit dirs, ensure that ancestral directories
are added and that duplicates are excluded.
The library now relies on
`more_itertools <https://pypi.org/project/more_itertools>`_.
v0.5.2
======
#7: Parent of a directory now actually returns the parent.
v0.5.1
======
Declared package as backport.
v0.5.0
======
Add ``.joinpath()`` method and ``.parent`` property.
Now a backport release of the ``zipfile.Path`` class.
v0.4.0
======
#4: Add support for zip files with implied directories.
v0.3.3
======
#3: Fix issue where ``.name`` on a directory was empty.
v0.3.2
======
#2: Fix TypeError on Python 2.7 when classic division is used.
v0.3.1
======
#1: Fix TypeError on Python 3.5 when joining to a path-like object.
v0.3.0
======
Add support for constructing a ``zipp.Path`` from any path-like
object.
``zipp.Path`` is now a new-style class on Python 2.7.
v0.2.1
======
Fix issue with ``__str__``.
v0.2.0
======
Drop reliance on future-fstrings.
v0.1.0
======
Initial release with basic functionality.

@ -0,0 +1,7 @@
Copyright Jason R. Coombs
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

@ -0,0 +1,39 @@
Metadata-Version: 2.1
Name: zipp
Version: 1.2.0
Summary: Backport of pathlib-compatible object wrapper for zip files
Home-page: https://github.com/jaraco/zipp
Author: Jason R. Coombs
Author-email: jaraco@jaraco.com
License: UNKNOWN
Description: .. image:: https://img.shields.io/pypi/v/zipp.svg
:target: https://pypi.org/project/zipp
.. image:: https://img.shields.io/pypi/pyversions/zipp.svg
.. image:: https://img.shields.io/travis/jaraco/zipp/master.svg
:target: https://travis-ci.org/jaraco/zipp
.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
:target: https://github.com/ambv/black
:alt: Code style: Black
.. image:: https://img.shields.io/appveyor/ci/jaraco/zipp/master.svg
:target: https://ci.appveyor.com/project/jaraco/zipp/branch/master
.. .. image:: https://readthedocs.org/projects/zipp/badge/?version=latest
.. :target: https://zipp.readthedocs.io/en/latest/?badge=latest
A pathlib-compatible Zipfile object wrapper. A backport of the
`Path object <https://docs.python.org/3.8/library/zipfile.html#path-objects>`_.
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Requires-Python: >=2.7
Provides-Extra: testing
Provides-Extra: docs

@ -0,0 +1,21 @@
.. image:: https://img.shields.io/pypi/v/zipp.svg
:target: https://pypi.org/project/zipp
.. image:: https://img.shields.io/pypi/pyversions/zipp.svg
.. image:: https://img.shields.io/travis/jaraco/zipp/master.svg
:target: https://travis-ci.org/jaraco/zipp
.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
:target: https://github.com/ambv/black
:alt: Code style: Black
.. image:: https://img.shields.io/appveyor/ci/jaraco/zipp/master.svg
:target: https://ci.appveyor.com/project/jaraco/zipp/branch/master
.. .. image:: https://readthedocs.org/projects/zipp/badge/?version=latest
.. :target: https://zipp.readthedocs.io/en/latest/?badge=latest
A pathlib-compatible Zipfile object wrapper. A backport of the
`Path object <https://docs.python.org/3.8/library/zipfile.html#path-objects>`_.

@ -0,0 +1,24 @@
environment:
APPVEYOR: true
matrix:
- PYTHON: "C:\\Python36-x64"
- PYTHON: "C:\\Python27-x64"
install:
# symlink python from a directory with a space
- "mklink /d \"C:\\Program Files\\Python\" %PYTHON%"
- "SET PYTHON=\"C:\\Program Files\\Python\""
- "SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%"
build: off
cache:
- '%LOCALAPPDATA%\pip\Cache'
test_script:
- "python -m pip install -U tox tox-venv virtualenv"
- "tox"
version: '{build}'

@ -0,0 +1,26 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
extensions = ['sphinx.ext.autodoc', 'jaraco.packaging.sphinx', 'rst.linker']
master_doc = "index"
link_files = {
'../CHANGES.rst': dict(
using=dict(GH='https://github.com'),
replace=[
dict(
pattern=r'(Issue #|\B#)(?P<issue>\d+)',
url='{package_url}/issues/{issue}',
),
dict(
pattern=r'^(?m)((?P<scm_version>v?\d+(\.\d+){1,2}))\n[-=]+\n',
with_scm='{text}\n{rev[timestamp]:%d %b %Y}\n',
),
dict(
pattern=r'PEP[- ](?P<pep_number>\d+)',
url='https://www.python.org/dev/peps/pep-{pep_number:0>4}/',
),
],
)
}

@ -0,0 +1,8 @@
:tocdepth: 2
.. _changes:
History
*******
.. include:: ../CHANGES (links).rst

@ -0,0 +1,22 @@
Welcome to zipp documentation!
========================================
.. toctree::
:maxdepth: 1
history
.. automodule:: zipp
:members:
:undoc-members:
:show-inheritance:
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`

@ -0,0 +1,6 @@
[build-system]
requires = ["setuptools>=34.4", "wheel", "setuptools_scm>=1.15"]
build-backend = "setuptools.build_meta"
[tool.black]
skip-string-normalization = true

@ -0,0 +1,45 @@
[bdist_wheel]
universal = 1
[metadata]
license_file = LICENSE
name = zipp
author = Jason R. Coombs
author_email = jaraco@jaraco.com
description = Backport of pathlib-compatible object wrapper for zip files
long_description = file:README.rst
url = https://github.com/jaraco/zipp
classifiers =
Development Status :: 5 - Production/Stable
Intended Audience :: Developers
License :: OSI Approved :: MIT License
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
[options]
py_modules = zipp
packages = find:
include_package_data = true
python_requires = >=2.7
install_requires =
contextlib2; python_version < "3.4"
setup_requires = setuptools_scm >= 1.15.0
[options.extras_require]
testing =
pathlib2
unittest2
jaraco.itertools
func-timeout
docs =
sphinx
jaraco.packaging >= 3.2
rst.linker >= 1.9
[options.entry_points]
[egg_info]
tag_build =
tag_date = 0

@ -0,0 +1,6 @@
#!/usr/bin/env python
import setuptools
if __name__ == "__main__":
setuptools.setup(use_scm_version=True)

@ -0,0 +1,137 @@
# Overview
This project is merged with [skeleton](https://github.com/jaraco/skeleton). What is skeleton? It's the scaffolding of a Python project jaraco [introduced in his blog](https://blog.jaraco.com/a-project-skeleton-for-python-projects/). It seeks to provide a means to re-use techniques and inherit advances when managing projects for distribution.
## An SCM Managed Approach
While maintaining dozens of projects in PyPI, jaraco derives best practices for project distribution and publishes them in the [skeleton repo](https://github.com/jaraco/skeleton), a git repo capturing the evolution and culmination of these best practices.
It's intended to be used by a new or existing project to adopt these practices and honed and proven techniques. Adopters are encouraged to use the project directly and maintain a small deviation from the technique, make their own fork for more substantial changes unique to their environment or preferences, or simply adopt the skeleton once and abandon it thereafter.
The primary advantage to using an SCM for maintaining these techniques is that those tools help facilitate the merge between the template and its adopting projects.
Another advantage to using an SCM-managed approach is that tools like GitHub recognize that a change in the skeleton is the _same change_ across all projects that merge with that skeleton. Without the ancestry, with a traditional copy/paste approach, a [commit like this](https://github.com/jaraco/skeleton/commit/12eed1326e1bc26ce256e7b3f8cd8d3a5beab2d5) would produce notifications in the upstream project issue for each and every application, but because it's centralized, GitHub provides just the one notification when the change is added to the skeleton.
# Usage
## new projects
To use skeleton for a new project, simply pull the skeleton into a new project:
```
$ git init my-new-project
$ cd my-new-project
$ git pull gh://jaraco/skeleton
```
Now customize the project to suit your individual project needs.
## existing projects
If you have an existing project, you can still incorporate the skeleton by merging it into the codebase.
```
$ git merge skeleton --allow-unrelated-histories
```
The `--allow-unrelated-histories` is necessary because the history from the skeleton was previously unrelated to the existing codebase. Resolve any merge conflicts and commit to the master, and now the project is based on the shared skeleton.
## Updating
Whenever a change is needed or desired for the general technique for packaging, it can be made in the skeleton project and then merged into each of the derived projects as needed, recommended before each release. As a result, features and best practices for packaging are centrally maintained and readily trickle into a whole suite of packages. This technique lowers the amount of tedious work necessary to create or maintain a project, and coupled with other techniques like continuous integration and deployment, lowers the cost of creating and maintaining refined Python projects to just a few, familiar git operations.
Thereafter, the target project can make whatever customizations it deems relevant to the scaffolding. The project may even at some point decide that the divergence is too great to merit renewed merging with the original skeleton. This approach applies maximal guidance while creating minimal constraints.
# Features
The features/techniques employed by the skeleton include:
- PEP 517/518 based build relying on setuptools as the build tool
- setuptools declarative configuration using setup.cfg
- tox for running tests
- A README.rst as reStructuredText with some popular badges, but with readthedocs and appveyor badges commented out
- A CHANGES.rst file intended for publishing release notes about the project
- Use of [black](https://black.readthedocs.io/en/stable/) for code formatting (disabled on unsupported Python 3.5 and earlier)
## Packaging Conventions
A pyproject.toml is included to enable PEP 517 and PEP 518 compatibility and declares the requirements necessary to build the project on setuptools (a minimum version compatible with setup.cfg declarative config).
The setup.cfg file implements the following features:
- Assumes universal wheel for release
- Advertises the project's LICENSE file (MIT by default)
- Reads the README.rst file into the long description
- Some common Trove classifiers
- Includes all packages discovered in the repo
- Data files in the package are also included (not just Python files)
- Declares the required Python versions
- Declares install requirements (empty by default)
- Declares setup requirements for legacy environments
- Supplies two 'extras':
- testing: requirements for running tests
- docs: requirements for building docs
- these extras split the declaration into "upstream" (requirements as declared by the skeleton) and "local" (those specific to the local project); these markers help avoid merge conflicts
- Placeholder for defining entry points
Additionally, the setup.py file declares `use_scm_version` which relies on [setuptools_scm](https://pypi.org/project/setuptools_scm) to do two things:
- derive the project version from SCM tags
- ensure that all files committed to the repo are automatically included in releases
## Running Tests
The skeleton assumes the developer has [tox](https://pypi.org/project/tox) installed. The developer is expected to run `tox` to run tests on the current Python version using [pytest](https://pypi.org/project/pytest).
Other environments (invoked with `tox -e {name}`) supplied include:
- a `build-docs` environment to build the documentation
- a `release` environment to publish the package to PyPI
A pytest.ini is included to define common options around running tests. In particular:
- rely on default test discovery in the current directory
- avoid recursing into common directories not containing tests
- run doctests on modules and invoke flake8 tests
- in doctests, allow unicode literals and regular literals to match, allowing for doctests to run on Python 2 and 3. Also enable ELLIPSES, a default that would be undone by supplying the prior option.
- filters out known warnings caused by libraries/functionality included by the skeleton
Relies a .flake8 file to correct some default behaviors:
- disable mutually incompatible rules W503 and W504
- support for black format
## Continuous Integration
The project is pre-configured to run tests in [Travis-CI](https://travis-ci.org) (.travis.yml). Any new project must be enabled either through their web site or with the `travis enable` command.
Features include:
- test against Python 2 and 3
- run on Ubuntu Xenial
- correct for broken IPv6
Also provided is a minimal template for running under Appveyor (Windows).
### Continuous Deployments
In addition to running tests, an additional deploy stage is configured to automatically release tagged commits to PyPI using [API tokens](https://pypi.org/help/#apitoken). The release process expects an authorized token to be configured with Travis as the TWINE_PASSWORD environment variable. After the Travis project is created, configure the token through the web UI or with a command like the following (bash syntax):
```
TWINE_PASSWORD={token} travis env copy TWINE_PASSWORD
```
## Building Documentation
Documentation is automatically built by [Read the Docs](https://readthedocs.org) when the project is registered with it, by way of the .readthedocs.yml file. To test the docs build manually, a tox env may be invoked as `tox -e build-docs`. Both techniques rely on the dependencies declared in `setup.cfg/options.extras_require.docs`.
In addition to building the sphinx docs scaffolded in `docs/`, the docs build a `history.html` file that first injects release dates and hyperlinks into the CHANGES.rst before incorporating it as history in the docs.
## Cutting releases
By default, tagged commits are released through the continuous integration deploy stage.
Releases may also be cut manually by invoking the tox environment `release` with the PyPI token set as the TWINE_PASSWORD:
```
TWINE_PASSWORD={token} tox -e release
```

@ -0,0 +1,245 @@
# coding: utf-8
from __future__ import division, unicode_literals
import io
import zipfile
import contextlib
import tempfile
import shutil
import string
try:
import pathlib
except ImportError:
import pathlib2 as pathlib
if not hasattr(contextlib, 'ExitStack'):
import contextlib2
contextlib.ExitStack = contextlib2.ExitStack
try:
import unittest
unittest.TestCase.subTest
except AttributeError:
import unittest2 as unittest
import jaraco.itertools
import func_timeout
import zipp
__metaclass__ = type
consume = tuple
def add_dirs(zf):
"""
Given a writable zip file zf, inject directory entries for
any directories implied by the presence of children.
"""
for name in zipp.CompleteDirs._implied_dirs(zf.namelist()):
zf.writestr(name, b"")
return zf
def build_alpharep_fixture():
"""
Create a zip file with this structure:
.
├── a.txt
├── b
│ ├── c.txt
│ ├── d
│ │ └── e.txt
│ └── f.txt
└── g
└── h
└── i.txt
This fixture has the following key characteristics:
- a file at the root (a)
- a file two levels deep (b/d/e)
- multiple files in a directory (b/c, b/f)
- a directory containing only a directory (g/h)
"alpha" because it uses alphabet
"rep" because it's a representative example
"""
data = io.BytesIO()
zf = zipfile.ZipFile(data, "w")
zf.writestr("a.txt", b"content of a")
zf.writestr("b/c.txt", b"content of c")
zf.writestr("b/d/e.txt", b"content of e")
zf.writestr("b/f.txt", b"content of f")
zf.writestr("g/h/i.txt", b"content of i")
zf.filename = "alpharep.zip"
return zf
@contextlib.contextmanager
def temp_dir():
tmpdir = tempfile.mkdtemp()
try:
yield pathlib.Path(tmpdir)
finally:
shutil.rmtree(tmpdir)
class TestPath(unittest.TestCase):
def setUp(self):
self.fixtures = contextlib.ExitStack()
self.addCleanup(self.fixtures.close)
def zipfile_alpharep(self):
with self.subTest():
yield build_alpharep_fixture()
with self.subTest():
yield add_dirs(build_alpharep_fixture())
def zipfile_ondisk(self):
tmpdir = pathlib.Path(self.fixtures.enter_context(temp_dir()))
for alpharep in self.zipfile_alpharep():
buffer = alpharep.fp
alpharep.close()
path = tmpdir / alpharep.filename
with path.open("wb") as strm:
strm.write(buffer.getvalue())
yield path
def test_iterdir_and_types(self):
for alpharep in self.zipfile_alpharep():
root = zipp.Path(alpharep)
assert root.is_dir()
a, b, g = root.iterdir()
assert a.is_file()
assert b.is_dir()
assert g.is_dir()
c, f, d = b.iterdir()
assert c.is_file() and f.is_file()
e, = d.iterdir()
assert e.is_file()
h, = g.iterdir()
i, = h.iterdir()
assert i.is_file()
def test_open(self):
for alpharep in self.zipfile_alpharep():
root = zipp.Path(alpharep)
a, b, g = root.iterdir()
with a.open() as strm:
data = strm.read()
assert data == "content of a"
def test_read(self):
for alpharep in self.zipfile_alpharep():
root = zipp.Path(alpharep)
a, b, g = root.iterdir()
assert a.read_text() == "content of a"
assert a.read_bytes() == b"content of a"
def test_joinpath(self):
for alpharep in self.zipfile_alpharep():
root = zipp.Path(alpharep)
a = root.joinpath("a")
assert a.is_file()
e = root.joinpath("b").joinpath("d").joinpath("e.txt")
assert e.read_text() == "content of e"
def test_traverse_truediv(self):
for alpharep in self.zipfile_alpharep():
root = zipp.Path(alpharep)
a = root / "a"
assert a.is_file()
e = root / "b" / "d" / "e.txt"
assert e.read_text() == "content of e"
def test_traverse_simplediv(self):
"""
Disable the __future__.division when testing traversal.
"""
for alpharep in self.zipfile_alpharep():
code = compile(
source="zipp.Path(alpharep) / 'a'",
filename="(test)",
mode="eval",
dont_inherit=True,
)
eval(code)
def test_pathlike_construction(self):
"""
zipp.Path should be constructable from a path-like object
"""
for zipfile_ondisk in self.zipfile_ondisk():
pathlike = pathlib.Path(str(zipfile_ondisk))
zipp.Path(pathlike)
def test_traverse_pathlike(self):
for alpharep in self.zipfile_alpharep():
root = zipp.Path(alpharep)
root / pathlib.Path("a")
def test_parent(self):
for alpharep in self.zipfile_alpharep():
root = zipp.Path(alpharep)
assert (root / 'a').parent.at == ''
assert (root / 'a' / 'b').parent.at == 'a/'
def test_dir_parent(self):
for alpharep in self.zipfile_alpharep():
root = zipp.Path(alpharep)
assert (root / 'b').parent.at == ''
assert (root / 'b/').parent.at == ''
def test_missing_dir_parent(self):
for alpharep in self.zipfile_alpharep():
root = zipp.Path(alpharep)
assert (root / 'missing dir/').parent.at == ''
def test_mutability(self):
"""
If the underlying zipfile is changed, the Path object should
reflect that change.
"""
for alpharep in self.zipfile_alpharep():
root = zipp.Path(alpharep)
a, b, g = root.iterdir()
alpharep.writestr('foo.txt', b'foo')
alpharep.writestr('bar/baz.txt', b'baz')
assert any(
child.name == 'foo.txt'
for child in root.iterdir())
assert (root / 'foo.txt').read_text() == 'foo'
baz, = (root / 'bar').iterdir()
assert baz.read_text() == 'baz'
HUGE_ZIPFILE_NUM_ENTRIES = 2 ** 13
def huge_zipfile(self):
"""Create a read-only zipfile with a huge number of entries entries."""
strm = io.BytesIO()
zf = zipfile.ZipFile(strm, "w")
for entry in map(str, range(self.HUGE_ZIPFILE_NUM_ENTRIES)):
zf.writestr(entry, entry)
zf.mode = 'r'
return zf
def test_joinpath_constant_time(self):
"""
Ensure joinpath on items in zipfile is linear time.
"""
root = zipp.Path(self.huge_zipfile())
entries = jaraco.itertools.Counter(root.iterdir())
for entry in entries:
entry.joinpath('suffix')
# Check the file iterated all items
assert entries.count == self.HUGE_ZIPFILE_NUM_ENTRIES
@func_timeout.func_set_timeout(3)
def test_implied_dirs_performance(self):
data = ['/'.join(string.ascii_lowercase + str(n)) for n in range(10000)]
zipp.CompleteDirs._implied_dirs(data)

@ -0,0 +1,36 @@
[tox]
envlist = python
minversion = 3.2
# https://github.com/jaraco/skeleton/issues/6
tox_pip_extensions_ext_venv_update = true
[testenv]
deps =
setuptools>=31.0.1
commands =
python -m unittest discover
usedevelop = True
extras = testing
[testenv:build-docs]
extras =
docs
testing
changedir = docs
commands =
python -m sphinx . {toxinidir}/build/html
[testenv:release]
skip_install = True
deps =
pep517>=0.5
twine>=1.13
path.py
passenv =
TWINE_PASSWORD
setenv =
TWINE_USERNAME = {env:TWINE_USERNAME:__token__}
commands =
python -c "import path; path.Path('dist').rmtree_p()"
python -m pep517.build .
python -m twine upload dist/*

@ -0,0 +1,39 @@
Metadata-Version: 2.1
Name: zipp
Version: 1.2.0
Summary: Backport of pathlib-compatible object wrapper for zip files
Home-page: https://github.com/jaraco/zipp
Author: Jason R. Coombs
Author-email: jaraco@jaraco.com
License: UNKNOWN
Description: .. image:: https://img.shields.io/pypi/v/zipp.svg
:target: https://pypi.org/project/zipp
.. image:: https://img.shields.io/pypi/pyversions/zipp.svg
.. image:: https://img.shields.io/travis/jaraco/zipp/master.svg
:target: https://travis-ci.org/jaraco/zipp
.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
:target: https://github.com/ambv/black
:alt: Code style: Black
.. image:: https://img.shields.io/appveyor/ci/jaraco/zipp/master.svg
:target: https://ci.appveyor.com/project/jaraco/zipp/branch/master
.. .. image:: https://readthedocs.org/projects/zipp/badge/?version=latest
.. :target: https://zipp.readthedocs.io/en/latest/?badge=latest
A pathlib-compatible Zipfile object wrapper. A backport of the
`Path object <https://docs.python.org/3.8/library/zipfile.html#path-objects>`_.
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Requires-Python: >=2.7
Provides-Extra: testing
Provides-Extra: docs

@ -0,0 +1,24 @@
.flake8
.pre-commit-config.yaml
.readthedocs.yml
.travis.yml
CHANGES.rst
LICENSE
README.rst
appveyor.yml
conftest.py
pyproject.toml
setup.cfg
setup.py
skeleton.md
test_zipp.py
tox.ini
zipp.py
docs/conf.py
docs/history.rst
docs/index.rst
zipp.egg-info/PKG-INFO
zipp.egg-info/SOURCES.txt
zipp.egg-info/dependency_links.txt
zipp.egg-info/requires.txt
zipp.egg-info/top_level.txt

@ -0,0 +1,14 @@
[:python_version < "3.4"]
contextlib2
[docs]
sphinx
jaraco.packaging>=3.2
rst.linker>=1.9
[testing]
pathlib2
unittest2
jaraco.itertools
func-timeout

@ -0,0 +1,286 @@
# coding: utf-8
from __future__ import division
import io
import sys
import posixpath
import zipfile
import functools
import itertools
from collections import OrderedDict
try:
from contextlib import suppress
except ImportError:
from contextlib2 import suppress
__metaclass__ = type
def _parents(path):
"""
Given a path with elements separated by
posixpath.sep, generate all parents of that path.
>>> list(_parents('b/d'))
['b']
>>> list(_parents('/b/d/'))
['/b']
>>> list(_parents('b/d/f/'))
['b/d', 'b']
>>> list(_parents('b'))
[]
>>> list(_parents(''))
[]
"""
return itertools.islice(_ancestry(path), 1, None)
def _ancestry(path):
"""
Given a path with elements separated by
posixpath.sep, generate all elements of that path
>>> list(_ancestry('b/d'))
['b/d', 'b']
>>> list(_ancestry('/b/d/'))
['/b/d', '/b']
>>> list(_ancestry('b/d/f/'))
['b/d/f', 'b/d', 'b']
>>> list(_ancestry('b'))
['b']
>>> list(_ancestry(''))
[]
"""
path = path.rstrip(posixpath.sep)
while path and path != posixpath.sep:
yield path
path, tail = posixpath.split(path)
class CompleteDirs(zipfile.ZipFile):
"""
A ZipFile subclass that ensures that implied directories
are always included in the namelist.
"""
@staticmethod
def _implied_dirs(names):
parents = itertools.chain.from_iterable(map(_parents, names))
# Cast names to a set for O(1) lookups
existing = set(names)
# Deduplicate entries in original order
implied_dirs = OrderedDict.fromkeys(
p + posixpath.sep for p in parents
if p + posixpath.sep not in existing
)
return implied_dirs
def namelist(self):
names = super(CompleteDirs, self).namelist()
return names + list(self._implied_dirs(names))
def _name_set(self):
return set(self.namelist())
def resolve_dir(self, name):
"""
If the name represents a directory, return that name
as a directory (with the trailing slash).
"""
names = self._name_set()
dirname = name + '/'
dir_match = name not in names and dirname in names
return dirname if dir_match else name
@classmethod
def make(cls, source):
"""
Given a source (filename or zipfile), return an
appropriate CompleteDirs subclass.
"""
if isinstance(source, CompleteDirs):
return source
if not isinstance(source, zipfile.ZipFile):
return cls(_pathlib_compat(source))
# Only allow for FastPath when supplied zipfile is read-only
if 'r' not in source.mode:
cls = CompleteDirs
res = cls.__new__(cls)
vars(res).update(vars(source))
return res
class FastLookup(CompleteDirs):
"""
ZipFile subclass to ensure implicit
dirs exist and are resolved rapidly.
"""
def namelist(self):
with suppress(AttributeError):
return self.__names
self.__names = super(FastLookup, self).namelist()
return self.__names
def _name_set(self):
with suppress(AttributeError):
return self.__lookup
self.__lookup = super(FastLookup, self)._name_set()
return self.__lookup
def _pathlib_compat(path):
"""
For path-like objects, convert to a filename for compatibility
on Python 3.6.1 and earlier.
"""
try:
return path.__fspath__()
except AttributeError:
return str(path)
class Path:
"""
A pathlib-compatible interface for zip files.
Consider a zip file with this structure::
.
├── a.txt
└── b
├── c.txt
└── d
└── e.txt
>>> data = io.BytesIO()
>>> zf = zipfile.ZipFile(data, 'w')
>>> zf.writestr('a.txt', 'content of a')
>>> zf.writestr('b/c.txt', 'content of c')
>>> zf.writestr('b/d/e.txt', 'content of e')
>>> zf.filename = 'abcde.zip'
Path accepts the zipfile object itself or a filename
>>> root = Path(zf)
From there, several path operations are available.
Directory iteration (including the zip file itself):
>>> a, b = root.iterdir()
>>> a
Path('abcde.zip', 'a.txt')
>>> b
Path('abcde.zip', 'b/')
name property:
>>> b.name
'b'
join with divide operator:
>>> c = b / 'c.txt'
>>> c
Path('abcde.zip', 'b/c.txt')
>>> c.name
'c.txt'
Read text:
>>> c.read_text()
'content of c'
existence:
>>> c.exists()
True
>>> (b / 'missing.txt').exists()
False
Coercion to string:
>>> str(c)
'abcde.zip/b/c.txt'
"""
__repr = "{self.__class__.__name__}({self.root.filename!r}, {self.at!r})"
def __init__(self, root, at=""):
self.root = FastLookup.make(root)
self.at = at
def open(self, mode='r', *args, **kwargs):
"""
Open this entry as text or binary following the semantics
of ``pathlib.Path.open()`` by passing arguments through
to io.TextIOWrapper().
"""
pwd = kwargs.pop('pwd', None)
zip_mode = mode[0]
stream = self.root.open(self.at, zip_mode, pwd=pwd)
if 'b' in mode:
if args or kwargs:
raise ValueError("encoding args invalid for binary operation")
return stream
return io.TextIOWrapper(stream, *args, **kwargs)
@property
def name(self):
return posixpath.basename(self.at.rstrip("/"))
def read_text(self, *args, **kwargs):
with self.open('r', *args, **kwargs) as strm:
return strm.read()
def read_bytes(self):
with self.open('rb') as strm:
return strm.read()
def _is_child(self, path):
return posixpath.dirname(path.at.rstrip("/")) == self.at.rstrip("/")
def _next(self, at):
return Path(self.root, at)
def is_dir(self):
return not self.at or self.at.endswith("/")
def is_file(self):
return not self.is_dir()
def exists(self):
return self.at in self.root._name_set()
def iterdir(self):
if not self.is_dir():
raise ValueError("Can't listdir a file")
subs = map(self._next, self.root.namelist())
return filter(self._is_child, subs)
def __str__(self):
return posixpath.join(self.root.filename, self.at)
def __repr__(self):
return self.__repr.format(self=self)
def joinpath(self, add):
next = posixpath.join(self.at, _pathlib_compat(add))
return self._next(self.root.resolve_dir(next))
__truediv__ = joinpath
@property
def parent(self):
parent_at = posixpath.dirname(self.at.rstrip('/'))
if parent_at:
parent_at += '/'
return self._next(parent_at)
if sys.version_info < (3,):
__div__ = __truediv__

@ -4,7 +4,7 @@ mozlog==7.1.0
mozdebug==0.2
# Pillow 7 requires Python 3
pillow==6.2.2; python_version <= '2.7' # pyup: <7.0
pillow==8.0.1; python_version >= '3.0'
pillow==8.1.0; python_version >= '3.0'
urllib3[secure]==1.26.2
requests==2.25.1
six==1.15.0

@ -139,15 +139,18 @@ def executor_kwargs(logger, test_type, server_config, cache_manager, run_info_da
options["prefs"].update({pref: Preferences.cast(value)})
capabilities["moz:firefoxOptions"] = options
environ = get_environ(logger,
kwargs["binary"],
kwargs["debug_info"],
kwargs["stylo_threads"],
kwargs["headless"],
kwargs["enable_webrender"],
kwargs["chaos_mode_flags"])
# This gets reused for firefox_android, but the environment setup
# isn't required in that case
if kwargs["binary"]:
environ = get_environ(logger,
kwargs["binary"],
kwargs["debug_info"],
kwargs["stylo_threads"],
kwargs["headless"],
kwargs["enable_webrender"],
kwargs["chaos_mode_flags"])
executor_kwargs["environ"] = environ
executor_kwargs["environ"] = environ
if kwargs["certutil_binary"] is None:
capabilities["acceptInsecureCerts"] = True
if capabilities:

@ -39,7 +39,8 @@ def check_args(**kwargs):
def browser_kwargs(logger, test_type, run_info_data, config, **kwargs):
return {"package_name": kwargs["package_name"],
return {"adb_binary": kwargs["adb_binary"],
"package_name": kwargs["package_name"],
"device_serial": kwargs["device_serial"],
"prefs_root": kwargs["prefs_root"],
"extra_prefs": kwargs["extra_prefs"],
@ -128,7 +129,8 @@ class FirefoxAndroidBrowser(Browser):
ca_certificate_path=None, e10s=False, enable_webrender=False, stackfix_dir=None,
binary_args=None, timeout_multiplier=None, leak_check=False, asan=False,
stylo_threads=1, chaos_mode_flags=None, config=None, browser_channel="nightly",
install_fonts=False, tests_root=None, specialpowers_path=None, **kwargs):
install_fonts=False, tests_root=None, specialpowers_path=None, adb_binary=None,
**kwargs):
super(FirefoxAndroidBrowser, self).__init__(logger)
self.prefs_root = prefs_root
@ -154,6 +156,7 @@ class FirefoxAndroidBrowser(Browser):
self.install_fonts = install_fonts
self.tests_root = tests_root
self.specialpowers_path = specialpowers_path
self.adb_binary = adb_binary
self.profile_creator = ProfileCreator(logger,
prefs_root,
@ -222,7 +225,9 @@ class FirefoxAndroidBrowser(Browser):
symbols_path=self.symbols_path,
serial=self.device_serial,
# TODO - choose appropriate log dir
logdir=os.getcwd())
logdir=os.getcwd(),
adb_path=self.adb_binary,
explicit_cleanup=True)
self.logger.debug("Starting %s" % self.package_name)
# connect to a running emulator
@ -254,7 +259,7 @@ class FirefoxAndroidBrowser(Browser):
self.logger.warning("Failed to remove forwarded or reversed ports: %s" % e)
# We assume that stopping the runner prompts the
# browser to shut down.
self.runner.stop()
self.runner.cleanup()
self.logger.debug("stopped")
def pid(self):

@ -1,6 +1,6 @@
import json
import os
import multiprocessing
import signal
import socket
import sys
@ -9,6 +9,7 @@ from six import iteritems
from mozlog import get_default_logger, handlers, proxy
from . import mpcontext
from .wptlogging import LogLevelRewriter
here = os.path.dirname(__file__)
@ -66,8 +67,9 @@ class TestEnvironment(object):
self.debug_info = debug_info
self.options = options if options is not None else {}
self.cache_manager = multiprocessing.Manager()
self.stash = serve.stash.StashServer()
mp_context = mpcontext.get_context()
self.cache_manager = mp_context.Manager()
self.stash = serve.stash.StashServer(mp_context=mp_context)
self.env_extras = env_extras
self.env_extras_cms = None
self.ssl_config = ssl_config
@ -95,7 +97,8 @@ class TestEnvironment(object):
self.env_extras_cms.append(cm)
self.servers = serve.start(self.config,
self.get_routes())
self.get_routes(),
mp_context=mpcontext.get_context())
if self.options.get("supports_debugger") and self.debug_info and self.debug_info.interactive:
self.ignore_interrupts()
@ -168,7 +171,8 @@ class TestEnvironment(object):
log_filter = LogLevelRewriter(log_filter, ["error"], "warning")
server_logger.component_filter = log_filter
server_logger = proxy.QueuedProxyLogger(server_logger)
server_logger = proxy.QueuedProxyLogger(server_logger,
mpcontext.get_context())
try:
# Set as the default logger for wptserve

@ -12,13 +12,13 @@ from six import text_type
from six.moves.http_client import HTTPConnection
from six.moves.urllib.parse import urljoin, urlsplit, urlunsplit
from ..testrunner import Stop
from .actions import actions
from .protocol import Protocol, BaseProtocolPart
here = os.path.dirname(__file__)
def executor_kwargs(test_type, server_config, cache_manager, run_info_data,
**kwargs):
timeout_multiplier = kwargs["timeout_multiplier"]
@ -180,11 +180,11 @@ class TimedRunner(object):
self.result_flag = threading.Event()
def run(self):
if self.set_timeout() is Stop:
return Stop
if self.before_run() is Stop:
return Stop
for setup_fn in [self.set_timeout, self.before_run]:
err = setup_fn()
if err:
self.result = (False, err)
return self.result
executor = threading.Thread(target=self.run_func)
executor.start()
@ -296,9 +296,6 @@ class TestExecutor(object):
self.logger.warning(exception_string)
result = self.result_from_exception(test, e, exception_string)
if result is Stop:
return result
# log result of parent test
if result[0].status == "ERROR":
self.logger.debug(result[0].message)

@ -41,7 +41,6 @@ from .protocol import (ActionSequenceProtocolPart,
VirtualAuthenticatorProtocolPart,
SetPermissionProtocolPart,
PrintProtocolPart)
from ..testrunner import Stop
from ..webdriver_server import GeckoDriverServer
@ -727,8 +726,9 @@ class ExecuteAsyncScriptRun(TimedRunner):
# timeout is set too high. This works at least.
self.protocol.base.set_timeout(2**28 - 1)
except IOError:
self.logger.error("Lost marionette connection before starting test")
return Stop
msg = "Lost marionette connection before starting test"
self.logger.error(msg)
return ("INTERNAL-ERROR", msg)
def before_run(self):
index = self.url.rfind("/storage/")

@ -23,7 +23,6 @@ from .protocol import (BaseProtocolPart,
SendKeysProtocolPart,
ActionSequenceProtocolPart,
TestDriverProtocolPart)
from ..testrunner import Stop
here = os.path.dirname(__file__)
@ -278,8 +277,9 @@ class SeleniumRun(TimedRunner):
try:
self.protocol.base.set_timeout(timeout + self.extra_timeout)
except exceptions.ErrorInResponseException:
self.logger.error("Lost WebDriver connection")
return Stop
msg = "Lost WebDriver connection"
self.logger.error(msg)
return ("INTERNAL-ERROR", msg)
def run_func(self):
try:

@ -10,7 +10,6 @@ from .base import (Protocol,
TestharnessExecutor,
TimedRunner,
strip_server)
from ..testrunner import Stop
from ..webdriver_server import wait_for_service
webdriver = None
@ -184,8 +183,9 @@ class ServoWebDriverTestharnessExecutor(TestharnessExecutor):
self.protocol.session.timeouts.script = timeout
self.timeout = timeout
except IOError:
self.logger.error("Lost webdriver connection")
return Stop
msg = "Lost WebDriver connection"
self.logger.error(msg)
return ("INTERNAL-ERROR", msg)
success, data = ServoWebDriverRun(self.logger,
self.do_testharness,
@ -276,8 +276,9 @@ class ServoWebDriverRefTestExecutor(RefTestExecutor):
self.protocol.session.timeouts.script = timeout
self.timeout = timeout
except IOError:
self.logger.error("Lost webdriver connection")
return Stop
msg = "Lost webdriver connection"
self.logger.error(msg)
return ("INTERNAL-ERROR", msg)
return ServoWebDriverRun(self.logger,
self._screenshot,

@ -27,7 +27,6 @@ from .protocol import (BaseProtocolPart,
GenerateTestReportProtocolPart,
SetPermissionProtocolPart,
VirtualAuthenticatorProtocolPart)
from ..testrunner import Stop
import webdriver as client
from webdriver import error
@ -355,8 +354,9 @@ class WebDriverRun(TimedRunner):
try:
self.protocol.base.set_timeout(self.timeout + self.extra_timeout)
except client.UnknownErrorException:
self.logger.error("Lost WebDriver connection")
return Stop
msg = "Lost WebDriver connection"
self.logger.error(msg)
return ("INTERNAL-ERROR", msg)
def run_func(self):
try:

@ -0,0 +1,21 @@
import multiprocessing
import six
_context = None
class MpContext(object):
def __getattr__(self, name):
return getattr(multiprocessing, name)
def get_context():
global _context
if _context is None:
if six.PY2:
_context = MpContext()
else:
_context = multiprocessing.get_context("spawn")
return _context

@ -5,12 +5,12 @@ from six.moves.urllib.parse import urlsplit
from abc import ABCMeta, abstractmethod
from six.moves.queue import Empty
from collections import defaultdict, deque
from multiprocessing import Queue
from six import ensure_binary, iteritems
from six.moves import range
from . import manifestinclude
from . import manifestexpected
from . import mpcontext
from . import wpttest
from mozlog import structured
@ -390,7 +390,8 @@ class GroupedSource(TestSource):
@classmethod
def make_queue(cls, tests, **kwargs):
test_queue = Queue()
mp = mpcontext.get_context()
test_queue = mp.Queue()
groups = []
state = {}
@ -423,7 +424,8 @@ class GroupedSource(TestSource):
class SingleTestSource(TestSource):
@classmethod
def make_queue(cls, tests, **kwargs):
test_queue = Queue()
mp = mpcontext.get_context()
test_queue = mp.Queue()
processes = kwargs["processes"]
queues = [deque([]) for _ in range(processes)]
metadatas = [cls.group_metadata(None) for _ in range(processes)]
@ -467,7 +469,8 @@ class GroupFileTestSource(TestSource):
ids_to_tests = {test.id: test for test in tests}
test_queue = Queue()
mp = mpcontext.get_context()
test_queue = mp.Queue()
for group_name, test_ids in iteritems(tests_by_group):
group_metadata = {"scope": group_name}

@ -1,14 +1,14 @@
from __future__ import unicode_literals
import multiprocessing
import threading
import traceback
from six.moves.queue import Empty
from collections import namedtuple
from multiprocessing import Process, current_process, Queue
from mozlog import structuredlog, capture
from . import mpcontext
# Special value used as a sentinal in various commands
Stop = object()
@ -68,7 +68,7 @@ class TestRunner(object):
self.result_queue = result_queue
self.executor = executor
self.name = current_process().name
self.name = mpcontext.get_context().current_process().name
self.logger = logger
def __enter__(self):
@ -311,9 +311,11 @@ class TestRunnerManager(threading.Thread):
self.executor_cls = executor_cls
self.executor_kwargs = executor_kwargs
mp = mpcontext.get_context()
# Flags used to shut down this thread if we get a sigint
self.parent_stop_flag = stop_flag
self.child_stop_flag = multiprocessing.Event()
self.child_stop_flag = mp.Event()
self.rerun = rerun
self.run_count = 0
@ -326,8 +328,8 @@ class TestRunnerManager(threading.Thread):
assert recording is not None
self.recording = recording
self.command_queue = Queue()
self.remote_queue = Queue()
self.command_queue = mp.Queue()
self.remote_queue = mp.Queue()
self.test_runner_proc = None
@ -519,9 +521,11 @@ class TestRunnerManager(threading.Thread):
executor_browser_kwargs,
self.capture_stdio,
self.child_stop_flag)
self.test_runner_proc = Process(target=start_runner,
args=args,
name="TestRunner-%i" % self.manager_number)
mp = mpcontext.get_context()
self.test_runner_proc = mp.Process(target=start_runner,
args=args,
name="TestRunner-%i" % self.manager_number)
self.test_runner_proc.start()
self.logger.debug("Test runner started")
# Now we wait for either an init_succeeded event or an init_failed event
@ -766,6 +770,7 @@ class TestRunnerManager(threading.Thread):
self.logger.debug("waiting for runner process to end")
self.test_runner_proc.join(10)
self.logger.debug("After join")
mp = mpcontext.get_context()
if self.test_runner_proc.is_alive():
# This might leak a file handle from the queue
self.logger.warning("Forcibly terminating runner process")
@ -777,9 +782,9 @@ class TestRunnerManager(threading.Thread):
# (subsequent attempts to retrieve items may block indefinitely).
# Discard the potentially-corrupted queue and create a new one.
self.command_queue.close()
self.command_queue = Queue()
self.command_queue = mp.Queue()
self.remote_queue.close()
self.remote_queue = Queue()
self.remote_queue = mp.Queue()
else:
self.logger.debug("Runner process exited with code %i" % self.test_runner_proc.exitcode)

@ -196,6 +196,8 @@ scheme host and port.""")
config_group.add_argument('--webdriver-arg',
default=[], action="append", dest="webdriver_args",
help="Extra argument for the WebDriver binary")
config_group.add_argument("--adb-binary", action="store",
help="Path to adb binary to use")
config_group.add_argument("--package-name", action="store",
help="Android package name to run tests against")
config_group.add_argument("--device-serial", action="store",

@ -3,6 +3,7 @@ from __future__ import print_function, unicode_literals
import json
import os
import sys
from six import iteritems, itervalues
import wptserve
@ -10,6 +11,7 @@ from wptserve import sslutils
from . import environment as env
from . import instruments
from . import mpcontext
from . import products
from . import testloader
from . import wptcommandline
@ -151,11 +153,14 @@ def get_pause_after_test(test_loader, **kwargs):
def run_tests(config, test_paths, product, **kwargs):
"""Set up the test environment, load the list of tests to be executed, and
invoke the remainder of the code to execute tests"""
mp = mpcontext.get_context()
if kwargs["instrument_to_file"] is None:
recorder = instruments.NullInstrument()
else:
recorder = instruments.Instrument(kwargs["instrument_to_file"])
with recorder as recording, capture.CaptureIO(logger, not kwargs["no_capture_stdio"]):
with recorder as recording, capture.CaptureIO(logger,
not kwargs["no_capture_stdio"],
mp_context=mp):
recording.set(["startup"])
env.do_delayed_imports(logger, test_paths)

@ -1,8 +1,10 @@
import base64
import json
import os
import uuid
import six
import threading
import uuid
from multiprocessing.managers import AcquirerProxy, BaseManager, DictProxy
from six import text_type, binary_type
@ -32,13 +34,16 @@ ClientDictManager.register("Lock")
class StashServer(object):
def __init__(self, address=None, authkey=None):
def __init__(self, address=None, authkey=None, mp_context=None):
self.address = address
self.authkey = authkey
self.manager = None
self.mp_context = mp_context
def __enter__(self):
self.manager, self.address, self.authkey = start_server(self.address, self.authkey)
self.manager, self.address, self.authkey = start_server(self.address,
self.authkey,
self.mp_context)
store_env_config(self.address, self.authkey)
def __exit__(self, *args, **kwargs):
@ -61,10 +66,13 @@ def store_env_config(address, authkey):
os.environ["WPT_STASH_CONFIG"] = json.dumps((address, authkey.decode("ascii")))
def start_server(address=None, authkey=None):
def start_server(address=None, authkey=None, mp_context=None):
if isinstance(authkey, text_type):
authkey = authkey.encode("ascii")
manager = ServerDictManager(address, authkey)
kwargs = {}
if six.PY3 and mp_context is not None:
kwargs["ctx"] = mp_context
manager = ServerDictManager(address, authkey, **kwargs)
manager.start()
return (manager, manager._address, manager._authkey)