Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

signingscript: remove debsign support #1111

Draft
wants to merge 1 commit into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 0 additions & 18 deletions signingscript/docker.d/passwords.yml
Original file line number Diff line number Diff line change
Expand Up @@ -223,12 +223,6 @@ in:
["stage_autograph_authenticode_202404", "stage_autograph_authenticode_202404_stub"],
"authenticode_dep_sha256",
]
- ["https://stage.autograph.nonprod.webservices.mozgcp.net",
{"$eval": "AUTOGRAPH_STAGE_MOZILLAVPN_DEBSIGN_USERNAME"},
{"$eval": "AUTOGRAPH_STAGE_MOZILLAVPN_DEBSIGN_PASSWORD"},
["stage_autograph_debsign"],
"dummy_gpg2_pgpsubkey_debsign",
]
- ["https://stage.autograph.nonprod.webservices.mozgcp.net",
{"$eval": "AUTOGRAPH_STAGE_MOZILLAVPN_ADDONS_USERNAME"},
{"$eval": "AUTOGRAPH_STAGE_MOZILLAVPN_ADDONS_PASSWORD"},
Expand All @@ -245,12 +239,6 @@ in:
{"$eval": "AUTOGRAPH_AUTHENTICODE_SHA2_PASSWORD"},
["autograph_authenticode_202404", "autograph_authenticode_202404_stub"]
]
- ["https://autograph-external.prod.autograph.services.mozaws.net",
{"$eval": "AUTOGRAPH_MOZILLAVPN_DEBSIGN_USERNAME"},
{"$eval": "AUTOGRAPH_MOZILLAVPN_DEBSIGN_PASSWORD"},
["autograph_debsign"],
"release_at_mozilla_debsign_dep"
]
- ["https://autograph-external.prod.autograph.services.mozaws.net",
{"$eval": "AUTOGRAPH_MOZILLAVPN_ADDONS_USERNAME"},
{"$eval": "AUTOGRAPH_MOZILLAVPN_ADDONS_PASSWORD"},
Expand Down Expand Up @@ -485,12 +473,6 @@ in:
{"$eval": "AUTOGRAPH_MOZILLAVPN_PASSWORD"},
["autograph_apk"]
]
- ["https://autograph-external.prod.autograph.services.mozaws.net",
{"$eval": "AUTOGRAPH_MOZILLAVPN_DEBSIGN_USERNAME"},
{"$eval": "AUTOGRAPH_MOZILLAVPN_DEBSIGN_PASSWORD"},
["autograph_debsign"],
"release_at_mozilla_debsign_rel_2021"
]
- ["https://autograph-external.prod.autograph.services.mozaws.net",
{"$eval": "AUTOGRAPH_MOZILLAVPN_ADDONS_USERNAME"},
{"$eval": "AUTOGRAPH_MOZILLAVPN_ADDONS_PASSWORD"},
Expand Down
107 changes: 3 additions & 104 deletions signingscript/src/signingscript/sign.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
import tempfile
import time
import zipfile
from contextlib import ExitStack
from functools import wraps
from io import BytesIO

Expand Down Expand Up @@ -784,7 +783,7 @@ async def _create_tarfile(context, to, files, compression, tmp_dir=None):
raise SigningScriptError(e)


def _encode_single_file(fp, signing_req):
def write_signing_req_to_disk(fp, signing_req):
"""Write signing_req to fp.

Does proper base64 and json encoding.
Expand Down Expand Up @@ -813,63 +812,6 @@ def _encode_single_file(fp, signing_req):
fp.write(b"}]")


def _encode_multiple_files(fp, signing_req):
"""Write signing_req to fp.

Builds a JSON byte string from the signing_req.
Does a proper base64 encoding of the binary content in the request's file blobs.
Doesn't hold onto a lot of memory by chunking the file blobs.
Writes the request to the binary-stream fp.
"""
_signing_req = signing_req.copy()
input_files = _signing_req.pop("files")
encoded_signing_req_bytes_io = BytesIO()
encoded_signing_req_bytes_io.write(b"[{")
for k, v in _signing_req.items():
encoded_signing_req_bytes_io.write(json.dumps(k).encode("utf8"))
encoded_signing_req_bytes_io.write(b":")
encoded_signing_req_bytes_io.write(json.dumps(v).encode("utf8"))
encoded_signing_req_bytes_io.write(b",")
encoded_signing_req_bytes_io.write(json.dumps("files").encode("utf8"))
encoded_signing_req_bytes_io.write(b":")
encoded_signing_req_bytes_io.write(b"[")
for input_file in input_files:
encoded_signing_req_bytes_io.write(b"{")
encoded_signing_req_bytes_io.write(json.dumps("name").encode("utf8"))
encoded_signing_req_bytes_io.write(b":")
encoded_signing_req_bytes_io.write(json.dumps(os.path.basename(input_file["name"])).encode("utf8"))
encoded_signing_req_bytes_io.write(b",")
encoded_signing_req_bytes_io.write(json.dumps("content").encode("utf8"))
encoded_signing_req_bytes_io.write(b":")
input_file["content"].seek(0)
encoded_signing_req_bytes_io.write(b'"')
while True:
block = input_file["content"].read(1020)
if not block:
break
encoded_block = b64encode(block).encode("utf8")
encoded_signing_req_bytes_io.write(encoded_block)
encoded_signing_req_bytes_io.write(b'"')
encoded_signing_req_bytes_io.write(b"},")
encoded_signing_req_bytes_io.seek(-2, 1)
encoded_signing_req_bytes_io.write(b"}]}]")
encoded_signing_req_bytes_io.seek(0)
encoded_signing_req_bytes = encoded_signing_req_bytes_io.read()
fp.write(encoded_signing_req_bytes)


def write_signing_req_to_disk(fp, signing_req):
"""Write signing_req to fp.

Does proper base64 and json encoding.
Tries not to hold onto a lot of memory.
"""
if "files" in signing_req:
_encode_multiple_files(fp, signing_req)
else:
_encode_single_file(fp, signing_req)


def get_hawk_content_hash(request_body, content_type):
"""Generate the content hash of the given request."""
h = hashlib.new("sha256")
Expand Down Expand Up @@ -943,12 +885,7 @@ def _is_xpi_format(fmt):
@time_function
def make_signing_req(input_file, fmt, keyid=None, extension_id=None):
"""Make a signing request object to pass to autograph."""
if isinstance(input_file, list):
sign_req = {"files": []}
for f in input_file:
sign_req["files"].append({"name": f.name, "content": f})
else:
sign_req = {"input": input_file}
sign_req = {"input": input_file}

if keyid:
sign_req["keyid"] = keyid
Expand Down Expand Up @@ -1011,7 +948,7 @@ async def sign_with_autograph(session, server, input_file, fmt, autograph_method
bytes: the signed data

"""
if autograph_method not in {"file", "hash", "data", "files"}:
if autograph_method not in {"file", "hash", "data"}:
raise SigningScriptError(f"Unsupported autograph method: {autograph_method}")

keyid = keyid or server.key_id
Expand All @@ -1026,8 +963,6 @@ async def sign_with_autograph(session, server, input_file, fmt, autograph_method

if autograph_method == "file":
return sign_resp[0]["signed_file"]
elif autograph_method == "files":
return sign_resp[0]["signed_files"]
else:
return sign_resp[0]["signature"]

Expand Down Expand Up @@ -1516,42 +1451,6 @@ async def sign_authenticode(context, orig_path, fmt, *, authenticode_comment=Non
return orig_path


@time_async_function
async def sign_debian_pkg(context, path, fmt, *args, **kwargs):
"""
Sign a debian package using autograph's debsign support.

Unpacks a tarball and signs the .dsc .buildinfo .changes files using the autograph /sign/files end-point.
Then, it re-compresses the tarball and uploads the new tarball with the signed files as an artifact.
"""
cert_type = task.task_cert_type(context)
autograph_config = get_autograph_config(context.autograph_configs, cert_type, [fmt], raise_on_empty=True)
cert_type = task.task_cert_type(context)
_, compression = os.path.splitext(path)
# Find *.dsc *.buildinfo *.changes. These are the files in the debian package we need to sign.
extensions = (".dsc", ".buildinfo", ".changes")
tmp_dir = os.path.join(context.config["work_dir"], "untarred")
all_file_names = await _extract_tarfile(context, path, compression, tmp_dir=tmp_dir)
input_file_names = [input_file_name for input_file_name in all_file_names if input_file_name.endswith(extensions)]
basename_to_file_name = {os.path.basename(input_file_name): input_file_name for input_file_name in input_file_names}
with ExitStack() as stack:
input_files = [stack.enter_context(open(input_file_name, "rb")) for input_file_name in input_file_names]
signed_files = await sign_with_autograph(
context.session,
autograph_config,
input_files,
fmt,
"files",
)
# go from base64 back to bytes before writing the files to disk
signed_files = [{"name": basename_to_file_name[signed_file["name"]], "content": base64.b64decode(signed_file["content"])} for signed_file in signed_files]
for signed_file in signed_files:
with open(signed_file["name"], "wb") as output_file:
output_file.write(signed_file["content"])
await _create_tarfile(context, path, all_file_names, compression, tmp_dir=tmp_dir)
return path


def _can_notarize(filename, supported_extensions):
"""
Check if file can be notarized based on extension
Expand Down
2 changes: 0 additions & 2 deletions signingscript/src/signingscript/task.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
apple_notarize_geckodriver,
apple_notarize_stacked, # noqa: F401
sign_authenticode,
sign_debian_pkg,
sign_file,
sign_file_detached,
sign_gpg_with_autograph,
Expand All @@ -40,7 +39,6 @@
"autograph_hash_only_mar384": sign_mar384_with_autograph_hash,
"autograph_stage_mar384": sign_mar384_with_autograph_hash,
"autograph_gpg": sign_gpg_with_autograph,
"autograph_debsign": sign_debian_pkg,
"autograph_widevine": sign_widevine,
"autograph_omnija": sign_omnija,
"autograph_langpack": sign_xpi,
Expand Down
49 changes: 1 addition & 48 deletions signingscript/tests/test_sign.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,9 +63,8 @@ def is_tarfile(archive):


class MockedSession:
def __init__(self, signed_file=None, signed_files=None, signature=None, exception=None):
def __init__(self, signed_file=None, signature=None, exception=None):
self.signed_file = signed_file
self.signed_files = signed_files
self.exception = exception
self.signature = signature
self.post = mock.MagicMock(wraps=self.post)
Expand All @@ -76,8 +75,6 @@ async def post(self, *args, **kwargs):
resp.json.return_value = asyncio.Future()
if self.signed_file:
resp.json.return_value.set_result([{"signed_file": self.signed_file}])
if self.signed_files:
resp.json.return_value.set_result([{"signed_files": self.signed_files}])
if self.signature:
resp.json.return_value.set_result([{"signature": self.signature}])
if self.exception:
Expand Down Expand Up @@ -1319,50 +1316,6 @@ async def mocked_winsign(infile, outfile, digest_algo, certs, signer, cafile, **
assert os.path.exists(result)


@pytest.mark.asyncio
async def test_sign_debian_pkg(tmpdir, mocker, context):
context.autograph_configs = {TEST_CERT_TYPE: [utils.Autograph(*["https://autograph.dev.mozaws.net", "user", "pass", ["autograph_debsign"]])]}
tmp_dir = os.path.join(context.config["work_dir"], "test_untarred")
path = os.path.join(context.config["work_dir"], "test_tar", "target.tar.gz")
os.makedirs(os.path.dirname(path), exist_ok=True)
shutil.copy2(os.path.join(TEST_DATA_DIR, "target.tar.gz"), path)
extensions = (".dsc", ".buildinfo", ".changes")
_, compression = os.path.splitext(path)
all_file_names = await sign._extract_tarfile(context, path, compression, tmp_dir=tmp_dir)
input_file_names = [input_file_name for input_file_name in all_file_names if input_file_name.endswith(extensions)]
signed_files = [{"name": os.path.basename(input_file_name), "content": sign.b64encode(b"<DATA>")} for input_file_name in input_file_names]
mocked_session = MockedSession(signed_files=signed_files)
mocker.patch.object(context, "session", new=mocked_session)
result = await sign.sign_debian_pkg(context, path, "autograph_debsign")
assert result == path
assert os.path.exists(result)


def test_encode_multiple_files(tmpdir, mocker, context):
output_file = tempfile.TemporaryFile("w+b")
input_files = [
{"name": "uawum.txt", "content": BufferedRandom(BytesIO(b"RmUOX3AesiyzSlh"))},
{"name": "mweut.txt", "content": BufferedRandom(BytesIO(b"dhD52zxxKoKjKls"))},
{"name": "xbtnd.txt", "content": BufferedRandom(BytesIO(b"I0XvQRZh7CcYme6"))},
]
signing_req = {"keyid": "rvkgu", "options": {"zip": "passthrough"}, "files": input_files}
sign.write_signing_req_to_disk(output_file, signing_req)
output_file.seek(0)
result = json.loads(output_file.read().decode())
expected = [
{
"keyid": "rvkgu",
"options": {"zip": "passthrough"},
"files": [
{"name": "uawum.txt", "content": "Um1VT1gzQWVzaXl6U2xo"},
{"name": "mweut.txt", "content": "ZGhENTJ6eHhLb0tqS2xz"},
{"name": "xbtnd.txt", "content": "STBYdlFSWmg3Q2NZbWU2"},
],
}
]
assert result == expected


def test_encode_single_file(tmpdir, mocker, context):
output_file = tempfile.TemporaryFile("w+b")
signing_req = {"keyid": "rvkgu", "options": {"zip": "passthrough"}, "input": BufferedRandom(BytesIO(b"RmUOX3AesiyzSlh"))}
Expand Down