2018-03-06 21:25:38 +00:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
|
|
|
import contextlib
|
2021-05-11 09:17:09 +00:00
|
|
|
import hashlib
|
2018-03-06 21:25:38 +00:00
|
|
|
import os
|
|
|
|
import platform
|
2018-06-02 19:37:44 +00:00
|
|
|
import re
|
2018-03-06 21:25:38 +00:00
|
|
|
import shutil
|
|
|
|
import subprocess
|
2018-06-02 19:37:44 +00:00
|
|
|
import sys
|
2018-03-06 21:25:38 +00:00
|
|
|
import tarfile
|
2018-06-02 19:37:44 +00:00
|
|
|
import urllib.request
|
2018-03-06 21:25:38 +00:00
|
|
|
import zipfile
|
2021-01-05 14:56:33 +00:00
|
|
|
from dataclasses import dataclass
|
|
|
|
from pathlib import Path
|
|
|
|
from typing import Optional, Union
|
2018-03-06 21:25:38 +00:00
|
|
|
|
|
|
|
import click
|
2019-11-19 17:15:08 +00:00
|
|
|
import cryptography.fernet
|
2021-01-05 14:56:33 +00:00
|
|
|
import parver
|
2019-11-19 17:15:08 +00:00
|
|
|
|
2018-03-06 21:25:38 +00:00
|
|
|
|
|
|
|
@contextlib.contextmanager
|
2021-01-05 14:56:33 +00:00
|
|
|
def chdir(path: Path): # pragma: no cover
|
2018-03-06 21:25:38 +00:00
|
|
|
old_dir = os.getcwd()
|
|
|
|
os.chdir(path)
|
|
|
|
yield
|
|
|
|
os.chdir(old_dir)
|
|
|
|
|
|
|
|
|
2018-05-23 23:06:05 +00:00
|
|
|
class BuildError(Exception):
|
2018-03-06 21:25:38 +00:00
|
|
|
pass
|
|
|
|
|
2018-03-06 23:39:32 +00:00
|
|
|
|
2019-11-19 17:15:08 +00:00
|
|
|
def bool_from_env(envvar: str) -> bool:
|
|
|
|
val = os.environ.get(envvar, "")
|
|
|
|
if not val or val.lower() in ("0", "false"):
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2021-01-05 14:56:33 +00:00
|
|
|
class ZipFile2(zipfile.ZipFile):
|
|
|
|
# ZipFile and tarfile have slightly different APIs. Let's fix that.
|
|
|
|
def add(self, name: str, arcname: str) -> None:
|
|
|
|
return self.write(name, arcname)
|
|
|
|
|
|
|
|
def __enter__(self) -> "ZipFile2":
|
|
|
|
return self
|
|
|
|
|
|
|
|
|
|
|
|
@dataclass(frozen=True, repr=False)
|
2018-05-23 23:06:05 +00:00
|
|
|
class BuildEnviron:
|
|
|
|
PLATFORM_TAGS = {
|
|
|
|
"Darwin": "osx",
|
|
|
|
"Windows": "windows",
|
|
|
|
"Linux": "linux",
|
|
|
|
}
|
|
|
|
|
2021-01-05 14:56:33 +00:00
|
|
|
system: str
|
|
|
|
root_dir: Path
|
|
|
|
branch: Optional[str] = None
|
|
|
|
tag: Optional[str] = None
|
|
|
|
is_pull_request: bool = True
|
|
|
|
should_build_wheel: bool = False
|
|
|
|
should_build_docker: bool = False
|
|
|
|
should_build_pyinstaller: bool = False
|
|
|
|
should_build_wininstaller: bool = False
|
|
|
|
has_aws_creds: bool = False
|
|
|
|
has_twine_creds: bool = False
|
|
|
|
docker_username: Optional[str] = None
|
|
|
|
docker_password: Optional[str] = None
|
|
|
|
build_key: Optional[str] = None
|
2018-05-23 23:06:05 +00:00
|
|
|
|
|
|
|
@classmethod
|
2021-01-05 14:56:33 +00:00
|
|
|
def from_env(cls) -> "BuildEnviron":
|
|
|
|
branch = None
|
|
|
|
tag = None
|
|
|
|
|
|
|
|
if ref := os.environ.get("GITHUB_REF", ""):
|
|
|
|
if ref.startswith("refs/heads/"):
|
|
|
|
branch = ref.replace("refs/heads/", "")
|
|
|
|
if ref.startswith("refs/pull/"):
|
|
|
|
branch = "pr-" + ref.split("/")[2]
|
|
|
|
if ref.startswith("refs/tags/"):
|
|
|
|
tag = ref.replace("refs/tags/", "")
|
|
|
|
|
|
|
|
is_pull_request = os.environ.get("GITHUB_EVENT_NAME", "pull_request") == "pull_request"
|
|
|
|
|
2018-06-02 19:37:44 +00:00
|
|
|
return cls(
|
|
|
|
system=platform.system(),
|
2021-01-05 14:56:33 +00:00
|
|
|
root_dir=Path(__file__).parent.parent,
|
|
|
|
branch=branch,
|
|
|
|
tag=tag,
|
|
|
|
is_pull_request=is_pull_request,
|
2019-11-19 17:15:08 +00:00
|
|
|
should_build_wheel=bool_from_env("CI_BUILD_WHEEL"),
|
|
|
|
should_build_pyinstaller=bool_from_env("CI_BUILD_PYINSTALLER"),
|
|
|
|
should_build_wininstaller=bool_from_env("CI_BUILD_WININSTALLER"),
|
|
|
|
should_build_docker=bool_from_env("CI_BUILD_DOCKER"),
|
|
|
|
has_aws_creds=bool_from_env("AWS_ACCESS_KEY_ID"),
|
|
|
|
has_twine_creds=bool_from_env("TWINE_USERNAME") and bool_from_env("TWINE_PASSWORD"),
|
2021-01-05 14:56:33 +00:00
|
|
|
docker_username=os.environ.get("DOCKER_USERNAME", None),
|
|
|
|
docker_password=os.environ.get("DOCKER_PASSWORD", None),
|
|
|
|
build_key=os.environ.get("CI_BUILD_KEY", None),
|
2018-05-23 23:06:05 +00:00
|
|
|
)
|
|
|
|
|
2021-01-05 14:56:33 +00:00
|
|
|
def archive(self, path: Path) -> Union[tarfile.TarFile, ZipFile2]:
|
2018-05-23 23:06:05 +00:00
|
|
|
if self.system == "Windows":
|
2021-01-05 14:56:33 +00:00
|
|
|
return ZipFile2(path, "w")
|
2018-05-23 23:06:05 +00:00
|
|
|
else:
|
2018-05-24 05:10:50 +00:00
|
|
|
return tarfile.open(path, "w:gz")
|
2018-05-23 23:06:05 +00:00
|
|
|
|
2021-01-05 14:56:33 +00:00
|
|
|
@property
|
|
|
|
def archive_path(self) -> Path:
|
2018-05-23 23:06:05 +00:00
|
|
|
if self.system == "Windows":
|
|
|
|
ext = "zip"
|
|
|
|
else:
|
|
|
|
ext = "tar.gz"
|
2021-01-05 14:56:33 +00:00
|
|
|
return self.dist_dir / f"mitmproxy-{self.version}-{self.platform_tag}.{ext}"
|
2018-05-24 10:15:55 +00:00
|
|
|
|
|
|
|
@property
|
2021-01-05 14:56:33 +00:00
|
|
|
def build_dir(self) -> Path:
|
|
|
|
return self.release_dir / "build"
|
2018-05-24 21:07:18 +00:00
|
|
|
|
|
|
|
@property
|
2021-01-05 14:56:33 +00:00
|
|
|
def dist_dir(self) -> Path:
|
|
|
|
return self.release_dir / "dist"
|
2018-05-24 21:07:18 +00:00
|
|
|
|
|
|
|
@property
|
2019-11-19 17:15:08 +00:00
|
|
|
def docker_tag(self) -> str:
|
2021-03-16 15:17:27 +00:00
|
|
|
if self.branch == "main":
|
2018-05-25 00:42:59 +00:00
|
|
|
t = "dev"
|
|
|
|
else:
|
|
|
|
t = self.version
|
2020-11-20 18:25:26 +00:00
|
|
|
return f"mitmproxy/mitmproxy:{t}"
|
2018-05-24 10:15:55 +00:00
|
|
|
|
2019-11-19 17:15:08 +00:00
|
|
|
def dump_info(self, fp=sys.stdout) -> None:
|
2018-05-24 10:58:33 +00:00
|
|
|
lst = [
|
|
|
|
"version",
|
|
|
|
"tag",
|
|
|
|
"branch",
|
|
|
|
"platform_tag",
|
|
|
|
"root_dir",
|
|
|
|
"release_dir",
|
|
|
|
"build_dir",
|
|
|
|
"dist_dir",
|
|
|
|
"upload_dir",
|
|
|
|
"should_build_wheel",
|
|
|
|
"should_build_pyinstaller",
|
2019-11-19 17:15:08 +00:00
|
|
|
"should_build_wininstaller",
|
2018-05-24 10:58:33 +00:00
|
|
|
"should_build_docker",
|
2019-11-19 17:15:08 +00:00
|
|
|
"should_upload_aws",
|
2018-05-24 10:58:33 +00:00
|
|
|
"should_upload_docker",
|
|
|
|
"should_upload_pypi",
|
|
|
|
]
|
|
|
|
for attr in lst:
|
2018-06-02 19:37:44 +00:00
|
|
|
print(f"cibuild.{attr}={getattr(self, attr)}", file=fp)
|
|
|
|
|
|
|
|
def check_version(self) -> None:
|
|
|
|
"""
|
|
|
|
Check that version numbers match our conventions.
|
|
|
|
Raises a ValueError if there is a mismatch.
|
|
|
|
"""
|
2021-01-05 14:56:33 +00:00
|
|
|
contents = (self.root_dir / "mitmproxy" / "version.py").read_text("utf8")
|
2019-11-16 13:56:01 +00:00
|
|
|
match = re.search(r'^VERSION = "(.+?)"', contents, re.M)
|
|
|
|
assert match
|
|
|
|
version = match.group(1)
|
2018-06-02 19:37:44 +00:00
|
|
|
|
2018-06-15 21:41:15 +00:00
|
|
|
if self.is_prod_release:
|
|
|
|
# For production releases, we require strict version equality
|
|
|
|
if self.version != version:
|
2018-06-02 19:37:44 +00:00
|
|
|
raise ValueError(f"Tag is {self.tag}, but mitmproxy/version.py is {version}.")
|
2018-06-16 03:09:34 +00:00
|
|
|
elif not self.is_maintenance_branch:
|
|
|
|
# Commits on maintenance branches don't need the dev suffix. This
|
|
|
|
# allows us to incorporate and test commits between tagged releases.
|
|
|
|
# For snapshots, we only ensure that mitmproxy/version.py contains a
|
|
|
|
# dev release.
|
2018-06-02 19:37:44 +00:00
|
|
|
version_info = parver.Version.parse(version)
|
|
|
|
if not version_info.is_devrelease:
|
2018-06-15 21:41:15 +00:00
|
|
|
raise ValueError(f"Non-production releases must have dev suffix: {version}")
|
2018-05-23 23:06:05 +00:00
|
|
|
|
2018-06-16 03:09:34 +00:00
|
|
|
@property
|
|
|
|
def is_maintenance_branch(self) -> bool:
|
|
|
|
"""
|
|
|
|
Is this an untagged commit on a maintenance branch?
|
|
|
|
"""
|
|
|
|
if not self.tag and self.branch and re.match(r"v\d+\.x", self.branch):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2018-05-24 21:07:18 +00:00
|
|
|
@property
|
|
|
|
def has_docker_creds(self) -> bool:
|
2018-06-02 19:37:44 +00:00
|
|
|
return bool(self.docker_username and self.docker_password)
|
2018-05-24 21:07:18 +00:00
|
|
|
|
2018-05-24 22:50:48 +00:00
|
|
|
@property
|
|
|
|
def is_prod_release(self) -> bool:
|
2021-01-05 14:56:33 +00:00
|
|
|
if not self.tag or not self.tag.startswith("v"):
|
2018-06-02 19:37:44 +00:00
|
|
|
return False
|
2018-05-24 22:50:48 +00:00
|
|
|
try:
|
2018-06-02 19:37:44 +00:00
|
|
|
v = parver.Version.parse(self.version, strict=True)
|
2018-05-24 22:50:48 +00:00
|
|
|
except (parver.ParseError, BuildError):
|
|
|
|
return False
|
|
|
|
return not v.is_prerelease
|
|
|
|
|
2018-05-24 21:07:18 +00:00
|
|
|
@property
|
2019-11-19 17:15:08 +00:00
|
|
|
def platform_tag(self) -> str:
|
2018-05-24 21:07:18 +00:00
|
|
|
if self.system in self.PLATFORM_TAGS:
|
|
|
|
return self.PLATFORM_TAGS[self.system]
|
2021-01-05 14:56:33 +00:00
|
|
|
raise BuildError(f"Unsupported platform: {self.system}")
|
2018-05-24 21:07:18 +00:00
|
|
|
|
|
|
|
@property
|
2021-01-05 14:56:33 +00:00
|
|
|
def release_dir(self) -> Path:
|
|
|
|
return self.root_dir / "release"
|
2018-05-24 21:07:18 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def should_upload_docker(self) -> bool:
|
|
|
|
return all([
|
2021-03-16 15:17:27 +00:00
|
|
|
(self.is_prod_release or self.branch == "main"),
|
2018-05-24 21:07:18 +00:00
|
|
|
self.should_build_docker,
|
|
|
|
self.has_docker_creds,
|
|
|
|
])
|
|
|
|
|
2019-11-19 17:15:08 +00:00
|
|
|
@property
|
|
|
|
def should_upload_aws(self) -> bool:
|
|
|
|
return all([
|
|
|
|
self.has_aws_creds,
|
|
|
|
(self.should_build_wheel or self.should_build_pyinstaller or self.should_build_wininstaller),
|
|
|
|
])
|
|
|
|
|
2018-05-24 21:07:18 +00:00
|
|
|
@property
|
|
|
|
def should_upload_pypi(self) -> bool:
|
2018-05-25 00:42:59 +00:00
|
|
|
return all([
|
|
|
|
self.is_prod_release,
|
|
|
|
self.should_build_wheel,
|
|
|
|
self.has_twine_creds,
|
|
|
|
])
|
2018-05-24 21:07:18 +00:00
|
|
|
|
|
|
|
@property
|
2019-11-19 17:15:08 +00:00
|
|
|
def upload_dir(self) -> str:
|
2018-05-24 21:07:18 +00:00
|
|
|
if self.tag:
|
|
|
|
return self.version
|
|
|
|
else:
|
2021-01-05 14:56:33 +00:00
|
|
|
return f"branches/{self.version}"
|
2018-05-24 21:07:18 +00:00
|
|
|
|
|
|
|
@property
|
2019-11-19 17:15:08 +00:00
|
|
|
def version(self) -> str:
|
2018-06-15 21:41:15 +00:00
|
|
|
if self.tag:
|
|
|
|
if self.tag.startswith("v"):
|
|
|
|
try:
|
|
|
|
parver.Version.parse(self.tag[1:], strict=True)
|
2018-12-02 10:35:22 +00:00
|
|
|
except parver.ParseError:
|
2018-06-15 21:41:15 +00:00
|
|
|
return self.tag
|
|
|
|
return self.tag[1:]
|
|
|
|
return self.tag
|
|
|
|
elif self.branch:
|
|
|
|
return self.branch
|
|
|
|
else:
|
2018-05-24 21:07:18 +00:00
|
|
|
raise BuildError("We're on neither a tag nor a branch - could not establish version")
|
|
|
|
|
2018-05-23 23:06:05 +00:00
|
|
|
|
2021-01-05 14:56:33 +00:00
|
|
|
def build_wheel(be: BuildEnviron) -> None: # pragma: no cover
|
2018-05-17 09:25:32 +00:00
|
|
|
click.echo("Building wheel...")
|
|
|
|
subprocess.check_call([
|
|
|
|
"python",
|
|
|
|
"setup.py",
|
|
|
|
"-q",
|
|
|
|
"bdist_wheel",
|
2018-05-23 23:06:05 +00:00
|
|
|
"--dist-dir", be.dist_dir,
|
2018-05-17 09:25:32 +00:00
|
|
|
])
|
2021-01-05 14:56:33 +00:00
|
|
|
whl, = be.dist_dir.glob('mitmproxy-*-py3-none-any.whl')
|
2020-11-20 18:25:26 +00:00
|
|
|
click.echo(f"Found wheel package: {whl}")
|
2018-05-23 23:06:05 +00:00
|
|
|
subprocess.check_call(["tox", "-e", "wheeltest", "--", whl])
|
2018-05-18 08:37:56 +00:00
|
|
|
|
|
|
|
|
2021-01-05 14:56:33 +00:00
|
|
|
def build_docker_image(be: BuildEnviron) -> None: # pragma: no cover
|
2018-06-06 16:39:19 +00:00
|
|
|
click.echo("Building Docker images...")
|
2021-03-12 07:45:21 +00:00
|
|
|
|
|
|
|
whl, = be.dist_dir.glob('mitmproxy-*-py3-none-any.whl')
|
|
|
|
docker_build_dir = be.release_dir / "docker"
|
|
|
|
shutil.copy(whl, docker_build_dir / whl.name)
|
2018-05-18 08:37:56 +00:00
|
|
|
subprocess.check_call([
|
|
|
|
"docker",
|
|
|
|
"build",
|
2018-05-25 00:42:59 +00:00
|
|
|
"--tag", be.docker_tag,
|
2021-03-12 07:45:21 +00:00
|
|
|
"--build-arg", f"MITMPROXY_WHEEL={whl.name}",
|
2018-05-18 08:37:56 +00:00
|
|
|
"."
|
2021-03-12 07:45:21 +00:00
|
|
|
],
|
|
|
|
cwd=docker_build_dir
|
|
|
|
)
|
2020-04-13 11:53:16 +00:00
|
|
|
# smoke-test the newly built docker image
|
|
|
|
r = subprocess.run([
|
|
|
|
"docker",
|
|
|
|
"run",
|
|
|
|
"--rm",
|
2020-04-16 18:44:21 +00:00
|
|
|
be.docker_tag,
|
2020-04-16 22:04:17 +00:00
|
|
|
"mitmdump",
|
2020-04-13 11:53:16 +00:00
|
|
|
"--version",
|
2020-04-17 15:46:11 +00:00
|
|
|
], check=True, capture_output=True)
|
|
|
|
print(r.stdout.decode())
|
|
|
|
assert "Mitmproxy: " in r.stdout.decode()
|
2018-06-05 20:12:32 +00:00
|
|
|
|
2018-06-06 15:49:57 +00:00
|
|
|
|
2021-01-05 14:56:33 +00:00
|
|
|
def build_pyinstaller(be: BuildEnviron) -> None: # pragma: no cover
|
2018-05-23 23:06:05 +00:00
|
|
|
click.echo("Building pyinstaller package...")
|
|
|
|
|
2021-01-05 14:56:33 +00:00
|
|
|
PYINSTALLER_SPEC = be.release_dir / "specs"
|
|
|
|
PYINSTALLER_HOOKS = be.release_dir / "hooks"
|
|
|
|
PYINSTALLER_TEMP = be.build_dir / "pyinstaller"
|
|
|
|
PYINSTALLER_DIST = be.build_dir / "binaries" / be.platform_tag
|
2018-05-18 08:37:56 +00:00
|
|
|
|
2021-01-05 14:56:33 +00:00
|
|
|
if PYINSTALLER_TEMP.exists():
|
2018-03-06 21:25:38 +00:00
|
|
|
shutil.rmtree(PYINSTALLER_TEMP)
|
2021-01-05 14:56:33 +00:00
|
|
|
if PYINSTALLER_DIST.exists():
|
2018-03-06 21:25:38 +00:00
|
|
|
shutil.rmtree(PYINSTALLER_DIST)
|
|
|
|
|
2021-01-05 14:56:33 +00:00
|
|
|
if be.platform_tag == "windows":
|
|
|
|
with chdir(PYINSTALLER_SPEC):
|
|
|
|
click.echo("Building PyInstaller binaries in directory mode...")
|
|
|
|
subprocess.check_call(
|
|
|
|
[
|
|
|
|
"pyinstaller",
|
|
|
|
"--clean",
|
|
|
|
"--workpath", PYINSTALLER_TEMP,
|
|
|
|
"--distpath", PYINSTALLER_DIST,
|
|
|
|
"./windows-dir.spec"
|
|
|
|
]
|
|
|
|
)
|
|
|
|
for tool in ["mitmproxy", "mitmdump", "mitmweb"]:
|
|
|
|
click.echo(f"> {tool} --version")
|
|
|
|
executable = (PYINSTALLER_DIST / "onedir" / tool).with_suffix(".exe")
|
2018-05-17 09:25:32 +00:00
|
|
|
click.echo(subprocess.check_output([executable, "--version"]).decode())
|
2018-03-06 21:25:38 +00:00
|
|
|
|
2021-01-05 14:56:33 +00:00
|
|
|
with be.archive(be.archive_path) as archive:
|
|
|
|
for tool in ["mitmproxy", "mitmdump", "mitmweb"]:
|
|
|
|
# We can't have a folder and a file with the same name.
|
|
|
|
if tool == "mitmproxy":
|
|
|
|
tool = "mitmproxy_main"
|
|
|
|
# Make sure that we are in the spec folder.
|
|
|
|
with chdir(PYINSTALLER_SPEC):
|
|
|
|
click.echo(f"Building PyInstaller {tool} binary...")
|
|
|
|
excludes = []
|
|
|
|
if tool != "mitmweb":
|
|
|
|
excludes.append("mitmproxy.tools.web")
|
|
|
|
if tool != "mitmproxy_main":
|
|
|
|
excludes.append("mitmproxy.tools.console")
|
|
|
|
|
|
|
|
subprocess.check_call(
|
|
|
|
[ # type: ignore
|
|
|
|
"pyinstaller",
|
|
|
|
"--clean",
|
|
|
|
"--workpath", PYINSTALLER_TEMP,
|
|
|
|
"--distpath", PYINSTALLER_DIST,
|
|
|
|
"--additional-hooks-dir", PYINSTALLER_HOOKS,
|
|
|
|
"--onefile",
|
|
|
|
"--console",
|
|
|
|
"--icon", "icon.ico",
|
|
|
|
]
|
|
|
|
+ [x for e in excludes for x in ["--exclude-module", e]]
|
|
|
|
+ [tool]
|
|
|
|
)
|
|
|
|
# Delete the spec file - we're good without.
|
|
|
|
os.remove(f"{tool}.spec")
|
|
|
|
|
|
|
|
executable = PYINSTALLER_DIST / tool
|
|
|
|
if be.platform_tag == "windows":
|
|
|
|
executable = executable.with_suffix(".exe")
|
|
|
|
|
|
|
|
# Remove _main suffix from mitmproxy executable
|
|
|
|
if "_main" in executable.name:
|
|
|
|
executable = executable.rename(
|
|
|
|
executable.with_name(executable.name.replace("_main", ""))
|
|
|
|
)
|
|
|
|
|
|
|
|
# Test if it works at all O:-)
|
|
|
|
click.echo(f"> {executable} --version")
|
|
|
|
click.echo(subprocess.check_output([executable, "--version"]).decode())
|
|
|
|
|
|
|
|
archive.add(str(executable), str(executable.name))
|
|
|
|
click.echo("Packed {}.".format(be.archive_path.name))
|
|
|
|
|
|
|
|
|
|
|
|
def build_wininstaller(be: BuildEnviron) -> None: # pragma: no cover
|
2018-06-02 19:37:44 +00:00
|
|
|
click.echo("Building wininstaller package...")
|
|
|
|
|
2021-01-05 14:56:33 +00:00
|
|
|
IB_VERSION = "20.12.0"
|
2021-05-11 09:17:09 +00:00
|
|
|
IB_SETUP_SHA256 = "657f4785c7d70f140468435b99e79ced813e7e051106e7525e0c819efffb40d3"
|
2021-01-05 14:56:33 +00:00
|
|
|
IB_DIR = be.release_dir / "installbuilder"
|
2018-06-02 19:37:44 +00:00
|
|
|
IB_SETUP = IB_DIR / "setup" / f"{IB_VERSION}-installer.exe"
|
2021-01-05 14:56:33 +00:00
|
|
|
IB_CLI = Path(fr"C:\Program Files\VMware InstallBuilder Enterprise {IB_VERSION}\bin\builder-cli.exe")
|
2018-06-02 19:37:44 +00:00
|
|
|
IB_LICENSE = IB_DIR / "license.xml"
|
|
|
|
|
2021-01-05 14:56:33 +00:00
|
|
|
if not IB_LICENSE.exists() and not be.build_key:
|
|
|
|
click.echo("Cannot build windows installer without secret key.")
|
|
|
|
return
|
|
|
|
|
|
|
|
if not IB_CLI.exists():
|
|
|
|
if not IB_SETUP.exists():
|
2018-06-02 19:37:44 +00:00
|
|
|
click.echo("Downloading InstallBuilder...")
|
|
|
|
|
|
|
|
def report(block, blocksize, total):
|
|
|
|
done = block * blocksize
|
|
|
|
if round(100 * done / total) != round(100 * (done - blocksize) / total):
|
2019-11-19 17:15:08 +00:00
|
|
|
click.secho(f"Downloading... {round(100 * done / total)}%")
|
2018-06-02 19:37:44 +00:00
|
|
|
|
2021-01-05 14:56:33 +00:00
|
|
|
tmp = IB_SETUP.with_suffix(".tmp")
|
2018-06-02 19:37:44 +00:00
|
|
|
urllib.request.urlretrieve(
|
2021-01-05 14:56:33 +00:00
|
|
|
f"https://clients.bitrock.com/installbuilder/installbuilder-enterprise-{IB_VERSION}-windows-x64-installer.exe",
|
|
|
|
tmp,
|
2018-06-02 19:37:44 +00:00
|
|
|
reporthook=report
|
|
|
|
)
|
2021-01-05 14:56:33 +00:00
|
|
|
tmp.rename(IB_SETUP)
|
2018-06-02 19:37:44 +00:00
|
|
|
|
2021-05-11 09:17:09 +00:00
|
|
|
ib_setup_hash = hashlib.sha256()
|
|
|
|
with IB_SETUP.open("rb") as fp:
|
|
|
|
while True:
|
|
|
|
data = fp.read(65_536)
|
|
|
|
if not data:
|
|
|
|
break
|
|
|
|
ib_setup_hash.update(data)
|
|
|
|
if ib_setup_hash.hexdigest() != IB_SETUP_SHA256: # pragma: no cover
|
|
|
|
raise RuntimeError("InstallBuilder hashes don't match.")
|
|
|
|
|
2018-06-02 19:37:44 +00:00
|
|
|
click.echo("Install InstallBuilder...")
|
2021-01-05 14:56:33 +00:00
|
|
|
subprocess.run([IB_SETUP, "--mode", "unattended", "--unattendedmodeui", "none"], check=True)
|
|
|
|
assert IB_CLI.is_file()
|
2018-06-02 19:37:44 +00:00
|
|
|
|
2021-01-05 14:56:33 +00:00
|
|
|
if not IB_LICENSE.exists():
|
|
|
|
assert be.build_key
|
|
|
|
click.echo("Decrypt InstallBuilder license...")
|
|
|
|
f = cryptography.fernet.Fernet(be.build_key.encode())
|
|
|
|
with open(IB_LICENSE.with_suffix(".xml.enc"), "rb") as infile, \
|
|
|
|
open(IB_LICENSE, "wb") as outfile:
|
|
|
|
outfile.write(f.decrypt(infile.read()))
|
2018-06-02 19:37:44 +00:00
|
|
|
|
|
|
|
click.echo("Run InstallBuilder...")
|
|
|
|
subprocess.run([
|
|
|
|
IB_CLI,
|
|
|
|
"build",
|
|
|
|
str(IB_DIR / "mitmproxy.xml"),
|
|
|
|
"windows",
|
|
|
|
"--license", str(IB_LICENSE),
|
|
|
|
"--setvars", f"project.version={be.version}",
|
|
|
|
"--verbose"
|
|
|
|
], check=True)
|
2021-01-05 14:56:33 +00:00
|
|
|
assert (be.dist_dir / f"mitmproxy-{be.version}-windows-installer.exe").exists()
|
2018-06-02 19:37:44 +00:00
|
|
|
|
|
|
|
|
2018-05-23 23:06:05 +00:00
|
|
|
@click.group(chain=True)
|
2018-05-24 05:10:50 +00:00
|
|
|
def cli(): # pragma: no cover
|
2018-05-23 23:06:05 +00:00
|
|
|
"""
|
|
|
|
mitmproxy build tool
|
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
@cli.command("build")
|
2018-05-24 05:10:50 +00:00
|
|
|
def build(): # pragma: no cover
|
2018-05-23 23:06:05 +00:00
|
|
|
"""
|
|
|
|
Build a binary distribution
|
|
|
|
"""
|
|
|
|
be = BuildEnviron.from_env()
|
|
|
|
be.dump_info()
|
|
|
|
|
2018-06-02 19:37:44 +00:00
|
|
|
be.check_version()
|
2018-05-23 23:06:05 +00:00
|
|
|
os.makedirs(be.dist_dir, exist_ok=True)
|
|
|
|
|
2018-05-24 10:15:55 +00:00
|
|
|
if be.should_build_wheel:
|
2019-11-19 17:15:08 +00:00
|
|
|
build_wheel(be)
|
|
|
|
if be.should_build_docker:
|
|
|
|
build_docker_image(be)
|
2018-05-24 10:15:55 +00:00
|
|
|
if be.should_build_pyinstaller:
|
2018-05-24 05:10:50 +00:00
|
|
|
build_pyinstaller(be)
|
2019-11-19 17:15:08 +00:00
|
|
|
if be.should_build_wininstaller:
|
2018-06-02 19:37:44 +00:00
|
|
|
build_wininstaller(be)
|
2018-03-06 21:25:38 +00:00
|
|
|
|
|
|
|
|
|
|
|
@cli.command("upload")
|
2018-05-24 05:10:50 +00:00
|
|
|
def upload(): # pragma: no cover
|
2018-03-06 21:25:38 +00:00
|
|
|
"""
|
2018-05-18 08:37:56 +00:00
|
|
|
Upload build artifacts
|
|
|
|
|
|
|
|
Uploads the wheels package to PyPi.
|
|
|
|
Uploads the Pyinstaller and wheels packages to the snapshot server.
|
|
|
|
Pushes the Docker image to Docker Hub.
|
2018-03-06 21:25:38 +00:00
|
|
|
"""
|
2018-05-23 23:06:05 +00:00
|
|
|
be = BuildEnviron.from_env()
|
2019-11-19 17:15:08 +00:00
|
|
|
be.dump_info()
|
2018-05-18 08:37:56 +00:00
|
|
|
|
2018-05-23 23:06:05 +00:00
|
|
|
if be.is_pull_request:
|
2018-05-18 08:37:56 +00:00
|
|
|
click.echo("Refusing to upload artifacts from a pull request!")
|
2018-03-07 18:55:52 +00:00
|
|
|
return
|
2018-05-17 09:25:32 +00:00
|
|
|
|
2019-11-19 17:15:08 +00:00
|
|
|
if be.should_upload_aws:
|
2021-01-05 14:56:33 +00:00
|
|
|
num_files = len([name for name in be.dist_dir.iterdir() if name.is_file()])
|
2019-11-19 17:15:08 +00:00
|
|
|
click.echo(f"Uploading {num_files} files to AWS dir {be.upload_dir}...")
|
2018-05-17 09:25:32 +00:00
|
|
|
subprocess.check_call([
|
|
|
|
"aws", "s3", "cp",
|
|
|
|
"--acl", "public-read",
|
2021-01-05 14:56:33 +00:00
|
|
|
f"{be.dist_dir}/",
|
2020-11-20 18:25:26 +00:00
|
|
|
f"s3://snapshots.mitmproxy.org/{be.upload_dir}/",
|
2018-05-17 09:25:32 +00:00
|
|
|
"--recursive",
|
|
|
|
])
|
|
|
|
|
2018-05-24 10:15:55 +00:00
|
|
|
if be.should_upload_pypi:
|
2021-01-05 14:56:33 +00:00
|
|
|
whl, = be.dist_dir.glob('mitmproxy-*-py3-none-any.whl')
|
2020-11-20 18:25:26 +00:00
|
|
|
click.echo(f"Uploading {whl} to PyPi...")
|
2018-05-23 23:06:05 +00:00
|
|
|
subprocess.check_call(["twine", "upload", whl])
|
2018-03-07 01:05:26 +00:00
|
|
|
|
2018-05-24 10:15:55 +00:00
|
|
|
if be.should_upload_docker:
|
2020-11-20 18:25:26 +00:00
|
|
|
click.echo(f"Uploading Docker image to tag={be.docker_tag}...")
|
2018-05-18 08:37:56 +00:00
|
|
|
subprocess.check_call([
|
|
|
|
"docker",
|
|
|
|
"login",
|
2018-05-23 23:06:05 +00:00
|
|
|
"-u", be.docker_username,
|
|
|
|
"-p", be.docker_password,
|
2018-05-18 08:37:56 +00:00
|
|
|
])
|
2020-04-12 20:47:54 +00:00
|
|
|
subprocess.check_call(["docker", "push", be.docker_tag])
|
|
|
|
if be.is_prod_release:
|
|
|
|
subprocess.check_call(["docker", "tag", be.docker_tag, "mitmproxy/mitmproxy:latest"])
|
|
|
|
subprocess.check_call(["docker", "push", "mitmproxy/mitmproxy:latest"])
|
2018-06-05 20:12:32 +00:00
|
|
|
|
2018-05-18 08:37:56 +00:00
|
|
|
|
2018-05-24 05:10:50 +00:00
|
|
|
if __name__ == "__main__": # pragma: no cover
|
2018-03-06 21:25:38 +00:00
|
|
|
cli()
|