Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: charles review of install #21

Merged
merged 13 commits into from
Sep 12, 2024
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -21,3 +21,4 @@ zkrich1
default
secrets/
secret/
.DS_Store
161 changes: 94 additions & 67 deletions gaboon/commands/install.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from argparse import Namespace
from dataclasses import dataclass
from pathlib import Path
import shutil
import subprocess
Expand All @@ -13,11 +14,7 @@
import traceback
from tqdm import tqdm
import zipfile
from io import BytesIO
from gaboon.constants.vars import (
REQUEST_HEADERS,
PACKAGE_VERSION_FILE,
)
from gaboon.constants.vars import REQUEST_HEADERS, PACKAGE_VERSION_FILE
import tomllib
import tomli_w
from enum import Enum
Expand Down Expand Up @@ -59,8 +56,8 @@ def classify_dependency(dependency: str) -> DependencyType:

if re.match(github_pattern, dependency):
return DependencyType.GITHUB
else:
return DependencyType.PIP

return DependencyType.PIP


# Much of this code thanks to brownie
Expand All @@ -79,7 +76,7 @@ def _github_installs(
org, repo = path.split("/")
except ValueError:
raise ValueError(
"Invalid package ID. Must be given as ORG/REPO@[VERSION]"
"Invalid package ID. Must be given as ORG/REPO[@VERSION]"
"\ne.g. 'pcaversaccio/[email protected]'"
) from None

Expand Down Expand Up @@ -167,29 +164,29 @@ def _stream_download(
download_url: str, target_path: str, headers: dict[str, str] = REQUEST_HEADERS
) -> None:
response = requests.get(download_url, stream=True, headers=headers)
response.raise_for_status()
total_size = int(response.headers.get("content-length", 0))

if response.status_code == 404:
raise ConnectionError(
f"404 error when attempting to download from {download_url} - "
"are you sure this is a valid mix? https://github.com/brownie-mix"
)
if response.status_code != 200:
raise ConnectionError(
f"Received status code {response.status_code} when attempting "
f"to download from {download_url}"
)
temp_file = os.path.join(target_path, "temp_download.zip")

total_size = int(response.headers.get("content-length", 0))
progress_bar = tqdm(total=total_size, unit="iB", unit_scale=True)
content = bytes()
with (
open(temp_file, "wb") as f,
tqdm(
desc="Downloading",
total=total_size,
unit="iB",
unit_scale=True,
unit_divisor=1024,
) as progress_bar,
):
for data in response.iter_content(chunk_size=None):
size = f.write(data)
progress_bar.update(size)

for data in response.iter_content(1024, decode_unicode=True):
progress_bar.update(len(data))
content += data
progress_bar.close()
with zipfile.ZipFile(temp_file, "r") as zip_ref:
zip_ref.extractall(target_path)

with zipfile.ZipFile(BytesIO(content)) as zf:
zf.extractall(target_path)
os.remove(temp_file)


def _maybe_retrieve_github_auth() -> dict[str, str]:
Expand All @@ -198,7 +195,7 @@ def _maybe_retrieve_github_auth() -> dict[str, str]:
Otherwise returns an empty dict if no auth token is present.
"""
token = os.getenv("GITHUB_TOKEN")
if token:
if token is not None:
auth = b64encode(token.encode()).decode()
return {"Authorization": f"Basic {auth}"}
return {}
Expand All @@ -208,30 +205,24 @@ def _get_download_url_from_tag(org: str, repo: str, version: str, headers: dict)
response = requests.get(
f"https://api.github.com/repos/{org}/{repo}/tags?per_page=100", headers=headers
)
if response.status_code != 200:
msg = "Status {} when getting package versions from Github: '{}'".format(
response.status_code, response.json()["message"]
)
if response.status_code in (403, 404):
msg += (
"\n\nMissing or forbidden.\n"
"If this issue persists, generate a Github API token and store"
" it as the environment variable `GITHUB_TOKEN`:\n"
"https://github.blog/2013-05-16-personal-api-tokens/"
)
raise ConnectionError(msg)
response.raise_for_status()

data = response.json()
if not data:
raise ValueError("Github repository has no tags set")
org, repo = data[0]["zipball_url"].split("/")[3:5]
tags = [i["name"].lstrip("v") for i in data]
if version not in tags:
raise ValueError(
"Invalid version for this package. Available versions are:\n"
+ ", ".join(tags)
) from None
return next(i["zipball_url"] for i in data if i["name"].lstrip("v") == version)

available_versions = []
for tag in data:
tag_version = tag["name"].lstrip("v")
available_versions.append(tag_version)
if tag_version == version:
return tag["zipball_url"]

# If we've gone through all tags without finding a match, raise an error
raise ValueError(
f"Invalid version '{version}' for this package. Available versions are:\n"
+ ", ".join(available_versions)
)


def _pip_installs(
Expand Down Expand Up @@ -269,33 +260,69 @@ def _write_dependencies(new_package_ids: list[str], dependency_type: DependencyT
dep for dep in dependencies if classify_dependency(dep) == dependency_type
]

# Write to dependencies file
to_delete = set()
updated_packages = set()

if dependency_type == DependencyType.PIP:
for package in new_package_ids:
package_req = Requirement(package)
for dep in typed_dependencies:
if Requirement(dep).name == Requirement(package).name:
dep_req = Requirement(dep)
if dep_req.name == package_req.name:
to_delete.add(dep)
if package not in to_delete:
logger.info(f"Installed {package}")
else:
updated_packages.add(package_req.name)

if package_req.name not in updated_packages:
logger.info(f"Installed new package: {package}")
else:
logger.info(f"Updated package: {package}")
else: # GIT dependencies
for package in new_package_ids:
package_dep = GitHubDependency.from_string(package)
for dep in typed_dependencies:
package_path, _ = (
package.split("@", 1) if "@" in package else (package, None)
)
package_org, package_repo = str(package_path).split("/")

dep_path, _ = dep.split("@", 1) if "@" in dep else (dep, None)
dep_org, dep_repo = str(dep_path).split("/")
if dep_org == package_org and dep_repo == package_repo:
dep_gh = GitHubDependency.from_string(dep)
if dep_gh.org == package_dep.org and dep_gh.repo == package_dep.repo:
to_delete.add(dep)
if package not in to_delete:
updated_packages.add(package_dep.format_no_version())

if f"{package_dep.org}/{package_dep.repo}" not in updated_packages:
logger.info(f"Installed {package}")
else:
logger.info(f"Updated {package}")

# Remove old versions of updated packages
dependencies = [dep for dep in dependencies if dep not in to_delete]
# TODO: keep original order of dependencies
# e.g. if gaboon.toml has snekmate==0.1.0 and user installs
# snekmate==0.2.0, we should keep the original order in the toml file.
dependencies.extend(new_package_ids)
config.write_dependencies(dependencies)

# Add new packages while preserving order
new_deps = []
for dep in dependencies + new_package_ids:
if dep not in new_deps:
new_deps.append(dep)

if len(new_deps) > 0:
config.write_dependencies(new_deps)


@dataclass
class GitHubDependency:
org: str
repo: str
version: str | None = None
PatrickAlphaC marked this conversation as resolved.
Show resolved Hide resolved

@classmethod
def from_string(cls, dep_string: str) -> "GitHubDependency":
if "@" in dep_string:
path, version = dep_string.split("@")
else:
path, version = dep_string, None

org, repo = str(path).split("/")
return cls(org, repo, version)

def format_no_version(self) -> str:
return f"{self.org}/{self.repo}"

def __str__(self) -> str:
if self.version:
return f"{self.org}/{self.repo}@{self.version}"
return self.format_no_version()
29 changes: 20 additions & 9 deletions gaboon/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,12 @@
import tomllib
from dotenv import load_dotenv
import os
import tomli_w
import shutil
import tempfile
import boa
from boa.environment import Env
from gaboon.logging import logger
import tomlkit

if TYPE_CHECKING:
from boa.network import NetworkEnv
Expand Down Expand Up @@ -176,14 +176,25 @@ def _load_env_file(self):
load_dotenv(dotenv_path=self.project_root.joinpath(DOT_ENV_FILE))

def read_gaboon_config(self, config_path: Path = None) -> dict:
config_path = self._validate_config_path(config_path)
with open(config_path, "rb") as f:
return tomllib.load(f)

def read_gaboon_config_preserve_comments(
self, config_path: Path = None
) -> tomlkit.TOMLDocument:
config_path = self._validate_config_path(config_path)
with open(config_path, "rb") as f:
return tomlkit.load(f)

def _validate_config_path(self, config_path: Path = None) -> Path:
if not config_path:
config_path = self._project_root
if not str(config_path).endswith(f"/{CONFIG_NAME}"):
config_path = config_path.joinpath(CONFIG_NAME)
if not config_path.exists():
raise FileNotFoundError(f"Config file not found: {config_path}")
with open(config_path, "rb") as f:
return tomllib.load(f)
return config_path

def expand_env_vars(self, value):
if isinstance(value, str):
Expand All @@ -201,10 +212,10 @@ def get_active_network(self):
def get_dependencies(self) -> list[str]:
return self.dependencies

def write_dependencies(self, new_dependencies: list):
def write_dependencies(self, dependencies: list):
target_path = self._project_root / CONFIG_NAME
toml_data = self.read_gaboon_config(target_path)
toml_data["project"]["dependencies"] = new_dependencies
toml_data = self.read_gaboon_config_preserve_comments(target_path)
toml_data["project"]["dependencies"] = dependencies # type: ignore

# Create a temporary file in the same directory as the target file
temp_file = tempfile.NamedTemporaryFile(
Expand All @@ -215,21 +226,21 @@ def write_dependencies(self, new_dependencies: list):
suffix=".toml",
)
try:
temp_file.write(tomli_w.dumps(toml_data))
temp_file.write(tomlkit.dumps(toml_data))
temp_file.close()
shutil.move(temp_file.name, target_path)
except Exception as e:
os.unlink(temp_file.name)
raise e

self.dependencies = new_dependencies
self.dependencies = dependencies

def get_base_dependencies_install_path(self) -> Path:
project_root = self._project_root
base_install_path = project_root / self.project.get(
DEPENDENCIES_FOLDER, DEPENDENCIES_FOLDER
)
base_install_path.mkdir(exist_ok=True)
base_install_path.mkdir(exist_ok=True, parents=True)
return base_install_path

def get_root(self) -> Path:
Expand Down
2 changes: 1 addition & 1 deletion justfile
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ format-check:
test:
uv run pytest -x -s --ignore=tests/data/

# Run tests, fail on first test failure
# Run tests, fail on first test failure, enter debugger on failure
test-pdb:
uv run pytest -x -s --ignore=tests/data/ --pdb

Expand Down
6 changes: 5 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,10 @@ authors = [
dependencies = [
"titanoboa>=0.2.2",
"python-dotenv>=1.0.1",
"tomli-w>=1.0.0",
"titanoboa-zksync>=v0.2.2",
"tqdm>=4.66.5",
"tomlkit>=0.13.2", # For preserving comments when writing to toml
"tomli-w>=1.0.0",
]
readme = "README.md"
requires-python = ">= 3.11, < 3.13"
Expand Down Expand Up @@ -64,3 +65,6 @@ exclude = [

[tool.pytest.ini_options]
addopts = "--ignore=tests/data/"

[tool.ruff.format]
skip-magic-trailing-comma = true
2 changes: 2 additions & 0 deletions tests/cli/test_cli_install.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
org_name = "pcaversaccio"
github_package_name = f"{org_name}/{pip_package_name}"
version = "0.1.0"
comment_content = "PRESERVE COMMENTS"


def test_run_help(gab_path, installation_cleanup_dependencies):
Expand Down Expand Up @@ -105,6 +106,7 @@ def test_write_to_config_after_install(installation_cleanup_dependencies, gab_pa
assert github_package_name in config.dependencies
for dep in starting_dependencies:
assert dep in config.dependencies
assert comment_content in config.read_gaboon_config_preserve_comments().as_string()


def test_can_install_with_version(installation_cleanup_dependencies, gab_path):
Expand Down
2 changes: 2 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,8 @@
INSTALLATION_STARTING_TOML = """[project]
dependencies = ["snekmate", "gaboon-cli"]

# PRESERVE COMMENTS

[networks.sepolia]
url = "https://ethereum-sepolia-rpc.publicnode.com"
chain_id = 11155111
Expand Down
2 changes: 2 additions & 0 deletions tests/data/installation_project/gaboon.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
[project]
dependencies = ["snekmate", "gaboon-cli"]

# PRESERVE COMMENTS

[networks.sepolia]
url = "https://ethereum-sepolia-rpc.publicnode.com"
chain_id = 11155111
Loading