Skip to content

Commit

Permalink
feat: release changes for version 0.4.9
Browse files Browse the repository at this point in the history
  • Loading branch information
DeXtroTip committed Dec 2, 2024
1 parent 23c1b92 commit 1c60be9
Show file tree
Hide file tree
Showing 10 changed files with 111 additions and 79 deletions.
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
from types import TracebackType
from typing import Any
from galaxy.core.utils import make_request
from galaxy.utils.requests import ClientSession, RequestError, create_session

from galaxy.utils.requests import ClientSession, create_session

__all__ = ["{{ cookiecutter.integration_name_pascalcase }}Client"]

Expand All @@ -13,6 +12,7 @@ def __init__(self, config, logger):

# (If needed) Client session
self._session: ClientSession | None = None
self._headers = {}

# (If needed) Implement logic for client context manager
async def __aenter__(self) -> "{{ cookiecutter.integration_name_pascalcase }}Client":
Expand All @@ -26,33 +26,12 @@ async def close(self) -> None:
if self._session is not None:
await self._session.close()

# (If needed) Session request utilities
async def _make_request(
self,
method: str,
url: str,
*,
retry: bool = True,
raise_on_error: bool = False,
none_on_404: bool = True,
**kwargs: Any,
) -> Any:
try:
return await make_request(
self._session,
method,
url,
**kwargs,
logger=self.logger,
retry_policy=self._retry_policy,
retry=retry,
none_on_404=none_on_404,
)
except RequestError as e:
if raise_on_error:
raise
self.logger.error(f"Error while making request, defaulting to empty response. ({e})")
return None
@property
def session(self) -> ClientSession:
if self._session is None:
raise ValueError("client session has not been created")

return self._session


# Implement the logic to make the API requests in this class
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,10 @@ rely:
token: "{{ "{{" }} env('RELY_API_TOKEN') {{ "}}" }}"
url: "{{ "{{" }} env('RELY_API_URL') {{ "}}" }}"
integration:
# The identifier of this integration instance.
id: "{{ "{{" }} env('RELY_INTEGRATION_ID') | default('00000', true) {{ "}}" }}"
# The type of the integration.
id: "{{ "{{" }} env('RELY_INTEGRATION_ID') {{ "}}" }}"
type: "{{ cookiecutter.integration_name }}"
# The execution type of the integration.
executionType: "{{ "{{" }} env('RELY_INTEGRATION_EXECUTION_TYPE') | default('cronjob', true) {{ "}}" }}"
# The configuration for the integration.

scheduledInterval: "{{ "{{" }} env('RELY_INTEGRATION_DAEMON_INTERVAL') | default(60, true) | int {{ "}}" }}"
defaultModelMappings: {}
properties:
Expand Down
24 changes: 17 additions & 7 deletions galaxy/cli/cookiecutter/{{cookiecutter.integration_name}}/main.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,27 @@
from types import TracebackType
from typing import Any

from galaxy.core.galaxy import register, Integration
from galaxy.core.models import Config
from galaxy.integrations.{{cookiecutter.integration_name}}.client import {{cookiecutter.integration_name_pascalcase}}Client
from galaxy.integrations.{{ cookiecutter.integration_name }}.client import {{ cookiecutter.integration_name_pascalcase }}Client


class {{cookiecutter.integration_name_pascalcase}}(Integration):
class {{ cookiecutter.integration_name_pascalcase }}(Integration):
_methods = []

def __init__(self, config: Config):
super().__init__(config)
self.client = {{cookiecutter.integration_name_pascalcase}}Client(self.config, self.logger)
self.client = {{ cookiecutter.integration_name_pascalcase }}Client(self.config, self.logger)

async def __aenter__(self) -> "{{ cookiecutter.integration_name_pascalcase }}":
await self.client.__aenter__()
return self

async def __aexit__(self, exc_type: type, exc: Exception, tb: TracebackType) -> None:
await self.client.__aexit__(exc_type, exc, tb)

@register(_methods, group=1)
async def entities(self) -> list[dict]:
self.entities = await self.client.api_request()
entities = await self.mapper.process("entities", self.entities)
return entities
async def entities(self) -> tuple[Any]:
entities = await self.client.api_request()
mapped_entities = await self.mapper.process("entities", entities)
return mapped_entities
17 changes: 11 additions & 6 deletions galaxy/core/mapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,18 +57,23 @@ def _replace_non_matching_characters(self, input_string: str, regex_pattern: str

def _sanitize(self, entity: dict) -> dict:
if entity.get("id"):
entity["id"] = str(entity["id"]).lower()
entity["id"] = self._replace_non_matching_characters(entity["id"], self.id_allowed_chars).lower()
entity["id"] = self._replace_non_matching_characters(str(entity["id"]).lower(), self.id_allowed_chars)

for relation in entity.get("relations", {}).values():
if not relation.get("value"):
continue

if isinstance(relation["value"], list):
relation["value"] = [
self._replace_non_matching_characters(value, self.id_allowed_chars).lower()
value
if not value
else self._replace_non_matching_characters(str(value).lower(), self.id_allowed_chars)
for value in relation["value"]
]
else:
if relation["value"]:
relation["value"] = relation["value"].lower()
relation["value"] = self._replace_non_matching_characters(relation["value"], self.id_allowed_chars)
relation["value"] = self._replace_non_matching_characters(
str(relation["value"]).lower(), self.id_allowed_chars
)

return entity

Expand Down
4 changes: 2 additions & 2 deletions galaxy/integrations/github/.rely/mappings.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -78,8 +78,8 @@ resources:
| sort
| first
// null
additions: .additions
deletions: .deletions
additions: .additions
deletions: .deletions
relations:
repository:
value: .context.repositoryId | tostring
Expand Down
60 changes: 41 additions & 19 deletions galaxy/integrations/github/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@


class GithubClient:
DEFAULT_DAYS_OF_HISTORY: int = 30

@staticmethod
def get_access_token(
app_id: str, app_private_key: str, app_installation_id: str = "", app_auth: bool = False
Expand Down Expand Up @@ -43,8 +45,6 @@ def get_access_token(
def __init__(self, config, logger, token):
self.config = config
self.logger = logger
self.history_limit_timestamp = datetime.now() - timedelta(days=self.days_of_history)
self.repo_activity_limit_timestamp = datetime.now() - timedelta(days=self.days_of_history * 3)

self._headers = {
"Authorization": f"Bearer {token}",
Expand All @@ -54,6 +54,11 @@ def __init__(self, config, logger, token):
self._session: ClientSession | None = None
self._retry_policy = RetryPolicy(logger=self.logger)

self._days_of_history: int | None = None

self.history_limit_timestamp = datetime.now() - timedelta(days=self.days_of_history)
self.repo_activity_limit_timestamp = datetime.now() - timedelta(days=self.days_of_history * 3)

async def __aenter__(self) -> "GithubClient":
self._session = create_session(timeout=self.timeout, headers=self._headers)
return self
Expand All @@ -67,12 +72,19 @@ async def close(self) -> None:

@property
def days_of_history(self) -> int:
days = int(self.config.integration.properties["daysOfHistory"])
if days < 1:
self.logger.warning("Invalid days of history, using default value 30")
return 30
if self._days_of_history is None:
try:
days = int(self.config.integration.properties["daysOfHistory"])
if days < 1:
self.logger.warning("Invalid days of history, using default value %d", self.DEFAULT_DAYS_OF_HISTORY)
days = self.DEFAULT_DAYS_OF_HISTORY
except (ValueError, TypeError):
self.logger.warning("Missing days of history, using default value %d", self.DEFAULT_DAYS_OF_HISTORY)
days = self.DEFAULT_DAYS_OF_HISTORY

self._days_of_history = days

return days
return self._days_of_history

@property
def base_url(self) -> str:
Expand Down Expand Up @@ -134,10 +146,16 @@ async def _make_request(
self.logger.error(f"Error while making request, defaulting to empty response. ({e})")
return None

async def _make_graphql_request(self, query: dict[str, Any]) -> Any:
async def _make_graphql_request(self, query: dict[str, Any], *, ignore_file_not_found_errors: bool = True) -> Any:
response = await self._make_request("POST", f"{self.base_url}/graphql", json=query, raise_on_error=True)
if response.get("errors"):
self.logger.warning("GraphQL error: %r", response["errors"])
if not ignore_file_not_found_errors:
errors = response["errors"]
else:
errors = [e for e in response["errors"] if isinstance(e, dict) and e.get("type") != "NOT_FOUND"]

if errors:
self.logger.warning("GraphQL error: %r", errors)
return response

def _parse_datetime(self, datetime_str: str) -> datetime:
Expand Down Expand Up @@ -170,12 +188,12 @@ async def get_repos(

return repos_list

async def get_repo(self, organization: str, repo: str) -> dict[str, str | int]:
async def get_repo(self, organization: str, repo: str) -> dict[str, Any]:
query = build_graphql_query(query_type=QueryType.REPOSITORY, repo_id=self.build_repo_id(organization, repo))
response = await self._make_graphql_request(query)
return response["data"]["repository"]

async def get_pull_requests(self, organization: str, repo: str, states: list[str]) -> list[dict[str, str | int]]:
async def get_pull_requests(self, organization: str, repo: str, states: list[str]) -> list[dict[str, Any]]:
all_pull_requests = []

cursor = None
Expand Down Expand Up @@ -205,7 +223,7 @@ async def get_pull_requests(self, organization: str, repo: str, states: list[str

return all_pull_requests

async def get_issues(self, organization: str, repo: str, state: str) -> list[dict[str, str | int]]:
async def get_issues(self, organization: str, repo: str, state: str) -> list[dict[str, Any]]:
all_issues = []

cursor = None
Expand Down Expand Up @@ -234,7 +252,7 @@ async def get_issues(self, organization: str, repo: str, state: str) -> list[dic

return all_issues

async def get_workflows(self, organization: str, repo: str) -> list[dict[str, str | int]]:
async def get_workflows(self, organization: str, repo: str) -> list[dict[str, Any]]:
all_workflows = []

url = f"{self.base_url}/repos/{organization}/{repo}/actions/workflows"
Expand All @@ -250,7 +268,7 @@ async def get_workflows(self, organization: str, repo: str) -> list[dict[str, st

return all_workflows

async def get_workflow_runs(self, organization: str, repo: str, workflow_id: str) -> list[dict[str, str | int]]:
async def get_workflow_runs(self, organization: str, repo: str, workflow_id: str) -> list[dict[str, Any]]:
all_workflow_runs = []

url = f"{self.base_url}/repos/{organization}/{repo}/actions/workflows/{workflow_id}/runs"
Expand Down Expand Up @@ -290,7 +308,7 @@ async def get_workflow_run_jobs(self, organization: str, repo: str, run_id: str)

return all_jobs

async def get_members(self, organization: str) -> list[dict[str, str | int]]:
async def get_members(self, organization: str) -> list[dict[str, Any]]:
all_members = []
cursor = None
while True:
Expand Down Expand Up @@ -391,7 +409,7 @@ async def get_team_repositories(self, organization: str, team_id: str) -> list[d

return all_repositories

async def get_environments(self, organization: str, repo: str) -> list[dict[str, str | int]]:
async def get_environments(self, organization: str, repo: str) -> list[dict[str, Any]]:
all_environments = []

url = f"{self.config.integration.properties['url']}/repos/{organization}/{repo}/environments"
Expand All @@ -409,7 +427,7 @@ async def get_environments(self, organization: str, repo: str) -> list[dict[str,

async def get_deployments(
self, organization: str, repo: str, environments: Iterable[str] | None = None
) -> list[dict[str, str | int]]:
) -> list[dict[str, Any]]:
all_deployments = []
cursor = None
while True:
Expand All @@ -435,8 +453,12 @@ async def get_deployments(
return all_deployments

async def get_commits(
self, organization: str, repo: str, branch: str, *, exclude_merge_commits: bool = True
) -> list[dict[str, str | int]]:
self, organization: str, repo: str, branch: str | None, *, exclude_merge_commits: bool = True
) -> list[dict[str, Any]]:
if branch is None or not branch:
self.logger.warning("Unable to fetch commits: branch not specified")
return []

all_commits = []

query_builder = partial(
Expand Down
18 changes: 11 additions & 7 deletions galaxy/integrations/github/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,9 @@
__all__ = ["Github"]


DEFAULT_BRANCH_NAME: str = "main"


class Github(Integration):
_methods = []

Expand Down Expand Up @@ -91,7 +94,7 @@ async def repository(self) -> None:
"owner": metadata["owner"]["login"],
"link": metadata["html_url"],
"metadata": metadata,
"default_branch": content.get("defaultBranchRef", {}).get("name"),
"default_branch": (content.get("defaultBranchRef") or {}).get("name") or DEFAULT_BRANCH_NAME,
"content": content,
}

Expand Down Expand Up @@ -140,6 +143,7 @@ async def pull_request(self) -> list[dict]:
self.repository_to_pull_requests[repo_id] = await self.client.get_pull_requests(
repo["owner"], repo["slug"], self.PULL_REQUEST_STATUS_TO_FETCH
)

prs_mapped.extend(
(
await self.mapper.process(
Expand All @@ -157,7 +161,7 @@ async def pull_request(self) -> list[dict]:
self.logger.info(f"Found {len(prs_mapped)} pull requests from the last {self.client.days_of_history} days")
new_entities = await self.register_inactive_users(inactive_usernames)
if new_entities:
self.logger.info(f"Found {len(new_entities) - 1} inactive members associated to deployments")
self.logger.info(f"Found {len(new_entities) - 1} inactive members associated to pull requests")

return new_entities + prs_mapped

Expand Down Expand Up @@ -206,7 +210,7 @@ async def workflow_run(self) -> list[dict]:
workflows_runs_mapped = []
inactive_usernames = set()
for repo_id, repo in self.repositories.items():
for workflow in self.repository_to_workflows[repo_id]:
for workflow in self.repository_to_workflows.get(repo_id) or []:
self.workflows_to_runs[repo_id] = await self.client.get_workflow_runs(
repo["owner"], repo["slug"], workflow["id"]
)
Expand All @@ -221,14 +225,14 @@ async def workflow_run(self) -> list[dict]:
)

inactive_usernames.update(
get_inactive_usernames_from_workflow_runs(self.repository_to_pull_requests[repo_id], self.users)
get_inactive_usernames_from_workflow_runs(self.workflows_to_runs[repo_id], self.users)
)
self.logger.info(
f"Found {len(workflows_runs_mapped)} workflow runs from the last {self.client.days_of_history} days"
"Found %d workflow runs from the last %d days", len(workflows_runs_mapped), self.client.days_of_history
)
new_entities = await self.register_inactive_users(inactive_usernames)
if new_entities:
self.logger.info(f"Found {len(new_entities) - 1} inactive members associated to deployments")
self.logger.info(f"Found {len(new_entities) - 1} inactive members associated to workflow runs")

return workflows_runs_mapped

Expand All @@ -255,7 +259,7 @@ async def deployments(self) -> list[dict]:
inactive_usernames = set()
for repo_id, repo in self.repositories.items():
self.repository_to_deployments[repo_id] = []
for environment in self.repository_to_environments[repo_id]:
for environment in self.repository_to_environments.get(repo_id) or []:
repo_env_deployments = await self.client.get_deployments(
repo["owner"], repo["slug"], [environment["name"]]
)
Expand Down
Loading

0 comments on commit 1c60be9

Please sign in to comment.