diff --git a/custom_components/hacs/repositories/base.py b/custom_components/hacs/repositories/base.py index 02538840dd1..bd322a09c3a 100644 --- a/custom_components/hacs/repositories/base.py +++ b/custom_components/hacs/repositories/base.py @@ -114,6 +114,7 @@ ("last_version", None), ("manifest_name", None), ("open_issues", 0), + ("prerelease", None), ("stargazers_count", 0), ("topics", []), ) @@ -165,6 +166,7 @@ class RepositoryData: manifest_name: str = None new: bool = True open_issues: int = 0 + prerelease: str = None published_tags: list[str] = [] releases: bool = False selected_tag: str = None @@ -569,9 +571,11 @@ async def download_zip_files(self, validate: Validate) -> None: ), validate, ) - except BaseException: # lgtm [py/catch-base-exception] pylint: disable=broad-except + # lgtm [py/catch-base-exception] pylint: disable=broad-except + except BaseException: validate.errors.append( - f"Download of {self.repository_manifest.filename} was not completed" + f"Download of { + self.repository_manifest.filename} was not completed" ) async def async_download_zip_file( @@ -610,7 +614,8 @@ def cleanup_temp_dir(): return validate.errors.append(f"[{content['name']}] was not downloaded") - except BaseException: # lgtm [py/catch-base-exception] pylint: disable=broad-except + # lgtm [py/catch-base-exception] pylint: disable=broad-except + except BaseException: validate.errors.append("Download was not completed") async def download_content(self, version: string | None = None) -> None: @@ -719,7 +724,8 @@ async def async_get_hacs_json(self, ref: str = None) -> dict[str, Any] | None: ) if response: return json_loads(decode_content(response.data.content)) - except BaseException: # lgtm [py/catch-base-exception] pylint: disable=broad-except + # lgtm [py/catch-base-exception] pylint: disable=broad-except + except BaseException: pass async def async_get_info_file_contents(self, *, version: str | None = None, **kwargs) -> str: @@ -820,7 +826,8 @@ async def remove_local_directory(self) -> None: ) except ( - BaseException # lgtm [py/catch-base-exception] pylint: disable=broad-except + # lgtm [py/catch-base-exception] pylint: disable=broad-except + BaseException ) as exception: self.logger.debug("%s Removing %s failed with %s", self.string, local_path, exception) return False @@ -945,7 +952,8 @@ async def async_install_repository(self, *, version: str | None = None, **_) -> ): persistent_directory = Backup( hacs=self.hacs, - local_path=f"{self.content.path.local}/{self.repository_manifest.persistent_directory}", + local_path=f"{ + self.content.path.local}/{self.repository_manifest.persistent_directory}", backup_path=tempfile.gettempdir() + "/hacs_persistent_directory/", ) await self.hacs.hass.async_add_executor_job(persistent_directory.create) @@ -1272,7 +1280,8 @@ async def dowload_repository_content(self, content: FileInformation) -> None: self.validate.errors.append(f"[{content.name}] was not downloaded.") except ( - BaseException # lgtm [py/catch-base-exception] pylint: disable=broad-except + # lgtm [py/catch-base-exception] pylint: disable=broad-except + BaseException ) as exception: self.validate.errors.append(f"Download was not completed [{exception}]") @@ -1332,7 +1341,8 @@ async def get_documentation( return None result = await self.hacs.async_download_file( - f"https://raw.githubusercontent.com/{self.data.full_name}/{target_version}/{filename}", + f"https://raw.githubusercontent.com/{ + self.data.full_name}/{target_version}/{filename}", nolog=True, ) @@ -1349,7 +1359,8 @@ async def get_hacs_json(self, *, version: str, **kwargs) -> HacsManifest | None: self.logger.debug("%s Getting hacs.json for version=%s", self.string, version) try: result = await self.hacs.async_download_file( - f"https://raw.githubusercontent.com/{self.data.full_name}/{version}/hacs.json", + f"https://raw.githubusercontent.com/{ + self.data.full_name}/{version}/hacs.json", nolog=True, ) if result is None: diff --git a/custom_components/hacs/utils/data.py b/custom_components/hacs/utils/data.py index 80a4f2cc929..7b548d9c378 100644 --- a/custom_components/hacs/utils/data.py +++ b/custom_components/hacs/utils/data.py @@ -47,6 +47,7 @@ ("last_version", None), ("manifest_name", None), ("open_issues", 0), + ("prerelease", None), ("published_tags", []), ("releases", False), ("selected_tag", None), @@ -289,6 +290,7 @@ def async_restore_repository(self, entry: str, repository_data: dict[str, Any]): repository.data.selected_tag = repository_data.get("selected_tag") repository.data.show_beta = repository_data.get("show_beta", False) repository.data.last_version = repository_data.get("last_version") + repository.data.prerelease = repository_data.get("prerelease") repository.data.last_commit = repository_data.get("last_commit") repository.data.installed_version = repository_data.get("version_installed") repository.data.installed_commit = repository_data.get("installed_commit") @@ -301,6 +303,9 @@ def async_restore_repository(self, entry: str, repository_data: dict[str, Any]): repository_data.get("manifest") or repository_data.get("repository_manifest") or {} ) + if repository.data.prerelease == repository.data.last_version: + repository.data.prerelease = None + if repository.localpath is not None and is_safe(self.hacs, repository.localpath): # Set local path repository.content.path.local = repository.localpath diff --git a/custom_components/hacs/utils/validate.py b/custom_components/hacs/utils/validate.py index 554bea055e5..07b7c1214be 100644 --- a/custom_components/hacs/utils/validate.py +++ b/custom_components/hacs/utils/validate.py @@ -118,6 +118,7 @@ def validate_version(data: Any) -> Any: vol.Required("last_fetched"): vol.Any(int, float), vol.Required("last_updated"): str, vol.Optional("last_version"): str, + vol.Optional("prerelease"): str, vol.Required("manifest"): { vol.Optional("country"): vol.Any([str], False), vol.Optional("name"): str, diff --git a/scripts/data/generate_category_data.py b/scripts/data/generate_category_data.py index 1762195c4dd..911e3fcb486 100644 --- a/scripts/data/generate_category_data.py +++ b/scripts/data/generate_category_data.py @@ -57,6 +57,34 @@ OUTPUT_DIR = os.path.join(os.getcwd(), "outputdata") +GQL_RELEASES = """ +query ($owner: String!, $repo: String!) { + repository(owner: $owner, name: $repo) { + latestRelease { + tagName + releaseAssets(first: 5) { + nodes { + name + downloadCount + } + } + } + releases(last: 1, orderBy: {field: CREATED_AT, direction: ASC}) { + nodes { + tagName + isPrerelease + releaseAssets(first: 5) { + nodes { + name + downloadCount + } + } + } + } + } +} +""" + def jsonprint(data: any): print( @@ -130,9 +158,7 @@ def async_store_repository_data(self, repository: HacsRepository) -> dict: """Store the repository data.""" data = {"manifest": {}} for key, default in HACS_MANIFEST_KEYS_TO_EXPORT: - if ( - value := getattr(repository.repository_manifest, key, default) - ) != default: + if (value := getattr(repository.repository_manifest, key, default)) != default: data["manifest"][key] = value for key, default in REPOSITORY_KEYS_TO_EXPORT: @@ -166,8 +192,7 @@ def __init__(self, session: ClientSession, *, token: str | None = None): self.core.config_path = None self.configuration.token = token self.data = AdjustedHacsData(hacs=self) - self.data_client = HacsDataClient( - session=session, client_name="HACS/Generator") + self.data_client = HacsDataClient(session=session, client_name="HACS/Generator") self.github = GitHub( token, @@ -210,41 +235,61 @@ async def concurrent_update_repository(self, repository: HacsRepository) -> None "%s Fetching repository releases", repository.string, ) - response = await self.githubapi.generic( - endpoint=f"/repos/{repository.data.full_name}/releases/latest", - etag=repository.data.etag_releases, - ) - response.data = ( - GitHubReleaseModel( - response.data) if response.data else None + + repoowner, reponame = repository.data.full_name.split("/") + response = await self.githubapi.graphql( + query=GQL_RELEASES, + variables={"owner": repoowner, "repo": reponame}, ) - repository.data.etag_releases = response.etag - if (releases := response.data) is not None: + + if (data := response.data["data"]["repository"]) is not None and ( + last_release_nodes := data.get("releases", {}).get("nodes", []) + ): repository.data.releases = True - repository.releases.objects = [releases] - repository.data.published_tags = [ - x.tag_name for x in repository.releases.objects - ] + + latest_release = data.get("latestRelease", {}) + last_release = last_release_nodes[0] + if ( - next_version := next(iter(repository.data.published_tags), None) - ) != repository.data.last_version: - repository.data.last_version = next_version + repository.data.prerelease + and repository.data.prerelease != last_release["tagName"] + ) or ( + repository.data.last_version + and repository.data.last_version != latest_release["tagName"] + ): repository.data.etag_repository = None - except GitHubNotModifiedException: - repository.data.releases = True - repository.logger.info( - "%s Release data is up to date", - repository.string, - ) + release_assets = latest_release.get("releaseAssets", {}).get("nodes", []) + repository.data.downloads = ( + release_assets[0]["downloadCount"] if release_assets else 0 + ) + repository.data.published_tags = [repository.data.last_version] + repository.releases.objects = [ + GitHubReleaseModel( + { + "tag_name": repository.data.last_version, + "assets": [ + { + "name": a["name"], + "download_count": a["downloadCount"], + } + for a in release_assets + ], + } + ) + ] + + repository.data.prerelease = last_release.get("tagName") + + if repository.data.prerelease == repository.data.last_version: + repository.data.prerelease = None + except GitHubNotFoundException: repository.data.releases = False - repository.logger.info( - "%s No releases found", repository.string) + repository.logger.info("%s No releases found", repository.string) except GitHubException as exception: repository.data.releases = False - repository.logger.warning( - "%s %s", repository.string, exception) + repository.logger.warning("%s %s", repository.string, exception) await repository.common_update( force=repository.data.etag_repository is None, @@ -330,8 +375,7 @@ async def get_category_repositories( continue repository = self.repositories.get_by_full_name(repo) if repository is not None: - self.queue.add(self.concurrent_update_repository( - repository=repository)) + self.queue.add(self.concurrent_update_repository(repository=repository)) continue self.queue.add( @@ -409,8 +453,7 @@ async def async_github_get_hacs_default_file(self, filename: str) -> list: async def generate_category_data(category: str, repository_name: str = None): """Generate data.""" async with ClientSession() as session: - hacs = AdjustedHacs( - session=session, token=os.getenv("DATA_GENERATOR_TOKEN")) + hacs = AdjustedHacs(session=session, token=os.getenv("DATA_GENERATOR_TOKEN")) os.makedirs(os.path.join(OUTPUT_DIR, category), exist_ok=True) os.makedirs(os.path.join(OUTPUT_DIR, "diff"), exist_ok=True) force = os.environ.get("FORCE_REPOSITORY_UPDATE") == "True" @@ -450,11 +493,7 @@ async def generate_category_data(category: str, repository_name: str = None): ) did_raise = False - if ( - not updated_data - or len(updated_data) == 0 - or not isinstance(updated_data, dict) - ): + if not updated_data or len(updated_data) == 0 or not isinstance(updated_data, dict): print_error_and_exit(f"Updated data is empty", category) did_raise = True @@ -471,8 +510,7 @@ async def generate_category_data(category: str, repository_name: str = None): print_error_and_exit(f"Invalid data: {errors}", category) if did_raise: - print_error_and_exit( - "Validation did raise but did not exit!", category) + print_error_and_exit("Validation did raise but did not exit!", category) sys.exit(1) # Fallback, should not be reached with open( diff --git a/tests/output/proxy_calls.json b/tests/output/proxy_calls.json index bcfed001b95..49ebe6f1e14 100644 --- a/tests/output/proxy_calls.json +++ b/tests/output/proxy_calls.json @@ -1252,6 +1252,23 @@ "https://api.github.com/repos/hacs-test-org/theme-basic/releases": 1, "https://data-v2.hacs.xyz/theme/data.json": 1 }, + "tests/scripts/data/test_generate_category_data.py::test_generate_category_data_with_prior_content[category_test_data0]": { + "https://api.github.com/graphql": 1, + "https://api.github.com/rate_limit": 1, + "https://api.github.com/repos/hacs-test-org/integration-basic": 1, + "https://api.github.com/repos/hacs-test-org/integration-basic-custom": 1, + "https://api.github.com/repos/hacs-test-org/integration-basic-custom/contents/custom_components/example/manifest.json": 1, + "https://api.github.com/repos/hacs-test-org/integration-basic-custom/contents/hacs.json": 1, + "https://api.github.com/repos/hacs-test-org/integration-basic-custom/git/trees/1.0.0": 1, + "https://api.github.com/repos/hacs-test-org/integration-basic-custom/releases": 1, + "https://api.github.com/repos/hacs-test-org/integration-basic/git/trees/1.0.0": 1, + "https://api.github.com/repos/hacs/default/contents/integration": 1, + "https://api.github.com/repos/hacs/integration": 1, + "https://api.github.com/repos/hacs/integration/branches/main": 1, + "https://api.github.com/repos/hacs/integration/releases": 1, + "https://data-v2.hacs.xyz/integration/data.json": 1, + "https://data-v2.hacs.xyz/removed/repositories.json": 1 + }, "tests/test_config_flow.py::test_flow_with_activation_failure": { "https://github.com/login/device/code": 1, "https://github.com/login/oauth/access_token": 2 diff --git a/tests/scripts/data/test_generate_category_data.py b/tests/scripts/data/test_generate_category_data.py index 8d8e56cf5c7..015b8947a07 100644 --- a/tests/scripts/data/test_generate_category_data.py +++ b/tests/scripts/data/test_generate_category_data.py @@ -1,4 +1,5 @@ """Test generate category data.""" + import json from homeassistant.core import HomeAssistant @@ -56,9 +57,7 @@ async def test_generate_category_data_single_repository( category_test_data['repository']}/repositories.json", ) - with open( - f"{OUTPUT_DIR}/summary.json", encoding="utf-8" - ) as file: + with open(f"{OUTPUT_DIR}/summary.json", encoding="utf-8") as file: snapshots.assert_match( safe_json_dumps(json.loads(file.read())), f"scripts/data/generate_category_data/single/{category_test_data['category']}/{ @@ -97,11 +96,92 @@ async def test_generate_category_data( category_test_data['category']}/repositories.json", ) + with open(f"{OUTPUT_DIR}/summary.json", encoding="utf-8") as file: + snapshots.assert_match( + safe_json_dumps(recursive_remove_key(json.loads(file.read()), ())), + f"scripts/data/generate_category_data/{ + category_test_data['category']}/summary.json", + ) + + +@pytest.mark.parametrize("category_test_data", [{"category": "integration"}]) +async def test_generate_category_data_with_prior_content( + hass: HomeAssistant, + response_mocker: ResponseMocker, + snapshots: SnapshotFixture, + category_test_data: CategoryTestData, +): + """Test behaviour with prior content.""" + response_mocker.add( + f"https://data-v2.hacs.xyz/{category_test_data['category']}/data.json", + MockedResponse( + content={ + "1296269": { + "description": "This your first repo!", + "domain": "example", + "downloads": 42, + "etag_repository": "321", + "full_name": "hacs-test-org/integration-basic", + "last_updated": "2011-01-26T19:06:43Z", + "last_version": "1.0.0", + "manifest": {"name": "Proxy manifest"}, + "manifest_name": "Proxy manifest", + "stargazers_count": 80, + "topics": ["api", "atom", "electron", "octocat"], + } + } + ), + ) + response_mocker.add( + "https://api.github.com/graphql", + MockedResponse( + content={ + "data": { + "repository": { + "latestRelease": { + "tagName": "1.0.0", + "releaseAssets": { + "nodes": [{"name": "basic.zip", "downloadCount": 4321}] + }, + }, + "releases": { + "nodes": [ + { + "tagName": "2.0.0b0", + "isPrerelease": True, + "releaseAssets": { + "nodes": [{"name": "basic.zip", "downloadCount": 1234}] + }, + } + ] + }, + } + } + } + ), + ) + await generate_category_data(category_test_data["category"]) + + with open(f"{OUTPUT_DIR}/{category_test_data['category']}/data.json", encoding="utf-8") as file: + snapshots.assert_match( + safe_json_dumps(recursive_remove_key( + json.loads(file.read()), ("last_fetched",))), + f"scripts/data/generate_category_data_with_prior_content/{ + category_test_data['category']}/data.json", + ) + with open( - f"{OUTPUT_DIR}/summary.json", encoding="utf-8" + f"{OUTPUT_DIR}/{category_test_data['category']}/repositories.json", encoding="utf-8" ) as file: snapshots.assert_match( safe_json_dumps(recursive_remove_key(json.loads(file.read()), ())), - f"scripts/data/generate_category_data/{ + f"scripts/data/generate_category_data_with_prior_content/{ + category_test_data['category']}/repositories.json", + ) + + with open(f"{OUTPUT_DIR}/summary.json", encoding="utf-8") as file: + snapshots.assert_match( + safe_json_dumps(recursive_remove_key(json.loads(file.read()), ())), + f"scripts/data/generate_category_data_with_prior_content/{ category_test_data['category']}/summary.json", ) diff --git a/tests/snapshots/diagnostics/base.json b/tests/snapshots/diagnostics/base.json index 9a19b1dec8a..3b3d27dc6b6 100644 --- a/tests/snapshots/diagnostics/base.json +++ b/tests/snapshots/diagnostics/base.json @@ -131,6 +131,7 @@ "manifest_name": "HACS", "new": false, "open_issues": 2, + "prerelease": null, "published_tags": [], "releases": true, "selected_tag": null, diff --git a/tests/snapshots/diagnostics/exception.json b/tests/snapshots/diagnostics/exception.json index 80c5cb4a96a..4981c48c7d1 100644 --- a/tests/snapshots/diagnostics/exception.json +++ b/tests/snapshots/diagnostics/exception.json @@ -74,6 +74,7 @@ "manifest_name": "HACS", "new": false, "open_issues": 2, + "prerelease": null, "published_tags": [], "releases": true, "selected_tag": null, diff --git a/tests/snapshots/scripts/data/generate_category_data_with_prior_content/integration/data.json b/tests/snapshots/scripts/data/generate_category_data_with_prior_content/integration/data.json new file mode 100644 index 00000000000..3e6cff05771 --- /dev/null +++ b/tests/snapshots/scripts/data/generate_category_data_with_prior_content/integration/data.json @@ -0,0 +1,43 @@ +{ + "1296269": { + "description": "This your first repo!", + "domain": "example", + "downloads": 4321, + "etag_repository": "321", + "full_name": "hacs-test-org/integration-basic", + "last_updated": "2011-01-26T19:06:43Z", + "last_version": "1.0.0", + "manifest": { + "name": "Proxy manifest" + }, + "manifest_name": "Proxy manifest", + "prerelease": "2.0.0b0", + "stargazers_count": 80, + "topics": [ + "api", + "atom", + "electron", + "octocat" + ] + }, + "91296269": { + "description": "This your first repo!", + "domain": "example", + "downloads": 42, + "etag_repository": "321", + "full_name": "hacs-test-org/integration-basic-custom", + "last_updated": "2011-01-26T19:06:43Z", + "last_version": "1.0.0", + "manifest": { + "name": "Proxy manifest" + }, + "manifest_name": "Proxy manifest", + "stargazers_count": 80, + "topics": [ + "api", + "atom", + "electron", + "octocat" + ] + } +} \ No newline at end of file diff --git a/tests/snapshots/scripts/data/generate_category_data_with_prior_content/integration/repositories.json b/tests/snapshots/scripts/data/generate_category_data_with_prior_content/integration/repositories.json new file mode 100644 index 00000000000..d81d7b4cd73 --- /dev/null +++ b/tests/snapshots/scripts/data/generate_category_data_with_prior_content/integration/repositories.json @@ -0,0 +1,4 @@ +[ + "hacs-test-org/integration-basic", + "hacs-test-org/integration-basic-custom" +] \ No newline at end of file diff --git a/tests/snapshots/scripts/data/generate_category_data_with_prior_content/integration/summary.json b/tests/snapshots/scripts/data/generate_category_data_with_prior_content/integration/summary.json new file mode 100644 index 00000000000..1d490c3707d --- /dev/null +++ b/tests/snapshots/scripts/data/generate_category_data_with_prior_content/integration/summary.json @@ -0,0 +1,19 @@ +{ + "changed": 2, + "changed_pct": 100, + "current_count": 1, + "diff": 1, + "new_count": 2, + "rate_limit": { + "core": { + "limit": 5000, + "reset": 1691591363, + "used": 1 + }, + "graphql": { + "limit": 5000, + "reset": 1691593228, + "used": 7 + } + } +} \ No newline at end of file