mirror of
https://github.com/CCOSTAN/Home-AssistantConfig.git
synced 2025-08-20 12:10:28 +00:00
Updated HACS and also fixed Garadget #727
This commit is contained in:
124
config/custom_components/hacs/helpers/download.py
Executable file → Normal file
124
config/custom_components/hacs/helpers/download.py
Executable file → Normal file
@@ -16,9 +16,13 @@ class FileInformation:
|
||||
|
||||
def should_try_releases(repository):
|
||||
"""Return a boolean indicating whether to download releases or not."""
|
||||
if repository.ref == repository.information.default_branch:
|
||||
if repository.data.zip_release:
|
||||
if repository.data.filename.endswith(".zip"):
|
||||
if repository.ref != repository.data.default_branch:
|
||||
return True
|
||||
if repository.ref == repository.data.default_branch:
|
||||
return False
|
||||
if repository.information.category not in ["plugin", "theme"]:
|
||||
if repository.data.category not in ["plugin", "theme"]:
|
||||
return False
|
||||
if not repository.releases.releases:
|
||||
return False
|
||||
@@ -31,7 +35,7 @@ def gather_files_to_download(repository):
|
||||
tree = repository.tree
|
||||
ref = f"{repository.ref}".replace("tags/", "")
|
||||
releaseobjects = repository.releases.objects
|
||||
category = repository.information.category
|
||||
category = repository.data.category
|
||||
remotelocation = repository.content.path.remote
|
||||
|
||||
if should_try_releases(repository):
|
||||
@@ -44,7 +48,7 @@ def gather_files_to_download(repository):
|
||||
|
||||
if repository.content.single:
|
||||
for treefile in tree:
|
||||
if treefile.filename == repository.information.file_name:
|
||||
if treefile.filename == repository.data.file_name:
|
||||
files.append(
|
||||
FileInformation(
|
||||
treefile.download_url, treefile.full_path, treefile.filename
|
||||
@@ -55,28 +59,29 @@ def gather_files_to_download(repository):
|
||||
if category == "plugin":
|
||||
for treefile in tree:
|
||||
if treefile.path in ["", "dist"]:
|
||||
if not remotelocation:
|
||||
if treefile.filename != repository.information.file_name:
|
||||
continue
|
||||
if remotelocation == "dist" and not treefile.filename.startswith(
|
||||
"dist"
|
||||
):
|
||||
continue
|
||||
if treefile.is_directory:
|
||||
continue
|
||||
files.append(
|
||||
FileInformation(
|
||||
treefile.download_url, treefile.full_path, treefile.filename
|
||||
if not remotelocation:
|
||||
if not treefile.filename.endswith(".js"):
|
||||
continue
|
||||
if treefile.path != "":
|
||||
continue
|
||||
if not treefile.is_directory:
|
||||
files.append(
|
||||
FileInformation(
|
||||
treefile.download_url, treefile.full_path, treefile.filename
|
||||
)
|
||||
)
|
||||
)
|
||||
if files:
|
||||
return files
|
||||
|
||||
if repository.repository_manifest.content_in_root:
|
||||
if repository.repository_manifest.filename is None:
|
||||
if repository.data.content_in_root:
|
||||
if not repository.data.filename:
|
||||
if category == "theme":
|
||||
tree = filter_content_return_one_of_type(
|
||||
repository.tree, "themes", "yaml", "full_path"
|
||||
repository.tree, "", "yaml", "full_path"
|
||||
)
|
||||
|
||||
for path in tree:
|
||||
@@ -104,21 +109,17 @@ async def download_zip(repository, validate):
|
||||
return validate
|
||||
|
||||
for content in contents:
|
||||
filecontent = await async_download_file(
|
||||
repository.hass, content.download_url
|
||||
)
|
||||
filecontent = await async_download_file(content.download_url)
|
||||
|
||||
if filecontent is None:
|
||||
validate.errors.append(f"[{content.name}] was not downloaded.")
|
||||
continue
|
||||
|
||||
result = await async_save_file(
|
||||
f"{tempfile.gettempdir()}/{repository.repository_manifest.filename}",
|
||||
filecontent,
|
||||
f"{tempfile.gettempdir()}/{repository.data.filename}", filecontent
|
||||
)
|
||||
with zipfile.ZipFile(
|
||||
f"{tempfile.gettempdir()}/{repository.repository_manifest.filename}",
|
||||
"r",
|
||||
f"{tempfile.gettempdir()}/{repository.data.filename}", "r"
|
||||
) as zip_file:
|
||||
zip_file.extractall(repository.content.path.local)
|
||||
|
||||
@@ -132,54 +133,49 @@ async def download_zip(repository, validate):
|
||||
return validate
|
||||
|
||||
|
||||
async def download_content(repository, validate, local_directory):
|
||||
async def download_content(repository):
|
||||
"""Download the content of a directory."""
|
||||
contents = gather_files_to_download(repository)
|
||||
try:
|
||||
if not contents:
|
||||
raise HacsException("No content to download")
|
||||
repository.logger.debug(repository.data.filename)
|
||||
if not contents:
|
||||
raise HacsException("No content to download")
|
||||
|
||||
for content in contents:
|
||||
if repository.repository_manifest.content_in_root:
|
||||
if repository.repository_manifest.filename is not None:
|
||||
if content.name != repository.repository_manifest.filename:
|
||||
continue
|
||||
repository.logger.debug(f"Downloading {content.name}")
|
||||
|
||||
filecontent = await async_download_file(
|
||||
repository.hass, content.download_url
|
||||
)
|
||||
|
||||
if filecontent is None:
|
||||
validate.errors.append(f"[{content.name}] was not downloaded.")
|
||||
for content in contents:
|
||||
if repository.data.content_in_root and repository.data.filename:
|
||||
if content.name != repository.data.filename:
|
||||
continue
|
||||
repository.logger.debug(f"Downloading {content.name}")
|
||||
|
||||
# Save the content of the file.
|
||||
if repository.content.single or content.path is None:
|
||||
local_directory = repository.content.path.local
|
||||
filecontent = await async_download_file(content.download_url)
|
||||
|
||||
else:
|
||||
_content_path = content.path
|
||||
if not repository.repository_manifest.content_in_root:
|
||||
_content_path = _content_path.replace(
|
||||
f"{repository.content.path.remote}", ""
|
||||
)
|
||||
if filecontent is None:
|
||||
repository.validate.errors.append(f"[{content.name}] was not downloaded.")
|
||||
continue
|
||||
|
||||
local_directory = f"{repository.content.path.local}/{_content_path}"
|
||||
local_directory = local_directory.split("/")
|
||||
del local_directory[-1]
|
||||
local_directory = "/".join(local_directory)
|
||||
# Save the content of the file.
|
||||
if repository.content.single or content.path is None:
|
||||
local_directory = repository.content.path.local
|
||||
|
||||
# Check local directory
|
||||
pathlib.Path(local_directory).mkdir(parents=True, exist_ok=True)
|
||||
else:
|
||||
_content_path = content.path
|
||||
if not repository.data.content_in_root:
|
||||
_content_path = _content_path.replace(
|
||||
f"{repository.content.path.remote}", ""
|
||||
)
|
||||
|
||||
local_file_path = f"{local_directory}/{content.name}"
|
||||
result = await async_save_file(local_file_path, filecontent)
|
||||
if result:
|
||||
repository.logger.info(f"download of {content.name} complete")
|
||||
continue
|
||||
validate.errors.append(f"[{content.name}] was not downloaded.")
|
||||
local_directory = f"{repository.content.path.local}/{_content_path}"
|
||||
local_directory = local_directory.split("/")
|
||||
del local_directory[-1]
|
||||
local_directory = "/".join(local_directory)
|
||||
|
||||
# Check local directory
|
||||
pathlib.Path(local_directory).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
local_file_path = (f"{local_directory}/{content.name}").replace("//", "/")
|
||||
|
||||
result = await async_save_file(local_file_path, filecontent)
|
||||
if result:
|
||||
repository.logger.info(f"download of {content.name} complete")
|
||||
continue
|
||||
repository.validate.errors.append(f"[{content.name}] was not downloaded.")
|
||||
|
||||
except Exception as exception: # pylint: disable=broad-except
|
||||
validate.errors.append(f"Download was not complete [{exception}]")
|
||||
return validate
|
||||
|
11
config/custom_components/hacs/helpers/filters.py
Executable file → Normal file
11
config/custom_components/hacs/helpers/filters.py
Executable file → Normal file
@@ -42,3 +42,14 @@ def find_first_of_filetype(content, filterfiltype, attr="name"):
|
||||
filename = getattr(_filename, attr)
|
||||
break
|
||||
return filename
|
||||
|
||||
|
||||
def get_first_directory_in_directory(content, dirname):
|
||||
"""Return the first directory in dirname or None."""
|
||||
directory = None
|
||||
for path in content:
|
||||
if path.full_path.startswith(dirname) and path.full_path != dirname:
|
||||
if path.is_directory:
|
||||
directory = path.filename
|
||||
break
|
||||
return directory
|
||||
|
5
config/custom_components/hacs/helpers/get_defaults.py
Executable file → Normal file
5
config/custom_components/hacs/helpers/get_defaults.py
Executable file → Normal file
@@ -2,6 +2,7 @@
|
||||
import json
|
||||
from aiogithubapi import AIOGitHub, AIOGitHubException
|
||||
from integrationhelper import Logger
|
||||
from custom_components.hacs.helpers.information import get_repository
|
||||
|
||||
|
||||
async def get_default_repos_orgs(github: type(AIOGitHub), category: str) -> dict:
|
||||
@@ -27,13 +28,13 @@ async def get_default_repos_orgs(github: type(AIOGitHub), category: str) -> dict
|
||||
return repositories
|
||||
|
||||
|
||||
async def get_default_repos_lists(github: type(AIOGitHub), default: str) -> dict:
|
||||
async def get_default_repos_lists(session, token, default: str) -> dict:
|
||||
"""Gets repositories from default list."""
|
||||
repositories = []
|
||||
logger = Logger("hacs")
|
||||
|
||||
try:
|
||||
repo = await github.get_repo("hacs/default")
|
||||
repo = await get_repository(session, token, "hacs/default")
|
||||
content = await repo.get_contents(default)
|
||||
repositories = json.loads(content.content)
|
||||
|
||||
|
184
config/custom_components/hacs/helpers/information.py
Normal file
184
config/custom_components/hacs/helpers/information.py
Normal file
@@ -0,0 +1,184 @@
|
||||
"""Return repository information if any."""
|
||||
import json
|
||||
from aiogithubapi import AIOGitHubException, AIOGitHub
|
||||
from custom_components.hacs.handler.template import render_template
|
||||
from custom_components.hacs.hacsbase.exceptions import HacsException
|
||||
|
||||
|
||||
def info_file(repository):
|
||||
"""get info filename."""
|
||||
if repository.data.render_readme:
|
||||
for filename in ["readme", "readme.md", "README", "README.md", "README.MD"]:
|
||||
if filename in repository.treefiles:
|
||||
return filename
|
||||
return ""
|
||||
for filename in ["info", "info.md", "INFO", "INFO.md", "INFO.MD"]:
|
||||
if filename in repository.treefiles:
|
||||
return filename
|
||||
return ""
|
||||
|
||||
|
||||
async def get_info_md_content(repository):
|
||||
"""Get the content of info.md"""
|
||||
filename = info_file(repository)
|
||||
if not filename:
|
||||
return ""
|
||||
try:
|
||||
info = await repository.repository_object.get_contents(filename, repository.ref)
|
||||
if info is None:
|
||||
return ""
|
||||
info = info.content.replace("<svg", "<disabled").replace("</svg", "</disabled")
|
||||
return render_template(info, repository)
|
||||
except (AIOGitHubException, Exception): # pylint: disable=broad-except
|
||||
return ""
|
||||
|
||||
|
||||
async def get_repository(session, token, repository_full_name):
|
||||
"""Return a repository object or None."""
|
||||
try:
|
||||
github = AIOGitHub(token, session)
|
||||
repository = await github.get_repo(repository_full_name)
|
||||
return repository
|
||||
except AIOGitHubException as exception:
|
||||
raise HacsException(exception)
|
||||
|
||||
|
||||
async def get_tree(repository, ref):
|
||||
"""Return the repository tree."""
|
||||
try:
|
||||
tree = await repository.get_tree(ref)
|
||||
return tree
|
||||
except AIOGitHubException as exception:
|
||||
raise HacsException(exception)
|
||||
|
||||
|
||||
async def get_releases(repository, prerelease=False, returnlimit=5):
|
||||
"""Return the repository releases."""
|
||||
try:
|
||||
releases = await repository.get_releases(prerelease, returnlimit)
|
||||
return releases
|
||||
except AIOGitHubException as exception:
|
||||
raise HacsException(exception)
|
||||
|
||||
|
||||
async def get_integration_manifest(repository):
|
||||
"""Return the integration manifest."""
|
||||
if repository.data.content_in_root:
|
||||
manifest_path = "manifest.json"
|
||||
else:
|
||||
manifest_path = f"{repository.content.path.remote}/manifest.json"
|
||||
if not manifest_path in [x.full_path for x in repository.tree]:
|
||||
raise HacsException(f"No file found '{manifest_path}'")
|
||||
try:
|
||||
manifest = await repository.repository_object.get_contents(
|
||||
manifest_path, repository.ref
|
||||
)
|
||||
manifest = json.loads(manifest.content)
|
||||
except Exception as exception: # pylint: disable=broad-except
|
||||
raise HacsException(f"Could not read manifest.json [{exception}]")
|
||||
|
||||
try:
|
||||
repository.integration_manifest = manifest
|
||||
repository.data.authors = manifest["codeowners"]
|
||||
repository.data.domain = manifest["domain"]
|
||||
repository.data.manifest_name = manifest["name"]
|
||||
repository.data.homeassistant = manifest.get("homeassistant")
|
||||
|
||||
# Set local path
|
||||
repository.content.path.local = repository.localpath
|
||||
|
||||
except KeyError as exception:
|
||||
raise HacsException(f"Missing expected key {exception} in 'manifest.json'")
|
||||
|
||||
|
||||
def find_file_name(repository):
|
||||
"""Get the filename to target."""
|
||||
if repository.data.category == "plugin":
|
||||
get_file_name_plugin(repository)
|
||||
elif repository.data.category == "integration":
|
||||
get_file_name_integration(repository)
|
||||
elif repository.data.category == "theme":
|
||||
get_file_name_theme(repository)
|
||||
elif repository.data.category == "appdaemon":
|
||||
get_file_name_appdaemon(repository)
|
||||
elif repository.data.category == "python_script":
|
||||
get_file_name_python_script(repository)
|
||||
|
||||
|
||||
def get_file_name_plugin(repository):
|
||||
"""Get the filename to target."""
|
||||
tree = repository.tree
|
||||
releases = repository.releases.objects
|
||||
|
||||
if repository.data.content_in_root:
|
||||
possible_locations = [""]
|
||||
else:
|
||||
possible_locations = ["release", "dist", ""]
|
||||
|
||||
# Handler for plug requirement 3
|
||||
if repository.data.filename:
|
||||
valid_filenames = [repository.data.filename]
|
||||
else:
|
||||
valid_filenames = [
|
||||
f"{repository.data.name.replace('lovelace-', '')}.js",
|
||||
f"{repository.data.name}.js",
|
||||
f"{repository.data.name}.umd.js",
|
||||
f"{repository.data.name}-bundle.js",
|
||||
]
|
||||
|
||||
for location in possible_locations:
|
||||
if location == "release":
|
||||
if not releases:
|
||||
continue
|
||||
release = releases[0]
|
||||
if not release.assets:
|
||||
continue
|
||||
asset = release.assets[0]
|
||||
for filename in valid_filenames:
|
||||
if filename == asset.name:
|
||||
repository.data.file_name = filename
|
||||
repository.content.path.remote = "release"
|
||||
break
|
||||
|
||||
else:
|
||||
for filename in valid_filenames:
|
||||
if f"{location+'/' if location else ''}{filename}" in [
|
||||
x.full_path for x in tree
|
||||
]:
|
||||
repository.data.file_name = filename.split("/")[-1]
|
||||
repository.content.path.remote = location
|
||||
break
|
||||
|
||||
|
||||
def get_file_name_integration(repository):
|
||||
"""Get the filename to target."""
|
||||
tree = repository.tree
|
||||
releases = repository.releases.objects
|
||||
|
||||
|
||||
def get_file_name_theme(repository):
|
||||
"""Get the filename to target."""
|
||||
tree = repository.tree
|
||||
|
||||
for treefile in tree:
|
||||
if treefile.full_path.startswith(
|
||||
repository.content.path.remote
|
||||
) and treefile.full_path.endswith(".yaml"):
|
||||
repository.data.file_name = treefile.filename
|
||||
|
||||
|
||||
def get_file_name_appdaemon(repository):
|
||||
"""Get the filename to target."""
|
||||
tree = repository.tree
|
||||
releases = repository.releases.objects
|
||||
|
||||
|
||||
def get_file_name_python_script(repository):
|
||||
"""Get the filename to target."""
|
||||
tree = repository.tree
|
||||
|
||||
for treefile in tree:
|
||||
if treefile.full_path.startswith(
|
||||
repository.content.path.remote
|
||||
) and treefile.full_path.endswith(".py"):
|
||||
repository.data.file_name = treefile.filename
|
92
config/custom_components/hacs/helpers/install.py
Executable file → Normal file
92
config/custom_components/hacs/helpers/install.py
Executable file → Normal file
@@ -1,14 +1,17 @@
|
||||
"""Install helper for repositories."""
|
||||
import os
|
||||
import tempfile
|
||||
from custom_components.hacs.globals import get_hacs
|
||||
from custom_components.hacs.hacsbase.exceptions import HacsException
|
||||
from custom_components.hacs.hacsbase.backup import Backup
|
||||
from custom_components.hacs.hacsbase.backup import Backup, BackupNetDaemon
|
||||
from custom_components.hacs.helpers.download import download_content
|
||||
|
||||
|
||||
async def install_repository(repository):
|
||||
"""Common installation steps of the repository."""
|
||||
persistent_directory = None
|
||||
await repository.update_repository()
|
||||
repository.validate.errors = []
|
||||
|
||||
if not repository.can_install:
|
||||
raise HacsException(
|
||||
@@ -16,41 +19,36 @@ async def install_repository(repository):
|
||||
)
|
||||
|
||||
version = version_to_install(repository)
|
||||
if version == repository.information.default_branch:
|
||||
if version == repository.data.default_branch:
|
||||
repository.ref = version
|
||||
else:
|
||||
repository.ref = f"tags/{version}"
|
||||
|
||||
if repository.repository_manifest:
|
||||
if repository.repository_manifest.persistent_directory:
|
||||
if os.path.exists(
|
||||
f"{repository.content.path.local}/{repository.repository_manifest.persistent_directory}"
|
||||
):
|
||||
persistent_directory = Backup(
|
||||
f"{repository.content.path.local}/{repository.repository_manifest.persistent_directory}",
|
||||
tempfile.gettempdir() + "/hacs_persistent_directory/",
|
||||
)
|
||||
persistent_directory.create()
|
||||
if repository.status.installed and repository.data.category == "netdaemon":
|
||||
persistent_directory = BackupNetDaemon(repository)
|
||||
persistent_directory.create()
|
||||
|
||||
elif repository.data.persistent_directory:
|
||||
if os.path.exists(
|
||||
f"{repository.content.path.local}/{repository.data.persistent_directory}"
|
||||
):
|
||||
persistent_directory = Backup(
|
||||
f"{repository.content.path.local}/{repository.data.persistent_directory}",
|
||||
tempfile.gettempdir() + "/hacs_persistent_directory/",
|
||||
)
|
||||
persistent_directory.create()
|
||||
|
||||
if repository.status.installed and not repository.content.single:
|
||||
backup = Backup(repository.content.path.local)
|
||||
backup.create()
|
||||
|
||||
if (
|
||||
repository.repository_manifest.zip_release
|
||||
and version != repository.information.default_branch
|
||||
):
|
||||
validate = await repository.download_zip(repository.validate)
|
||||
if repository.data.zip_release and version != repository.data.default_branch:
|
||||
await repository.download_zip(repository)
|
||||
else:
|
||||
validate = await repository.download_content(
|
||||
repository.validate,
|
||||
repository.content.path.remote,
|
||||
repository.content.path.local,
|
||||
repository.ref,
|
||||
)
|
||||
await download_content(repository)
|
||||
|
||||
if validate.errors:
|
||||
for error in validate.errors:
|
||||
if repository.validate.errors:
|
||||
for error in repository.validate.errors:
|
||||
repository.logger.error(error)
|
||||
if repository.status.installed and not repository.content.single:
|
||||
backup.restore()
|
||||
@@ -62,14 +60,14 @@ async def install_repository(repository):
|
||||
persistent_directory.restore()
|
||||
persistent_directory.cleanup()
|
||||
|
||||
if validate.success:
|
||||
if repository.information.full_name not in repository.common.installed:
|
||||
if repository.information.full_name == "hacs/integration":
|
||||
repository.common.installed.append(repository.information.full_name)
|
||||
if repository.validate.success:
|
||||
if repository.data.full_name not in repository.hacs.common.installed:
|
||||
if repository.data.full_name == "hacs/integration":
|
||||
repository.hacs.common.installed.append(repository.data.full_name)
|
||||
repository.status.installed = True
|
||||
repository.versions.installed_commit = repository.versions.available_commit
|
||||
|
||||
if version == repository.information.default_branch:
|
||||
if version == repository.data.default_branch:
|
||||
repository.versions.installed = None
|
||||
else:
|
||||
repository.versions.installed = version
|
||||
@@ -80,28 +78,34 @@ async def install_repository(repository):
|
||||
|
||||
async def reload_after_install(repository):
|
||||
"""Reload action after installation success."""
|
||||
if repository.information.category == "integration":
|
||||
if repository.data.category == "integration":
|
||||
if repository.config_flow:
|
||||
if repository.information.full_name != "hacs/integration":
|
||||
if repository.data.full_name != "hacs/integration":
|
||||
await repository.reload_custom_components()
|
||||
repository.pending_restart = True
|
||||
|
||||
elif repository.information.category == "theme":
|
||||
elif repository.data.category == "theme":
|
||||
try:
|
||||
await repository.hass.services.async_call("frontend", "reload_themes", {})
|
||||
await repository.hacs.hass.services.async_call(
|
||||
"frontend", "reload_themes", {}
|
||||
)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
pass
|
||||
elif repository.data.category == "netdaemon":
|
||||
try:
|
||||
await repository.hacs.hass.services.async_call(
|
||||
"hassio", "addon_restart", {"addon": "e466aeb3_netdaemon"}
|
||||
)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
pass
|
||||
|
||||
|
||||
def installation_complete(repository):
|
||||
"""Action to run when the installation is complete."""
|
||||
repository.hass.bus.async_fire(
|
||||
hacs = get_hacs()
|
||||
hacs.hass.bus.async_fire(
|
||||
"hacs/repository",
|
||||
{
|
||||
"id": 1337,
|
||||
"action": "install",
|
||||
"repository": repository.information.full_name,
|
||||
},
|
||||
{"id": 1337, "action": "install", "repository": repository.data.full_name},
|
||||
)
|
||||
|
||||
|
||||
@@ -115,10 +119,10 @@ def version_to_install(repository):
|
||||
return repository.status.selected_tag
|
||||
return repository.versions.available
|
||||
if repository.status.selected_tag is not None:
|
||||
if repository.status.selected_tag == repository.information.default_branch:
|
||||
return repository.information.default_branch
|
||||
if repository.status.selected_tag == repository.data.default_branch:
|
||||
return repository.data.default_branch
|
||||
if repository.status.selected_tag in repository.releases.published_tags:
|
||||
return repository.status.selected_tag
|
||||
if repository.information.default_branch is None:
|
||||
if repository.data.default_branch is None:
|
||||
return "master"
|
||||
return repository.information.default_branch
|
||||
return repository.data.default_branch
|
||||
|
23
config/custom_components/hacs/helpers/misc.py
Executable file → Normal file
23
config/custom_components/hacs/helpers/misc.py
Executable file → Normal file
@@ -2,20 +2,23 @@
|
||||
import semantic_version
|
||||
|
||||
|
||||
def get_repository_name(
|
||||
hacs_manifest, repository_name: str, category: str = None, manifest: dict = None
|
||||
) -> str:
|
||||
def get_repository_name(repository) -> str:
|
||||
"""Return the name of the repository for use in the frontend."""
|
||||
|
||||
if hacs_manifest.name is not None:
|
||||
return hacs_manifest.name
|
||||
if repository.repository_manifest.name is not None:
|
||||
return repository.repository_manifest.name
|
||||
|
||||
if category == "integration":
|
||||
if manifest:
|
||||
if "name" in manifest:
|
||||
return manifest["name"]
|
||||
if repository.data.category == "integration":
|
||||
if repository.integration_manifest:
|
||||
if "name" in repository.integration_manifest:
|
||||
return repository.integration_manifest["name"]
|
||||
|
||||
return repository_name.replace("-", " ").replace("_", " ").title()
|
||||
return (
|
||||
repository.data.full_name.split("/")[-1]
|
||||
.replace("-", " ")
|
||||
.replace("_", " ")
|
||||
.title()
|
||||
)
|
||||
|
||||
|
||||
def version_left_higher_then_right(new: str, old: str) -> bool:
|
||||
|
8
config/custom_components/hacs/helpers/network.py
Normal file
8
config/custom_components/hacs/helpers/network.py
Normal file
@@ -0,0 +1,8 @@
|
||||
"""Verify network."""
|
||||
from socket import gaierror
|
||||
from integrationhelper import Logger
|
||||
|
||||
|
||||
def internet_connectivity_check(host="api.github.com"):
|
||||
"""Verify network connectivity."""
|
||||
return True
|
49
config/custom_components/hacs/helpers/register_repository.py
Normal file
49
config/custom_components/hacs/helpers/register_repository.py
Normal file
@@ -0,0 +1,49 @@
|
||||
"""Register a repository."""
|
||||
from aiogithubapi import AIOGitHubException
|
||||
from custom_components.hacs.globals import get_hacs
|
||||
from custom_components.hacs.hacsbase.exceptions import (
|
||||
HacsException,
|
||||
HacsExpectedException,
|
||||
)
|
||||
|
||||
|
||||
async def register_repository(full_name, category, check=True):
|
||||
"""Register a repository."""
|
||||
hacs = get_hacs()
|
||||
from custom_components.hacs.repositories import (
|
||||
RERPOSITORY_CLASSES,
|
||||
) # To hanle import error
|
||||
|
||||
if full_name in hacs.common.skip:
|
||||
if full_name != "hacs/integration":
|
||||
raise HacsExpectedException(f"Skipping {full_name}")
|
||||
|
||||
if category not in RERPOSITORY_CLASSES:
|
||||
raise HacsException(f"{category} is not a valid repository category.")
|
||||
|
||||
repository = RERPOSITORY_CLASSES[category](full_name)
|
||||
if check:
|
||||
try:
|
||||
await repository.registration()
|
||||
if hacs.system.status.new:
|
||||
repository.status.new = False
|
||||
if repository.validate.errors:
|
||||
hacs.common.skip.append(repository.data.full_name)
|
||||
if not hacs.system.status.startup:
|
||||
hacs.logger.error(f"Validation for {full_name} failed.")
|
||||
return repository.validate.errors
|
||||
repository.logger.info("Registration complete")
|
||||
except AIOGitHubException as exception:
|
||||
hacs.common.skip.append(repository.data.full_name)
|
||||
raise HacsException(f"Validation for {full_name} failed with {exception}.")
|
||||
|
||||
hacs.hass.bus.async_fire(
|
||||
"hacs/repository",
|
||||
{
|
||||
"id": 1337,
|
||||
"action": "registration",
|
||||
"repository": repository.data.full_name,
|
||||
"repository_id": repository.information.uid,
|
||||
},
|
||||
)
|
||||
hacs.repositories.append(repository)
|
90
config/custom_components/hacs/helpers/validate_repository.py
Normal file
90
config/custom_components/hacs/helpers/validate_repository.py
Normal file
@@ -0,0 +1,90 @@
|
||||
"""Helper to do common validation for repositories."""
|
||||
from aiogithubapi import AIOGitHubException
|
||||
from custom_components.hacs.globals import get_hacs, is_removed
|
||||
from custom_components.hacs.hacsbase.exceptions import HacsException
|
||||
from custom_components.hacs.helpers.install import version_to_install
|
||||
from custom_components.hacs.helpers.information import (
|
||||
get_repository,
|
||||
get_tree,
|
||||
get_releases,
|
||||
)
|
||||
|
||||
|
||||
async def common_validate(repository):
|
||||
"""Common validation steps of the repository."""
|
||||
repository.validate.errors = []
|
||||
|
||||
# Make sure the repository exist.
|
||||
repository.logger.debug("Checking repository.")
|
||||
await common_update_data(repository)
|
||||
|
||||
# Step 6: Get the content of hacs.json
|
||||
await repository.get_repository_manifest_content()
|
||||
|
||||
|
||||
async def common_update_data(repository):
|
||||
"""Common update data."""
|
||||
hacs = get_hacs()
|
||||
try:
|
||||
repository_object = await get_repository(
|
||||
hacs.session, hacs.configuration.token, repository.data.full_name
|
||||
)
|
||||
repository.repository_object = repository_object
|
||||
repository.data.update_data(repository_object.attributes)
|
||||
except (AIOGitHubException, HacsException) as exception:
|
||||
if not hacs.system.status.startup:
|
||||
repository.logger.error(exception)
|
||||
repository.validate.errors.append("Repository does not exist.")
|
||||
raise HacsException(exception)
|
||||
|
||||
# Make sure the repository is not archived.
|
||||
if repository.data.archived:
|
||||
repository.validate.errors.append("Repository is archived.")
|
||||
raise HacsException("Repository is archived.")
|
||||
|
||||
# Make sure the repository is not in the blacklist.
|
||||
if is_removed(repository.data.full_name):
|
||||
repository.validate.errors.append("Repository is in the blacklist.")
|
||||
raise HacsException("Repository is in the blacklist.")
|
||||
|
||||
# Get releases.
|
||||
try:
|
||||
releases = await get_releases(
|
||||
repository.repository_object,
|
||||
repository.status.show_beta,
|
||||
hacs.configuration.release_limit,
|
||||
)
|
||||
if releases:
|
||||
repository.releases.releases = True
|
||||
repository.releases.objects = releases
|
||||
repository.releases.published_tags = [
|
||||
x.tag_name for x in releases if not x.draft
|
||||
]
|
||||
repository.versions.available = next(iter(releases)).tag_name
|
||||
for release in releases:
|
||||
if release.tag_name == repository.ref:
|
||||
assets = release.assets
|
||||
if assets:
|
||||
downloads = next(iter(assets)).attributes.get("download_count")
|
||||
repository.releases.downloads = downloads
|
||||
|
||||
except (AIOGitHubException, HacsException):
|
||||
repository.releases.releases = False
|
||||
|
||||
repository.ref = version_to_install(repository)
|
||||
|
||||
repository.logger.debug(
|
||||
f"Running checks against {repository.ref.replace('tags/', '')}"
|
||||
)
|
||||
|
||||
try:
|
||||
repository.tree = await get_tree(repository.repository_object, repository.ref)
|
||||
if not repository.tree:
|
||||
raise HacsException("No files in tree")
|
||||
repository.treefiles = []
|
||||
for treefile in repository.tree:
|
||||
repository.treefiles.append(treefile.full_path)
|
||||
except (AIOGitHubException, HacsException) as exception:
|
||||
if not hacs.system.status.startup:
|
||||
repository.logger.error(exception)
|
||||
raise HacsException(exception)
|
Reference in New Issue
Block a user