Skip to content

Commit

Permalink
both backups working
Browse files Browse the repository at this point in the history
  • Loading branch information
amyasnikov committed Jan 10, 2025
1 parent 6a5e0fe commit e8e6177
Show file tree
Hide file tree
Showing 41 changed files with 687 additions and 355 deletions.
10 changes: 5 additions & 5 deletions validity/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,16 +29,16 @@ class NetBoxValidityConfig(PluginConfig):
# custom field
netbox_version = NetboxVersion(VERSION)

def _setup_queue(self):
queue_name = di["validity_settings"].runtests_queue
def _setup_queues(self):
django_settings = di["django_settings"]
if queue_name not in django_settings.RQ_QUEUES:
django_settings.RQ_QUEUES[queue_name] = django_settings.RQ_PARAMS
for _, queue_name in di["validity_settings"].custom_queues:
if queue_name not in django_settings.RQ_QUEUES:
django_settings.RQ_QUEUES[queue_name] = django_settings.RQ_PARAMS

def ready(self):
from validity import dependencies, signals

self._setup_queue()
self._setup_queues()
return super().ready()


Expand Down
13 changes: 13 additions & 0 deletions validity/api/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from django.core.exceptions import ValidationError
from django.db.models import ManyToManyField
from netbox.api.serializers import WritableNestedSerializer
from rest_framework.permissions import BasePermission
from rest_framework.relations import PrimaryKeyRelatedField
from rest_framework.serializers import HyperlinkedIdentityField, JSONField, ModelSerializer

Expand Down Expand Up @@ -52,6 +53,18 @@ def proxy_factory(
return type(serializer_class.__name__, (serializer_class,), {"url": url, "Meta": meta})


def model_perms(*permissions: str) -> type[BasePermission]:
"""
Returns permission class suitable for a list of django model permissions
"""

class Permission(BasePermission):
def has_permission(self, request, view):
return request.user.is_authenticated and request.user.has_perms([permissions])

return Permission


class EncryptedDictField(JSONField):
def __init__(self, **kwargs):
self.do_not_encrypt = kwargs.pop("do_not_encrypt", ())
Expand Down
2 changes: 1 addition & 1 deletion validity/api/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -391,7 +391,7 @@ class Meta:
"display",
"name",
"data_source",
"enabled",
"backup_after_sync",
"method",
"upload_url",
"ignore_rules",
Expand Down
17 changes: 15 additions & 2 deletions validity/api/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,9 @@

from validity import di, filtersets, models
from validity.choices import SeverityChoices
from validity.scripts import Launcher, RunTestsParams, ScriptParams
from validity.scripts import BackUpParams, Launcher, RunTestsParams, ScriptParams
from . import serializers
from .helpers import model_perms


class RunMixin:
Expand Down Expand Up @@ -66,7 +67,7 @@ def __init__(self, launcher: Annotated[Launcher, "runtests_launcher"], **kwargs:
self.launcher = launcher
super().__init__(**kwargs)

@action(detail=False, methods=["post"], url_path="run")
@action(detail=False, methods=["post"], permission_classes=[model_perms("validity.run_compliancetest")])
def run(self, request):
return super().run(request)

Expand Down Expand Up @@ -106,6 +107,18 @@ class BackupPointViewSet(NetBoxModelViewSet):
serializer_class = serializers.BackupPointSerializer
filterset_class = filtersets.BackupPointFilterSet

@di.inject
def __init__(self, launcher: Annotated[Launcher, "backup_launcher"], **kwargs: Any) -> None:
self.launcher = launcher
super().__init__(**kwargs)

@action(detail=True, methods=["post"], permission_classes=[model_perms("validity.backup_backuppoint")])
def backup(self, request, pk):
params = BackUpParams(request=request, backuppoint_id=pk)
job = self.launcher(params)
serializer = serializers.ScriptResultSerializer({"result": job}, context={"request": request})
return Response(serializer.data)


class CommandViewSet(NetBoxModelViewSet):
queryset = models.Command.objects.select_related("serializer").prefetch_related("tags")
Expand Down
2 changes: 1 addition & 1 deletion validity/data_backends.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ class PollingBackend(DataBackend):
.annotate_datasource_id()
.order_by("poller_id")
)
backup_qs = BackupPoint.objects.filter(enabled=True)
backup_qs = BackupPoint.objects.filter(backup_after_sync=True)
metainfo_file = Path("polling_info.yaml")

@property
Expand Down
8 changes: 6 additions & 2 deletions validity/data_backup/backend.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from contextlib import contextmanager
from pathlib import Path
from tempfile import TemporaryDirectory
from typing import TYPE_CHECKING

Expand All @@ -15,12 +16,15 @@ def __init__(self, backupers: dict[str, "Backuper"]):
@contextmanager
def _datasource_in_filesytem(self, backup_point: "BackupPoint"):
with TemporaryDirectory() as datasource_dir:
datasource_dir = Path(datasource_dir)
for file in backup_point.data_source.datafiles.all():
if not backup_point.ignore_file(file.path):
file.write_to_disk(datasource_dir, overwrite=True)
filepath = datasource_dir / file.path
filepath.parent.mkdir(exist_ok=True, parents=True)
file.write_to_disk(datasource_dir / file.path)
yield datasource_dir

def __call__(self, backup_point: "BackupPoint") -> None:
backuper = self.backupers[backup_point.method]
with self._datasource_in_filesytem(backup_point) as datasource_dir:
backuper(backup_point.url, backup_point.parameters, datasource_dir)
backuper(backup_point.url, backup_point.parameters.decrypted, datasource_dir)
37 changes: 23 additions & 14 deletions validity/data_backup/backupers.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,16 @@

from validity.integrations.git import GitClient
from validity.integrations.s3 import S3Client
from validity.utils.filesystem import merge_directories
from validity.utils.logger import Logger
from .entities import RemoteGitRepo
from .parameters import GitParams, S3Params


@dataclass
class Backuper(ABC):
parameters_cls: type[BaseModel]
logger: Logger

parameters_cls: ClassVar[type[BaseModel]]

def __call__(self, url: str, parameters: dict[str, Any], datasource_dir: Path) -> None:
validated_params = self.parameters_cls.model_validate(parameters)
Expand All @@ -34,20 +37,25 @@ class GitBackuper(Backuper):

parameters_cls: ClassVar[type[BaseModel]] = GitParams

def _get_repo(self, url: str, parameters: GitParams, datasource_dir: Path) -> RemoteGitRepo:
return RemoteGitRepo(
local_path=datasource_dir,
remote_url=url,
active_branch=parameters.branch,
username=parameters.username,
password=parameters.password,
client=self.git_client,
)

def _do_backup(self, url: str, parameters: GitParams, datasource_dir: Path) -> None:
with TemporaryDirectory() as repo_dir:
repo = RemoteGitRepo(
local_path=repo_dir,
remote_url=url,
active_branch=parameters.branch,
username=parameters.username,
password=parameters.password,
client=self.git_client,
)
repo.download()
merge_directories(datasource_dir, repo.local_path)
repo = self._get_repo(url, parameters, datasource_dir)
repo.download(dotgit_only=True)
if repo.has_changes:
repo.save_changes(self.author_username, self.author_email, message=self.message)
repo.upload()
self.logger.info(f"Data successfully git-pushed to `{url}`")
else:
self.logger.info(f"No diff found for `{url}`, skipping git push")


@dataclass
Expand All @@ -59,7 +67,7 @@ class S3Backuper(Backuper):
def _backup_archive(self, url: str, parameters: S3Params, datasource_dir: Path) -> None:
with TemporaryDirectory() as backup_dir:
archive = Path(backup_dir) / "a.zip"
shutil.make_archive(archive, "zip", datasource_dir)
shutil.make_archive(archive.with_suffix(""), "zip", datasource_dir)
self.s3_client.upload_file(archive, url, parameters.aws_access_key_id, parameters.aws_secret_access_key)

def _backup_dir(self, url: str, parameters: S3Params, datasource_dir: Path) -> None:
Expand All @@ -72,3 +80,4 @@ def _do_backup(self, url: str, parameters: S3Params, datasource_dir: Path):
self._backup_archive(url, parameters, datasource_dir)
else:
self._backup_dir(url, parameters, datasource_dir)
self.logger.info(f"Data uploaded to S3 storage: `{url}`")
22 changes: 18 additions & 4 deletions validity/data_backup/entities.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,26 @@ class RemoteGitRepo:
password: str = ""
client: GitClient

def save_changes(self, author_username: str, author_email: str, message: str = ""):
def save_changes(self, author_username: str, author_email: str, message: str = "") -> None:
self.client.stage_all(self.local_path)
self.client.commit(self.local_path, author_username, author_email, message)

def download(self):
self.client.clone(self.local_path, self.remote_url, self.active_branch, self.username, self.password, depth=1)
def download(self, dotgit_only: bool = False) -> None:
self.client.clone(
self.local_path,
self.remote_url,
self.active_branch,
self.username,
self.password,
depth=1,
checkout=not dotgit_only,
)
if dotgit_only:
self.client.unstage_all(self.local_path)

def upload(self):
def upload(self) -> None:
self.client.push(self.local_path, self.remote_url, self.active_branch, self.username, self.password)

@property
def has_changes(self) -> bool:
return self.client.status(self.local_path).has_changes
81 changes: 34 additions & 47 deletions validity/dependencies.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,7 @@
from typing import Annotated

from dimi.scopes import Singleton
from dimi.scopes import Context, Singleton
from django.conf import LazySettings, settings
from django_rq.queues import DjangoRQ, get_redis_connection
from redis import Redis
from rq import Queue, Worker
from rq.job import Job

from validity import di
from validity.compliance.serialization import (
Expand All @@ -21,6 +17,7 @@
from validity.integrations.s3 import BotoS3Client
from validity.pollers import NetmikoPoller, RequestsPoller, ScrapliNetconfPoller
from validity.settings import PollerInfo, ValiditySettings
from validity.utils.logger import Logger
from validity.utils.misc import null_request


Expand All @@ -34,17 +31,23 @@ def validity_settings(django_settings: Annotated[LazySettings, django_settings])
return ValiditySettings.model_validate(django_settings.PLUGINS_CONFIG.get("validity", {}))


@di.dependency(scope=Context, add_return_alias=True)
def scripts_logger() -> Logger:
return Logger()


@di.dependency(scope=Singleton, add_return_alias=True)
def backup_backend(vsettings: Annotated[ValiditySettings, ...]) -> BackupBackend:
def backup_backend(vsettings: Annotated[ValiditySettings, ...], logger: Annotated[Logger, ...]) -> BackupBackend:
return BackupBackend(
backupers={
"git": GitBackuper(
message="",
author_username=vsettings.integrations.git.author,
author_email=vsettings.integrations.git.email,
git_client=DulwichGitClient(),
logger=logger,
),
"S3": S3Backuper(s3_client=BotoS3Client(max_threads=vsettings.integrations.s3.threads)),
"S3": S3Backuper(s3_client=BotoS3Client(max_threads=vsettings.integrations.s3.threads), logger=logger),
}
)

Expand Down Expand Up @@ -80,43 +83,12 @@ def pollers_info(custom_pollers: Annotated[list[PollerInfo], "validity_settings.


import validity.pollers.factory # noqa
from validity.scripts import ApplyWorker, CombineWorker, Launcher, SplitWorker, Task # noqa


@di.dependency
def runtests_queue_config(
settings: Annotated[LazySettings, django_settings], vsettings: Annotated[ValiditySettings, validity_settings]
) -> dict:
return settings.RQ_QUEUES.get(vsettings.runtests_queue, settings.RQ_PARAMS)


@di.dependency
def runtests_redis_connection(queue_config: Annotated[dict, runtests_queue_config]) -> Redis:
return get_redis_connection(queue_config)


@di.dependency
def runtests_queue(
vsettings: Annotated[ValiditySettings, validity_settings],
config: Annotated[dict, runtests_queue_config],
connection: Annotated[Redis, runtests_redis_connection],
) -> Queue:
is_async = config.get("ASYNC", True)
default_timeout = config.get("DEFAULT_TIMEOUT")
return DjangoRQ(
vsettings.runtests_queue,
default_timeout=default_timeout,
connection=connection,
is_async=is_async,
job_class=Job,
)
from validity.scripts import ApplyWorker, CombineWorker, Launcher, SplitWorker, Task, LauncherFactory, perform_backup # noqa


@di.dependency
def runtests_worker_count(
connection: Annotated[Redis, runtests_redis_connection], queue: Annotated[Queue, runtests_queue]
) -> int:
return Worker.count(connection=connection, queue=queue)
def launcher_factory(settings: Annotated[LazySettings, django_settings]) -> LauncherFactory:
return LauncherFactory(settings.RQ_PARAMS)


@di.dependency(scope=Singleton)
Expand All @@ -125,14 +97,14 @@ def runtests_launcher(
split_worker: Annotated[SplitWorker, ...],
apply_worker: Annotated[ApplyWorker, ...],
combine_worker: Annotated[CombineWorker, ...],
queue: Annotated[Queue, runtests_queue],
):
factory: Annotated[LauncherFactory, launcher_factory],
) -> Launcher:
from validity.models import ComplianceReport

return Launcher(
job_name="RunTests",
job_object_factory=null_request()(ComplianceReport.objects.create),
rq_queue=queue,
return factory.get_launcher(
"RunTests",
job_object_factory=lambda _: null_request()(ComplianceReport.objects.create)(),
queue_name=vsettings.custom_queues.runtests,
tasks=[
Task(split_worker, job_timeout=vsettings.script_timeouts.runtests_split),
Task(
Expand All @@ -143,3 +115,18 @@ def runtests_launcher(
Task(combine_worker, job_timeout=vsettings.script_timeouts.runtests_combine),
],
)


@di.dependency(scope=Singleton)
def backup_launcher(
vsettings: Annotated[ValiditySettings, validity_settings],
factory: Annotated[LauncherFactory, launcher_factory],
) -> Launcher:
from validity.models import BackupPoint

return factory.get_launcher(
"DataSourceBackup",
job_object_factory=lambda params: BackupPoint.objects.get(pk=params.backuppoint_id),
queue_name=vsettings.custom_queues.backup,
tasks=[Task(perform_backup, job_timeout=vsettings.script_timeouts.backup)],
)
2 changes: 1 addition & 1 deletion validity/filtersets.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,5 +145,5 @@ class BackupPointFilterSet(SearchMixin, NetBoxModelFilterSet):

class Meta:
model = models.BackupPoint
fields = ("id", "name", "method", "data_source_id", "enabled", "last_uploaded", "last_status")
fields = ("id", "name", "method", "data_source_id", "backup_after_sync", "last_uploaded", "last_status")
search_fields = ("name",)
2 changes: 1 addition & 1 deletion validity/forms/bulk_import.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,4 +222,4 @@ class BackupPointImportForm(SubFormMixin, NetBoxModelImportForm):

class Meta:
model = models.BackupPoint
fields = ("name", "data_source", "enabled", "url", "method", "ignore_rules", "parameters")
fields = ("name", "data_source", "backup_after_sync", "url", "method", "ignore_rules", "parameters")
4 changes: 3 additions & 1 deletion validity/forms/filterset.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,9 @@ class BackupPointFilterForm(NetBoxModelFilterSetForm):
data_source_id = DynamicModelMultipleChoiceField(
label=_("Data Source"), queryset=DataSource.objects.all(), required=False
)
enabled = NullBooleanField(label=_("Enabled"), required=False, widget=Select(choices=BOOLEAN_WITH_BLANK_CHOICES))
backup_after_sync = NullBooleanField(
label=_("Back Up After Sync"), required=False, widget=Select(choices=BOOLEAN_WITH_BLANK_CHOICES)
)
method = PlaceholderChoiceField(required=False, label=_("Backup Method"), choices=BackupMethodChoices.choices)
last_status = PlaceholderChoiceField(required=False, label=_("Last Status"), choices=BackupStatusChoices.choices)
last_uploaded__lte = DateTimeField(required=False, widget=DateTimePicker(), label=_("Last Uploaded Before"))
Expand Down
Loading

0 comments on commit e8e6177

Please sign in to comment.