1
0
Fork 0
mirror of https://gitlab.com/bramw/baserow.git synced 2025-03-18 06:12:56 +00:00

Periodic data sync

This commit is contained in:
Bram Wiepjes 2025-01-20 12:28:14 +00:00
parent 6ceee4e155
commit 25378704a5
30 changed files with 2090 additions and 71 deletions
backend/src/baserow/contrib/database/data_sync
changelog/entries/unreleased/feature
docker-compose.yml
docs/installation
enterprise
web-frontend/modules

View file

@ -50,6 +50,7 @@ class DataSyncHandler:
Returns the data sync matching the provided ID.
:param data_sync_id: The data sync ID to fetch.
:param base_queryset: Optionally change the default queryset.
:return: The fetched data sync object.
"""

View file

@ -0,0 +1,7 @@
{
"type": "feature",
"message": "Periodic data sync",
"issue_number": 3071,
"bullet_points": [],
"created_at": "2025-01-15"
}

View file

@ -196,6 +196,8 @@ x-backend-variables: &backend-variables
BASEROW_ASGI_HTTP_MAX_CONCURRENCY: ${BASEROW_ASGI_HTTP_MAX_CONCURRENCY:-}
BASEROW_MAX_WEBHOOK_CALLS_IN_QUEUE_PER_WEBHOOK:
BASEROW_MAX_HEALTHY_CELERY_QUEUE_SIZE:
BASEROW_ENTERPRISE_PERIODIC_DATA_SYNC_CHECK_INTERVAL_MINUTES:
BASEROW_ENTERPRISE_MAX_PERIODIC_DATA_SYNC_CONSECUTIVE_ERRORS:
services:

View file

@ -64,26 +64,26 @@ The installation methods referred to in the variable descriptions are:
| BASEROW\_IMPORT\_EXPORT\_RESOURCE\_REMOVAL\_AFTER\_DAYS | Specifies the number of days after which an import/export resource will be automatically deleted. | 5 |
### Backend Database Configuration
| Name | Description | Defaults |
|---------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| DATABASE\_HOST | The hostname of the postgres database Baserow will use to store its data in. | Defaults to db in the standalone and compose installs. If not provided in the \`baserow/baserow\` install then the embedded Postgres will be setup and used. |
| DATABASE\_USER | The username of the database user Baserow will use to connect to the database at DATABASE\_HOST | baserow |
| | | |
| DATABASE\_PORT | The port Baserow will use when trying to connect to the postgres database at DATABASE\_HOST | 5432 |
| DATABASE\_NAME | The database name Baserow will use to store data in. | baserow |
| DATABASE\_PASSWORD | The password of DATABASE\_USER on the postgres server at DATABASE\_HOST | Required to be set by you in the docker-compose and standalone installs. Automatically generated by the baserow/baserow image if not provided and stored in /baserow/data/.pgpass. |
| DATABASE\_PASSWORD\_FILE | **Only supported by the `baserow/baserow` image** If set Baserow will attempt to read the above DATABASE\_PASSWORD from this file location instead. | |
| DATABASE\_OPTIONS | Optional extra options as a JSON formatted string to use when connecting to the database, see [this documentation](https://docs.djangoproject.com/en/3.2/ref/settings/#std-setting-OPTIONS) for more details. | |
| DATABASE\_URL | Alternatively to setting the individual DATABASE\_ parameters above instead you can provide one standard postgres connection string in the format of: postgresql://\[user\[:password\]@\]\[netloc\]\[:port\]\[/dbname\]\[?param1=value1&…\]. Please note this will completely override all other DATABASE_* settings and ignore them. | |
| | | |
| MIGRATE\_ON\_STARTUP | If set to “true” when the Baserow backend service starts up it will automatically apply database migrations. Set to any other value to disable. If you disable this then you must remember to manually apply the database migrations when upgrading Baserow to a new version. | true |
| BASEROW\_TRIGGER\_SYNC\_TEMPLATES\_AFTER\_MIGRATION | If set to “true” when after a migration Baserow will automatically sync all builtin Baserow templates in the background. If you are using a postgres database which is constrained to fewer than 10000 rows then we recommend you disable this as the Baserow templates will go over that row limit. To disable this set to any other value than “true” | true |
| BASEROW\_SYNC\_TEMPLATES\_TIME\_LIMIT | The number of seconds before the background sync templates job will timeout if not yet completed. | 1800 |
| SYNC\_TEMPLATES\_ON\_STARTUP | **Deprecated please use BASEROW\_TRIGGER\_SYNC\_TEMPLATES\_AFTER\_MIGRATION** If provided has the same effect of BASEROW\_TRIGGER\_SYNC\_TEMPLATES\_AFTER\_MIGRATION for backwards compatibility reasons. If BASEROW\_TRIGGER\_SYNC\_TEMPLATES\_AFTER\_MIGRATION is set it will override this value. | true |
| DONT\_UPDATE\_FORMULAS\_AFTER\_MIGRATION | Baserows formulas have an internal version number. When upgrading Baserow if the formula language has also changed then after the database migration has run Baserow will also automatically recalculate all formulas if they have a different version. Set this to any non empty value to disable this automatic update if you would prefer to run the update\_formulas management command manually yourself. Formulas might break if you forget to do so after an upgrade of Baserow until and so it is recommended to leave this empty. | |
| POSTGRES\_STARTUP\_CHECK\_ATTEMPTS | When Baserow's Backend service starts up it first checks to see if the postgres database is available. It checks 5 times by default, after which if it still has not connected it will crash. | 5 |
| BASEROW\_PREVENT\_POSTGRESQL\_DATA\_SYNC\_CONNECTION\_\TO\_DATABASE | If true, then it's impossible to connect to the Baserow PostgreSQL database using the PostgreSQL data sync. | true |
| BASEROW\_POSTGRESQL\_DATA\_SYNC\_BLACKLIST | Optionally provide a comma separated list of hostnames that the Baserow PostgreSQL data sync can't connect to. (e.g. "localhost,baserow.io") | |
| Name | Description | Defaults |
|--------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| DATABASE\_HOST | The hostname of the postgres database Baserow will use to store its data in. | Defaults to db in the standalone and compose installs. If not provided in the \`baserow/baserow\` install then the embedded Postgres will be setup and used. |
| DATABASE\_USER | The username of the database user Baserow will use to connect to the database at DATABASE\_HOST | baserow |
| | | |
| DATABASE\_PORT | The port Baserow will use when trying to connect to the postgres database at DATABASE\_HOST | 5432 |
| DATABASE\_NAME | The database name Baserow will use to store data in. | baserow |
| DATABASE\_PASSWORD | The password of DATABASE\_USER on the postgres server at DATABASE\_HOST | Required to be set by you in the docker-compose and standalone installs. Automatically generated by the baserow/baserow image if not provided and stored in /baserow/data/.pgpass. |
| DATABASE\_PASSWORD\_FILE | **Only supported by the `baserow/baserow` image** If set Baserow will attempt to read the above DATABASE\_PASSWORD from this file location instead. | |
| DATABASE\_OPTIONS | Optional extra options as a JSON formatted string to use when connecting to the database, see [this documentation](https://docs.djangoproject.com/en/3.2/ref/settings/#std-setting-OPTIONS) for more details. | |
| DATABASE\_URL | Alternatively to setting the individual DATABASE\_ parameters above instead you can provide one standard postgres connection string in the format of: postgresql://\[user\[:password\]@\]\[netloc\]\[:port\]\[/dbname\]\[?param1=value1&…\]. Please note this will completely override all other DATABASE_* settings and ignore them. | |
| | | |
| MIGRATE\_ON\_STARTUP | If set to “true” when the Baserow backend service starts up it will automatically apply database migrations. Set to any other value to disable. If you disable this then you must remember to manually apply the database migrations when upgrading Baserow to a new version. | true |
| BASEROW\_TRIGGER\_SYNC\_TEMPLATES\_AFTER\_MIGRATION | If set to “true” when after a migration Baserow will automatically sync all builtin Baserow templates in the background. If you are using a postgres database which is constrained to fewer than 10000 rows then we recommend you disable this as the Baserow templates will go over that row limit. To disable this set to any other value than “true” | true |
| BASEROW\_SYNC\_TEMPLATES\_TIME\_LIMIT | The number of seconds before the background sync templates job will timeout if not yet completed. | 1800 |
| SYNC\_TEMPLATES\_ON\_STARTUP | **Deprecated please use BASEROW\_TRIGGER\_SYNC\_TEMPLATES\_AFTER\_MIGRATION** If provided has the same effect of BASEROW\_TRIGGER\_SYNC\_TEMPLATES\_AFTER\_MIGRATION for backwards compatibility reasons. If BASEROW\_TRIGGER\_SYNC\_TEMPLATES\_AFTER\_MIGRATION is set it will override this value. | true |
| DONT\_UPDATE\_FORMULAS\_AFTER\_MIGRATION | Baserows formulas have an internal version number. When upgrading Baserow if the formula language has also changed then after the database migration has run Baserow will also automatically recalculate all formulas if they have a different version. Set this to any non empty value to disable this automatic update if you would prefer to run the update\_formulas management command manually yourself. Formulas might break if you forget to do so after an upgrade of Baserow until and so it is recommended to leave this empty. | |
| POSTGRES\_STARTUP\_CHECK\_ATTEMPTS | When Baserow's Backend service starts up it first checks to see if the postgres database is available. It checks 5 times by default, after which if it still has not connected it will crash. | 5 |
| BASEROW\_PREVENT\_POSTGRESQL\_DATA\_SYNC\_CONNECTION\_TO\_DATABASE | If true, then it's impossible to connect to the Baserow PostgreSQL database using the PostgreSQL data sync. | true |
| BASEROW\_POSTGRESQL\_DATA\_SYNC\_BLACKLIST | Optionally provide a comma separated list of hostnames that the Baserow PostgreSQL data sync can't connect to. (e.g. "localhost,baserow.io") | |
### Redis Configuration
| Name | Description | Defaults |
@ -139,35 +139,36 @@ The installation methods referred to in the variable descriptions are:
| BASEROW\_OLLAMA\_MODELS | Provide a comma separated list of Ollama models (https://ollama.com/library) that you would like to enable in the instance (e.g. `llama2`). Note that this only works if an Ollama host is set. If this variable is not provided, the user won't be able to choose a model. | |
### Backend Misc Configuration
| Name | Description | Defaults |
|------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------------------|
| BASEROW\_ENABLE\_SECURE\_PROXY\_SSL\_HEADER | Set to any non-empty value to ensure Baserow generates https:// next links provided by paginated API endpoints. Baserow will still work correctly if not enabled, this is purely for giving the correct https url for clients of the API. If you have setup Baserow to use Caddy's auto HTTPS or you have put Baserow behind<br>a reverse proxy which:<br>* Handles HTTPS<br>* Strips the X-Forwarded-Proto header from all incoming requests.<br>* Sets the X-Forwarded-Proto header and sends it to Baserow.<br>Then you can safely set BASEROW\_ENABLE\_SECURE\_PROXY\_SSL\_HEADER=yes to ensure Baserow<br>generates https links for pagination correctly.<br> | |
| ADDITIONAL\_APPS | A comma separated list of additional django applications to add to the INSTALLED\_APPS django setting | |
| HOURS\_UNTIL\_TRASH\_PERMANENTLY\_DELETED | Items from the trash will be permanently deleted after this number of hours. | |
| DISABLE\_ANONYMOUS\_PUBLIC\_VIEW\_WS\_CONNECTIONS | When sharing views publicly a websocket connection is opened to provide realtime updates to viewers of the public link. To disable this set any non empty value. When disabled publicly shared links will need to be refreshed to see any updates to the view. | |
| BASEROW\_WAIT\_INSTEAD\_OF\_409\_CONFLICT\_ERROR | When updating or creating various resources in Baserow if another concurrent operation is ongoing (like a snapshot, duplication, import etc) which would be affected by your modification a 409 HTTP error will be returned. If you instead would prefer Baserow to not return a 409 and just block waiting until the operation finishes and then to perform the requested operation set this flag to any non-empty value. | |
| BASEROW\_JOB\_CLEANUP\_INTERVAL\_MINUTES | How often the job cleanup task will run. | 5 |
| BASEROW\_JOB\_EXPIRATION\_TIME\_LIMIT | How long before a Baserow job will be kept before being cleaned up. | 30 * 24 * 60 (24 days) |
| BASEROW\_JOB\_SOFT\_TIME\_LIMIT | The number of seconds a Baserow job can run before being terminated. | 1800 |
| BASEROW\_MAX\_FILE\_IMPORT\_ERROR\_COUNT | The max number of per row errors than can occur in a file import before an overall failure is declared | 30 |
| MINUTES\_UNTIL\_ACTION\_CLEANED\_UP | How long before actions are cleaned up, actions are used to let you undo/redo so this is effectively the max length of time you can undo/redo can action. | 120 |
| BASEROW\_DISABLE\_MODEL\_CACHE | When set to any non empty value the model cache used to speed up Baserow will be disabled. Useful to enable when debugging Baserow errors if they are possibly caused by the model cache itself. | | |
| BASEROW\_STORAGE\_USAGE\_JOB\_CRONTAB | The crontab controlling when the file usage job runs when enabled in the settings page | 0 0 * * * |
| BASEROW\_ROW\_COUNT\_JOB\_CRONTAB | The crontab controlling when the row counting job runs when enabled in the settings page | 0 3 * * * |
| | | |
| DJANGO\_SETTINGS\_MODULE | **INTERNAL** The settings python module to load when starting up the Backend django server. You shouldnt need to set this yourself unless you are customizing the settings manually. | |
| | | |
| BASEROW\_BACKEND\_BIND\_ADDRESS | **INTERNAL** The address that Baserows backend service will bind to. | |
| BASEROW\_BACKEND\_PORT | **INTERNAL** Controls which port the Baserow backend service binds to. | |
| BASEROW\_WEBFRONTEND\_BIND\_ADDRESS | **INTERNAL** The address that Baserows web-frontend service will bind to. | |
| BASEROW\_INITIAL\_CREATE\_SYNC\_TABLE\_DATA\_LIMIT | The maximum number of rows you can import in a synchronous way | 5000 |
| BASEROW\_MAX\_ROW\_REPORT\_ERROR\_COUNT | The maximum row error count tolerated before a file import fails. Before this max error count the import will continue and the non failing rows will be imported and after it, no rows are imported at all. | 30 |
| BASEROW\_ROW\_HISTORY\_CLEANUP\_INTERVAL\_MINUTES | Sets the interval for periodic clean up check of the row edit history in minutes. | 30 |
| BASEROW\_ROW\_HISTORY\_RETENTION\_DAYS | The number of days that the row edit history will be kept. | 180 |
| BASEROW\_ICAL\_VIEW\_MAX\_EVENTS | The maximum number of events returned from ical feed endpoint. Empty value means no limit. | |
| BASEROW\_ENTERPRISE\_AUDIT\_LOG\_CLEANUP\_INTERVAL_MINUTES | Sets the interval for periodic clean up check of the enterprise audit log in minutes. | 30 |
| BASEROW\_ENTERPRISE\_AUDIT\_LOG\_RETENTION\_DAYS | The number of days that the enterprise audit log will be kept. | 365 |
| Name | Description | Defaults |
|---------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------------------|
| BASEROW\_ENABLE\_SECURE\_PROXY\_SSL\_HEADER | Set to any non-empty value to ensure Baserow generates https:// next links provided by paginated API endpoints. Baserow will still work correctly if not enabled, this is purely for giving the correct https url for clients of the API. If you have setup Baserow to use Caddy's auto HTTPS or you have put Baserow behind<br>a reverse proxy which:<br>* Handles HTTPS<br>* Strips the X-Forwarded-Proto header from all incoming requests.<br>* Sets the X-Forwarded-Proto header and sends it to Baserow.<br>Then you can safely set BASEROW\_ENABLE\_SECURE\_PROXY\_SSL\_HEADER=yes to ensure Baserow<br>generates https links for pagination correctly.<br> | |
| ADDITIONAL\_APPS | A comma separated list of additional django applications to add to the INSTALLED\_APPS django setting | |
| HOURS\_UNTIL\_TRASH\_PERMANENTLY\_DELETED | Items from the trash will be permanently deleted after this number of hours. | |
| DISABLE\_ANONYMOUS\_PUBLIC\_VIEW\_WS\_CONNECTIONS | When sharing views publicly a websocket connection is opened to provide realtime updates to viewers of the public link. To disable this set any non empty value. When disabled publicly shared links will need to be refreshed to see any updates to the view. | |
| BASEROW\_WAIT\_INSTEAD\_OF\_409\_CONFLICT\_ERROR | When updating or creating various resources in Baserow if another concurrent operation is ongoing (like a snapshot, duplication, import etc) which would be affected by your modification a 409 HTTP error will be returned. If you instead would prefer Baserow to not return a 409 and just block waiting until the operation finishes and then to perform the requested operation set this flag to any non-empty value. | |
| BASEROW\_JOB\_CLEANUP\_INTERVAL\_MINUTES | How often the job cleanup task will run. | 5 |
| BASEROW\_JOB\_EXPIRATION\_TIME\_LIMIT | How long before a Baserow job will be kept before being cleaned up. | 30 * 24 * 60 (24 days) |
| BASEROW\_JOB\_SOFT\_TIME\_LIMIT | The number of seconds a Baserow job can run before being terminated. | 1800 |
| BASEROW\_MAX\_FILE\_IMPORT\_ERROR\_COUNT | The max number of per row errors than can occur in a file import before an overall failure is declared | 30 |
| MINUTES\_UNTIL\_ACTION\_CLEANED\_UP | How long before actions are cleaned up, actions are used to let you undo/redo so this is effectively the max length of time you can undo/redo can action. | 120 |
| BASEROW\_DISABLE\_MODEL\_CACHE | When set to any non empty value the model cache used to speed up Baserow will be disabled. Useful to enable when debugging Baserow errors if they are possibly caused by the model cache itself. | | |
| BASEROW\_STORAGE\_USAGE\_JOB\_CRONTAB | The crontab controlling when the file usage job runs when enabled in the settings page | 0 0 * * * |
| BASEROW\_ROW\_COUNT\_JOB\_CRONTAB | The crontab controlling when the row counting job runs when enabled in the settings page | 0 3 * * * |
| | | |
| DJANGO\_SETTINGS\_MODULE | **INTERNAL** The settings python module to load when starting up the Backend django server. You shouldnt need to set this yourself unless you are customizing the settings manually. | |
| | | |
| BASEROW\_BACKEND\_BIND\_ADDRESS | **INTERNAL** The address that Baserows backend service will bind to. | |
| BASEROW\_BACKEND\_PORT | **INTERNAL** Controls which port the Baserow backend service binds to. | |
| BASEROW\_WEBFRONTEND\_BIND\_ADDRESS | **INTERNAL** The address that Baserows web-frontend service will bind to. | |
| BASEROW\_INITIAL\_CREATE\_SYNC\_TABLE\_DATA\_LIMIT | The maximum number of rows you can import in a synchronous way | 5000 |
| BASEROW\_MAX\_ROW\_REPORT\_ERROR\_COUNT | The maximum row error count tolerated before a file import fails. Before this max error count the import will continue and the non failing rows will be imported and after it, no rows are imported at all. | 30 |
| BASEROW\_ROW\_HISTORY\_CLEANUP\_INTERVAL\_MINUTES | Sets the interval for periodic clean up check of the row edit history in minutes. | 30 |
| BASEROW\_ROW\_HISTORY\_RETENTION\_DAYS | The number of days that the row edit history will be kept. | 180 |
| BASEROW\_ICAL\_VIEW\_MAX\_EVENTS | The maximum number of events returned from ical feed endpoint. Empty value means no limit. | |
| BASEROW\_ENTERPRISE\_AUDIT\_LOG\_CLEANUP\_INTERVAL_MINUTES | Sets the interval for periodic clean up check of the enterprise audit log in minutes. | 30 |
| BASEROW\_ENTERPRISE\_AUDIT\_LOG\_RETENTION\_DAYS | The number of days that the enterprise audit log will be kept. | 365 |
| BASEROW\_ENTERPRISE\_PERIODIC\_DATA_SYNC\_CHECK\_INTERVAL\_MINUTES | The number of minutes that an async task is run to check if there are periodic data syncs that must run. It's safe to run this task frequently because it works in a non blocking way. | 1 |
| BASEROW\_ENTERPRISE\_MAX\_PERIODIC\_DATA\_SYNC\_CONSECUTIVE\_ERRORS | The maximum number of consecutive periodic data sync error before it's disabled. | 4 |
### Backend Application Builder Configuration

View file

@ -0,0 +1,14 @@
from rest_framework import serializers
from baserow_enterprise.data_sync.models import PeriodicDataSyncInterval
class PeriodicDataSyncIntervalSerializer(serializers.ModelSerializer):
class Meta:
model = PeriodicDataSyncInterval
fields = (
"interval",
"when",
"automatically_deactivated",
)
extra_kwargs = {"automatically_deactivated": {"read_only": True}}

View file

@ -0,0 +1,13 @@
from django.urls import re_path
from .views import PeriodicDataSyncIntervalView
app_name = "baserow_enterprise.api.data_sync"
urlpatterns = [
re_path(
r"(?P<data_sync_id>[0-9]+)/periodic-interval/$",
PeriodicDataSyncIntervalView.as_view(),
name="periodic_interval",
),
]

View file

@ -0,0 +1,142 @@
from django.db import transaction
from baserow_premium.license.handler import LicenseHandler
from drf_spectacular.openapi import OpenApiParameter, OpenApiTypes
from drf_spectacular.utils import extend_schema
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from baserow.api.decorators import map_exceptions, validate_body
from baserow.api.errors import ERROR_USER_NOT_IN_GROUP
from baserow.api.schemas import get_error_schema
from baserow.contrib.database.api.data_sync.errors import ERROR_DATA_SYNC_DOES_NOT_EXIST
from baserow.contrib.database.data_sync.exceptions import DataSyncDoesNotExist
from baserow.contrib.database.data_sync.handler import DataSyncHandler
from baserow.contrib.database.data_sync.models import DataSync
from baserow.contrib.database.data_sync.operations import (
GetIncludingPublicValuesOperationType,
)
from baserow.core.action.registries import action_type_registry
from baserow.core.exceptions import UserNotInWorkspace
from baserow.core.handler import CoreHandler
from baserow_enterprise.data_sync.actions import (
UpdatePeriodicDataSyncIntervalActionType,
)
from baserow_enterprise.data_sync.models import DATA_SYNC_INTERVAL_MANUAL
from ...features import DATA_SYNC
from .serializers import PeriodicDataSyncIntervalSerializer
class PeriodicDataSyncIntervalView(APIView):
permission_classes = (IsAuthenticated,)
@extend_schema(
parameters=[
OpenApiParameter(
name="data_sync_id",
location=OpenApiParameter.PATH,
type=OpenApiTypes.INT,
description="The data sync where to fetch the periodic settings for.",
),
],
tags=["Database tables"],
operation_id="get_periodic_data_sync_interval",
description=(
"Responds with the periodic data sync interval data, if the user has the "
"right permissions."
"\nThis is an **enterprise** feature."
),
responses={
200: PeriodicDataSyncIntervalSerializer,
400: get_error_schema(["ERROR_USER_NOT_IN_GROUP"]),
404: get_error_schema(["ERROR_DATA_SYNC_DOES_NOT_EXIST"]),
},
)
@map_exceptions(
{
DataSyncDoesNotExist: ERROR_DATA_SYNC_DOES_NOT_EXIST,
UserNotInWorkspace: ERROR_USER_NOT_IN_GROUP,
}
)
def get(self, request, data_sync_id):
"""Responds with the periodic data sync interval."""
data_sync = DataSyncHandler().get_data_sync(
data_sync_id,
base_queryset=DataSync.objects.select_related(
"periodic_interval", "table__database__workspace"
),
)
LicenseHandler.raise_if_workspace_doesnt_have_feature(
DATA_SYNC, data_sync.table.database.workspace
)
CoreHandler().check_permissions(
request.user,
GetIncludingPublicValuesOperationType.type,
workspace=data_sync.table.database.workspace,
context=data_sync.table,
)
if not hasattr(data_sync, "periodic_interval"):
periodic_interval = {
"interval": DATA_SYNC_INTERVAL_MANUAL,
"when": None,
"automatically_deactivated": False,
}
else:
periodic_interval = data_sync.periodic_interval
serializer = PeriodicDataSyncIntervalSerializer(periodic_interval)
return Response(serializer.data)
@extend_schema(
parameters=[
OpenApiParameter(
name="data_sync_id",
location=OpenApiParameter.PATH,
type=OpenApiTypes.INT,
description="Updates the data sync related to the provided value.",
),
],
tags=["Database tables"],
operation_id="update_periodic_data_sync_interval",
description=(
"Updates the periodic data sync interval, if the user has "
"the right permissions."
"\nThis is an **enterprise** feature."
),
request=PeriodicDataSyncIntervalSerializer,
responses={
200: PeriodicDataSyncIntervalSerializer,
400: get_error_schema(["ERROR_USER_NOT_IN_GROUP"]),
404: get_error_schema(["ERROR_DATA_SYNC_DOES_NOT_EXIST"]),
},
)
@transaction.atomic
@map_exceptions(
{
DataSyncDoesNotExist: ERROR_DATA_SYNC_DOES_NOT_EXIST,
UserNotInWorkspace: ERROR_USER_NOT_IN_GROUP,
}
)
@validate_body(PeriodicDataSyncIntervalSerializer, return_validated=True)
def patch(self, request, data_sync_id, data):
"""Updates the periodic data sync interval."""
data_sync = DataSyncHandler().get_data_sync(
data_sync_id,
base_queryset=DataSync.objects.select_for_update(
of=("self",)
).select_related("table__database__workspace"),
)
periodic_interval = action_type_registry.get_by_type(
UpdatePeriodicDataSyncIntervalActionType
).do(user=request.user, data_sync=data_sync, **data)
serializer = PeriodicDataSyncIntervalSerializer(periodic_interval)
return Response(serializer.data)

View file

@ -2,6 +2,7 @@ from django.urls import include, path
from .admin import urls as admin_urls
from .audit_log import urls as audit_log_urls
from .data_sync import urls as data_sync_urls
from .role import urls as role_urls
from .secure_file_serve import urls as secure_file_serve_urls
from .teams import urls as teams_urls
@ -14,4 +15,5 @@ urlpatterns = [
path("admin/", include(admin_urls, namespace="admin")),
path("audit-log/", include(audit_log_urls, namespace="audit_log")),
path("files/", include(secure_file_serve_urls, namespace="files")),
path("data-sync/", include(data_sync_urls, namespace="data_sync")),
]

View file

@ -204,6 +204,12 @@ class BaserowEnterpriseConfig(AppConfig):
data_sync_type_registry.register(GitLabIssuesDataSyncType())
data_sync_type_registry.register(HubspotContactsDataSyncType())
from baserow_enterprise.data_sync.actions import (
UpdatePeriodicDataSyncIntervalActionType,
)
action_type_registry.register(UpdatePeriodicDataSyncIntervalActionType())
# Create default roles
post_migrate.connect(sync_default_roles_after_migrate, sender=self)

View file

@ -66,3 +66,12 @@ def setup(settings):
] = "baserow_enterprise.secure_file_serve.storage.EnterpriseFileStorage"
settings.BASEROW_SERVE_FILES_THROUGH_BACKEND = serve_files_through_backend
settings.BASEROW_ENTERPRISE_PERIODIC_DATA_SYNC_CHECK_INTERVAL_MINUTES = int(
os.getenv("BASEROW_ENTERPRISE_PERIODIC_DATA_SYNC_CHECK_INTERVAL_MINUTES", "")
or 1
)
settings.BASEROW_ENTERPRISE_MAX_PERIODIC_DATA_SYNC_CONSECUTIVE_ERRORS = int(
os.getenv("BASEROW_ENTERPRISE_MAX_PERIODIC_DATA_SYNC_CONSECUTIVE_ERRORS", "")
or 4
)

View file

@ -0,0 +1,80 @@
import dataclasses
from datetime import time
from django.contrib.auth.models import AbstractUser
from django.utils.translation import gettext_lazy as _
from baserow.contrib.database.action.scopes import DATABASE_ACTION_CONTEXT
from baserow.contrib.database.data_sync.models import DataSync
from baserow.core.action.registries import (
ActionScopeStr,
ActionType,
ActionTypeDescription,
)
from baserow.core.action.scopes import ApplicationActionScopeType
from baserow_enterprise.data_sync.handler import EnterpriseDataSyncHandler
class UpdatePeriodicDataSyncIntervalActionType(ActionType):
type = "update_periodic_data_sync_interval"
description = ActionTypeDescription(
_("Update periodic data sync interval"),
_('Data sync table "%(table_name)s" (%(table_id)s) updated'),
DATABASE_ACTION_CONTEXT,
)
analytics_params = [
"database_id",
"table_id",
"data_sync_id",
"interval",
"when",
]
@dataclasses.dataclass
class Params:
database_id: int
database_name: str
table_id: int
table_name: str
data_sync_id: int
interval: str
when: str
@classmethod
def do(
cls,
user: AbstractUser,
data_sync: DataSync,
interval: str,
when: time,
) -> DataSync:
data_sync = data_sync.specific
periodic_interval = (
EnterpriseDataSyncHandler.update_periodic_data_sync_interval(
user=user,
data_sync=data_sync,
interval=interval,
when=when,
)
)
table = data_sync.table
database = table.database
workspace = database.workspace
params = cls.Params(
database.id,
database.name,
table.id,
table.name,
data_sync.id,
interval,
when.strftime("%H:%M:%S"),
)
cls.register_action(user, params, cls.scope(database.id), workspace=workspace)
return periodic_interval
@classmethod
def scope(cls, database_id) -> ActionScopeStr:
return ApplicationActionScopeType.value(database_id)

View file

@ -0,0 +1,226 @@
from datetime import datetime, time
from django.conf import settings
from django.contrib.auth.models import AbstractUser
from django.core.cache import cache
from django.db import transaction
from django.db.models import Q
from django.utils import timezone
from baserow_premium.license.handler import LicenseHandler
from loguru import logger
from baserow.contrib.database.data_sync.exceptions import (
SyncDataSyncTableAlreadyRunning,
)
from baserow.contrib.database.data_sync.handler import DataSyncHandler
from baserow.contrib.database.data_sync.models import DataSync
from baserow.contrib.database.data_sync.operations import SyncTableOperationType
from baserow.core.handler import CoreHandler
from baserow_enterprise.data_sync.models import (
DATA_SYNC_INTERVAL_DAILY,
DATA_SYNC_INTERVAL_HOURLY,
PeriodicDataSyncInterval,
)
from baserow_enterprise.features import DATA_SYNC
from .tasks import sync_periodic_data_sync
class EnterpriseDataSyncHandler:
@classmethod
def update_periodic_data_sync_interval(
cls,
user: AbstractUser,
data_sync: DataSync,
interval: str,
when: time,
) -> PeriodicDataSyncInterval:
"""
Updates the periodic configuration of a data sync.
:param user: The user on whose behalf the periodic configuration is updated.
This user is saved on the object, and is used when syncing the data sync.
:param data_sync: The data sync where the periodic configuration must be
updated for.
:param interval: Accepts either `DATA_SYNC_INTERVAL_DAILY` or
`DATA_SYNC_INTERVAL_DAILY` indicating how frequently the data sync must be
updated.
:param when: Indicates when the data sync must periodically be synced.
:return: The created or updated periodic data sync object.
"""
LicenseHandler.raise_if_workspace_doesnt_have_feature(
DATA_SYNC, data_sync.table.database.workspace
)
CoreHandler().check_permissions(
user,
SyncTableOperationType.type,
workspace=data_sync.table.database.workspace,
context=data_sync.table,
)
periodic_data_sync, _ = PeriodicDataSyncInterval.objects.update_or_create(
data_sync=data_sync,
defaults={
"interval": interval,
"when": when,
"authorized_user": user,
"automatically_deactivated": False,
},
)
return periodic_data_sync
@classmethod
def call_periodic_data_sync_syncs_that_are_due(cls):
"""
This method is typically called by an async task. It loops over all daily and
hourly periodic data sync that are due to the synced, and fires a task for each
to sync it.
"""
now = timezone.now()
now_time = time(
now.hour, now.minute, now.second, now.microsecond, tzinfo=now.tzinfo
)
beginning_of_day = datetime(
now.year, now.month, now.day, 0, 0, 0, 0, tzinfo=now.tzinfo
)
beginning_of_hour = datetime(
now.year, now.month, now.day, now.hour, 0, 0, 0, tzinfo=now.tzinfo
)
is_null = Q(last_periodic_sync__isnull=True)
all_to_trigger = (
PeriodicDataSyncInterval.objects.filter(
Q(
# If the interval is daily, the last periodic sync timestamp must be
# yesterday or None meaning it hasn't been executed yet.
is_null | Q(last_periodic_sync__lt=beginning_of_day),
interval=DATA_SYNC_INTERVAL_DAILY,
)
| Q(
# If the interval is hourly, the last periodic data sync timestamp
# must be at least an hour ago or None meaning it hasn't been
# executed yet.
is_null | Q(last_periodic_sync__lt=beginning_of_hour),
interval=DATA_SYNC_INTERVAL_HOURLY,
),
# Skip deactivated periodic data sync because they're not working
# anymore.
automatically_deactivated=False,
# The now time must be higher than the now time because the data sync
# must be triggered at the desired the of the user.
when__lte=now_time,
).select_related("data_sync__table__database__workspace")
# Take a lock on the periodic data sync because the `last_periodic_sync`
# must be updated immediately. This will make sure that if this method is
# called frequently, it doesn't trigger the same. If self or `data_sync` is
# locked, then we can skip the sync for now because the data sync is already
# being updated. It doesn't matter if we skip it because it will then be
# picked up the next time this method is called.
.select_for_update(
of=(
"self",
"data_sync",
),
skip_locked=True,
)
)
updated_periodic_data_sync = []
for periodic_data_sync in all_to_trigger:
workspace_has_feature = LicenseHandler.workspace_has_feature(
DATA_SYNC, periodic_data_sync.data_sync.table.database.workspace
)
if workspace_has_feature:
lock_key = DataSyncHandler().get_table_sync_lock_key(
periodic_data_sync.data_sync_id
)
sync_is_running = cache.get(lock_key) is not None
periodic_data_sync.last_periodic_sync = now
updated_periodic_data_sync.append(periodic_data_sync)
# If the sync is already running because the lock exists,
# then nothing sohuld happen because the sync has already happened
# within the correct periodic timeframe. We do want to update the
# `last_periodic_sync`, so that it doesn't try again on the next run.
if sync_is_running:
logger.info(
f"Skipping periodic data sync of data sync "
f"{periodic_data_sync.data_sync_id} because the sync already "
f"running."
)
else:
transaction.on_commit(
lambda: sync_periodic_data_sync.delay(periodic_data_sync.id)
)
# Update the last periodic sync so the periodic sync won't be triggerd the next
# time this method is called.
if len(updated_periodic_data_sync) > 0:
PeriodicDataSyncInterval.objects.bulk_update(
all_to_trigger, fields=["last_periodic_sync"]
)
@classmethod
def sync_periodic_data_sync(cls, periodic_data_sync_id):
"""
Syncs the data sync of a periodic data sync. This is typically executed by the
async task `sync_periodic_data_sync`.
:param periodic_data_sync_id: The ID of the periodic data sync object that must
be synced. Note that this not equal to the data sync ID.
:return: True if the data sync ran, even if it wasn't successful. False if it
never ran.
"""
try:
periodic_data_sync = (
PeriodicDataSyncInterval.objects.select_related("data_sync")
.select_for_update(of=("self",))
.get(id=periodic_data_sync_id, automatically_deactivated=False)
)
except PeriodicDataSyncInterval.DoesNotExist:
logger.info(
f"Skipping periodic data sync {periodic_data_sync_id} because it "
f"doesn't exist or has been deactivated."
)
return False
try:
data_sync = DataSyncHandler().sync_data_sync_table(
periodic_data_sync.authorized_user,
periodic_data_sync.data_sync.specific,
)
except SyncDataSyncTableAlreadyRunning:
# If the sync has started in the meantime, then we don't want to do
# anything because the sync already ran.
logger.info(
f"Skipping periodic data sync of data sync "
f"{periodic_data_sync.data_sync_id} because the sync is running."
)
return False
if data_sync.last_error:
# If the data sync has an error, then something went wrong during execution,
# and we need to increase the consecutive count so that when the max errors
# is reached, we can deactivate it. This to protect the system from
# periodically syncing a data sync that doesn't work anyway.
periodic_data_sync.consecutive_failed_count += 1
if (
periodic_data_sync.consecutive_failed_count
>= settings.BASEROW_ENTERPRISE_MAX_PERIODIC_DATA_SYNC_CONSECUTIVE_ERRORS
):
periodic_data_sync.automatically_deactivated = True
periodic_data_sync.save()
elif periodic_data_sync.consecutive_failed_count > 0:
# Once it runs successfully, the consecutive count can be reset because we
# now know it actually works, and it doesn't have to be deactivated anymore.
periodic_data_sync.consecutive_failed_count = 0
periodic_data_sync.save()
return True

View file

@ -8,6 +8,48 @@ from baserow.contrib.database.views.models import View
User = get_user_model()
DATA_SYNC_INTERVAL_MANUAL = "MANUAL"
DATA_SYNC_INTERVAL_DAILY = "DAILY"
DATA_SYNC_INTERVAL_HOURLY = "HOURLY"
class PeriodicDataSyncInterval(models.Model):
data_sync = models.OneToOneField(
DataSync,
on_delete=models.CASCADE,
help_text="The periodic data sync.",
related_name="periodic_interval",
)
last_periodic_sync = models.DateTimeField(
null=True, help_text="Timestamp when the table was last periodically synced."
)
interval = models.CharField(
choices=(
(DATA_SYNC_INTERVAL_MANUAL, DATA_SYNC_INTERVAL_MANUAL),
(DATA_SYNC_INTERVAL_DAILY, DATA_SYNC_INTERVAL_DAILY),
(DATA_SYNC_INTERVAL_HOURLY, DATA_SYNC_INTERVAL_HOURLY),
),
default=DATA_SYNC_INTERVAL_MANUAL,
)
when = models.TimeField()
automatically_deactivated = models.BooleanField(
default=False,
help_text="Indicates whether the periodic data sync has been deactivated.",
)
consecutive_failed_count = models.SmallIntegerField(
default=0,
help_text="The number of failed sync data sync operations that have failed. "
"This is used to deactivate the periodic sync if it keeps failing.",
)
authorized_user = models.ForeignKey(
User,
on_delete=models.SET_NULL,
null=True,
help_text="The user on whose behalf the data is periodically synced."
"Automatically set when the interval changes.",
)
class LocalBaserowTableDataSync(DataSync):
source_table = models.ForeignKey(
Table,

View file

@ -0,0 +1,31 @@
from datetime import timedelta
from django.conf import settings
from django.db import transaction
from baserow.config.celery import app
@app.task(bind=True, queue="export")
def call_periodic_data_sync_syncs_that_are_due(self):
from baserow_enterprise.data_sync.handler import EnterpriseDataSyncHandler
with transaction.atomic():
EnterpriseDataSyncHandler().call_periodic_data_sync_syncs_that_are_due()
@app.on_after_finalize.connect
def setup_periodic_enterprise_data_sync_tasks(sender, **kwargs):
every = timedelta(
minutes=settings.BASEROW_ENTERPRISE_PERIODIC_DATA_SYNC_CHECK_INTERVAL_MINUTES
)
sender.add_periodic_task(every, call_periodic_data_sync_syncs_that_are_due.s())
@app.task(bind=True, queue="export")
def sync_periodic_data_sync(self, periodic_data_sync_id):
from baserow_enterprise.data_sync.handler import EnterpriseDataSyncHandler
with transaction.atomic():
EnterpriseDataSyncHandler().sync_periodic_data_sync(periodic_data_sync_id)

View file

@ -0,0 +1,81 @@
# Generated by Django 5.0.9 on 2025-01-14 20:35
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("baserow_enterprise", "0036_localbaserowtabledatasync_source_table_view_id"),
("database", "0175_formviewfieldoptions_include_all_select_options_and_more"),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name="PeriodicDataSyncInterval",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"last_periodic_sync",
models.DateTimeField(
help_text="Timestamp when the table was last periodically synced.",
null=True,
),
),
(
"interval",
models.CharField(
choices=[
("MANUAL", "MANUAL"),
("DAILY", "DAILY"),
("HOURLY", "HOURLY"),
],
default="MANUAL",
),
),
("when", models.TimeField()),
(
"automatically_deactivated",
models.BooleanField(
default=False,
help_text="Indicates whether the periodic data sync has been deactivated.",
),
),
(
"consecutive_failed_count",
models.SmallIntegerField(
default=0,
help_text="The number of failed sync data sync operations that have failed. This is used to deactivate the periodic sync if it keeps failing.",
),
),
(
"authorized_user",
models.ForeignKey(
help_text="The user on whose behalf the data is periodically synced.Automatically set when the interval changes.",
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to=settings.AUTH_USER_MODEL,
),
),
(
"data_sync",
models.OneToOneField(
help_text="The periodic data sync.",
on_delete=django.db.models.deletion.CASCADE,
related_name="periodic_interval",
to="database.datasync",
),
),
],
),
]

View file

@ -11,6 +11,10 @@ from baserow_enterprise.audit_log.tasks import (
clean_up_audit_log_entries,
setup_periodic_audit_log_tasks,
)
from baserow_enterprise.data_sync.tasks import (
call_periodic_data_sync_syncs_that_are_due,
sync_periodic_data_sync,
)
@app.task(bind=True, queue="export")
@ -64,4 +68,9 @@ def unsubscribe_subject_from_tables_currently_subscribed_to_task(
)
__all__ = ["clean_up_audit_log_entries", "setup_periodic_audit_log_tasks"]
__all__ = [
"clean_up_audit_log_entries",
"setup_periodic_audit_log_tasks",
"sync_periodic_data_sync",
"call_periodic_data_sync_syncs_that_are_due",
]

View file

@ -0,0 +1,198 @@
from datetime import time
from django.shortcuts import reverse
from django.test.utils import override_settings
import pytest
from rest_framework.status import HTTP_200_OK, HTTP_402_PAYMENT_REQUIRED
from baserow_enterprise.audit_log.models import AuditLogEntry
from baserow_enterprise.data_sync.handler import EnterpriseDataSyncHandler
@pytest.mark.django_db
@override_settings(DEBUG=True)
def test_get_existing_periodic_data_sync_interval(api_client, enterprise_data_fixture):
enterprise_data_fixture.enable_enterprise()
user, token = enterprise_data_fixture.create_user_and_token()
data_sync = enterprise_data_fixture.create_ical_data_sync(user=user)
EnterpriseDataSyncHandler.update_periodic_data_sync_interval(
user=user,
data_sync=data_sync,
interval="DAILY",
when=time(hour=12, minute=10, second=1, microsecond=1),
)
response = api_client.get(
reverse(
f"api:enterprise:data_sync:periodic_interval",
kwargs={"data_sync_id": data_sync.id},
),
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
assert response.status_code == HTTP_200_OK
assert response.json() == {
"interval": "DAILY",
"when": "12:10:01.000001",
"automatically_deactivated": False,
}
@pytest.mark.django_db
@override_settings(DEBUG=True)
def test_get_not_existing_periodic_data_sync_interval(
api_client, enterprise_data_fixture
):
enterprise_data_fixture.enable_enterprise()
user, token = enterprise_data_fixture.create_user_and_token()
data_sync = enterprise_data_fixture.create_ical_data_sync(user=user)
response = api_client.get(
reverse(
f"api:enterprise:data_sync:periodic_interval",
kwargs={"data_sync_id": data_sync.id},
),
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
assert response.status_code == HTTP_200_OK
assert response.json() == {
"interval": "MANUAL",
"when": None,
"automatically_deactivated": False,
}
@pytest.mark.django_db
@override_settings(DEBUG=True)
def test_get_periodic_data_sync_interval_without_license(
api_client, enterprise_data_fixture
):
user, token = enterprise_data_fixture.create_user_and_token()
data_sync = enterprise_data_fixture.create_ical_data_sync(user=user)
response = api_client.get(
reverse(
f"api:enterprise:data_sync:periodic_interval",
kwargs={"data_sync_id": data_sync.id},
),
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
assert response.status_code == HTTP_402_PAYMENT_REQUIRED
@pytest.mark.django_db
@override_settings(DEBUG=True)
def test_update_periodic_data_sync(api_client, enterprise_data_fixture):
enterprise_data_fixture.enable_enterprise()
user, token = enterprise_data_fixture.create_user_and_token()
data_sync = enterprise_data_fixture.create_ical_data_sync(user=user)
response = api_client.patch(
reverse(
f"api:enterprise:data_sync:periodic_interval",
kwargs={"data_sync_id": data_sync.id},
),
{"interval": "HOURLY", "when": "12:10:01.000001"},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
assert response.status_code == HTTP_200_OK
assert response.json() == {
"interval": "HOURLY",
"when": "12:10:01.000001",
"automatically_deactivated": False,
}
@pytest.mark.django_db
@override_settings(DEBUG=True)
def test_update_periodic_data_sync_without_license(api_client, enterprise_data_fixture):
user, token = enterprise_data_fixture.create_user_and_token()
data_sync = enterprise_data_fixture.create_ical_data_sync(user=user)
response = api_client.patch(
reverse(
f"api:enterprise:data_sync:periodic_interval",
kwargs={"data_sync_id": data_sync.id},
),
{"interval": "HOURLY", "when": "12:10:01.000001"},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
assert response.status_code == HTTP_402_PAYMENT_REQUIRED
@pytest.mark.django_db
@override_settings(DEBUG=True)
def test_update_periodic_data_sync_automatically_deactivated_false(
api_client, enterprise_data_fixture
):
enterprise_data_fixture.enable_enterprise()
user, token = enterprise_data_fixture.create_user_and_token()
data_sync = enterprise_data_fixture.create_ical_data_sync(user=user)
periodic_data_sync = EnterpriseDataSyncHandler.update_periodic_data_sync_interval(
user=user,
data_sync=data_sync,
interval="DAILY",
when=time(hour=12, minute=10, second=1, microsecond=1),
)
periodic_data_sync.automatically_deactivated = True
response = api_client.patch(
reverse(
f"api:enterprise:data_sync:periodic_interval",
kwargs={"data_sync_id": data_sync.id},
),
{
"interval": "HOURLY",
"when": "12:10:01.000001",
},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
assert response.status_code == HTTP_200_OK
assert response.json()["automatically_deactivated"] is False
@pytest.mark.django_db
@override_settings(DEBUG=True)
def test_update_periodic_data_sync_audit_log_created(
api_client, enterprise_data_fixture
):
enterprise_data_fixture.enable_enterprise()
user, token = enterprise_data_fixture.create_user_and_token()
data_sync = enterprise_data_fixture.create_ical_data_sync(user=user)
response = api_client.patch(
reverse(
f"api:enterprise:data_sync:periodic_interval",
kwargs={"data_sync_id": data_sync.id},
),
{"interval": "HOURLY", "when": "12:10:01.000001"},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
assert response.status_code == HTTP_200_OK
audit_log_entry = AuditLogEntry.objects.get(
action_type="update_periodic_data_sync_interval"
)
assert audit_log_entry.action_params == {
"when": "12:10:01",
"interval": "HOURLY",
"table_id": data_sync.table_id,
"table_name": data_sync.table.name,
"database_id": data_sync.table.database_id,
"data_sync_id": data_sync.id,
"database_name": data_sync.table.database.name,
}

View file

@ -0,0 +1,669 @@
from datetime import datetime, time, timezone
from unittest.mock import patch
from django.core.cache import cache
from django.db import transaction
from django.test.utils import override_settings
from django.utils import timezone as django_timezone
import pytest
import responses
from baserow_premium.license.exceptions import FeaturesNotAvailableError
from baserow_premium.license.models import License
from freezegun.api import freeze_time
from baserow.contrib.database.data_sync.handler import DataSyncHandler
from baserow.contrib.database.data_sync.models import DataSync
from baserow.core.exceptions import UserNotInWorkspace
from baserow_enterprise.data_sync.handler import EnterpriseDataSyncHandler
from baserow_enterprise.data_sync.models import PeriodicDataSyncInterval
@pytest.mark.django_db
@override_settings(DEBUG=True)
def test_update_periodic_data_sync_interval_licence_check(enterprise_data_fixture):
user = enterprise_data_fixture.create_user()
data_sync = enterprise_data_fixture.create_ical_data_sync(user=user)
with pytest.raises(FeaturesNotAvailableError):
EnterpriseDataSyncHandler.update_periodic_data_sync_interval(
user=user,
data_sync=data_sync,
interval="MANUAL",
when=time(hour=12, minute=10),
)
@pytest.mark.django_db
@override_settings(DEBUG=True)
def test_update_periodic_data_sync_interval_check_permissions(enterprise_data_fixture):
enterprise_data_fixture.enable_enterprise()
user = enterprise_data_fixture.create_user()
data_sync = enterprise_data_fixture.create_ical_data_sync()
with pytest.raises(UserNotInWorkspace):
EnterpriseDataSyncHandler.update_periodic_data_sync_interval(
user=user,
data_sync=data_sync,
interval="MANUAL",
when=time(hour=12, minute=10),
)
@pytest.mark.django_db
@override_settings(DEBUG=True)
def test_update_periodic_data_sync_interval_create(enterprise_data_fixture):
enterprise_data_fixture.enable_enterprise()
user = enterprise_data_fixture.create_user()
data_sync = enterprise_data_fixture.create_ical_data_sync(user=user)
periodic_data_sync_interval = (
EnterpriseDataSyncHandler.update_periodic_data_sync_interval(
user=user,
data_sync=data_sync,
interval="DAILY",
when=time(hour=12, minute=10, second=1, microsecond=1),
)
)
fetched_periodic_data_sync_interval = PeriodicDataSyncInterval.objects.all().first()
assert periodic_data_sync_interval.id == fetched_periodic_data_sync_interval.id
assert (
periodic_data_sync_interval.data_sync_id
== periodic_data_sync_interval.data_sync_id
== data_sync.id
)
assert (
periodic_data_sync_interval.interval
== periodic_data_sync_interval.interval
== "DAILY"
)
assert (
periodic_data_sync_interval.when
== periodic_data_sync_interval.when
== time(hour=12, minute=10, second=1, microsecond=1)
)
assert periodic_data_sync_interval.authorized_user_id == user.id
assert periodic_data_sync_interval.automatically_deactivated is False
@pytest.mark.django_db
@override_settings(DEBUG=True)
def test_update_periodic_data_sync_interval_update(enterprise_data_fixture):
enterprise_data_fixture.enable_enterprise()
user = enterprise_data_fixture.create_user()
data_sync = enterprise_data_fixture.create_ical_data_sync(user=user)
EnterpriseDataSyncHandler.update_periodic_data_sync_interval(
user=user,
data_sync=data_sync,
interval="DAILY",
when=time(hour=12, minute=10, second=1, microsecond=1),
)
periodic_data_sync_interval = (
EnterpriseDataSyncHandler.update_periodic_data_sync_interval(
user=user,
data_sync=data_sync,
interval="HOURLY",
when=time(hour=14, minute=12, second=1, microsecond=1),
)
)
fetched_periodic_data_sync_interval = PeriodicDataSyncInterval.objects.all().first()
assert periodic_data_sync_interval.id == fetched_periodic_data_sync_interval.id
assert (
periodic_data_sync_interval.data_sync_id
== periodic_data_sync_interval.data_sync_id
== data_sync.id
)
assert (
periodic_data_sync_interval.interval
== periodic_data_sync_interval.interval
== "HOURLY"
)
assert (
periodic_data_sync_interval.when
== periodic_data_sync_interval.when
== time(hour=14, minute=12, second=1, microsecond=1)
)
assert periodic_data_sync_interval.authorized_user_id == user.id
assert periodic_data_sync_interval.automatically_deactivated is False
@pytest.mark.django_db
@override_settings(DEBUG=True)
def test_update_periodic_data_sync_interval_update_automatically_disabled(
enterprise_data_fixture,
):
enterprise_data_fixture.enable_enterprise()
user = enterprise_data_fixture.create_user()
data_sync = enterprise_data_fixture.create_ical_data_sync(user=user)
periodic_data_sync = EnterpriseDataSyncHandler.update_periodic_data_sync_interval(
user=user,
data_sync=data_sync,
interval="DAILY",
when=time(hour=12, minute=10, second=1, microsecond=1),
)
periodic_data_sync.automatically_deactivated = True
periodic_data_sync.save()
periodic_data_sync = EnterpriseDataSyncHandler.update_periodic_data_sync_interval(
user=user,
data_sync=data_sync,
interval="HOURLY",
when=time(hour=14, minute=12, second=1, microsecond=1),
)
assert periodic_data_sync.automatically_deactivated is False
@pytest.mark.django_db
@override_settings(DEBUG=True)
def test_call_daily_periodic_data_sync_syncs(enterprise_data_fixture):
enterprise_data_fixture.enable_enterprise()
user = enterprise_data_fixture.create_user()
not_yet_executed_1 = EnterpriseDataSyncHandler.update_periodic_data_sync_interval(
user=user,
data_sync=enterprise_data_fixture.create_ical_data_sync(user=user),
interval="DAILY",
when=time(hour=12, minute=10, second=1, microsecond=1),
)
not_yet_executed_1.refresh_from_db()
not_yet_executed_2 = EnterpriseDataSyncHandler.update_periodic_data_sync_interval(
user=user,
data_sync=enterprise_data_fixture.create_ical_data_sync(user=user),
interval="DAILY",
when=time(hour=12, minute=30, second=1, microsecond=1),
)
already_executed_today_1 = (
EnterpriseDataSyncHandler.update_periodic_data_sync_interval(
user=user,
data_sync=enterprise_data_fixture.create_ical_data_sync(user=user),
interval="DAILY",
when=time(hour=12, minute=10, second=1, microsecond=1),
)
)
already_executed_today_1.last_periodic_sync = datetime(
2024, 10, 10, 11, 0, 1, 1, tzinfo=timezone.utc
)
already_executed_today_1.save()
already_executed_yesterday_1 = (
EnterpriseDataSyncHandler.update_periodic_data_sync_interval(
user=user,
data_sync=enterprise_data_fixture.create_ical_data_sync(user=user),
interval="DAILY",
when=time(hour=12, minute=10, second=1, microsecond=1),
)
)
already_executed_yesterday_1.last_periodic_sync = datetime(
2024, 10, 9, 11, 0, 1, 1, tzinfo=timezone.utc
)
already_executed_yesterday_1.save()
with freeze_time("2024-10-10T12:15:00.00Z") as frozen:
EnterpriseDataSyncHandler.call_periodic_data_sync_syncs_that_are_due()
frozen_datetime = django_timezone.now()
not_yet_executed_1.refresh_from_db()
# executed because not yet executed before and due.
assert not_yet_executed_1.last_periodic_sync == frozen_datetime
not_yet_executed_2.refresh_from_db()
# skipped because not yet due
assert not_yet_executed_2.last_periodic_sync != frozen_datetime
already_executed_today_1.refresh_from_db()
# skipped because already executed
assert already_executed_today_1.last_periodic_sync != frozen_datetime
already_executed_yesterday_1.refresh_from_db()
# executed because was last executed yesterday.
assert already_executed_yesterday_1.last_periodic_sync == frozen_datetime
with freeze_time("2024-10-10T12:31:00.00Z") as frozen:
EnterpriseDataSyncHandler.call_periodic_data_sync_syncs_that_are_due()
frozen_datetime = django_timezone.now()
not_yet_executed_1.refresh_from_db()
# not executed because not yet due.
assert not_yet_executed_1.last_periodic_sync != frozen_datetime
not_yet_executed_2.refresh_from_db()
# executed because not yet executed before and due.
assert not_yet_executed_2.last_periodic_sync == frozen_datetime
already_executed_today_1.refresh_from_db()
# not executed because not yet due.
assert already_executed_today_1.last_periodic_sync != frozen_datetime
already_executed_yesterday_1.refresh_from_db()
# not executed because not yet due.
assert already_executed_yesterday_1.last_periodic_sync != frozen_datetime
@pytest.mark.django_db
@override_settings(DEBUG=True)
def test_call_hourly_periodic_data_sync_syncs(enterprise_data_fixture):
enterprise_data_fixture.enable_enterprise()
user = enterprise_data_fixture.create_user()
not_yet_executed_1 = EnterpriseDataSyncHandler.update_periodic_data_sync_interval(
user=user,
data_sync=enterprise_data_fixture.create_ical_data_sync(user=user),
interval="HOURLY",
when=time(hour=12, minute=10, second=1, microsecond=1),
)
not_yet_executed_1.refresh_from_db()
not_yet_executed_2 = EnterpriseDataSyncHandler.update_periodic_data_sync_interval(
user=user,
data_sync=enterprise_data_fixture.create_ical_data_sync(user=user),
interval="HOURLY",
when=time(hour=12, minute=30, second=1, microsecond=1),
)
already_executed_this_hour_1 = (
EnterpriseDataSyncHandler.update_periodic_data_sync_interval(
user=user,
data_sync=enterprise_data_fixture.create_ical_data_sync(user=user),
interval="HOURLY",
when=time(hour=12, minute=10, second=1, microsecond=1),
)
)
already_executed_this_hour_1.last_periodic_sync = datetime(
2024, 10, 10, 12, 10, 1, 1, tzinfo=timezone.utc
)
already_executed_this_hour_1.save()
already_executed_last_hour_1 = (
EnterpriseDataSyncHandler.update_periodic_data_sync_interval(
user=user,
data_sync=enterprise_data_fixture.create_ical_data_sync(user=user),
interval="HOURLY",
when=time(hour=12, minute=10, second=1, microsecond=1),
)
)
already_executed_last_hour_1.last_periodic_sync = datetime(
2024, 10, 10, 11, 20, 1, 1, tzinfo=timezone.utc
)
already_executed_last_hour_1.save()
with freeze_time("2024-10-10T12:15:00.00Z") as frozen:
EnterpriseDataSyncHandler.call_periodic_data_sync_syncs_that_are_due()
frozen_datetime = django_timezone.now()
not_yet_executed_1.refresh_from_db()
# executed because not yet executed before and due.
assert not_yet_executed_1.last_periodic_sync == frozen_datetime
not_yet_executed_2.refresh_from_db()
# skipped because not yet due
assert not_yet_executed_2.last_periodic_sync != frozen_datetime
already_executed_this_hour_1.refresh_from_db()
# skipped because already executed
assert already_executed_this_hour_1.last_periodic_sync != frozen_datetime
already_executed_last_hour_1.refresh_from_db()
# executed because was last executed yesterday.
assert already_executed_last_hour_1.last_periodic_sync == frozen_datetime
with freeze_time("2024-10-10T12:35:00.00Z") as frozen:
EnterpriseDataSyncHandler.call_periodic_data_sync_syncs_that_are_due()
frozen_datetime = django_timezone.now()
not_yet_executed_1.refresh_from_db()
# not executed because not yet due.
assert not_yet_executed_1.last_periodic_sync != frozen_datetime
not_yet_executed_2.refresh_from_db()
# executed because not yet executed before and due.
assert not_yet_executed_2.last_periodic_sync == frozen_datetime
already_executed_this_hour_1.refresh_from_db()
# not executed because not yet due.
assert already_executed_this_hour_1.last_periodic_sync != frozen_datetime
already_executed_last_hour_1.refresh_from_db()
# not executed because not yet due.
assert already_executed_last_hour_1.last_periodic_sync != frozen_datetime
@pytest.mark.django_db(transaction=True)
@override_settings(DEBUG=True)
@patch("baserow_enterprise.data_sync.handler.sync_periodic_data_sync")
def test_call_periodic_data_sync_syncs_starts_task(
mock_sync_periodic_data_sync, enterprise_data_fixture
):
enterprise_data_fixture.enable_enterprise()
user = enterprise_data_fixture.create_user()
not_yet_executed_1 = EnterpriseDataSyncHandler.update_periodic_data_sync_interval(
user=user,
data_sync=enterprise_data_fixture.create_ical_data_sync(user=user),
interval="DAILY",
when=time(hour=12, minute=10, second=1, microsecond=1),
)
not_yet_executed_1.refresh_from_db()
with freeze_time("2024-10-10T12:15:00.00Z"):
with transaction.atomic():
EnterpriseDataSyncHandler.call_periodic_data_sync_syncs_that_are_due()
mock_sync_periodic_data_sync.delay.assert_called_once()
args = mock_sync_periodic_data_sync.delay.call_args
assert args[0][0] == not_yet_executed_1.id
@pytest.mark.django_db
@override_settings(DEBUG=True)
def test_skip_automatically_deactivated_periodic_data_syncs(enterprise_data_fixture):
enterprise_data_fixture.enable_enterprise()
user = enterprise_data_fixture.create_user()
not_yet_executed_1 = EnterpriseDataSyncHandler.update_periodic_data_sync_interval(
user=user,
data_sync=enterprise_data_fixture.create_ical_data_sync(user=user),
interval="DAILY",
when=time(hour=12, minute=10, second=1, microsecond=1),
)
License.objects.all().delete()
with freeze_time("2024-10-10T12:15:00.00Z"):
with transaction.atomic():
EnterpriseDataSyncHandler.call_periodic_data_sync_syncs_that_are_due()
not_yet_executed_1.refresh_from_db()
# Should not be triggered because there was no license.
assert not_yet_executed_1.last_periodic_sync is None
@pytest.mark.django_db(transaction=True, databases=["default", "default-copy"])
@override_settings(DEBUG=True)
def test_skip_locked_data_syncs(enterprise_data_fixture):
enterprise_data_fixture.enable_enterprise()
user = enterprise_data_fixture.create_user()
not_yet_executed_1 = EnterpriseDataSyncHandler.update_periodic_data_sync_interval(
user=user,
data_sync=enterprise_data_fixture.create_ical_data_sync(user=user),
interval="DAILY",
when=time(hour=12, minute=10, second=1, microsecond=1),
)
not_yet_executed_2 = EnterpriseDataSyncHandler.update_periodic_data_sync_interval(
user=user,
data_sync=enterprise_data_fixture.create_ical_data_sync(user=user),
interval="DAILY",
when=time(hour=12, minute=10, second=1, microsecond=1),
)
with transaction.atomic(using="default-copy"):
PeriodicDataSyncInterval.objects.using("default-copy").filter(
id=not_yet_executed_1.id
).select_for_update().get()
DataSync.objects.using("default-copy").filter(
id=not_yet_executed_2.data_sync_id
).select_for_update().get()
with freeze_time("2024-10-10T12:15:00.00Z"):
with transaction.atomic():
EnterpriseDataSyncHandler.call_periodic_data_sync_syncs_that_are_due()
not_yet_executed_1.refresh_from_db()
# Should not be triggered because the periodic data sync object was locked.
assert not_yet_executed_1.last_periodic_sync is None
not_yet_executed_2.refresh_from_db()
# Should not be triggered because there the data sync was locked.
assert not_yet_executed_2.last_periodic_sync is None
@pytest.mark.django_db(transaction=True)
@override_settings(DEBUG=True)
@patch("baserow_enterprise.data_sync.handler.sync_periodic_data_sync")
def test_skip_syncing_data_syncs(mock_sync_periodic_data_sync, enterprise_data_fixture):
enterprise_data_fixture.enable_enterprise()
user = enterprise_data_fixture.create_user()
not_yet_executed_1 = EnterpriseDataSyncHandler.update_periodic_data_sync_interval(
user=user,
data_sync=enterprise_data_fixture.create_ical_data_sync(user=user),
interval="DAILY",
when=time(hour=12, minute=10, second=1, microsecond=1),
)
lock_key = DataSyncHandler().get_table_sync_lock_key(
not_yet_executed_1.data_sync_id
)
cache.add(lock_key, "locked", timeout=2)
with freeze_time("2024-10-10T12:15:00.00Z"):
with transaction.atomic():
EnterpriseDataSyncHandler.call_periodic_data_sync_syncs_that_are_due()
not_yet_executed_1.refresh_from_db()
# Should be updated if the data sync is already running.
assert not_yet_executed_1.last_periodic_sync is not None
# Should not be called if the data sync is already running.
mock_sync_periodic_data_sync.delay.assert_not_called()
@pytest.mark.django_db
@override_settings(DEBUG=True)
def test_sync_periodic_data_sync_deactivated(enterprise_data_fixture):
enterprise_data_fixture.enable_enterprise()
user = enterprise_data_fixture.create_user()
periodic_data_sync = EnterpriseDataSyncHandler.update_periodic_data_sync_interval(
user=user,
data_sync=enterprise_data_fixture.create_ical_data_sync(user=user),
interval="DAILY",
when=time(hour=12, minute=10, second=1, microsecond=1),
)
periodic_data_sync.automatically_deactivated = True
periodic_data_sync.save()
assert (
EnterpriseDataSyncHandler.sync_periodic_data_sync(periodic_data_sync.id)
is False
)
periodic_data_sync.data_sync.refresh_from_db()
assert periodic_data_sync.data_sync.last_sync is None
@pytest.mark.django_db
@override_settings(DEBUG=True)
def test_sync_periodic_data_sync_already_syncing(enterprise_data_fixture):
enterprise_data_fixture.enable_enterprise()
user = enterprise_data_fixture.create_user()
periodic_data_sync = EnterpriseDataSyncHandler.update_periodic_data_sync_interval(
user=user,
data_sync=enterprise_data_fixture.create_ical_data_sync(user=user),
interval="DAILY",
when=time(hour=12, minute=10, second=1, microsecond=1),
)
lock_key = DataSyncHandler().get_table_sync_lock_key(
periodic_data_sync.data_sync_id
)
cache.add(lock_key, "locked", timeout=2)
assert (
EnterpriseDataSyncHandler.sync_periodic_data_sync(periodic_data_sync.id)
is False
)
periodic_data_sync.data_sync.refresh_from_db()
assert periodic_data_sync.data_sync.last_sync is None
@pytest.mark.django_db
@override_settings(DEBUG=True)
@responses.activate
def test_sync_periodic_data_sync_consecutive_failed_count_increases(
enterprise_data_fixture,
):
responses.add(
responses.GET,
"https://baserow.io/ical.ics",
status=404,
body="",
)
enterprise_data_fixture.enable_enterprise()
user = enterprise_data_fixture.create_user()
periodic_data_sync = EnterpriseDataSyncHandler.update_periodic_data_sync_interval(
user=user,
data_sync=enterprise_data_fixture.create_ical_data_sync(user=user),
interval="DAILY",
when=time(hour=12, minute=10, second=1, microsecond=1),
)
assert (
EnterpriseDataSyncHandler.sync_periodic_data_sync(periodic_data_sync.id) is True
)
periodic_data_sync.refresh_from_db()
assert periodic_data_sync.consecutive_failed_count == 1
@pytest.mark.django_db
@override_settings(
DEBUG=True, BASEROW_ENTERPRISE_MAX_PERIODIC_DATA_SYNC_CONSECUTIVE_ERRORS=2
)
@responses.activate
def test_sync_periodic_data_sync_consecutive_failed_count_reset(
enterprise_data_fixture,
):
responses.add(
responses.GET,
"https://baserow.io/ical.ics",
status=200,
body="""BEGIN:VCALENDAR
VERSION:2.0
END:VCALENDAR""",
)
enterprise_data_fixture.enable_enterprise()
user = enterprise_data_fixture.create_user()
periodic_data_sync = EnterpriseDataSyncHandler.update_periodic_data_sync_interval(
user=user,
data_sync=enterprise_data_fixture.create_ical_data_sync(
user=user, ical_url="https://baserow.io/ical.ics"
),
interval="DAILY",
when=time(hour=12, minute=10, second=1, microsecond=1),
)
periodic_data_sync.consecutive_failed_count = 1
periodic_data_sync.save()
assert (
EnterpriseDataSyncHandler.sync_periodic_data_sync(periodic_data_sync.id) is True
)
periodic_data_sync.refresh_from_db()
assert periodic_data_sync.consecutive_failed_count == 0
@pytest.mark.django_db
@override_settings(DEBUG=True)
@responses.activate
def test_sync_periodic_data_sync_deactivated_max_failure(enterprise_data_fixture):
responses.add(
responses.GET,
"https://baserow.io/ical.ics",
status=404,
body="",
)
enterprise_data_fixture.enable_enterprise()
user = enterprise_data_fixture.create_user()
periodic_data_sync = EnterpriseDataSyncHandler.update_periodic_data_sync_interval(
user=user,
data_sync=enterprise_data_fixture.create_ical_data_sync(user=user),
interval="DAILY",
when=time(hour=12, minute=10, second=1, microsecond=1),
)
periodic_data_sync.consecutive_failed_count = 3
periodic_data_sync.save()
assert (
EnterpriseDataSyncHandler.sync_periodic_data_sync(periodic_data_sync.id) is True
)
periodic_data_sync.refresh_from_db()
assert periodic_data_sync.consecutive_failed_count == 4
assert periodic_data_sync.automatically_deactivated is True
@pytest.mark.django_db
@override_settings(DEBUG=True)
def test_sync_periodic_data_sync_authorized_user_is_none(enterprise_data_fixture):
enterprise_data_fixture.enable_enterprise()
user = enterprise_data_fixture.create_user()
periodic_data_sync = EnterpriseDataSyncHandler.update_periodic_data_sync_interval(
user=user,
data_sync=enterprise_data_fixture.create_ical_data_sync(user=user),
interval="DAILY",
when=time(hour=12, minute=10, second=1, microsecond=1),
)
periodic_data_sync.authorized_user is None
periodic_data_sync.save()
assert (
EnterpriseDataSyncHandler.sync_periodic_data_sync(periodic_data_sync.id) is True
)
periodic_data_sync.refresh_from_db()
assert periodic_data_sync.consecutive_failed_count == 1
@pytest.mark.django_db
@override_settings(DEBUG=True)
@responses.activate
def test_sync_periodic_data_sync(enterprise_data_fixture):
responses.add(
responses.GET,
"https://baserow.io/ical.ics",
status=200,
body="""BEGIN:VCALENDAR
VERSION:2.0
END:VCALENDAR""",
)
enterprise_data_fixture.enable_enterprise()
user = enterprise_data_fixture.create_user()
periodic_data_sync = EnterpriseDataSyncHandler.update_periodic_data_sync_interval(
user=user,
data_sync=enterprise_data_fixture.create_ical_data_sync(
user=user, ical_url="https://baserow.io/ical.ics"
),
interval="DAILY",
when=time(hour=12, minute=10, second=1, microsecond=1),
)
assert (
EnterpriseDataSyncHandler.sync_periodic_data_sync(periodic_data_sync.id) is True
)
periodic_data_sync.data_sync.refresh_from_db()
assert periodic_data_sync.data_sync.last_sync is not None
assert periodic_data_sync.data_sync.last_error is None

View file

@ -0,0 +1,175 @@
<template>
<div>
<h2 class="box__title">
{{ $t('configureDataSyncPeriodicInterval.title') }}
</h2>
<div v-if="hasPermissions">
<div v-if="fetchLoading">
<div class="loading"></div>
</div>
<div v-if="!fetchLoaded">
<Error :error="error"></Error>
</div>
<div v-else-if="fetchLoaded">
<Error :error="error"></Error>
<Alert
v-if="periodicInterval.automatically_deactivated"
type="info-primary"
>
<template #title>{{
$t('configureDataSyncPeriodicInterval.deactivatedTitle')
}}</template>
<p>{{ $t('configureDataSyncPeriodicInterval.deactivatedText') }}</p>
<template #actions>
<Button
type="primary"
size="small"
:loading="saveLoading"
@click="activate"
>{{ $t('configureDataSyncPeriodicInterval.activate') }}</Button
>
</template>
</Alert>
<DataSyncPeriodicIntervalForm
v-if="!periodicInterval.automatically_deactivated"
:default-values="periodicInterval"
:disabled="saveLoading"
@submitted="submitted"
@values-changed="saved = false"
>
<div class="flex align-items-center justify-content-end">
<Button
v-if="!saved"
type="primary"
size="large"
:loading="saveLoading"
:disabled="saveLoading"
>
{{ $t('action.save') }}
</Button>
<template v-if="saved">
<strong class="color-success">{{
$t('configureDataSyncPeriodicInterval.saved')
}}</strong>
<Button type="secondary" size="large" @click="$emit('hide')">
{{ $t('action.hide') }}
</Button>
</template>
</div>
</DataSyncPeriodicIntervalForm>
</div>
</div>
<div v-else>
<div class="placeholder">
<div class="placeholder__icon">
<i class="iconoir-timer"></i>
</div>
<p class="placeholder__content">
{{ $t('configureDataSyncPeriodicInterval.enterprise') }}
</p>
<div class="placeholder__action">
<Button
type="primary"
icon="iconoir-no-lock"
@click="$refs.enterpriseModal.show()"
>
{{ $t('configureDataSyncPeriodicInterval.more') }}
</Button>
</div>
</div>
<EnterpriseModal
ref="enterpriseModal"
:name="$t('configureDataSyncPeriodicInterval.title')"
:workspace="database.workspace"
></EnterpriseModal>
</div>
</div>
</template>
<script>
import EnterpriseDataSyncService from '@baserow_enterprise/services/dataSync'
import error from '@baserow/modules/core/mixins/error'
import DataSyncPeriodicIntervalForm from '@baserow_enterprise/components/dataSync/DataSyncPeriodicIntervalForm'
import EnterpriseFeatures from '@baserow_enterprise/features'
import EnterpriseModal from '@baserow_enterprise/components/EnterpriseModal'
import { clone } from '@baserow/modules/core/utils/object'
export default {
name: 'ConfigureDataSyncPeriodicInterval',
components: { EnterpriseModal, DataSyncPeriodicIntervalForm },
mixins: [error],
props: {
database: {
type: Object,
required: true,
},
table: {
type: Object,
required: true,
},
},
data() {
return {
fetchLoading: false,
fetchLoaded: false,
periodicInterval: {},
saveLoading: false,
saved: false,
}
},
computed: {
hasPermissions() {
return this.$hasFeature(
EnterpriseFeatures.DATA_SYNC,
this.database.workspace.id
)
},
},
mounted() {
this.hideError()
this.fetchPeriodicInterval(this.table)
},
methods: {
async fetchPeriodicInterval(table) {
this.fetchLoading = true
try {
const { data } = await EnterpriseDataSyncService(
this.$client
).getPeriodicInterval(table.data_sync.id)
this.periodicInterval = data
this.fetchLoaded = true
} catch (error) {
this.handleError(error)
} finally {
this.fetchLoading = false
}
},
async activate() {
const values = clone(this.periodicInterval)
values.automatically_deactivated = false
// Updating the periodic interval sets automatically_disabled = false.
await this.submitted(values)
this.periodicInterval = values
this.saved = false
},
async submitted(values) {
this.hideError()
this.saveLoading = true
try {
await EnterpriseDataSyncService(this.$client).updatePeriodicInterval(
this.table.data_sync.id,
values.interval,
values.when
)
this.saved = true
} catch (error) {
this.handleError(error)
} finally {
this.saveLoading = false
}
},
},
}
</script>

View file

@ -0,0 +1,176 @@
<template>
<form @submit.prevent="submit">
<FormGroup
:error="fieldHasErrors('interval')"
:label="$t('dataSyncPeriodicIntervalForm.intervalLabel')"
:helper-text="$t('dataSyncPeriodicIntervalForm.intervalHelper')"
required
small-label
class="margin-bottom-2"
>
<Dropdown v-model="values.interval" :disabled="disabled" size="large">
<DropdownItem
:name="$t('dataSyncPeriodicIntervalForm.manual')"
value="MANUAL"
></DropdownItem>
<DropdownItem
:name="$t('dataSyncPeriodicIntervalForm.daily')"
value="DAILY"
></DropdownItem>
<DropdownItem
:name="$t('dataSyncPeriodicIntervalForm.hourly')"
value="HOURLY"
></DropdownItem>
</Dropdown>
</FormGroup>
<template v-if="values.interval !== 'MANUAL'">
<div class="flex align-items-end">
<FormGroup
v-if="values.interval === 'DAILY'"
small-label
:label="$t('dataSyncPeriodicIntervalForm.hour')"
:error="$v.hour.$dirty && $v.hour.$error"
required
>
<FormInput
v-model="hour"
:disabled="disabled"
size="large"
type="number"
:min="0"
:max="23"
@blur="$v.hour.$touch()"
@input="updateWhen"
/>
</FormGroup>
<FormGroup
small-label
:label="$t('dataSyncPeriodicIntervalForm.minute')"
:error="$v.minute.$dirty && $v.minute.$error"
required
>
<FormInput
v-model="minute"
:disabled="disabled"
size="large"
type="number"
:min="0"
:max="59"
@blur="$v.minute.$touch()"
@input="updateWhen"
/>
</FormGroup>
<FormGroup
small-label
:label="$t('dataSyncPeriodicIntervalForm.second')"
:error="$v.second.$dirty && $v.second.$error"
required
>
<FormInput
v-model="second"
:disabled="disabled"
size="large"
type="number"
:min="0"
:max="59"
@blur="$v.second.$touch()"
@input="updateWhen"
/>
</FormGroup>
<div class="color-neutral">
{{ timezone }}
</div>
</div>
<p class="control__helper-text">
{{ $t('dataSyncPeriodicIntervalForm.whenHelper') }}
</p>
</template>
<slot></slot>
</form>
</template>
<script>
import moment from '@baserow/modules/core/moment'
import { required, numeric, minValue, maxValue } from 'vuelidate/lib/validators'
import form from '@baserow/modules/core/mixins/form'
export default {
name: 'DataSyncPeriodicIntervalForm',
mixins: [form],
props: {
disabled: {
type: Boolean,
required: false,
default: false,
},
},
data() {
return {
timezone: Intl.DateTimeFormat().resolvedOptions().timeZone,
allowedValues: ['interval', 'when'],
values: {
interval: 'MANUAL',
when: '',
},
hour: '',
minute: '',
second: '',
}
},
mounted() {
if (this.values.when) {
const localTime = moment
.utc(this.values.when, 'HH:mm:ss')
.local()
.format('HH:mm:ss')
const splitted = localTime.split(':')
this.hour = parseInt(splitted[0], 10) || 0
this.minute = parseInt(splitted[1], 10) || 0
this.second = parseInt(splitted[2], 10) || 0
} else {
this.setDefaultTime()
}
this.updateWhen()
},
methods: {
setDefaultTime() {
const localTime = moment().format('HH:mm:ss')
const splitted = localTime.split(':')
this.hour = splitted[0]
this.minute = splitted[1]
this.second = splitted[2]
},
updateWhen() {
const timeInLocal = `${this.hour}:${this.minute}:${this.second}`
const timeInUTC = moment(timeInLocal, 'HH:mm:ss').utc().format('HH:mm:ss')
this.values.when = timeInUTC
},
},
validations() {
return {
values: {
interval: { required },
when: { required },
},
hour: {
required,
numeric,
minValue: minValue(0),
maxValue: maxValue(24),
},
minute: {
required,
numeric,
minValue: minValue(0),
maxValue: maxValue(59),
},
second: {
required,
numeric,
minValue: minValue(0),
maxValue: maxValue(59),
},
}
},
}
</script>

View file

@ -0,0 +1,20 @@
import { ConfigureDataSyncType } from '@baserow/modules/database/configureDataSyncTypes'
import ConfigureDataSyncPeriodicInterval from '@baserow_enterprise/components/dataSync/ConfigureDataSyncPeriodicInterval'
export class PeriodicIntervalFieldsConfigureDataSyncType extends ConfigureDataSyncType {
static getType() {
return 'periodic-interval'
}
get name() {
return this.app.i18n.t('configureDataSyncModal.periodicInterval')
}
get iconClass() {
return 'iconoir-timer'
}
get component() {
return ConfigureDataSyncPeriodicInterval
}
}

View file

@ -409,5 +409,28 @@
"placeholderWithSaml": "{login} with SAML",
"provideEmail": "Provide your SAML account email",
"emailPlaceholder": "Enter your email..."
},
"configureDataSyncModal": {
"periodicInterval": "Periodic settings"
},
"configureDataSyncPeriodicInterval": {
"title": "Periodic settings",
"saved": "Saved",
"deactivatedTitle": "Periodic data sync deactivated",
"deactivatedText": "The periodic data sync was deactivated because it failed to many consecutive times. Please try to sync manually, and if it works as expected, then click on the activate button.",
"activate": "Activate periodic sync",
"enterprise": "Periodic data sync is an advanced/enterprise feature.",
"more": "More information"
},
"dataSyncPeriodicIntervalForm": {
"intervalLabel": "Interval",
"intervalHelper": "How frequently should the the data sync automatically sync.",
"manual": "Manual",
"daily": "Daily",
"hourly": "Hourly",
"whenHelper": "Provide the exact hour, minute, and second when the data sync should sync.",
"hour": "Hour",
"minute": "Minute",
"second": "Second"
}
}

View file

@ -49,6 +49,7 @@ import {
GitLabIssuesDataSyncType,
HubspotContactsDataSyncType,
} from '@baserow_enterprise/dataSyncTypes'
import { PeriodicIntervalFieldsConfigureDataSyncType } from '@baserow_enterprise/configureDataSyncTypes'
import { FF_AB_SSO } from '@baserow/modules/core/plugins/featureFlags'
@ -144,4 +145,9 @@ export default (context) => {
app.$registry.register('dataSync', new GitHubIssuesDataSyncType(context))
app.$registry.register('dataSync', new GitLabIssuesDataSyncType(context))
app.$registry.register('dataSync', new HubspotContactsDataSyncType(context))
app.$registry.register(
'configureDataSync',
new PeriodicIntervalFieldsConfigureDataSyncType(context)
)
}

View file

@ -0,0 +1,13 @@
export default (client) => {
return {
getPeriodicInterval(dataSyncId) {
return client.get(`/data-sync/${dataSyncId}/periodic-interval/`)
},
updatePeriodicInterval(dataSyncId, interval, when) {
return client.patch(`/data-sync/${dataSyncId}/periodic-interval/`, {
interval,
when,
})
},
}
}

View file

@ -210,10 +210,18 @@
align-items: center;
}
.align-items-end {
align-items: end;
}
.justify-content-space-between {
justify-content: space-between;
}
.justify-content-end {
justify-content: end;
}
.position-relative {
position: relative;
}

View file

@ -7,11 +7,11 @@
</div>
</div>
<ul class="modal-sidebar__nav">
<li v-for="page in pages" :key="page.type">
<li v-for="page in pages" :key="page.getType()">
<a
class="modal-sidebar__nav-link"
:class="{ active: selectedPage === page.type }"
@click="setPage(page.type)"
:class="{ active: selectedPage === page.getType() }"
@click="setPage(page.getType())"
>
<i class="modal-sidebar__nav-icon" :class="page.iconClass"></i>
{{ page.name }}
@ -52,26 +52,15 @@ export default {
},
data() {
return {
pages: [
{
type: 'visible-fields',
name: this.$t('configureDataSyncModal.syncedFields'),
iconClass: 'iconoir-switch-on',
component: ConfigureDataSyncVisibleFields,
},
{
type: 'settings',
name: this.$t('configureDataSyncModal.syncSettings'),
iconClass: 'iconoir-settings',
component: ConfigureDataSyncSettings,
},
],
selectedPage: 'visible-fields',
selectedPage: 'synced-fields',
}
},
computed: {
pages() {
return Object.values(this.$registry.getAll('configureDataSync'))
},
selectedPageObject() {
return this.pages.find((page) => page.type === this.selectedPage)
return this.pages.find((page) => page.getType() === this.selectedPage)
},
},
methods: {

View file

@ -333,8 +333,12 @@ export default {
}
},
openConfigureDataSyncModal() {
this.$refs.context.hide()
this.$refs.configureDataSyncModal.show()
if (this.dataSyncDeactivated) {
this.$refs.deactivatedDataSyncClickModal.show()
} else {
this.$refs.context.hide()
this.$refs.configureDataSyncModal.show()
}
},
enableRename() {
this.$refs.context.hide()

View file

@ -0,0 +1,59 @@
import { Registerable } from '@baserow/modules/core/registry'
import ConfigureDataSyncVisibleFields from '@baserow/modules/database/components/dataSync/ConfigureDataSyncVisibleFields'
import ConfigureDataSyncSettings from '@baserow/modules/database/components/dataSync/ConfigureDataSyncSettings'
export class ConfigureDataSyncType extends Registerable {
get name() {
throw new Error(
'name getter must be implemented in the ConfigureDataSyncType.'
)
}
get iconClass() {
throw new Error(
'iconClass getter must be implemented in the ConfigureDataSyncType.'
)
}
get component() {
throw new Error(
'component getter must be implemented in the ConfigureDataSyncType.'
)
}
}
export class SyncedFieldsConfigureDataSyncType extends ConfigureDataSyncType {
static getType() {
return 'synced-fields'
}
get name() {
return this.app.i18n.t('configureDataSyncModal.syncedFields')
}
get iconClass() {
return 'iconoir-switch-on'
}
get component() {
return ConfigureDataSyncVisibleFields
}
}
export class SettingsConfigureDataSyncType extends ConfigureDataSyncType {
static getType() {
return 'settings'
}
get name() {
return this.app.i18n.t('configureDataSyncModal.syncSettings')
}
get iconClass() {
return 'iconoir-settings'
}
get component() {
return ConfigureDataSyncSettings
}
}

View file

@ -331,6 +331,10 @@ import {
DatabaseScratchTrackTaskFieldsOnboardingType,
DatabaseScratchTrackTeamFieldsOnboardingType,
} from '@baserow/modules/database/databaseScratchTrackFieldsStepType'
import {
SyncedFieldsConfigureDataSyncType,
SettingsConfigureDataSyncType,
} from '@baserow/modules/database/configureDataSyncTypes'
export default (context) => {
const { store, app, isDev } = context
@ -1000,5 +1004,14 @@ export default (context) => {
new DatabaseScratchTrackCustomFieldsOnboardingType(context)
)
app.$registry.register(
'configureDataSync',
new SyncedFieldsConfigureDataSyncType(context)
)
app.$registry.register(
'configureDataSync',
new SettingsConfigureDataSyncType(context)
)
registerRealtimeEvents(app.$realtime)
}