Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
34e7f2a
Add new v2 push endpoint to create a project version
varmar05 Aug 5, 2025
e10729a
Added new endpoint for pushing chunks (#487)
MarcelGeo Aug 5, 2025
18abd45
Merge branch 'dev-r84-concurrent-push' into v2-project-push
varmar05 Aug 5, 2025
5e8c5a9
Use new errors structure in v2 chunks endpoint
varmar05 Aug 5, 2025
fb378ad
Add integration test for full v2 push
varmar05 Aug 5, 2025
b8afe5f
Address comments
varmar05 Aug 6, 2025
27647c8
Cron job to remove outdated uploaded chunks (#489)
varmar05 Aug 6, 2025
1897f3a
Merge pull request #488 from MerginMaps/v2-project-push
MarcelGeo Aug 11, 2025
bdb3da5
Fix create version with only removed files
varmar05 Aug 13, 2025
cd2fc80
Merge pull request #490 from MerginMaps/fix_push_only_delete
MarcelGeo Aug 13, 2025
ec80700
resolveunhandled description
MarcelGeo Aug 13, 2025
548ec3b
Remove upload chunks only if push was successful
varmar05 Aug 15, 2025
ea4d51d
Fix integrity error handling in push
varmar05 Aug 15, 2025
c5a1008
Merge pull request #495 from MerginMaps/fix_remove_chunks
MarcelGeo Aug 15, 2025
38765c6
Merge branch 'dev-r84-concurrent-push' into fix-error-integrity-handling
MarcelGeo Aug 18, 2025
8765c8f
Merge pull request #496 from MerginMaps/fix-error-integrity-handling
MarcelGeo Sep 1, 2025
84ba9dd
Return whole project nfo dump from project versions :(
MarcelGeo Sep 3, 2025
9fa0a4b
Merge pull request #506 from MerginMaps/return-project-info-v2
MarcelGeo Sep 8, 2025
b06423e
Do not validate diff files against mime type
MarcelGeo Sep 22, 2025
640f578
Merge pull request #513 from MerginMaps/backport-mimetype-validation
MarcelGeo Sep 22, 2025
de2f3e7
Merge remote-tracking branch 'origin/develop' into dev-r84-concurrent…
MarcelGeo Oct 1, 2025
d11228a
fix import for timezone
MarcelGeo Oct 30, 2025
1f8ed4a
Merge remote-tracking branch 'origin/develop' into dev-r84-concurrent…
MarcelGeo Oct 30, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
projects*/
data/
mergin_db
diagnostic_logs

logs
*.log
Expand Down
Empty file modified deployment/common/set_permissions.sh
100644 → 100755
Empty file.
2 changes: 0 additions & 2 deletions development.md
Original file line number Diff line number Diff line change
Expand Up @@ -71,8 +71,6 @@ cd deployment/community/
# Create .prod.env file from .env.template
cp .env.template .prod.env

# Run the docker composition with the current Dockerfiles
cp .env.template .prod.env
docker compose -f docker-compose.yml -f docker-compose.dev.yml up -d

# Give ownership of the ./projects folder to user that is running the gunicorn container
Expand Down
7 changes: 7 additions & 0 deletions server/application.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
remove_projects_archives,
remove_temp_files,
remove_projects_backups,
remove_unused_chunks,
)
from mergin.celery import celery, configure_celery
from mergin.stats.config import Configuration
Expand All @@ -47,6 +48,7 @@
"GLOBAL_WRITE",
"ENABLE_SUPERADMIN_ASSIGNMENT",
"DIAGNOSTIC_LOGS_URL",
"V2_PUSH_ENABLED",
]
)
register_stats(application)
Expand Down Expand Up @@ -85,4 +87,9 @@ def setup_periodic_tasks(sender, **kwargs):
crontab(hour=3, minute=0),
remove_projects_archives,
name="remove old project archives",
),
sender.add_periodic_task(
crontab(hour="*/4", minute=0),
remove_unused_chunks,
name="clean up of outdated chunks",
)
30 changes: 28 additions & 2 deletions server/mergin/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,17 @@
from sqlalchemy.schema import MetaData
from flask_sqlalchemy import SQLAlchemy
from flask_marshmallow import Marshmallow
from flask import json, jsonify, request, abort, current_app, Flask, Request, Response
from flask import (
json,
jsonify,
make_response,
request,
abort,
current_app,
Flask,
Request,
Response,
)
from flask_login import current_user, LoginManager
from flask_wtf.csrf import generate_csrf, CSRFProtect
from flask_migrate import Migrate
Expand All @@ -25,7 +35,7 @@
import time
import traceback
from werkzeug.exceptions import HTTPException
from typing import List, Dict, Optional
from typing import List, Dict, Optional, Tuple

from .sync.utils import get_blacklisted_dirs, get_blacklisted_files
from .config import Configuration
Expand Down Expand Up @@ -347,6 +357,16 @@ def ping(): # pylint: disable=W0612
)
return status, 200

# reading raw input stream not supported in connexion so far
# https://github.com/zalando/connexion/issues/592
# and as workaround we use custom Flask endpoint in create_app function
@app.route("/v2/projects/<id>/chunks", methods=["POST"])
@auth_required
def upload_chunk_v2(id: str):
from .sync import public_api_v2_controller

return public_api_v2_controller.upload_chunk(id)

# reading raw input stream not supported in connexion so far
# https://github.com/zalando/connexion/issues/592
# and as workaround we use custom Flask endpoint in create_app function
Expand Down Expand Up @@ -485,6 +505,12 @@ class ResponseError:
def to_dict(self) -> Dict:
return dict(code=self.code, detail=self.detail + f" ({self.code})")

def response(self, status_code: int) -> Tuple[Response, int]:
"""Returns a custom error response with the given code."""
response = make_response(jsonify(self.to_dict()), status_code)
response.headers["Content-Type"] = "application/problem+json"
return response, status_code


def whitespace_filter(obj):
return obj.strip() if isinstance(obj, str) else obj
Expand Down
5 changes: 2 additions & 3 deletions server/mergin/sync/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
from flask import Flask, current_app
from sqlalchemy import func

from .files import UploadChanges
from ..app import db
from .models import Project, ProjectVersion
from .utils import split_project_path
Expand Down Expand Up @@ -55,8 +54,8 @@ def create(name, namespace, username): # pylint: disable=W0612
p = Project(**project_params)
p.updated = datetime.utcnow()
db.session.add(p)
changes = UploadChanges(added=[], updated=[], removed=[])
pv = ProjectVersion(p, 0, user.id, changes, "127.0.0.1")
pv = ProjectVersion(p, 0, user.id, [], "127.0.0.1")
pv.project = p
db.session.add(pv)
db.session.commit()
os.makedirs(p.storage.project_dir, exist_ok=True)
Expand Down
11 changes: 11 additions & 0 deletions server/mergin/sync/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,3 +64,14 @@ class Configuration(object):
)
# in seconds, older unfinished zips are moved to temp
PARTIAL_ZIP_EXPIRATION = config("PARTIAL_ZIP_EXPIRATION", default=600, cast=int)
# whether new push is allowed
V2_PUSH_ENABLED = config("V2_PUSH_ENABLED", default=True, cast=bool)
# directory for file chunks
UPLOAD_CHUNKS_DIR = config(
"UPLOAD_CHUNKS_DIR",
default=os.path.join(LOCAL_PROJECTS, "chunks"),
)
# time in seconds after chunks are permanently deleted (1 day)
UPLOAD_CHUNKS_EXPIRATION = config(
"UPLOAD_CHUNKS_EXPIRATION", default=86400, cast=int
)
10 changes: 10 additions & 0 deletions server/mergin/sync/db_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@
from flask import current_app, abort
from sqlalchemy import event

from .models import ProjectVersion
from .tasks import optimize_storage
from ..app import db


Expand All @@ -14,9 +16,17 @@ def check(session):
abort(503, "Service unavailable due to maintenance, please try later")


def optimize_gpgk_storage(mapper, connection, project_version):
# do not optimize on every version, every 10th is just fine
if not project_version.name % 10:
optimize_storage.delay(project_version.project_id)


def register_events():
event.listen(db.session, "before_commit", check)
event.listen(ProjectVersion, "after_insert", optimize_gpgk_storage)


def remove_events():
event.remove(db.session, "before_commit", check)
event.listen(ProjectVersion, "after_insert", optimize_gpgk_storage)
56 changes: 56 additions & 0 deletions server/mergin/sync/errors.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,12 @@
# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-MerginMaps-Commercial

from typing import List, Dict

from .config import Configuration
from ..app import ResponseError

MAX_CHUNK_SIZE = Configuration.MAX_CHUNK_SIZE / 1024 / 1024


class UpdateProjectAccessError(ResponseError):
code = "UpdateProjectAccessError"
Expand Down Expand Up @@ -39,3 +43,55 @@ def to_dict(self) -> Dict:
class ProjectLocked(ResponseError):
code = "ProjectLocked"
detail = "The project is currently locked and you cannot make changes to it"


class DataSyncError(ResponseError):
code = "DataSyncError"
detail = "There are either corrupted files or it is not possible to create version with provided geopackage data"

def __init__(self, failed_files: Dict):
self.failed_files = failed_files

def to_dict(self) -> Dict:
data = super().to_dict()
data["failed_files"] = self.failed_files
return data


class ProjectVersionExists(ResponseError):
code = "ProjectVersionExists"
detail = "Project version mismatch"

def __init__(self, client_version: int, server_version: int):
self.client_version = client_version
self.server_version = server_version

def to_dict(self) -> Dict:
data = super().to_dict()
data["client_version"] = f"v{self.client_version}"
data["server_version"] = f"v{self.server_version}"
return data


class AnotherUploadRunning(ResponseError):
code = "AnotherUploadRunning"
detail = "Another process is running"


class UploadError(ResponseError):
code = "UploadError"
detail = "Project version could not be created"

def __init__(self, error: str = None):
self.error = error

def to_dict(self) -> Dict:
data = super().to_dict()
if self.error is not None:
data["detail"] = self.error + f" ({self.code})"
return data


class BigChunkError(ResponseError):
code = "BigChunkError"
detail = f"Chunk size exceeds maximum allowed size {MAX_CHUNK_SIZE} MB"
Loading
Loading