Files processed are now stored instead of deleted

Updated .gitignore
[README] Updated README file
[backend] processed files are now stored in files/processed instead of deleted
[backend] removed uploads folder, and centralised everything in files folder, including bulk_import folder
This commit is contained in:
João Vitória Silva
2024-10-15 17:12:32 +01:00
parent d376bd7c2a
commit 36e4dd5f02
8 changed files with 63 additions and 16 deletions

10
.gitignore vendored
View File

@@ -17,10 +17,12 @@ backend/app/user_images/*.png
backend/app/user_images/*.jpg
# gpx and fit files
backend/app/uploads/*.gpx
backend/app/uploads/*.fit
backend/app/bulk_import/*.gpx
backend/app/bulk_import/*.fit
backend/app/files/*.gpx
backend/app/files/*.fit
backend/app/files/bulk_import/*.gpx
backend/app/files/bulk_import/*.fit
backend/app/files/processed/*.gpx
backend/app/files/processed/*.fit
# Frontend
frontend/app/img/users_img/*.*

View File

@@ -28,7 +28,7 @@
- [Frontend Environment Variables](#frontend-environment-variables)
- [Backend Environment Variables](#backend-environment-variables)
- [Volumes](#volumes)
- [Bulk import](#bulk-import)
- [Bulk import and file upload](#bulk-import-and-file-upload)
- [Strava Integration](#strava-integration)
- [Sponsors](#sponsors)
- [Contributing](#contributing)
@@ -69,11 +69,15 @@ Endurain currently supports:
- Multi-language support (currently English only)
- Dark/light theme switcher
- Third-party app support
- Weight logging
## Planned Features
Upcoming features (in no particular order):
- Garmin Connect integration
- Simplified Docker images
- Live tracking
- Default gear for activity types
- Gear component tracking (e.g., track when components like bike chains need replacing)
- Activity comments and likes
@@ -162,19 +166,19 @@ To check Python backend dependencies used, use poetry file (pyproject.toml)
## Volumes
It is recommended to configure the following volumes for code and data persistence:
It is recommended to configure the following volumes for data persistence:
| Volume | Path | Notes |
| --- | --- | --- |
| /app | <local_path>/endurain/backend/app:/app | Configure volume if you want to edit the code locally by cloning the repo and comment next line |
| /app/files/bulk_import | <local_path>/endurain/backend/app/files/bulk_import:/app/files/bulk_import | Necessary to enable bulk import of activities. Place here your activities files |
| /app/files/processed | <local_path>/endurain/backend/app/files/processed:/app/files/processed | Necessary for processed original files persistence on container image updates |
| /app/user_images | <local_path>/endurain/backend/app/user_images:/app/user_images | Necessary for user image persistence on container image updates |
| /app/bulk_import | <local_path>/endurain/backend/bulk_import:/app/bulk_import | Necessary to enable bulk import of activities. Place here your activities files |
| /app/logs | <local_path>/endurain/backend/app.log:/app/logs | Log files for the backend |
## Bulk import
## Bulk import and file upload
Some notes for bulk import:
- After the files are processed, the files are deleted from the filesystem. If needed please keep a backup of your files.
Some notes:
- After the files are processed, the files are moved to the processed folder.
- GEOCODES API has a limit of 1 Request/Second on the free plan, so if you have a large number of files, it might not be possible to import all in the same action.
## Strava Integration

View File

@@ -455,7 +455,7 @@ async def create_activity_with_bulk_import(
):
try:
# Ensure the 'bulk_import' directory exists
bulk_import_dir = "bulk_import"
bulk_import_dir = "files/bulk_import"
os.makedirs(bulk_import_dir, exist_ok=True)
# Iterate over each file in the 'bulk_import' directory

View File

@@ -1,5 +1,6 @@
import logging
import os
import shutil
import requests
import math
@@ -40,6 +41,15 @@ async def parse_and_store_activity_from_file(
# Store the activity in the database
created_activity = store_activity(parsed_info, db)
# Define the directory where the processed files will be stored
processed_dir = "files/processed"
# Define new file path with activity ID as filename
new_file_name = f"{created_activity.id}{file_extension}"
# Move the file to the processed directory
move_file(processed_dir, new_file_name, file_path)
# Return the created activity
return created_activity
else:
@@ -61,14 +71,14 @@ def parse_and_store_activity_from_uploaded_file(
_, file_extension = os.path.splitext(file.filename)
try:
# Ensure the 'uploads' directory exists
upload_dir = "uploads"
# Ensure the 'files' directory exists
upload_dir = "files"
os.makedirs(upload_dir, exist_ok=True)
# Build the full path where the file will be saved
file_path = os.path.join(upload_dir, file.filename)
# Save the uploaded file in the 'uploads' directory
# Save the uploaded file in the 'files' directory
with open(file_path, "wb") as save_file:
save_file.write(file.file.read())
@@ -79,6 +89,15 @@ def parse_and_store_activity_from_uploaded_file(
# Store the activity in the database
created_activity = store_activity(parsed_info, db)
# Define the directory where the processed files will be stored
processed_dir = "files/processed"
# Define new file path with activity ID as filename
new_file_name = f"{created_activity.id}{file_extension}"
# Move the file to the processed directory
move_file(processed_dir, new_file_name, file_path)
# Return the created activity
return created_activity
else:
@@ -93,6 +112,26 @@ def parse_and_store_activity_from_uploaded_file(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Internal Server Error: {str(err)}",
) from err
def move_file(new_dir: str, new_filename: str, file_path: str):
try:
# Ensure the new directory exists
os.makedirs(new_dir, exist_ok=True)
# Define the new file path
new_file_path = os.path.join(new_dir, new_filename)
# Move the file
shutil.move(file_path, new_file_path)
except Exception as err:
# Log the exception
logger.error(f"Error in move_file - {str(err)}", exc_info=True)
# Raise an HTTPException with a 500 Internal Server Error status code
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Internal Server Error: {str(err)}",
) from err
def parse_file(token_user_id: int, file_extension: str, filename: str) -> dict:
@@ -114,7 +153,7 @@ def parse_file(token_user_id: int, file_extension: str, filename: str) -> dict:
)
# Remove the file after processing
os.remove(filename)
#os.remove(filename)
return parsed_info
else:

View File

View File

@@ -304,6 +304,8 @@ def parse_fit_file(file: str, user_id: int) -> dict:
"prev_latitude": prev_latitude,
"prev_longitude": prev_longitude,
}
except HTTPException as http_err:
raise http_err
except Exception as err:
# Log the exception
logger.error(f"Error in parse_fit_file: {err}", exc_info=True)