Added bulk import functionality

[backend] added new route for handling bulk import requests
[backend] added new folder for bulk import actions
[backend] adjusted code for better re usability for bulk import and single file upload
[docker-compose] added new volume mapping in docker compose example file
[README] added new volumes information to backend section of README file
This commit is contained in:
João Vitória Silva
2024-08-27 17:09:20 +01:00
parent 4b52977bea
commit f418d79238
8 changed files with 237 additions and 80 deletions

6
.gitignore vendored
View File

@@ -16,6 +16,12 @@ backend/app/user_images/*.jpeg
backend/app/user_images/*.png
backend/app/user_images/*.jpg
# gpx and fit files
backend/app/uploads/*.gpx
backend/app/uploads/*.fit
backend/app/bulk_import/*.gpx
backend/app/bulk_import/*.fit
# Frontend
frontend/app/img/users_img/*.*
# Logs

View File

@@ -30,6 +30,7 @@ Currently the service supports:
- Create/edit/delete users
- Basic admin and regular user profiles that adapt the interface
- Import activities using .gpx files
- Bulk import for activity files
- Connect with Strava and retrieve activities and gear from Strava
- Feed with user activities, current user week stats and month stats
- Feed with followers activities
@@ -47,7 +48,6 @@ Currently the service supports:
To do features (not by order):
- Support import of .fit files
- Bulk import for .gpx and .fit files
- Default gear for activity type
- Gear components logic for component usage tracking
- Comments and likes logic for activities
@@ -109,6 +109,14 @@ MYSQL_DATABASE | endurain | `No` | N/A
MYSQL_USER | endurain | `No` | N/A
MYSQL_PASSWORD | changeme | `No` | N/A
Table bellow shows the volumes available to be configured. Not obligatory, but recommended:
Volume | Path | Notes
--- | --- | ---
/app | <local_path>/endurain/backend/app:/app | Configure volume if you want to edit the code locally by cloning the repo and comment next line
/app/user_images | <local_path>/endurain/backend/app/user_images:/app/user_images | Necessary for user image persistence on container image updates
/app/bulk_import | <local_path>/endurain/backend/bulkimport | Necessary to enable bulk import of activities. Place here your activities files
To check Python backend dependencies used, use poetry file (pyproject.toml)
# Strava integration

View File

@@ -4,7 +4,15 @@ import calendar
from typing import Annotated, Callable
from fastapi import APIRouter, Depends, HTTPException, status, UploadFile, Security
from fastapi import (
APIRouter,
Depends,
HTTPException,
status,
UploadFile,
Security,
BackgroundTasks,
)
from sqlalchemy.orm import Session
from datetime import datetime, timedelta, timezone
@@ -20,12 +28,6 @@ import gears.dependencies as gears_dependencies
import users.dependencies as users_dependencies
import activity_streams.crud as activity_streams_crud
import gpx.utils as gpx_utils
import fit.utils as fit_utils
import database
import dependencies_global
@@ -105,9 +107,7 @@ async def read_activities_useractivities_thisweek_distances(
):
# Calculate the start of the current week
today = datetime.now(timezone.utc)
start_of_week = today - timedelta(
days=today.weekday()
)
start_of_week = today - timedelta(days=today.weekday())
end_of_week = start_of_week + timedelta(days=6)
if user_id == token_user_id:
@@ -122,8 +122,8 @@ async def read_activities_useractivities_thisweek_distances(
)
# Check if activities is None
#if activities is None:
# Return None if activities is None
# if activities is None:
# Return None if activities is None
# return None
# Return the activities distances for this week
@@ -167,8 +167,8 @@ async def read_activities_useractivities_thismonth_distances(
user_id, start_of_month, end_of_month, db
)
#if activities is None:
# Return None if activities is None
# if activities is None:
# Return None if activities is None
# return None
# Return the activities distances for this month
@@ -187,9 +187,7 @@ async def read_activities_useractivities_thismonth_number(
],
token_user_id: Annotated[
Callable,
Depends(
session_security.get_user_id_from_access_token
),
Depends(session_security.get_user_id_from_access_token),
],
db: Annotated[
Session,
@@ -234,9 +232,7 @@ async def read_activities_gearactivities(
],
token_user_id: Annotated[
Callable,
Depends(
session_security.get_user_id_from_access_token
),
Depends(session_security.get_user_id_from_access_token),
],
db: Annotated[
Session,
@@ -433,51 +429,8 @@ async def create_activity_with_uploaded_file(
upload_dir = "uploads"
os.makedirs(upload_dir, exist_ok=True)
# Get file extension
_, file_extension = os.path.splitext(file.filename)
# Save the uploaded file in the 'uploads' directory
with open(file.filename, "wb") as save_file:
save_file.write(file.file.read())
# Choose the appropriate parser based on file extension
if file_extension.lower() == ".gpx":
# Parse the GPX file
parsed_info = gpx_utils.parse_gpx_file(file.filename, token_user_id)
elif file_extension.lower() == ".fit":
# Parse the FIT file
parsed_info = fit_utils.parse_fit_file(file.filename, token_user_id)
else:
# file extension not supported raise an HTTPException with a 406 Not Acceptable status code
raise HTTPException(
status_code=status.HTTP_406_NOT_ACCEPTABLE,
detail="File extension not supported. Supported file extensions are .gpx and .fit",
)
# create the activity in the database
created_activity = activities_crud.create_activity(parsed_info["activity"], db)
# Check if created_activity is None
if created_activity is None:
# raise an HTTPException with a 500 Internal Server Error status code
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Error creating activity",
)
# Parse the activity streams from the parsed info
activity_streams = gpx_utils.parse_activity_streams_from_gpx_file(
parsed_info, created_activity.id
)
# Create activity streams in the database
activity_streams_crud.create_activity_streams(activity_streams, db)
# Remove the file after processing
os.remove(file.filename)
# Return activity ID
return created_activity
# Return activity
return activies_utils.parse_and_store_activity_from_uploaded_file(token_user_id, file, db)
except Exception as err:
# Log the exception
logger.error(
@@ -490,8 +443,55 @@ async def create_activity_with_uploaded_file(
) from err
@router.post(
"/create/bulkimport",
)
async def create_activity_with_bulk_import(
token_user_id: Annotated[
int,
Depends(session_security.get_user_id_from_access_token),
],
check_scopes: Annotated[
Callable, Security(session_security.check_scopes, scopes=["activities:write"])
],
db: Annotated[
Session,
Depends(database.get_db),
],
background_tasks: BackgroundTasks,
):
try:
# Ensure the 'bulk_import' directory exists
bulk_import_dir = "bulk_import"
os.makedirs(bulk_import_dir, exist_ok=True)
# Iterate over each file in the 'bulk_import' directory
for filename in os.listdir(bulk_import_dir):
file_path = os.path.join(bulk_import_dir, filename)
if os.path.isfile(file_path):
# Parse and store the activity
background_tasks.add_task(
activies_utils.parse_and_store_activity_from_file,
token_user_id,
file_path,
db,
)
# Return a success message
return {"Bulk import initiated. Processing files in the background."}
except Exception as err:
# Log the exception
logger.error(f"Error in create_activity_with_bulk_import: {err}", exc_info=True)
# Raise an HTTPException with a 500 Internal Server Error status code
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Internal Server Error",
) from err
@router.put(
"/edit",
"/edit",
)
async def edit_activity(
token_user_id: Annotated[
@@ -529,9 +529,7 @@ async def activity_add_gear(
],
token_user_id: Annotated[
int,
Depends(
session_security.get_user_id_from_access_token
),
Depends(session_security.get_user_id_from_access_token),
],
db: Annotated[
Session,
@@ -580,9 +578,7 @@ async def delete_activity_gear(
],
token_user_id: Annotated[
int,
Depends(
session_security.get_user_id_from_access_token
),
Depends(session_security.get_user_id_from_access_token),
],
db: Annotated[
Session,
@@ -621,9 +617,7 @@ async def delete_activity(
],
token_user_id: Annotated[
int,
Depends(
session_security.get_user_id_from_access_token
),
Depends(session_security.get_user_id_from_access_token),
],
db: Annotated[
Session,

View File

@@ -3,16 +3,131 @@ import os
import requests
import math
from fastapi import HTTPException, status, UploadFile
from typing import Union
from datetime import datetime
from urllib.parse import urlencode
from statistics import mean
from sqlalchemy.orm import Session
import activities.schema as activities_schema
import activities.crud as activities_crud
import activity_streams.crud as activity_streams_crud
import gpx.utils as gpx_utils
import fit.utils as fit_utils
# Define a loggger created on main.py
logger = logging.getLogger("myLogger")
async def parse_and_store_activity_from_file(
token_user_id: int, file_path: str, db: Session
):
try:
# Get file extension
_, file_extension = os.path.splitext(file_path)
# Open the file and process it
with open(file_path, "rb") as file:
# Parse the file
parsed_info = parse_file(token_user_id, file_extension, file_path)
# Store the activity in the database
created_activity = store_activity(parsed_info, db)
# Return the created activity
return created_activity
except Exception as err:
# Log the exception
logger.error(
f"Error in parse_and_store_activity_from_file: {err}", exc_info=True
)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Internal Server Error",
) from err
def parse_and_store_activity_from_uploaded_file(
token_user_id: int, file: UploadFile, db: Session
):
# Get file extension
_, file_extension = os.path.splitext(file.filename)
# Save the uploaded file in the 'uploads' directory
with open(file.filename, "wb") as save_file:
save_file.write(file.file.read())
# Parse the file
parsed_info = parse_file(token_user_id, file_extension, file.filename)
# Store the activity in the database
created_activity = store_activity(parsed_info, db)
# Return the created activity
return created_activity
def parse_file(token_user_id: int, file_extension: str, filename: str) -> dict:
try:
# Choose the appropriate parser based on file extension
if file_extension.lower() == ".gpx":
# Parse the GPX file
parsed_info = gpx_utils.parse_gpx_file(filename, token_user_id)
elif file_extension.lower() == ".fit":
# Parse the FIT file
parsed_info = fit_utils.parse_fit_file(filename, token_user_id)
else:
# file extension not supported raise an HTTPException with a 406 Not Acceptable status code
raise HTTPException(
status_code=status.HTTP_406_NOT_ACCEPTABLE,
detail="File extension not supported. Supported file extensions are .gpx and .fit",
)
# Return the parsed information
return parsed_info
except Exception as err:
# Log the exception
logger.error(f"Error in parse_file: {err}", exc_info=True)
# Raise an HTTPException with a 500 Internal Server Error status code
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Internal Server Error",
) from err
finally:
# Remove the file after processing
os.remove(filename)
def store_activity(parsed_info: dict, db: Session):
# create the activity in the database
created_activity = activities_crud.create_activity(parsed_info["activity"], db)
# Check if created_activity is None
if created_activity is None:
# raise an HTTPException with a 500 Internal Server Error status code
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Error creating activity",
)
# Parse the activity streams from the parsed info
activity_streams = gpx_utils.parse_activity_streams_from_gpx_file(
parsed_info, created_activity.id
)
# Create activity streams in the database
activity_streams_crud.create_activity_streams(activity_streams, db)
# Return the created activity
return created_activity
def calculate_activity_distances(activities: list[activities_schema.Activity]):
"""Calculate the distances of the activities for each type of activity (run, bike, swim)"""
# Initialize the distances

View File

@@ -30,8 +30,9 @@ services:
ports:
- "98:80" # API port, change per your needs
volumes:
# - <local_path>/endurain/backend/app:/app # Configure volume if you want to edit the code locally by cloning the repo and comment next line
# - <local_path>/endurain/backend/app:/app # Configure volume if you want to edit the code locally by cloning the repo
- <local_path>/endurain/backend/user_images:/app/user_images # necessary for user image persistence on container image updates
- <local_path>/endurain/backend/bulkimport:/app/bulk_import # necessary to enable bulk import of activities. Place here your activities files
depends_on:
- mariadb
- jaeger # optional

View File

@@ -1,6 +1,7 @@
<template>
<div class="col">
<div class="row row-gap-3 row-cols-sm-3 align-items-center">
<!-- strava integration zone -->
<div class="col">
<div class="card text-center">
<img src="/src/assets/strava/api_logo_cptblWith_strava_stack_light.png" alt="Compatible with Strava image" class="card-img-top">
@@ -16,6 +17,16 @@
</div>
</div>
</div>
<!-- bulk import zone -->
<div class="col">
<div class="card text-center">
<div class="card-body">
<h4 class="card-title">{{ $t("settingsIntegrationsZone.bulkImportIntegrationTitle") }}</h4>
<p class="card-text">{{ $t("settingsIntegrationsZone.bulkImportIntegrationBody") }}</p>
<a href="#" class="btn btn-primary" @click="submitBulkImport">{{ $t("settingsIntegrationsZone.buttonBulkImport") }}</a>
</div>
</div>
</div>
</div>
</div>
</template>
@@ -28,6 +39,7 @@ import { addToast } from '@/utils/toastUtils';
import { useAuthStore } from '@/stores/authStore';
// Importing the services
import { strava } from '@/services/stravaService';
import { activities } from '@/services/activitiesService';
export default {
components: {
@@ -76,12 +88,25 @@ export default {
}
}
async function submitBulkImport() {
try {
await activities.bulkImportActivities();
// Set the loading message and show the loading alert.
addToast(t('settingsIntegrationsZone.loadingMessageBulkImport'), 'loading', true);
} catch(error) {
// If there is an error, set the error message and show the error alert.
addToast(t('settingsIntegrationsZone.errorMessageUnableToImportActivities') + " - " + error, 'danger', true);
}
}
return {
authStore,
t,
submitConnectStrava,
submitRetrieveStravaLastWeekActivities,
submitRetrieveStravaGear,
submitBulkImport,
};
},
};

View File

@@ -10,5 +10,10 @@
"errorMessageUnableToGetStravaActivities": "Unable to get Strava activities",
"errorMessageUnableToGetStravaGear": "Unable to get Strava gear",
"loadingMessageRetrievingStravaActivities": "Retrieving Strava activities",
"loadingMessageRetrievingStravaGear": "Retrieving Strava gear"
"loadingMessageRetrievingStravaGear": "Retrieving Strava gear",
"bulkImportIntegrationTitle": "Bulk import",
"bulkImportIntegrationBody": "Bulk import activities from files stored in the bulk_import folder",
"buttonBulkImport": "Import",
"errorMessageUnableToImportActivities": "An error occurred while importing activities",
"loadingMessageBulkImport": "Importing activities"
}

View File

@@ -1,4 +1,4 @@
import { fetchGetRequest, fetchPostFileRequest, fetchDeleteRequest, fetchPutRequest } from '@/utils/serviceUtils';
import { fetchGetRequest, fetchPostFileRequest, fetchDeleteRequest, fetchPutRequest, fetchPostRequest } from '@/utils/serviceUtils';
export const activities = {
getUserWeekActivities(user_id, week_number) {
@@ -34,6 +34,9 @@ export const activities = {
uploadActivityFile(formData) {
return fetchPostFileRequest('activities/create/upload', formData);
},
bulkImportActivities() {
return fetchPostRequest('activities/create/bulkimport');
},
addGearToActivity(activityId, gearId) {
return fetchPutRequest(`activities/${activityId}/addgear/${gearId}`);
},
@@ -41,7 +44,7 @@ export const activities = {
return fetchPutRequest(`activities/${activityId}/deletegear`);
},
editActivity(activityId, data) {
return fetchPutRequest(`activities/edit`, data);
return fetchPutRequest('activities/edit', data);
},
deleteActivity(activityId) {
return fetchDeleteRequest(`activities/${activityId}/delete`);