mirror of
https://github.com/Freika/dawarich.git
synced 2026-01-09 06:28:06 -05:00
Rework track generation system
This commit is contained in:
@@ -5,6 +5,8 @@ class AppVersionCheckingJob < ApplicationJob
|
||||
sidekiq_options retry: false
|
||||
|
||||
def perform
|
||||
return unless DawarichSettings.self_hosted?
|
||||
|
||||
Rails.cache.delete(CheckAppVersion::VERSION_CACHE_KEY)
|
||||
|
||||
CheckAppVersion.new.call
|
||||
|
||||
76
app/jobs/track_processing_job.rb
Normal file
76
app/jobs/track_processing_job.rb
Normal file
@@ -0,0 +1,76 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
# Unified background job for all track processing operations.
|
||||
#
|
||||
# This job replaces the previous complex system of multiple job types with a single,
|
||||
# configurable job that handles both bulk and incremental track processing.
|
||||
#
|
||||
# Modes:
|
||||
# - bulk: Process all unassigned points for a user (typically for initial setup)
|
||||
# - incremental: Process recent points for real-time track updates
|
||||
#
|
||||
# Features:
|
||||
# - Configurable processing modes
|
||||
# - Point-specific processing for incremental updates
|
||||
# - Automatic error handling and reporting
|
||||
# - Smart batching via unique_for to prevent job queue overflow
|
||||
#
|
||||
# Usage:
|
||||
# # Bulk processing
|
||||
# TrackProcessingJob.perform_later(user.id, 'bulk', cleanup_tracks: true)
|
||||
#
|
||||
# # Incremental processing
|
||||
# TrackProcessingJob.perform_later(user.id, 'incremental', point_id: point.id)
|
||||
#
|
||||
class TrackProcessingJob < ApplicationJob
|
||||
queue_as :tracks
|
||||
sidekiq_options retry: 3
|
||||
|
||||
# Enable unique jobs to prevent duplicate processing
|
||||
sidekiq_options unique_for: 30.seconds,
|
||||
unique_args: ->(args) { [args[0], args[1]] } # user_id and mode
|
||||
|
||||
def perform(user_id, mode, **options)
|
||||
user = User.find(user_id)
|
||||
|
||||
service_options = {
|
||||
mode: mode.to_sym,
|
||||
cleanup_tracks: options[:cleanup_tracks] || false,
|
||||
point_id: options[:point_id],
|
||||
time_threshold_minutes: options[:time_threshold_minutes],
|
||||
distance_threshold_meters: options[:distance_threshold_meters]
|
||||
}.compact
|
||||
|
||||
# Additional validation for incremental mode
|
||||
if mode == 'incremental' && options[:point_id]
|
||||
point = Point.find_by(id: options[:point_id])
|
||||
if point.nil?
|
||||
Rails.logger.warn "Point #{options[:point_id]} not found for track processing"
|
||||
return
|
||||
end
|
||||
|
||||
# Skip processing old points to avoid processing imported data
|
||||
if point.created_at < 1.hour.ago
|
||||
Rails.logger.debug "Skipping track processing for old point #{point.id}"
|
||||
return
|
||||
end
|
||||
end
|
||||
|
||||
tracks_created = TrackService.new(user, **service_options).call
|
||||
|
||||
Rails.logger.info "Track processing completed for user #{user_id}: #{tracks_created} tracks created"
|
||||
rescue ActiveRecord::RecordNotFound => e
|
||||
Rails.logger.error "User #{user_id} not found for track processing: #{e.message}"
|
||||
rescue StandardError => e
|
||||
Rails.logger.error "Track processing failed for user #{user_id}: #{e.message}"
|
||||
Rails.logger.error e.backtrace.join("\n")
|
||||
|
||||
ExceptionReporter.call(e, "Track processing failed", {
|
||||
user_id: user_id,
|
||||
mode: mode,
|
||||
options: options
|
||||
})
|
||||
|
||||
raise
|
||||
end
|
||||
end
|
||||
@@ -1,22 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
# This job is being run on daily basis to create tracks for all users.
|
||||
# For each user, it starts from the end of their last track (or from their oldest point
|
||||
# if no tracks exist) and processes points until the specified end_at time.
|
||||
#
|
||||
# To manually run for a specific time range:
|
||||
# Tracks::BulkCreatingJob.perform_later(start_at: 1.week.ago, end_at: Time.current)
|
||||
#
|
||||
# To run for specific users only:
|
||||
# Tracks::BulkCreatingJob.perform_later(user_ids: [1, 2, 3])
|
||||
#
|
||||
# To let the job determine start times automatically (recommended):
|
||||
# Tracks::BulkCreatingJob.perform_later(end_at: Time.current)
|
||||
class Tracks::BulkCreatingJob < ApplicationJob
|
||||
queue_as :tracks
|
||||
sidekiq_options retry: false
|
||||
|
||||
def perform(start_at: nil, end_at: 1.day.ago.end_of_day, user_ids: [])
|
||||
Tracks::BulkTrackCreator.new(start_at:, end_at:, user_ids:).call
|
||||
end
|
||||
end
|
||||
@@ -1,36 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
class Tracks::CreateJob < ApplicationJob
|
||||
queue_as :default
|
||||
|
||||
def perform(user_id, start_at: nil, end_at: nil, cleaning_strategy: :replace)
|
||||
user = User.find(user_id)
|
||||
tracks_created = Tracks::CreateFromPoints.new(user, start_at:, end_at:, cleaning_strategy:).call
|
||||
|
||||
create_success_notification(user, tracks_created)
|
||||
rescue StandardError => e
|
||||
ExceptionReporter.call(e, 'Failed to create tracks for user')
|
||||
|
||||
create_error_notification(user, e)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def create_success_notification(user, tracks_created)
|
||||
Notifications::Create.new(
|
||||
user: user,
|
||||
kind: :info,
|
||||
title: 'Tracks Generated',
|
||||
content: "Created #{tracks_created} tracks from your location data. Check your tracks section to view them."
|
||||
).call
|
||||
end
|
||||
|
||||
def create_error_notification(user, error)
|
||||
Notifications::Create.new(
|
||||
user: user,
|
||||
kind: :error,
|
||||
title: 'Track Generation Failed',
|
||||
content: "Failed to generate tracks from your location data: #{error.message}"
|
||||
).call
|
||||
end
|
||||
end
|
||||
@@ -1,30 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
class Tracks::IncrementalGeneratorJob < ApplicationJob
|
||||
queue_as :default
|
||||
sidekiq_options retry: 3
|
||||
|
||||
def perform(user_id, day = nil, grace_period_minutes = 5)
|
||||
user = User.find(user_id)
|
||||
day = day ? Date.parse(day.to_s) : Date.current
|
||||
|
||||
Rails.logger.info "Starting incremental track generation for user #{user.id}, day #{day}"
|
||||
|
||||
generator(user, day, grace_period_minutes).call
|
||||
rescue StandardError => e
|
||||
ExceptionReporter.call(e, 'Incremental track generation failed')
|
||||
|
||||
raise e
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def generator(user, day, grace_period_minutes)
|
||||
@generator ||= Tracks::Generator.new(
|
||||
user,
|
||||
point_loader: Tracks::PointLoaders::IncrementalLoader.new(user, day),
|
||||
incomplete_segment_handler: Tracks::IncompleteSegmentHandlers::BufferHandler.new(user, day, grace_period_minutes),
|
||||
track_cleaner: Tracks::Cleaners::NoOpCleaner.new(user)
|
||||
)
|
||||
end
|
||||
end
|
||||
@@ -33,7 +33,7 @@ class Point < ApplicationRecord
|
||||
after_create :async_reverse_geocode, if: -> { DawarichSettings.store_geodata? && !reverse_geocoded? }
|
||||
after_create :set_country
|
||||
after_create_commit :broadcast_coordinates
|
||||
after_create_commit :trigger_incremental_track_generation, if: -> { import_id.nil? }
|
||||
after_create_commit :trigger_track_processing, if: -> { import_id.nil? }
|
||||
after_commit :recalculate_track, on: :update
|
||||
|
||||
def self.without_raw_data
|
||||
@@ -104,10 +104,36 @@ class Point < ApplicationRecord
|
||||
track.recalculate_path_and_distance!
|
||||
end
|
||||
|
||||
def trigger_incremental_track_generation
|
||||
point_date = Time.zone.at(timestamp).to_date
|
||||
return if point_date < 1.day.ago.to_date
|
||||
|
||||
Tracks::IncrementalGeneratorJob.perform_later(user_id, point_date.to_s, 5)
|
||||
def trigger_track_processing
|
||||
# Smart track processing: immediate for track boundaries, batched for continuous tracking
|
||||
previous_point = user.points.where('timestamp < ?', timestamp)
|
||||
.order(timestamp: :desc)
|
||||
.first
|
||||
|
||||
if should_trigger_immediate_processing?(previous_point)
|
||||
# Process immediately for obvious track boundaries
|
||||
TrackProcessingJob.perform_now(user_id, 'incremental', point_id: id)
|
||||
else
|
||||
# Batch processing for continuous tracking (reduces job queue load)
|
||||
TrackProcessingJob.perform_later(user_id, 'incremental', point_id: id)
|
||||
end
|
||||
end
|
||||
|
||||
def should_trigger_immediate_processing?(previous_point)
|
||||
return true if previous_point.nil?
|
||||
|
||||
# Immediate processing for obvious track boundaries
|
||||
time_diff = timestamp - previous_point.timestamp
|
||||
return true if time_diff > 30.minutes # Long gap = likely new track
|
||||
|
||||
# Calculate distance for large jumps
|
||||
distance_km = Geocoder::Calculations.distance_between(
|
||||
[previous_point.lat, previous_point.lon],
|
||||
[lat, lon],
|
||||
units: :km
|
||||
)
|
||||
return true if distance_km > 1.0 # Large jump = likely new track
|
||||
|
||||
false
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
class ExceptionReporter
|
||||
def self.call(exception, human_message = 'Exception reported')
|
||||
def self.call(exception, human_message = 'Exception reported', context = {})
|
||||
return unless DawarichSettings.self_hosted?
|
||||
|
||||
Rails.logger.error "#{human_message}: #{exception.message}"
|
||||
|
||||
219
app/services/track_service.rb
Normal file
219
app/services/track_service.rb
Normal file
@@ -0,0 +1,219 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
# Unified track processing service that handles all track operations.
|
||||
#
|
||||
# This service consolidates the previously complex track system into a single,
|
||||
# configurable service that can handle both bulk and incremental track processing.
|
||||
#
|
||||
# Features:
|
||||
# - Unified point loading and segmentation
|
||||
# - Configurable time and distance thresholds
|
||||
# - Real-time and batch processing modes
|
||||
# - Comprehensive track statistics calculation
|
||||
# - Automatic cleanup of overlapping tracks
|
||||
#
|
||||
# Usage:
|
||||
# # Bulk processing (initial setup or full regeneration)
|
||||
# TrackService.new(user, mode: :bulk, cleanup_tracks: true).call
|
||||
#
|
||||
# # Incremental processing (real-time updates)
|
||||
# TrackService.new(user, mode: :incremental).call
|
||||
#
|
||||
class TrackService
|
||||
include ActiveModel::Model
|
||||
|
||||
attr_accessor :user, :mode, :cleanup_tracks, :time_threshold_minutes, :distance_threshold_meters, :point_id
|
||||
|
||||
def initialize(user, **options)
|
||||
@user = user
|
||||
@mode = options[:mode] || :bulk
|
||||
@cleanup_tracks = options[:cleanup_tracks] || false
|
||||
@point_id = options[:point_id]
|
||||
@time_threshold_minutes = options[:time_threshold_minutes] || user.safe_settings.minutes_between_routes.to_i || 60
|
||||
@distance_threshold_meters = options[:distance_threshold_meters] || user.safe_settings.meters_between_routes.to_i || 500
|
||||
end
|
||||
|
||||
def call
|
||||
Rails.logger.info "Processing tracks for user #{user.id} in #{mode} mode"
|
||||
|
||||
# Early exit for incremental mode with old points
|
||||
if mode == :incremental && point_id
|
||||
point = Point.find_by(id: point_id)
|
||||
return 0 if point.nil? || point.created_at < 1.hour.ago
|
||||
end
|
||||
|
||||
cleanup_existing_tracks if cleanup_tracks
|
||||
|
||||
points = load_points
|
||||
return 0 if points.empty?
|
||||
|
||||
segments = segment_points(points)
|
||||
tracks_created = create_tracks_from_segments(segments)
|
||||
|
||||
Rails.logger.info "Created #{tracks_created} tracks for user #{user.id}"
|
||||
tracks_created
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def load_points
|
||||
scope = user.points.where.not(lonlat: nil).where.not(timestamp: nil)
|
||||
|
||||
case mode
|
||||
when :bulk
|
||||
scope.where(track_id: nil).order(:timestamp)
|
||||
when :incremental
|
||||
# Load recent unassigned points + points from active tracks that might need reprocessing
|
||||
cutoff_time = 2.hours.ago.to_i
|
||||
unassigned_point_ids = scope.where('timestamp > ? AND track_id IS NULL', cutoff_time).pluck(:id)
|
||||
active_track_point_ids = scope.joins(:track).where('tracks.end_at > ?', 2.hours.ago).pluck(:id)
|
||||
|
||||
# Combine the IDs and query for all points
|
||||
all_point_ids = (unassigned_point_ids + active_track_point_ids).uniq
|
||||
return scope.none if all_point_ids.empty?
|
||||
|
||||
scope.where(id: all_point_ids).order(:timestamp)
|
||||
end
|
||||
end
|
||||
|
||||
def segment_points(points)
|
||||
return [] if points.empty?
|
||||
|
||||
segments = []
|
||||
current_segment = []
|
||||
|
||||
points.each do |point|
|
||||
if should_start_new_segment?(point, current_segment.last)
|
||||
segments << current_segment if current_segment.size >= 2
|
||||
current_segment = [point]
|
||||
else
|
||||
current_segment << point
|
||||
end
|
||||
end
|
||||
|
||||
segments << current_segment if current_segment.size >= 2
|
||||
segments
|
||||
end
|
||||
|
||||
def should_start_new_segment?(current_point, previous_point)
|
||||
return false if previous_point.nil?
|
||||
|
||||
# Time threshold check
|
||||
time_diff = current_point.timestamp - previous_point.timestamp
|
||||
return true if time_diff > (time_threshold_minutes * 60)
|
||||
|
||||
# Distance threshold check
|
||||
distance_km = Geocoder::Calculations.distance_between(
|
||||
[previous_point.lat, previous_point.lon],
|
||||
[current_point.lat, current_point.lon],
|
||||
units: :km
|
||||
)
|
||||
return true if (distance_km * 1000) > distance_threshold_meters
|
||||
|
||||
false
|
||||
end
|
||||
|
||||
def create_tracks_from_segments(segments)
|
||||
tracks_created = 0
|
||||
|
||||
segments.each do |segment_points|
|
||||
track = create_track_from_points(segment_points)
|
||||
tracks_created += 1 if track&.persisted?
|
||||
end
|
||||
|
||||
tracks_created
|
||||
end
|
||||
|
||||
def create_track_from_points(points)
|
||||
return nil if points.size < 2
|
||||
|
||||
track = Track.create!(
|
||||
user: user,
|
||||
start_at: Time.zone.at(points.first.timestamp),
|
||||
end_at: Time.zone.at(points.last.timestamp),
|
||||
original_path: build_linestring(points),
|
||||
distance: calculate_distance(points),
|
||||
duration: points.last.timestamp - points.first.timestamp,
|
||||
avg_speed: calculate_average_speed(points),
|
||||
elevation_gain: calculate_elevation_gain(points),
|
||||
elevation_loss: calculate_elevation_loss(points),
|
||||
elevation_max: points.map(&:altitude).compact.max || 0,
|
||||
elevation_min: points.map(&:altitude).compact.min || 0
|
||||
)
|
||||
|
||||
Point.where(id: points.map(&:id)).update_all(track_id: track.id)
|
||||
track
|
||||
rescue StandardError => e
|
||||
Rails.logger.error "Failed to create track for user #{user.id}: #{e.message}"
|
||||
nil
|
||||
end
|
||||
|
||||
def cleanup_existing_tracks
|
||||
case mode
|
||||
when :bulk
|
||||
user.tracks.destroy_all
|
||||
when :incremental
|
||||
# Remove overlapping tracks in the processing window
|
||||
cutoff_time = 2.hours.ago
|
||||
user.tracks.where('end_at > ?', cutoff_time).destroy_all
|
||||
end
|
||||
end
|
||||
|
||||
def build_linestring(points)
|
||||
coordinates = points.map { |p| "#{p.lon} #{p.lat}" }.join(',')
|
||||
"LINESTRING(#{coordinates})"
|
||||
end
|
||||
|
||||
def calculate_distance(points)
|
||||
total_distance = 0
|
||||
|
||||
points.each_cons(2) do |point1, point2|
|
||||
distance_km = Geocoder::Calculations.distance_between(
|
||||
[point1.lat, point1.lon],
|
||||
[point2.lat, point2.lon],
|
||||
units: :km
|
||||
)
|
||||
total_distance += distance_km
|
||||
end
|
||||
|
||||
(total_distance * 1000).round # Convert to meters
|
||||
end
|
||||
|
||||
def calculate_average_speed(points)
|
||||
return 0.0 if points.size < 2
|
||||
|
||||
distance_meters = calculate_distance(points)
|
||||
duration_seconds = points.last.timestamp - points.first.timestamp
|
||||
|
||||
return 0.0 if duration_seconds <= 0
|
||||
|
||||
speed_mps = distance_meters.to_f / duration_seconds
|
||||
(speed_mps * 3.6).round(2) # Convert to km/h
|
||||
end
|
||||
|
||||
def calculate_elevation_gain(points)
|
||||
altitudes = points.map(&:altitude).compact
|
||||
return 0 if altitudes.size < 2
|
||||
|
||||
gain = 0
|
||||
altitudes.each_cons(2) do |alt1, alt2|
|
||||
diff = alt2 - alt1
|
||||
gain += diff if diff > 0
|
||||
end
|
||||
|
||||
gain.round
|
||||
end
|
||||
|
||||
def calculate_elevation_loss(points)
|
||||
altitudes = points.map(&:altitude).compact
|
||||
return 0 if altitudes.size < 2
|
||||
|
||||
loss = 0
|
||||
altitudes.each_cons(2) do |alt1, alt2|
|
||||
diff = alt1 - alt2
|
||||
loss += diff if diff > 0
|
||||
end
|
||||
|
||||
loss.round
|
||||
end
|
||||
end
|
||||
@@ -1,21 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
class Tracks::BuildPath
|
||||
def initialize(coordinates)
|
||||
@coordinates = coordinates
|
||||
end
|
||||
|
||||
def call
|
||||
factory.line_string(
|
||||
coordinates.map { |point| factory.point(point.lon.to_f.round(5), point.lat.to_f.round(5)) }
|
||||
)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :coordinates
|
||||
|
||||
def factory
|
||||
@factory ||= RGeo::Geographic.spherical_factory(srid: 3857)
|
||||
end
|
||||
end
|
||||
@@ -1,47 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module Tracks
|
||||
class BulkTrackCreator
|
||||
def initialize(start_at: nil, end_at: 1.day.ago.end_of_day, user_ids: [])
|
||||
@start_at = start_at&.to_datetime
|
||||
@end_at = end_at&.to_datetime
|
||||
@user_ids = user_ids
|
||||
end
|
||||
|
||||
def call
|
||||
users.find_each do |user|
|
||||
next if user.tracked_points.empty?
|
||||
|
||||
user_start_at = start_at || start_time(user)
|
||||
|
||||
next unless user.tracked_points.where(timestamp: user_start_at.to_i..end_at.to_i).exists?
|
||||
|
||||
Tracks::CreateJob.perform_later(
|
||||
user.id,
|
||||
start_at: user_start_at,
|
||||
end_at:,
|
||||
cleaning_strategy: :daily
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :start_at, :end_at, :user_ids
|
||||
|
||||
def users
|
||||
user_ids.any? ? User.active.where(id: user_ids) : User.active
|
||||
end
|
||||
|
||||
def start_time(user)
|
||||
latest_track = user.tracks.order(end_at: :desc).first
|
||||
|
||||
if latest_track
|
||||
latest_track.end_at
|
||||
else
|
||||
oldest_point = user.tracked_points.order(:timestamp).first
|
||||
oldest_point ? Time.zone.at(oldest_point.timestamp) : 1.day.ago.beginning_of_day
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,116 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
# Track cleaning strategy for daily track processing.
|
||||
#
|
||||
# This cleaner handles tracks that overlap with the specified time window,
|
||||
# ensuring proper handling of cross-day tracks and preventing orphaned points.
|
||||
#
|
||||
# How it works:
|
||||
# 1. Finds tracks that overlap with the time window (not just those completely contained)
|
||||
# 2. For overlapping tracks, removes only points within the time window
|
||||
# 3. Deletes tracks that become empty after point removal
|
||||
# 4. Preserves tracks that extend beyond the time window with their remaining points
|
||||
#
|
||||
# Key differences from ReplaceCleaner:
|
||||
# - Handles tracks that span multiple days correctly
|
||||
# - Uses overlap logic instead of containment logic
|
||||
# - Preserves track portions outside the processing window
|
||||
# - Prevents orphaned points from cross-day tracks
|
||||
#
|
||||
# Used primarily for:
|
||||
# - Daily track processing that handles 24-hour windows
|
||||
# - Incremental processing that respects existing cross-day tracks
|
||||
# - Scenarios where tracks may span the processing boundary
|
||||
#
|
||||
# Example usage:
|
||||
# cleaner = Tracks::Cleaners::DailyCleaner.new(user, start_at: 1.day.ago.beginning_of_day, end_at: 1.day.ago.end_of_day)
|
||||
# cleaner.cleanup
|
||||
#
|
||||
module Tracks
|
||||
module Cleaners
|
||||
class DailyCleaner
|
||||
attr_reader :user, :start_at, :end_at
|
||||
|
||||
def initialize(user, start_at: nil, end_at: nil)
|
||||
@user = user
|
||||
@start_at = start_at
|
||||
@end_at = end_at
|
||||
end
|
||||
|
||||
def cleanup
|
||||
return unless start_at.present? && end_at.present?
|
||||
|
||||
overlapping_tracks = find_overlapping_tracks
|
||||
|
||||
return if overlapping_tracks.empty?
|
||||
|
||||
Rails.logger.info "Processing #{overlapping_tracks.count} overlapping tracks for user #{user.id} in time window #{start_at} to #{end_at}"
|
||||
|
||||
overlapping_tracks.each do |track|
|
||||
process_overlapping_track(track)
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def find_overlapping_tracks
|
||||
# Find tracks that overlap with our time window
|
||||
# A track overlaps if: track_start < window_end AND track_end > window_start
|
||||
user.tracks.where(
|
||||
'(start_at < ? AND end_at > ?)',
|
||||
Time.zone.at(end_at),
|
||||
Time.zone.at(start_at)
|
||||
)
|
||||
end
|
||||
|
||||
def process_overlapping_track(track)
|
||||
# Find points within our time window that belong to this track
|
||||
points_in_window = track.points.where(
|
||||
'timestamp >= ? AND timestamp <= ?',
|
||||
start_at.to_i,
|
||||
end_at.to_i
|
||||
)
|
||||
|
||||
if points_in_window.empty?
|
||||
Rails.logger.debug "Track #{track.id} has no points in time window, skipping"
|
||||
return
|
||||
end
|
||||
|
||||
# Remove these points from the track
|
||||
points_in_window.update_all(track_id: nil)
|
||||
|
||||
Rails.logger.debug "Removed #{points_in_window.count} points from track #{track.id}"
|
||||
|
||||
# Check if the track has any remaining points
|
||||
remaining_points_count = track.points.count
|
||||
|
||||
if remaining_points_count == 0
|
||||
# Track is now empty, delete it
|
||||
Rails.logger.debug "Track #{track.id} is now empty, deleting"
|
||||
track.destroy!
|
||||
elsif remaining_points_count < 2
|
||||
# Track has too few points to be valid, delete it and orphan remaining points
|
||||
Rails.logger.debug "Track #{track.id} has insufficient points (#{remaining_points_count}), deleting"
|
||||
track.points.update_all(track_id: nil)
|
||||
track.destroy!
|
||||
else
|
||||
# Track still has valid points outside our window, update its boundaries
|
||||
Rails.logger.debug "Track #{track.id} still has #{remaining_points_count} points, updating boundaries"
|
||||
update_track_boundaries(track)
|
||||
end
|
||||
end
|
||||
|
||||
def update_track_boundaries(track)
|
||||
remaining_points = track.points.order(:timestamp)
|
||||
|
||||
return if remaining_points.empty?
|
||||
|
||||
# Update track start/end times based on remaining points
|
||||
track.update!(
|
||||
start_at: Time.zone.at(remaining_points.first.timestamp),
|
||||
end_at: Time.zone.at(remaining_points.last.timestamp)
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,16 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module Tracks
|
||||
module Cleaners
|
||||
class NoOpCleaner
|
||||
def initialize(user)
|
||||
@user = user
|
||||
end
|
||||
|
||||
def cleanup
|
||||
# No cleanup needed for incremental processing
|
||||
# We only append new tracks, don't remove existing ones
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,69 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
# Track cleaning strategy for bulk track regeneration.
|
||||
#
|
||||
# This cleaner removes existing tracks before generating new ones,
|
||||
# ensuring a clean slate for bulk processing without duplicate tracks.
|
||||
#
|
||||
# How it works:
|
||||
# 1. Finds all existing tracks for the user within the specified time range
|
||||
# 2. Detaches all points from these tracks (sets track_id to nil)
|
||||
# 3. Destroys the existing track records
|
||||
# 4. Allows the generator to create fresh tracks from the same points
|
||||
#
|
||||
# Used primarily for:
|
||||
# - Bulk track regeneration after settings changes
|
||||
# - Reprocessing historical data with updated algorithms
|
||||
# - Ensuring consistency when tracks need to be rebuilt
|
||||
#
|
||||
# The cleaner respects optional time boundaries (start_at/end_at) to enable
|
||||
# partial regeneration of tracks within specific time windows.
|
||||
#
|
||||
# This strategy is essential for bulk operations but should not be used
|
||||
# for incremental processing where existing tracks should be preserved.
|
||||
#
|
||||
# Example usage:
|
||||
# cleaner = Tracks::Cleaners::ReplaceCleaner.new(user, start_at: 1.week.ago, end_at: Time.current)
|
||||
# cleaner.cleanup
|
||||
#
|
||||
module Tracks
|
||||
module Cleaners
|
||||
class ReplaceCleaner
|
||||
attr_reader :user, :start_at, :end_at
|
||||
|
||||
def initialize(user, start_at: nil, end_at: nil)
|
||||
@user = user
|
||||
@start_at = start_at
|
||||
@end_at = end_at
|
||||
end
|
||||
|
||||
def cleanup
|
||||
tracks_to_remove = find_tracks_to_remove
|
||||
|
||||
if tracks_to_remove.any?
|
||||
Rails.logger.info "Removing #{tracks_to_remove.count} existing tracks for user #{user.id}"
|
||||
|
||||
Point.where(track_id: tracks_to_remove.ids).update_all(track_id: nil)
|
||||
|
||||
tracks_to_remove.destroy_all
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def find_tracks_to_remove
|
||||
scope = user.tracks
|
||||
|
||||
if start_at.present?
|
||||
scope = scope.where('start_at >= ?', Time.zone.at(start_at))
|
||||
end
|
||||
|
||||
if end_at.present?
|
||||
scope = scope.where('end_at <= ?', Time.zone.at(end_at))
|
||||
end
|
||||
|
||||
scope
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,73 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
class Tracks::CreateFromPoints
|
||||
include Tracks::Segmentation
|
||||
include Tracks::TrackBuilder
|
||||
|
||||
attr_reader :user, :start_at, :end_at, :cleaning_strategy
|
||||
|
||||
def initialize(user, start_at: nil, end_at: nil, cleaning_strategy: :replace)
|
||||
@user = user
|
||||
@start_at = start_at
|
||||
@end_at = end_at
|
||||
@cleaning_strategy = cleaning_strategy
|
||||
end
|
||||
|
||||
def call
|
||||
generator = Tracks::Generator.new(
|
||||
user,
|
||||
point_loader: point_loader,
|
||||
incomplete_segment_handler: incomplete_segment_handler,
|
||||
track_cleaner: track_cleaner
|
||||
)
|
||||
|
||||
generator.call
|
||||
end
|
||||
|
||||
# Expose threshold properties for tests
|
||||
def distance_threshold_meters
|
||||
@distance_threshold_meters ||= user.safe_settings.meters_between_routes.to_i || 500
|
||||
end
|
||||
|
||||
def time_threshold_minutes
|
||||
@time_threshold_minutes ||= user.safe_settings.minutes_between_routes.to_i || 60
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def point_loader
|
||||
@point_loader ||=
|
||||
Tracks::PointLoaders::BulkLoader.new(
|
||||
user, start_at: start_at, end_at: end_at
|
||||
)
|
||||
end
|
||||
|
||||
def incomplete_segment_handler
|
||||
@incomplete_segment_handler ||=
|
||||
Tracks::IncompleteSegmentHandlers::IgnoreHandler.new(user)
|
||||
end
|
||||
|
||||
def track_cleaner
|
||||
@track_cleaner ||=
|
||||
case cleaning_strategy
|
||||
when :daily
|
||||
Tracks::Cleaners::DailyCleaner.new(user, start_at: start_at, end_at: end_at)
|
||||
when :none
|
||||
Tracks::Cleaners::NoOpCleaner.new(user)
|
||||
else # :replace (default)
|
||||
Tracks::Cleaners::ReplaceCleaner.new(user, start_at: start_at, end_at: end_at)
|
||||
end
|
||||
end
|
||||
|
||||
# Legacy method for backward compatibility with tests
|
||||
# Delegates to segmentation module logic
|
||||
def should_start_new_track?(current_point, previous_point)
|
||||
should_start_new_segment?(current_point, previous_point)
|
||||
end
|
||||
|
||||
# Legacy method for backward compatibility with tests
|
||||
# Delegates to segmentation module logic
|
||||
def calculate_distance_kilometers(point1, point2)
|
||||
calculate_distance_kilometers_between_points(point1, point2)
|
||||
end
|
||||
end
|
||||
@@ -1,108 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
# The core track generation engine that orchestrates the entire process of creating tracks from GPS points.
|
||||
#
|
||||
# This class uses a flexible strategy pattern to handle different track generation scenarios:
|
||||
# - Bulk processing: Generate all tracks at once from existing points
|
||||
# - Incremental processing: Generate tracks as new points arrive
|
||||
#
|
||||
# How it works:
|
||||
# 1. Uses a PointLoader strategy to load points from the database
|
||||
# 2. Applies segmentation logic to split points into track segments based on time/distance gaps
|
||||
# 3. Determines which segments should be finalized into tracks vs buffered for later
|
||||
# 4. Creates Track records from finalized segments with calculated statistics
|
||||
# 5. Manages cleanup of existing tracks based on the chosen strategy
|
||||
#
|
||||
# Strategy Components:
|
||||
# - point_loader: Loads points from database (BulkLoader, IncrementalLoader)
|
||||
# - incomplete_segment_handler: Handles segments that aren't ready to finalize (IgnoreHandler, BufferHandler)
|
||||
# - track_cleaner: Manages existing tracks when regenerating (ReplaceCleaner, NoOpCleaner)
|
||||
#
|
||||
# The class includes Tracks::Segmentation for splitting logic and Tracks::TrackBuilder for track creation.
|
||||
# Distance and time thresholds are configurable per user via their settings.
|
||||
#
|
||||
# Example usage:
|
||||
# generator = Tracks::Generator.new(
|
||||
# user,
|
||||
# point_loader: Tracks::PointLoaders::BulkLoader.new(user),
|
||||
# incomplete_segment_handler: Tracks::IncompleteSegmentHandlers::IgnoreHandler.new(user),
|
||||
# track_cleaner: Tracks::Cleaners::ReplaceCleaner.new(user)
|
||||
# )
|
||||
# tracks_created = generator.call
|
||||
#
|
||||
module Tracks
|
||||
class Generator
|
||||
include Tracks::Segmentation
|
||||
include Tracks::TrackBuilder
|
||||
|
||||
attr_reader :user, :point_loader, :incomplete_segment_handler, :track_cleaner
|
||||
|
||||
def initialize(user, point_loader:, incomplete_segment_handler:, track_cleaner:)
|
||||
@user = user
|
||||
@point_loader = point_loader
|
||||
@incomplete_segment_handler = incomplete_segment_handler
|
||||
@track_cleaner = track_cleaner
|
||||
end
|
||||
|
||||
def call
|
||||
Rails.logger.info "Starting track generation for user #{user.id}"
|
||||
|
||||
tracks_created = 0
|
||||
|
||||
Point.transaction do
|
||||
# Clean up existing tracks if needed
|
||||
track_cleaner.cleanup
|
||||
|
||||
# Load points using the configured strategy
|
||||
points = point_loader.load_points
|
||||
|
||||
if points.empty?
|
||||
Rails.logger.info "No points to process for user #{user.id}"
|
||||
return 0
|
||||
end
|
||||
|
||||
Rails.logger.info "Processing #{points.size} points for user #{user.id}"
|
||||
|
||||
# Apply segmentation logic
|
||||
segments = split_points_into_segments(points)
|
||||
|
||||
Rails.logger.info "Created #{segments.size} segments for user #{user.id}"
|
||||
|
||||
# Process each segment
|
||||
segments.each do |segment_points|
|
||||
next if segment_points.size < 2
|
||||
|
||||
if incomplete_segment_handler.should_finalize_segment?(segment_points)
|
||||
# Create track from finalized segment
|
||||
track = create_track_from_points(segment_points)
|
||||
if track&.persisted?
|
||||
tracks_created += 1
|
||||
Rails.logger.debug "Created track #{track.id} with #{segment_points.size} points"
|
||||
end
|
||||
else
|
||||
# Handle incomplete segment according to strategy
|
||||
incomplete_segment_handler.handle_incomplete_segment(segment_points)
|
||||
Rails.logger.debug "Stored #{segment_points.size} points as incomplete segment"
|
||||
end
|
||||
end
|
||||
|
||||
# Cleanup any processed buffered data
|
||||
incomplete_segment_handler.cleanup_processed_data
|
||||
end
|
||||
|
||||
Rails.logger.info "Completed track generation for user #{user.id}: #{tracks_created} tracks created"
|
||||
tracks_created
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# Required by Tracks::Segmentation module
|
||||
def distance_threshold_meters
|
||||
@distance_threshold_meters ||= user.safe_settings.meters_between_routes.to_i || 500
|
||||
end
|
||||
|
||||
def time_threshold_minutes
|
||||
@time_threshold_minutes ||= user.safe_settings.minutes_between_routes.to_i || 60
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,36 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module Tracks
|
||||
module IncompleteSegmentHandlers
|
||||
class BufferHandler
|
||||
attr_reader :user, :day, :grace_period_minutes, :redis_buffer
|
||||
|
||||
def initialize(user, day = nil, grace_period_minutes = 5)
|
||||
@user = user
|
||||
@day = day || Date.current
|
||||
@grace_period_minutes = grace_period_minutes
|
||||
@redis_buffer = Tracks::RedisBuffer.new(user.id, @day)
|
||||
end
|
||||
|
||||
def should_finalize_segment?(segment_points)
|
||||
return false if segment_points.empty?
|
||||
|
||||
# Check if the last point is old enough (grace period)
|
||||
last_point_time = Time.zone.at(segment_points.last.timestamp)
|
||||
grace_period_cutoff = Time.current - grace_period_minutes.minutes
|
||||
|
||||
last_point_time < grace_period_cutoff
|
||||
end
|
||||
|
||||
def handle_incomplete_segment(segment_points)
|
||||
redis_buffer.store(segment_points)
|
||||
Rails.logger.debug "Stored #{segment_points.size} points in buffer for user #{user.id}, day #{day}"
|
||||
end
|
||||
|
||||
def cleanup_processed_data
|
||||
redis_buffer.clear
|
||||
Rails.logger.debug "Cleared buffer for user #{user.id}, day #{day}"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,48 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
# Incomplete segment handling strategy for bulk track generation.
|
||||
#
|
||||
# This handler always finalizes segments immediately without buffering,
|
||||
# making it suitable for bulk processing where all data is historical
|
||||
# and no segments are expected to grow with new incoming points.
|
||||
#
|
||||
# How it works:
|
||||
# 1. Always returns true for should_finalize_segment? - every segment becomes a track
|
||||
# 2. Ignores any incomplete segments (logs them but takes no action)
|
||||
# 3. Requires no cleanup since no data is buffered
|
||||
#
|
||||
# Used primarily for:
|
||||
# - Bulk track generation from historical data
|
||||
# - One-time processing where all points are already available
|
||||
# - Scenarios where you want to create tracks from every valid segment
|
||||
#
|
||||
# This strategy is efficient for bulk operations but not suitable for
|
||||
# real-time processing where segments may grow as new points arrive.
|
||||
#
|
||||
# Example usage:
|
||||
# handler = Tracks::IncompleteSegmentHandlers::IgnoreHandler.new(user)
|
||||
# should_create_track = handler.should_finalize_segment?(segment_points)
|
||||
#
|
||||
module Tracks
|
||||
module IncompleteSegmentHandlers
|
||||
class IgnoreHandler
|
||||
def initialize(user)
|
||||
@user = user
|
||||
end
|
||||
|
||||
def should_finalize_segment?(segment_points)
|
||||
# Always finalize segments in bulk processing
|
||||
true
|
||||
end
|
||||
|
||||
def handle_incomplete_segment(segment_points)
|
||||
# Ignore incomplete segments in bulk processing
|
||||
Rails.logger.debug "Ignoring incomplete segment with #{segment_points.size} points"
|
||||
end
|
||||
|
||||
def cleanup_processed_data
|
||||
# No cleanup needed for ignore strategy
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,54 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
# Point loading strategy for bulk track generation from existing GPS points.
|
||||
#
|
||||
# This loader retrieves all valid points for a user within an optional time range,
|
||||
# suitable for regenerating all tracks at once or processing historical data.
|
||||
#
|
||||
# How it works:
|
||||
# 1. Queries all points belonging to the user
|
||||
# 2. Filters out points without valid coordinates or timestamps
|
||||
# 3. Optionally filters by start_at/end_at time range if provided
|
||||
# 4. Returns points ordered by timestamp for sequential processing
|
||||
#
|
||||
# Used primarily for:
|
||||
# - Initial track generation when a user first enables tracks
|
||||
# - Bulk regeneration of all tracks after settings changes
|
||||
# - Processing historical data imports
|
||||
#
|
||||
# The loader is designed to be efficient for large datasets while ensuring
|
||||
# data integrity by filtering out invalid points upfront.
|
||||
#
|
||||
# Example usage:
|
||||
# loader = Tracks::PointLoaders::BulkLoader.new(user, start_at: 1.week.ago, end_at: Time.current)
|
||||
# points = loader.load_points
|
||||
#
|
||||
module Tracks
|
||||
module PointLoaders
|
||||
class BulkLoader
|
||||
attr_reader :user, :start_at, :end_at
|
||||
|
||||
def initialize(user, start_at: nil, end_at: nil)
|
||||
@user = user
|
||||
@start_at = start_at
|
||||
@end_at = end_at
|
||||
end
|
||||
|
||||
def load_points
|
||||
scope = Point.where(user: user)
|
||||
.where.not(lonlat: nil)
|
||||
.where.not(timestamp: nil)
|
||||
|
||||
if start_at.present?
|
||||
scope = scope.where('timestamp >= ?', start_at)
|
||||
end
|
||||
|
||||
if end_at.present?
|
||||
scope = scope.where('timestamp <= ?', end_at)
|
||||
end
|
||||
|
||||
scope.order(:timestamp)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,72 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module Tracks
|
||||
module PointLoaders
|
||||
class IncrementalLoader
|
||||
attr_reader :user, :day, :redis_buffer
|
||||
|
||||
def initialize(user, day = nil)
|
||||
@user = user
|
||||
@day = day || Date.current
|
||||
@redis_buffer = Tracks::RedisBuffer.new(user.id, @day)
|
||||
end
|
||||
|
||||
def load_points
|
||||
# Get buffered points from Redis
|
||||
buffered_points = redis_buffer.retrieve
|
||||
|
||||
# Find the last track for this day to determine where to start
|
||||
last_track = Track.last_for_day(user, day)
|
||||
|
||||
# Load new points since last track
|
||||
new_points = load_new_points_since_last_track(last_track)
|
||||
|
||||
# Combine buffered points with new points
|
||||
combined_points = merge_points(buffered_points, new_points)
|
||||
|
||||
Rails.logger.debug "Loaded #{buffered_points.size} buffered points and #{new_points.size} new points for user #{user.id}"
|
||||
|
||||
combined_points
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def load_new_points_since_last_track(last_track)
|
||||
scope = user.points
|
||||
.where.not(lonlat: nil)
|
||||
.where.not(timestamp: nil)
|
||||
.where(track_id: nil) # Only process points not already assigned to tracks
|
||||
|
||||
if last_track
|
||||
scope = scope.where('timestamp > ?', last_track.end_at.to_i)
|
||||
else
|
||||
# If no last track, load all points for the day
|
||||
day_start = day.beginning_of_day.to_i
|
||||
day_end = day.end_of_day.to_i
|
||||
scope = scope.where('timestamp >= ? AND timestamp <= ?', day_start, day_end)
|
||||
end
|
||||
|
||||
scope.order(:timestamp)
|
||||
end
|
||||
|
||||
def merge_points(buffered_points, new_points)
|
||||
# Convert buffered point hashes back to Point objects if needed
|
||||
buffered_point_objects = buffered_points.map do |point_data|
|
||||
# If it's already a Point object, use it directly
|
||||
if point_data.is_a?(Point)
|
||||
point_data
|
||||
else
|
||||
# Create a Point-like object from the hash
|
||||
Point.new(point_data.except('id').symbolize_keys)
|
||||
end
|
||||
end
|
||||
|
||||
# Combine and sort by timestamp
|
||||
all_points = (buffered_point_objects + new_points.to_a).sort_by(&:timestamp)
|
||||
|
||||
# Remove duplicates based on timestamp and coordinates
|
||||
all_points.uniq { |point| [point.timestamp, point.lat, point.lon] }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,72 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
class Tracks::RedisBuffer
|
||||
BUFFER_PREFIX = 'track_buffer'
|
||||
BUFFER_EXPIRY = 7.days
|
||||
|
||||
attr_reader :user_id, :day
|
||||
|
||||
def initialize(user_id, day)
|
||||
@user_id = user_id
|
||||
@day = day.is_a?(Date) ? day : Date.parse(day.to_s)
|
||||
end
|
||||
|
||||
def store(points)
|
||||
return if points.empty?
|
||||
|
||||
points_data = serialize_points(points)
|
||||
redis_key = buffer_key
|
||||
|
||||
Rails.cache.write(redis_key, points_data, expires_in: BUFFER_EXPIRY)
|
||||
Rails.logger.debug "Stored #{points.size} points in buffer for user #{user_id}, day #{day}"
|
||||
end
|
||||
|
||||
def retrieve
|
||||
redis_key = buffer_key
|
||||
cached_data = Rails.cache.read(redis_key)
|
||||
|
||||
return [] unless cached_data
|
||||
|
||||
deserialize_points(cached_data)
|
||||
rescue StandardError => e
|
||||
Rails.logger.error "Failed to retrieve buffered points for user #{user_id}, day #{day}: #{e.message}"
|
||||
[]
|
||||
end
|
||||
|
||||
# Clear the buffer for the user/day combination
|
||||
def clear
|
||||
redis_key = buffer_key
|
||||
Rails.cache.delete(redis_key)
|
||||
Rails.logger.debug "Cleared buffer for user #{user_id}, day #{day}"
|
||||
end
|
||||
|
||||
def exists?
|
||||
Rails.cache.exist?(buffer_key)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def buffer_key
|
||||
"#{BUFFER_PREFIX}:#{user_id}:#{day.strftime('%Y-%m-%d')}"
|
||||
end
|
||||
|
||||
def serialize_points(points)
|
||||
points.map do |point|
|
||||
{
|
||||
id: point.id,
|
||||
lonlat: point.lonlat.to_s,
|
||||
timestamp: point.timestamp,
|
||||
lat: point.lat,
|
||||
lon: point.lon,
|
||||
altitude: point.altitude,
|
||||
velocity: point.velocity,
|
||||
battery: point.battery,
|
||||
user_id: point.user_id
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
def deserialize_points(points_data)
|
||||
points_data || []
|
||||
end
|
||||
end
|
||||
@@ -1,140 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
# Track segmentation logic for splitting GPS points into meaningful track segments.
|
||||
#
|
||||
# This module provides the core algorithm for determining where one track ends
|
||||
# and another begins, based on time gaps and distance jumps between consecutive points.
|
||||
#
|
||||
# How it works:
|
||||
# 1. Analyzes consecutive GPS points to detect gaps that indicate separate journeys
|
||||
# 2. Uses configurable time and distance thresholds to identify segment boundaries
|
||||
# 3. Splits large arrays of points into smaller arrays representing individual tracks
|
||||
# 4. Provides utilities for handling both Point objects and hash representations
|
||||
#
|
||||
# Segmentation criteria:
|
||||
# - Time threshold: Gap longer than X minutes indicates a new track
|
||||
# - Distance threshold: Jump larger than X meters indicates a new track
|
||||
# - Minimum segment size: Segments must have at least 2 points to form a track
|
||||
#
|
||||
# The module is designed to be included in classes that need segmentation logic
|
||||
# and requires the including class to implement distance_threshold_meters and
|
||||
# time_threshold_minutes methods.
|
||||
#
|
||||
# Used by:
|
||||
# - Tracks::Generator for splitting points during track generation
|
||||
# - Tracks::CreateFromPoints for legacy compatibility
|
||||
#
|
||||
# Example usage:
|
||||
# class MyTrackProcessor
|
||||
# include Tracks::Segmentation
|
||||
#
|
||||
# def distance_threshold_meters; 500; end
|
||||
# def time_threshold_minutes; 60; end
|
||||
#
|
||||
# def process_points(points)
|
||||
# segments = split_points_into_segments(points)
|
||||
# # Process each segment...
|
||||
# end
|
||||
# end
|
||||
#
|
||||
module Tracks::Segmentation
|
||||
extend ActiveSupport::Concern
|
||||
|
||||
private
|
||||
|
||||
def split_points_into_segments(points)
|
||||
return [] if points.empty?
|
||||
|
||||
segments = []
|
||||
current_segment = []
|
||||
|
||||
points.each do |point|
|
||||
if should_start_new_segment?(point, current_segment.last)
|
||||
# Finalize current segment if it has enough points
|
||||
segments << current_segment if current_segment.size >= 2
|
||||
current_segment = [point]
|
||||
else
|
||||
current_segment << point
|
||||
end
|
||||
end
|
||||
|
||||
# Don't forget the last segment
|
||||
segments << current_segment if current_segment.size >= 2
|
||||
|
||||
segments
|
||||
end
|
||||
|
||||
def should_start_new_segment?(current_point, previous_point)
|
||||
return false if previous_point.nil?
|
||||
|
||||
# Check time threshold (convert minutes to seconds)
|
||||
current_timestamp = point_timestamp(current_point)
|
||||
previous_timestamp = point_timestamp(previous_point)
|
||||
|
||||
time_diff_seconds = current_timestamp - previous_timestamp
|
||||
time_threshold_seconds = time_threshold_minutes.to_i * 60
|
||||
|
||||
return true if time_diff_seconds > time_threshold_seconds
|
||||
|
||||
# Check distance threshold - convert km to meters to match frontend logic
|
||||
distance_km = calculate_distance_kilometers_between_points(previous_point, current_point)
|
||||
distance_meters = distance_km * 1000 # Convert km to meters
|
||||
return true if distance_meters > distance_threshold_meters
|
||||
|
||||
false
|
||||
end
|
||||
|
||||
def calculate_distance_kilometers_between_points(point1, point2)
|
||||
lat1, lon1 = point_coordinates(point1)
|
||||
lat2, lon2 = point_coordinates(point2)
|
||||
|
||||
# Use Geocoder to match behavior with frontend (same library used elsewhere in app)
|
||||
Geocoder::Calculations.distance_between([lat1, lon1], [lat2, lon2], units: :km)
|
||||
end
|
||||
|
||||
def should_finalize_segment?(segment_points, grace_period_minutes = 5)
|
||||
return false if segment_points.size < 2
|
||||
|
||||
last_point = segment_points.last
|
||||
last_timestamp = point_timestamp(last_point)
|
||||
current_time = Time.current.to_i
|
||||
|
||||
# Don't finalize if the last point is too recent (within grace period)
|
||||
time_since_last_point = current_time - last_timestamp
|
||||
grace_period_seconds = grace_period_minutes * 60
|
||||
|
||||
time_since_last_point > grace_period_seconds
|
||||
end
|
||||
|
||||
def point_timestamp(point)
|
||||
if point.respond_to?(:timestamp)
|
||||
# Point objects from database always have integer timestamps
|
||||
point.timestamp
|
||||
elsif point.is_a?(Hash)
|
||||
# Hash might come from Redis buffer or test data
|
||||
timestamp = point[:timestamp] || point['timestamp']
|
||||
timestamp.to_i
|
||||
else
|
||||
raise ArgumentError, "Invalid point type: #{point.class}"
|
||||
end
|
||||
end
|
||||
|
||||
def point_coordinates(point)
|
||||
if point.respond_to?(:lat) && point.respond_to?(:lon)
|
||||
[point.lat, point.lon]
|
||||
elsif point.is_a?(Hash)
|
||||
[point[:lat] || point['lat'], point[:lon] || point['lon']]
|
||||
else
|
||||
raise ArgumentError, "Invalid point type: #{point.class}"
|
||||
end
|
||||
end
|
||||
|
||||
# These methods need to be implemented by the including class
|
||||
def distance_threshold_meters
|
||||
raise NotImplementedError, "Including class must implement distance_threshold_meters"
|
||||
end
|
||||
|
||||
def time_threshold_minutes
|
||||
raise NotImplementedError, "Including class must implement time_threshold_minutes"
|
||||
end
|
||||
end
|
||||
@@ -1,147 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
# Track creation and statistics calculation module for building Track records from GPS points.
|
||||
#
|
||||
# This module provides the core functionality for converting arrays of GPS points into
|
||||
# Track database records with calculated statistics including distance, duration, speed,
|
||||
# and elevation metrics.
|
||||
#
|
||||
# How it works:
|
||||
# 1. Takes an array of Point objects representing a track segment
|
||||
# 2. Creates a Track record with basic temporal and spatial boundaries
|
||||
# 3. Calculates comprehensive statistics: distance, duration, average speed
|
||||
# 4. Computes elevation metrics: gain, loss, maximum, minimum
|
||||
# 5. Builds a LineString path representation for mapping
|
||||
# 6. Associates all points with the created track
|
||||
#
|
||||
# Statistics calculated:
|
||||
# - Distance: Always stored in meters as integers for consistency
|
||||
# - Duration: Total time in seconds between first and last point
|
||||
# - Average speed: In km/h regardless of user's distance unit preference
|
||||
# - Elevation gain/loss: Cumulative ascent and descent in meters
|
||||
# - Elevation max/min: Highest and lowest altitudes in the track
|
||||
#
|
||||
# Distance is converted to user's preferred unit only at display time, not storage time.
|
||||
# This ensures consistency when users change their distance unit preferences.
|
||||
#
|
||||
# Used by:
|
||||
# - Tracks::Generator for creating tracks during generation
|
||||
# - Any class that needs to convert point arrays to Track records
|
||||
#
|
||||
# Example usage:
|
||||
# class MyTrackProcessor
|
||||
# include Tracks::TrackBuilder
|
||||
#
|
||||
# def initialize(user)
|
||||
# @user = user
|
||||
# end
|
||||
#
|
||||
# def process_segment(points)
|
||||
# track = create_track_from_points(points)
|
||||
# # Track now exists with calculated statistics
|
||||
# end
|
||||
#
|
||||
# private
|
||||
#
|
||||
# attr_reader :user
|
||||
# end
|
||||
#
|
||||
module Tracks::TrackBuilder
|
||||
extend ActiveSupport::Concern
|
||||
|
||||
def create_track_from_points(points)
|
||||
return nil if points.size < 2
|
||||
|
||||
track = Track.new(
|
||||
user_id: user.id,
|
||||
start_at: Time.zone.at(points.first.timestamp),
|
||||
end_at: Time.zone.at(points.last.timestamp),
|
||||
original_path: build_path(points)
|
||||
)
|
||||
|
||||
# Calculate track statistics
|
||||
track.distance = calculate_track_distance(points)
|
||||
track.duration = calculate_duration(points)
|
||||
track.avg_speed = calculate_average_speed(track.distance, track.duration)
|
||||
|
||||
# Calculate elevation statistics
|
||||
elevation_stats = calculate_elevation_stats(points)
|
||||
track.elevation_gain = elevation_stats[:gain]
|
||||
track.elevation_loss = elevation_stats[:loss]
|
||||
track.elevation_max = elevation_stats[:max]
|
||||
track.elevation_min = elevation_stats[:min]
|
||||
|
||||
if track.save
|
||||
Point.where(id: points.map(&:id)).update_all(track_id: track.id)
|
||||
track
|
||||
else
|
||||
Rails.logger.error "Failed to create track for user #{user.id}: #{track.errors.full_messages.join(', ')}"
|
||||
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
def build_path(points)
|
||||
Tracks::BuildPath.new(points.map(&:lonlat)).call
|
||||
end
|
||||
|
||||
def calculate_track_distance(points)
|
||||
# Always calculate and store distance in meters for consistency
|
||||
distance_in_meters = Point.total_distance(points, :m)
|
||||
distance_in_meters.round
|
||||
end
|
||||
|
||||
def calculate_duration(points)
|
||||
points.last.timestamp - points.first.timestamp
|
||||
end
|
||||
|
||||
def calculate_average_speed(distance_in_meters, duration_seconds)
|
||||
return 0.0 if duration_seconds <= 0 || distance_in_meters <= 0
|
||||
|
||||
# Speed in meters per second, then convert to km/h for storage
|
||||
speed_mps = distance_in_meters.to_f / duration_seconds
|
||||
(speed_mps * 3.6).round(2) # m/s to km/h
|
||||
end
|
||||
|
||||
def calculate_elevation_stats(points)
|
||||
altitudes = points.map(&:altitude).compact
|
||||
|
||||
return default_elevation_stats if altitudes.empty?
|
||||
|
||||
elevation_gain = 0
|
||||
elevation_loss = 0
|
||||
previous_altitude = altitudes.first
|
||||
|
||||
altitudes[1..].each do |altitude|
|
||||
diff = altitude - previous_altitude
|
||||
if diff > 0
|
||||
elevation_gain += diff
|
||||
else
|
||||
elevation_loss += diff.abs
|
||||
end
|
||||
previous_altitude = altitude
|
||||
end
|
||||
|
||||
{
|
||||
gain: elevation_gain.round,
|
||||
loss: elevation_loss.round,
|
||||
max: altitudes.max,
|
||||
min: altitudes.min
|
||||
}
|
||||
end
|
||||
|
||||
def default_elevation_stats
|
||||
{
|
||||
gain: 0,
|
||||
loss: 0,
|
||||
max: 0,
|
||||
min: 0
|
||||
}
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def user
|
||||
raise NotImplementedError, "Including class must implement user method"
|
||||
end
|
||||
end
|
||||
@@ -30,9 +30,10 @@ cache_preheating_job:
|
||||
class: "Cache::PreheatingJob"
|
||||
queue: default
|
||||
|
||||
tracks_bulk_creating_job:
|
||||
track_processing_job:
|
||||
cron: "10 0 * * *" # every day at 00:10
|
||||
class: "Tracks::BulkCreatingJob"
|
||||
class: "TrackProcessingJob"
|
||||
args: [null, "bulk", {"cleanup_tracks": true}]
|
||||
queue: tracks
|
||||
|
||||
place_name_fetching_job:
|
||||
|
||||
@@ -11,5 +11,17 @@ RSpec.describe AppVersionCheckingJob, type: :job do
|
||||
|
||||
job.perform
|
||||
end
|
||||
|
||||
context 'when app is not self-hosted' do
|
||||
before do
|
||||
allow(DawarichSettings).to receive(:self_hosted?).and_return(false)
|
||||
end
|
||||
|
||||
it 'does not call CheckAppVersion service' do
|
||||
expect(CheckAppVersion).not_to receive(:new)
|
||||
|
||||
job.perform
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
152
spec/jobs/track_processing_job_spec.rb
Normal file
152
spec/jobs/track_processing_job_spec.rb
Normal file
@@ -0,0 +1,152 @@
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe TrackProcessingJob, type: :job do
|
||||
let(:user) { create(:user) }
|
||||
let(:job) { described_class.new }
|
||||
|
||||
describe '#perform' do
|
||||
context 'with bulk mode' do
|
||||
it 'calls TrackService with bulk mode' do
|
||||
expect_any_instance_of(TrackService).to receive(:call).and_return(3)
|
||||
|
||||
job.perform(user.id, 'bulk', cleanup_tracks: true)
|
||||
end
|
||||
|
||||
it 'passes options to TrackService' do
|
||||
expect(TrackService).to receive(:new).with(
|
||||
user,
|
||||
mode: :bulk,
|
||||
cleanup_tracks: true
|
||||
).and_call_original
|
||||
|
||||
expect_any_instance_of(TrackService).to receive(:call)
|
||||
|
||||
job.perform(user.id, 'bulk', cleanup_tracks: true)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with incremental mode' do
|
||||
let!(:point) { create(:point, user: user) }
|
||||
|
||||
it 'calls TrackService with incremental mode' do
|
||||
expect_any_instance_of(TrackService).to receive(:call).and_return(1)
|
||||
|
||||
job.perform(user.id, 'incremental', point_id: point.id)
|
||||
end
|
||||
|
||||
it 'passes point_id to TrackService' do
|
||||
expect(TrackService).to receive(:new).with(
|
||||
user,
|
||||
mode: :incremental,
|
||||
point_id: point.id
|
||||
).and_call_original
|
||||
|
||||
expect_any_instance_of(TrackService).to receive(:call)
|
||||
|
||||
job.perform(user.id, 'incremental', point_id: point.id)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with incremental mode and old point' do
|
||||
let!(:point) { create(:point, user: user, created_at: 2.hours.ago) }
|
||||
|
||||
it 'skips processing for old points' do
|
||||
expect(TrackService).not_to receive(:new)
|
||||
|
||||
job.perform(user.id, 'incremental', point_id: point.id)
|
||||
end
|
||||
|
||||
it 'logs the skip' do
|
||||
expect(Rails.logger).to receive(:debug).with(/Skipping track processing for old point/)
|
||||
|
||||
job.perform(user.id, 'incremental', point_id: point.id)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with missing point' do
|
||||
it 'logs warning and returns early' do
|
||||
expect(Rails.logger).to receive(:warn).with(/Point 999 not found/)
|
||||
expect(TrackService).not_to receive(:new)
|
||||
|
||||
job.perform(user.id, 'incremental', point_id: 999)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with missing user' do
|
||||
it 'logs error and raises exception' do
|
||||
expect(Rails.logger).to receive(:error).with(/User 999 not found/)
|
||||
|
||||
expect { job.perform(999, 'bulk') }.to raise_error(ActiveRecord::RecordNotFound)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with processing error' do
|
||||
before do
|
||||
allow_any_instance_of(TrackService).to receive(:call).and_raise(StandardError.new('Test error'))
|
||||
end
|
||||
|
||||
it 'logs error and calls ExceptionReporter' do
|
||||
expect(Rails.logger).to receive(:error).with(/Track processing failed/)
|
||||
expect(Rails.logger).to receive(:error) # for backtrace
|
||||
expect(ExceptionReporter).to receive(:call).with(
|
||||
instance_of(StandardError),
|
||||
'Track processing failed',
|
||||
hash_including(user_id: user.id, mode: 'bulk')
|
||||
)
|
||||
|
||||
expect { job.perform(user.id, 'bulk') }.to raise_error(StandardError)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with custom thresholds' do
|
||||
it 'passes custom thresholds to TrackService' do
|
||||
expect(TrackService).to receive(:new).with(
|
||||
user,
|
||||
mode: :bulk,
|
||||
cleanup_tracks: false,
|
||||
time_threshold_minutes: 30,
|
||||
distance_threshold_meters: 1000
|
||||
).and_call_original
|
||||
|
||||
expect_any_instance_of(TrackService).to receive(:call)
|
||||
|
||||
job.perform(user.id, 'bulk', time_threshold_minutes: 30, distance_threshold_meters: 1000)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'job configuration' do
|
||||
it 'has correct queue' do
|
||||
expect(described_class.queue_name).to eq('tracks')
|
||||
end
|
||||
|
||||
it 'has retry configuration' do
|
||||
expect(described_class.sidekiq_options['retry']).to eq(3)
|
||||
end
|
||||
|
||||
it 'has unique job configuration' do
|
||||
expect(described_class.sidekiq_options['unique_for']).to eq(30.seconds)
|
||||
end
|
||||
end
|
||||
|
||||
describe 'job uniqueness' do
|
||||
it 'uses user_id and mode for uniqueness' do
|
||||
unique_args_proc = described_class.sidekiq_options['unique_args']
|
||||
result = unique_args_proc.call([user.id, 'bulk', { cleanup_tracks: true }])
|
||||
|
||||
expect(result).to eq([user.id, 'bulk'])
|
||||
end
|
||||
end
|
||||
|
||||
describe 'integration with job queue' do
|
||||
it 'can be enqueued' do
|
||||
expect { described_class.perform_later(user.id, 'bulk') }.to change(ActiveJob::Base.queue_adapter.enqueued_jobs, :size).by(1)
|
||||
end
|
||||
|
||||
it 'can be performed now' do
|
||||
expect_any_instance_of(TrackService).to receive(:call).and_return(0)
|
||||
|
||||
described_class.perform_now(user.id, 'incremental')
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,19 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Tracks::BulkCreatingJob, type: :job do
|
||||
describe '#perform' do
|
||||
let(:service) { instance_double(Tracks::BulkTrackCreator) }
|
||||
|
||||
before do
|
||||
allow(Tracks::BulkTrackCreator).to receive(:new).with(start_at: 'foo', end_at: 'bar', user_ids: [1, 2]).and_return(service)
|
||||
end
|
||||
|
||||
it 'calls Tracks::BulkTrackCreator with the correct arguments' do
|
||||
expect(service).to receive(:call)
|
||||
|
||||
described_class.new.perform(start_at: 'foo', end_at: 'bar', user_ids: [1, 2])
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,115 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Tracks::CreateJob, type: :job do
|
||||
let(:user) { create(:user) }
|
||||
|
||||
describe '#perform' do
|
||||
let(:service_instance) { instance_double(Tracks::CreateFromPoints) }
|
||||
let(:notification_service) { instance_double(Notifications::Create) }
|
||||
|
||||
before do
|
||||
allow(Tracks::CreateFromPoints).to receive(:new).with(user, start_at: nil, end_at: nil, cleaning_strategy: :replace).and_return(service_instance)
|
||||
allow(service_instance).to receive(:call).and_return(3)
|
||||
allow(Notifications::Create).to receive(:new).and_return(notification_service)
|
||||
allow(notification_service).to receive(:call)
|
||||
end
|
||||
|
||||
it 'calls the service and creates a notification' do
|
||||
described_class.new.perform(user.id)
|
||||
|
||||
expect(Tracks::CreateFromPoints).to have_received(:new).with(user, start_at: nil, end_at: nil, cleaning_strategy: :replace)
|
||||
expect(service_instance).to have_received(:call)
|
||||
expect(Notifications::Create).to have_received(:new).with(
|
||||
user: user,
|
||||
kind: :info,
|
||||
title: 'Tracks Generated',
|
||||
content: 'Created 3 tracks from your location data. Check your tracks section to view them.'
|
||||
)
|
||||
expect(notification_service).to have_received(:call)
|
||||
end
|
||||
|
||||
context 'with custom parameters' do
|
||||
let(:start_at) { 1.day.ago.beginning_of_day.to_i }
|
||||
let(:end_at) { 1.day.ago.end_of_day.to_i }
|
||||
let(:cleaning_strategy) { :daily }
|
||||
|
||||
before do
|
||||
allow(Tracks::CreateFromPoints).to receive(:new).with(user, start_at: start_at, end_at: end_at, cleaning_strategy: cleaning_strategy).and_return(service_instance)
|
||||
allow(service_instance).to receive(:call).and_return(2)
|
||||
allow(Notifications::Create).to receive(:new).and_return(notification_service)
|
||||
allow(notification_service).to receive(:call)
|
||||
end
|
||||
|
||||
it 'passes custom parameters to the service' do
|
||||
described_class.new.perform(user.id, start_at: start_at, end_at: end_at, cleaning_strategy: cleaning_strategy)
|
||||
|
||||
expect(Tracks::CreateFromPoints).to have_received(:new).with(user, start_at: start_at, end_at: end_at, cleaning_strategy: cleaning_strategy)
|
||||
expect(service_instance).to have_received(:call)
|
||||
expect(Notifications::Create).to have_received(:new).with(
|
||||
user: user,
|
||||
kind: :info,
|
||||
title: 'Tracks Generated',
|
||||
content: 'Created 2 tracks from your location data. Check your tracks section to view them.'
|
||||
)
|
||||
expect(notification_service).to have_received(:call)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when service raises an error' do
|
||||
let(:error_message) { 'Something went wrong' }
|
||||
let(:service_instance) { instance_double(Tracks::CreateFromPoints) }
|
||||
let(:notification_service) { instance_double(Notifications::Create) }
|
||||
|
||||
before do
|
||||
allow(Tracks::CreateFromPoints).to receive(:new).with(user, start_at: nil, end_at: nil, cleaning_strategy: :replace).and_return(service_instance)
|
||||
allow(service_instance).to receive(:call).and_raise(StandardError, error_message)
|
||||
allow(Notifications::Create).to receive(:new).and_return(notification_service)
|
||||
allow(notification_service).to receive(:call)
|
||||
end
|
||||
|
||||
it 'creates an error notification' do
|
||||
described_class.new.perform(user.id)
|
||||
|
||||
expect(Notifications::Create).to have_received(:new).with(
|
||||
user: user,
|
||||
kind: :error,
|
||||
title: 'Track Generation Failed',
|
||||
content: "Failed to generate tracks from your location data: #{error_message}"
|
||||
)
|
||||
expect(notification_service).to have_received(:call)
|
||||
end
|
||||
|
||||
it 'reports the error using ExceptionReporter' do
|
||||
allow(ExceptionReporter).to receive(:call)
|
||||
|
||||
described_class.new.perform(user.id)
|
||||
|
||||
expect(ExceptionReporter).to have_received(:call).with(
|
||||
kind_of(StandardError),
|
||||
'Failed to create tracks for user'
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when user does not exist' do
|
||||
it 'handles the error gracefully and creates error notification' do
|
||||
allow(User).to receive(:find).with(999).and_raise(ActiveRecord::RecordNotFound)
|
||||
allow(ExceptionReporter).to receive(:call)
|
||||
allow(Notifications::Create).to receive(:new).and_return(instance_double(Notifications::Create, call: nil))
|
||||
|
||||
# Should not raise an error because it's caught by the rescue block
|
||||
expect { described_class.new.perform(999) }.not_to raise_error
|
||||
|
||||
expect(ExceptionReporter).to have_received(:call)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'queue' do
|
||||
it 'is queued on default queue' do
|
||||
expect(described_class.new.queue_name).to eq('default')
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -40,6 +40,54 @@ RSpec.describe Point, type: :model do
|
||||
point.update(lonlat: 'POINT(-79.85581250721961 15.854775993302411)')
|
||||
end
|
||||
end
|
||||
|
||||
describe '#trigger_track_processing' do
|
||||
let(:user) { create(:user) }
|
||||
let(:point) { build(:point, user: user) }
|
||||
|
||||
context 'when point is from import' do
|
||||
let(:import) { create(:import, user: user) }
|
||||
let(:point) { build(:point, user: user, import: import) }
|
||||
|
||||
it 'does not trigger track processing' do
|
||||
expect(TrackProcessingJob).not_to receive(:perform_now)
|
||||
expect(TrackProcessingJob).not_to receive(:perform_later)
|
||||
|
||||
point.save!
|
||||
end
|
||||
end
|
||||
|
||||
context 'when point is not from import' do
|
||||
context 'with no previous point' do
|
||||
it 'triggers immediate processing' do
|
||||
expect(TrackProcessingJob).to receive(:perform_now).with(user.id, 'incremental', point_id: point.id)
|
||||
|
||||
point.save!
|
||||
end
|
||||
end
|
||||
|
||||
context 'with previous point triggering immediate processing' do
|
||||
let!(:previous_point) { create(:point, user: user, timestamp: 2.hours.ago.to_i) }
|
||||
|
||||
it 'triggers immediate processing' do
|
||||
expect(TrackProcessingJob).to receive(:perform_now).with(user.id, 'incremental', point_id: point.id)
|
||||
|
||||
point.save!
|
||||
end
|
||||
end
|
||||
|
||||
context 'with previous point not triggering immediate processing' do
|
||||
let!(:previous_point) { create(:point, user: user, timestamp: 10.minutes.ago.to_i, lonlat: 'POINT(13.404954 52.520008)') }
|
||||
let(:point) { build(:point, user: user, timestamp: 5.minutes.ago.to_i, lonlat: 'POINT(13.405954 52.521008)') }
|
||||
|
||||
it 'triggers batched processing' do
|
||||
expect(TrackProcessingJob).to receive(:perform_later).with(user.id, 'incremental', point_id: point.id)
|
||||
|
||||
point.save!
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'scopes' do
|
||||
@@ -121,14 +169,65 @@ RSpec.describe Point, type: :model do
|
||||
end
|
||||
end
|
||||
|
||||
describe '#trigger_incremental_track_generation' do
|
||||
let(:point) do
|
||||
create(:point, track: track, import_id: nil, timestamp: 1.hour.ago.to_i, reverse_geocoded_at: 1.hour.ago)
|
||||
end
|
||||
let(:track) { create(:track) }
|
||||
describe '#should_trigger_immediate_processing?' do
|
||||
let(:user) { create(:user) }
|
||||
let(:point) { build(:point, user: user, timestamp: Time.current.to_i, lonlat: 'POINT(13.405954 52.521008)') }
|
||||
|
||||
it 'enqueues Tracks::IncrementalGeneratorJob' do
|
||||
expect { point.send(:trigger_incremental_track_generation) }.to have_enqueued_job(Tracks::IncrementalGeneratorJob).with(point.user_id, point.recorded_at.to_date.to_s, 5)
|
||||
context 'with no previous point' do
|
||||
it 'returns true' do
|
||||
result = point.send(:should_trigger_immediate_processing?, nil)
|
||||
expect(result).to eq(true)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with previous point exceeding time threshold' do
|
||||
let(:previous_point) { create(:point, user: user, timestamp: 2.hours.ago.to_i, lonlat: 'POINT(13.404954 52.520008)') }
|
||||
|
||||
it 'returns true' do
|
||||
result = point.send(:should_trigger_immediate_processing?, previous_point)
|
||||
expect(result).to eq(true)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with previous point exceeding distance threshold' do
|
||||
let(:previous_point) { create(:point, user: user, timestamp: 10.minutes.ago.to_i, lonlat: 'POINT(14.404954 53.520008)') }
|
||||
|
||||
it 'returns true' do
|
||||
result = point.send(:should_trigger_immediate_processing?, previous_point)
|
||||
expect(result).to eq(true)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with previous point within both thresholds' do
|
||||
let(:previous_point) { create(:point, user: user, timestamp: 10.minutes.ago.to_i, lonlat: 'POINT(13.404954 52.520008)') }
|
||||
|
||||
it 'returns false' do
|
||||
result = point.send(:should_trigger_immediate_processing?, previous_point)
|
||||
expect(result).to eq(false)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with previous point exactly at time threshold' do
|
||||
let(:previous_point) { create(:point, user: user, timestamp: 30.minutes.ago.to_i, lonlat: 'POINT(13.404954 52.520008)') }
|
||||
|
||||
it 'returns false' do
|
||||
result = point.send(:should_trigger_immediate_processing?, previous_point)
|
||||
expect(result).to eq(false)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with previous point exactly at distance threshold' do
|
||||
let(:previous_point) { create(:point, user: user, timestamp: 10.minutes.ago.to_i, lonlat: 'POINT(13.404954 52.520008)') }
|
||||
|
||||
before do
|
||||
# Mock distance calculation to return exactly 1.0 km
|
||||
allow(Geocoder::Calculations).to receive(:distance_between).and_return(1.0)
|
||||
end
|
||||
|
||||
it 'returns false' do
|
||||
result = point.send(:should_trigger_immediate_processing?, previous_point)
|
||||
expect(result).to eq(false)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
326
spec/services/track_service_spec.rb
Normal file
326
spec/services/track_service_spec.rb
Normal file
@@ -0,0 +1,326 @@
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe TrackService, type: :service do
|
||||
let(:user) { create(:user) }
|
||||
let(:service) { described_class.new(user, **options) }
|
||||
let(:options) { {} }
|
||||
|
||||
describe '#initialize' do
|
||||
it 'sets default values' do
|
||||
expect(service.user).to eq(user)
|
||||
expect(service.mode).to eq(:bulk)
|
||||
expect(service.cleanup_tracks).to eq(false)
|
||||
expect(service.time_threshold_minutes).to eq(60)
|
||||
expect(service.distance_threshold_meters).to eq(500)
|
||||
end
|
||||
|
||||
context 'with custom options' do
|
||||
let(:options) do
|
||||
{
|
||||
mode: :incremental,
|
||||
cleanup_tracks: true,
|
||||
time_threshold_minutes: 30,
|
||||
distance_threshold_meters: 1000
|
||||
}
|
||||
end
|
||||
|
||||
it 'uses provided options' do
|
||||
expect(service.mode).to eq(:incremental)
|
||||
expect(service.cleanup_tracks).to eq(true)
|
||||
expect(service.time_threshold_minutes).to eq(30)
|
||||
expect(service.distance_threshold_meters).to eq(1000)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#call' do
|
||||
context 'with no points' do
|
||||
it 'returns 0' do
|
||||
result = service.call
|
||||
expect(result).to eq(0)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with points' do
|
||||
let!(:points) do
|
||||
[
|
||||
create(:point, user: user, timestamp: 1.hour.ago.to_i, lonlat: 'POINT(13.404954 52.520008)'),
|
||||
create(:point, user: user, timestamp: 30.minutes.ago.to_i, lonlat: 'POINT(13.405954 52.521008)'),
|
||||
create(:point, user: user, timestamp: Time.current.to_i, lonlat: 'POINT(13.406954 52.522008)')
|
||||
]
|
||||
end
|
||||
|
||||
it 'creates tracks from points' do
|
||||
expect { service.call }.to change(Track, :count).by(1)
|
||||
end
|
||||
|
||||
it 'assigns points to created tracks' do
|
||||
service.call
|
||||
track = Track.last
|
||||
expect(points.map(&:reload).map(&:track)).to all(eq(track))
|
||||
end
|
||||
|
||||
it 'returns count of tracks created' do
|
||||
result = service.call
|
||||
expect(result).to eq(1)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with cleanup_tracks enabled' do
|
||||
let(:options) { { cleanup_tracks: true } }
|
||||
let!(:existing_track) { create(:track, user: user) }
|
||||
|
||||
it 'removes existing tracks in bulk mode' do
|
||||
expect { service.call }.to change(Track, :count).by(-1)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with incremental mode and old point' do
|
||||
let(:options) { { mode: :incremental, point_id: point.id } }
|
||||
let!(:point) { create(:point, user: user, created_at: 2.hours.ago) }
|
||||
|
||||
it 'returns early for old points' do
|
||||
result = service.call
|
||||
expect(result).to eq(0)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#load_points' do
|
||||
context 'in bulk mode' do
|
||||
let(:service) { described_class.new(user, mode: :bulk) }
|
||||
let!(:assigned_point) { create(:point, user: user, track: create(:track, user: user)) }
|
||||
let!(:unassigned_point) { create(:point, user: user) }
|
||||
|
||||
it 'loads only unassigned points' do
|
||||
points = service.send(:load_points)
|
||||
expect(points).to contain_exactly(unassigned_point)
|
||||
end
|
||||
end
|
||||
|
||||
context 'in incremental mode' do
|
||||
let(:service) { described_class.new(user, mode: :incremental) }
|
||||
let!(:old_point) { create(:point, user: user, timestamp: 3.hours.ago.to_i) }
|
||||
let!(:recent_unassigned_point) { create(:point, user: user, timestamp: 1.hour.ago.to_i) }
|
||||
let!(:active_track_point) { create(:point, user: user, track: recent_track, timestamp: 1.hour.ago.to_i) }
|
||||
let(:recent_track) { create(:track, user: user, end_at: 1.hour.ago) }
|
||||
|
||||
it 'loads recent unassigned points and active track points' do
|
||||
points = service.send(:load_points)
|
||||
expect(points).to contain_exactly(recent_unassigned_point, active_track_point)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#segment_points' do
|
||||
let(:service) { described_class.new(user) }
|
||||
|
||||
context 'with points that should be segmented' do
|
||||
let(:points) do
|
||||
[
|
||||
create(:point, user: user, timestamp: 2.hours.ago.to_i, lonlat: 'POINT(13.404954 52.520008)'),
|
||||
create(:point, user: user, timestamp: 1.hour.ago.to_i, lonlat: 'POINT(13.405954 52.521008)'),
|
||||
create(:point, user: user, timestamp: Time.current.to_i, lonlat: 'POINT(13.406954 52.522008)')
|
||||
]
|
||||
end
|
||||
|
||||
it 'creates segments based on time threshold' do
|
||||
segments = service.send(:segment_points, points)
|
||||
expect(segments.count).to eq(2) # Should split due to time gap
|
||||
end
|
||||
end
|
||||
|
||||
context 'with points that should stay together' do
|
||||
let(:points) do
|
||||
[
|
||||
create(:point, user: user, timestamp: 30.minutes.ago.to_i, lonlat: 'POINT(13.404954 52.520008)'),
|
||||
create(:point, user: user, timestamp: 20.minutes.ago.to_i, lonlat: 'POINT(13.405954 52.521008)'),
|
||||
create(:point, user: user, timestamp: 10.minutes.ago.to_i, lonlat: 'POINT(13.406954 52.522008)')
|
||||
]
|
||||
end
|
||||
|
||||
it 'keeps points in single segment' do
|
||||
segments = service.send(:segment_points, points)
|
||||
expect(segments.count).to eq(1)
|
||||
expect(segments.first.count).to eq(3)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#should_start_new_segment?' do
|
||||
let(:service) { described_class.new(user) }
|
||||
let(:previous_point) { create(:point, user: user, timestamp: 2.hours.ago.to_i, lonlat: 'POINT(13.404954 52.520008)') }
|
||||
|
||||
context 'with no previous point' do
|
||||
it 'returns false' do
|
||||
current_point = create(:point, user: user, timestamp: 1.hour.ago.to_i)
|
||||
result = service.send(:should_start_new_segment?, current_point, nil)
|
||||
expect(result).to eq(false)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with time threshold exceeded' do
|
||||
it 'returns true' do
|
||||
current_point = create(:point, user: user, timestamp: Time.current.to_i)
|
||||
result = service.send(:should_start_new_segment?, current_point, previous_point)
|
||||
expect(result).to eq(true)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with distance threshold exceeded' do
|
||||
it 'returns true' do
|
||||
# Create a point very far away (should exceed 500m default threshold)
|
||||
current_point = create(:point, user: user, timestamp: 1.hour.ago.to_i, lonlat: 'POINT(14.404954 53.520008)')
|
||||
result = service.send(:should_start_new_segment?, current_point, previous_point)
|
||||
expect(result).to eq(true)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with neither threshold exceeded' do
|
||||
it 'returns false' do
|
||||
# Create a point nearby and within time threshold
|
||||
current_point = create(:point, user: user, timestamp: 1.hour.ago.to_i, lonlat: 'POINT(13.405954 52.521008)')
|
||||
result = service.send(:should_start_new_segment?, current_point, previous_point)
|
||||
expect(result).to eq(false)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#create_track_from_points' do
|
||||
let(:service) { described_class.new(user) }
|
||||
let(:points) do
|
||||
[
|
||||
create(:point, user: user, timestamp: 1.hour.ago.to_i, lonlat: 'POINT(13.404954 52.520008)', altitude: 100),
|
||||
create(:point, user: user, timestamp: 30.minutes.ago.to_i, lonlat: 'POINT(13.405954 52.521008)', altitude: 120),
|
||||
create(:point, user: user, timestamp: Time.current.to_i, lonlat: 'POINT(13.406954 52.522008)', altitude: 110)
|
||||
]
|
||||
end
|
||||
|
||||
it 'creates a track with correct attributes' do
|
||||
track = service.send(:create_track_from_points, points)
|
||||
|
||||
expect(track).to be_persisted
|
||||
expect(track.user).to eq(user)
|
||||
expect(track.start_at).to be_within(1.second).of(Time.zone.at(points.first.timestamp))
|
||||
expect(track.end_at).to be_within(1.second).of(Time.zone.at(points.last.timestamp))
|
||||
expect(track.distance).to be > 0
|
||||
expect(track.duration).to be > 0
|
||||
expect(track.avg_speed).to be >= 0
|
||||
expect(track.elevation_gain).to be >= 0
|
||||
expect(track.elevation_loss).to be >= 0
|
||||
end
|
||||
|
||||
it 'assigns points to the track' do
|
||||
track = service.send(:create_track_from_points, points)
|
||||
expect(points.map(&:reload).map(&:track)).to all(eq(track))
|
||||
end
|
||||
|
||||
context 'with insufficient points' do
|
||||
let(:points) { [create(:point, user: user)] }
|
||||
|
||||
it 'returns nil' do
|
||||
result = service.send(:create_track_from_points, points)
|
||||
expect(result).to be_nil
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#calculate_distance' do
|
||||
let(:service) { described_class.new(user) }
|
||||
let(:points) do
|
||||
[
|
||||
create(:point, user: user, lonlat: 'POINT(13.404954 52.520008)'),
|
||||
create(:point, user: user, lonlat: 'POINT(13.405954 52.521008)')
|
||||
]
|
||||
end
|
||||
|
||||
it 'calculates distance between points' do
|
||||
distance = service.send(:calculate_distance, points)
|
||||
expect(distance).to be > 0
|
||||
expect(distance).to be_a(Integer) # Should be rounded to integer
|
||||
end
|
||||
end
|
||||
|
||||
describe '#calculate_average_speed' do
|
||||
let(:service) { described_class.new(user) }
|
||||
let(:points) do
|
||||
[
|
||||
create(:point, user: user, timestamp: 1.hour.ago.to_i),
|
||||
create(:point, user: user, timestamp: Time.current.to_i)
|
||||
]
|
||||
end
|
||||
|
||||
it 'calculates average speed' do
|
||||
speed = service.send(:calculate_average_speed, points)
|
||||
expect(speed).to be >= 0
|
||||
expect(speed).to be_a(Float)
|
||||
end
|
||||
|
||||
context 'with zero duration' do
|
||||
let(:points) do
|
||||
timestamp = Time.current.to_i
|
||||
[
|
||||
create(:point, user: user, timestamp: timestamp),
|
||||
create(:point, user: user, timestamp: timestamp)
|
||||
]
|
||||
end
|
||||
|
||||
it 'returns 0' do
|
||||
speed = service.send(:calculate_average_speed, points)
|
||||
expect(speed).to eq(0.0)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#calculate_elevation_gain' do
|
||||
let(:service) { described_class.new(user) }
|
||||
|
||||
context 'with ascending points' do
|
||||
let(:points) do
|
||||
[
|
||||
create(:point, user: user, altitude: 100),
|
||||
create(:point, user: user, altitude: 120),
|
||||
create(:point, user: user, altitude: 110)
|
||||
]
|
||||
end
|
||||
|
||||
it 'calculates elevation gain' do
|
||||
gain = service.send(:calculate_elevation_gain, points)
|
||||
expect(gain).to eq(20) # 100 -> 120 = +20
|
||||
end
|
||||
end
|
||||
|
||||
context 'with no altitude data' do
|
||||
let(:points) do
|
||||
[
|
||||
create(:point, user: user, altitude: nil),
|
||||
create(:point, user: user, altitude: nil)
|
||||
]
|
||||
end
|
||||
|
||||
it 'returns 0' do
|
||||
gain = service.send(:calculate_elevation_gain, points)
|
||||
expect(gain).to eq(0)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#calculate_elevation_loss' do
|
||||
let(:service) { described_class.new(user) }
|
||||
|
||||
context 'with descending points' do
|
||||
let(:points) do
|
||||
[
|
||||
create(:point, user: user, altitude: 120),
|
||||
create(:point, user: user, altitude: 100),
|
||||
create(:point, user: user, altitude: 110)
|
||||
]
|
||||
end
|
||||
|
||||
it 'calculates elevation loss' do
|
||||
loss = service.send(:calculate_elevation_loss, points)
|
||||
expect(loss).to eq(20) # 120 -> 100 = -20 (loss)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,35 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Tracks::BuildPath do
|
||||
describe '#call' do
|
||||
let(:coordinates) do
|
||||
[
|
||||
RGeo::Geographic.spherical_factory.point(-122.654321, 45.123456),
|
||||
RGeo::Geographic.spherical_factory.point(-122.765432, 45.234567),
|
||||
RGeo::Geographic.spherical_factory.point(-122.876543, 45.345678)
|
||||
]
|
||||
end
|
||||
|
||||
let(:service) { described_class.new(coordinates) }
|
||||
let(:result) { service.call }
|
||||
|
||||
it 'returns an RGeo::Geographic::SphericalLineString' do
|
||||
expect(result).to be_a(RGeo::Geographic::SphericalLineStringImpl)
|
||||
end
|
||||
|
||||
it 'creates a line string with the correct number of points' do
|
||||
expect(result.num_points).to eq(coordinates.length)
|
||||
end
|
||||
|
||||
it 'correctly converts coordinates to points with rounded values' do
|
||||
points = result.points
|
||||
|
||||
coordinates.each_with_index do |coordinate_pair, index|
|
||||
expect(points[index].x).to eq(coordinate_pair.lon.to_f.round(5))
|
||||
expect(points[index].y).to eq(coordinate_pair.lat.to_f.round(5))
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,176 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Tracks::BulkTrackCreator do
|
||||
describe '#call' do
|
||||
let!(:active_user) { create(:user) }
|
||||
let!(:inactive_user) { create(:user, :inactive) }
|
||||
let!(:user_without_points) { create(:user) }
|
||||
|
||||
let(:start_at) { 1.day.ago.beginning_of_day }
|
||||
let(:end_at) { 1.day.ago.end_of_day }
|
||||
|
||||
before do
|
||||
# Create points for active user in the target timeframe
|
||||
create(:point, user: active_user, timestamp: start_at.to_i + 1.hour.to_i)
|
||||
create(:point, user: active_user, timestamp: start_at.to_i + 2.hours.to_i)
|
||||
|
||||
# Create points for inactive user in the target timeframe
|
||||
create(:point, user: inactive_user, timestamp: start_at.to_i + 1.hour.to_i)
|
||||
end
|
||||
|
||||
context 'when explicit start_at is provided' do
|
||||
it 'schedules tracks creation jobs for active users with points in the timeframe' do
|
||||
expect {
|
||||
described_class.new(start_at:, end_at:).call
|
||||
}.to have_enqueued_job(Tracks::CreateJob).with(active_user.id, start_at:, end_at:, cleaning_strategy: :daily)
|
||||
end
|
||||
|
||||
it 'does not schedule jobs for users without tracked points' do
|
||||
expect {
|
||||
described_class.new(start_at:, end_at:).call
|
||||
}.not_to have_enqueued_job(Tracks::CreateJob).with(user_without_points.id, start_at:, end_at:, cleaning_strategy: :daily)
|
||||
end
|
||||
|
||||
it 'does not schedule jobs for users without points in the specified timeframe' do
|
||||
# Create a user with points outside the timeframe
|
||||
user_with_old_points = create(:user)
|
||||
create(:point, user: user_with_old_points, timestamp: 2.days.ago.to_i)
|
||||
|
||||
expect {
|
||||
described_class.new(start_at:, end_at:).call
|
||||
}.not_to have_enqueued_job(Tracks::CreateJob).with(user_with_old_points.id, start_at:, end_at:, cleaning_strategy: :daily)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when specific user_ids are provided' do
|
||||
it 'only processes the specified users' do
|
||||
expect {
|
||||
described_class.new(start_at:, end_at:, user_ids: [active_user.id]).call
|
||||
}.to have_enqueued_job(Tracks::CreateJob).with(active_user.id, start_at:, end_at:, cleaning_strategy: :daily)
|
||||
end
|
||||
|
||||
it 'does not process users not in the user_ids list' do
|
||||
expect {
|
||||
described_class.new(start_at:, end_at:, user_ids: [active_user.id]).call
|
||||
}.not_to have_enqueued_job(Tracks::CreateJob).with(inactive_user.id, start_at:, end_at:, cleaning_strategy: :daily)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with automatic start time determination' do
|
||||
let(:user_with_tracks) { create(:user) }
|
||||
let(:user_without_tracks) { create(:user) }
|
||||
let(:current_time) { Time.current }
|
||||
|
||||
before do
|
||||
# Create some historical points and tracks for user_with_tracks
|
||||
create(:point, user: user_with_tracks, timestamp: 3.days.ago.to_i)
|
||||
create(:point, user: user_with_tracks, timestamp: 2.days.ago.to_i)
|
||||
|
||||
# Create a track ending 1 day ago
|
||||
create(:track, user: user_with_tracks, end_at: 1.day.ago)
|
||||
|
||||
# Create newer points after the last track
|
||||
create(:point, user: user_with_tracks, timestamp: 12.hours.ago.to_i)
|
||||
create(:point, user: user_with_tracks, timestamp: 6.hours.ago.to_i)
|
||||
|
||||
# Create points for user without tracks
|
||||
create(:point, user: user_without_tracks, timestamp: 2.days.ago.to_i)
|
||||
create(:point, user: user_without_tracks, timestamp: 1.day.ago.to_i)
|
||||
end
|
||||
|
||||
it 'starts from the end of the last track for users with existing tracks' do
|
||||
track_end_time = user_with_tracks.tracks.order(end_at: :desc).first.end_at
|
||||
|
||||
expect {
|
||||
described_class.new(end_at: current_time, user_ids: [user_with_tracks.id]).call
|
||||
}.to have_enqueued_job(Tracks::CreateJob).with(
|
||||
user_with_tracks.id,
|
||||
start_at: track_end_time,
|
||||
end_at: current_time.to_datetime,
|
||||
cleaning_strategy: :daily
|
||||
)
|
||||
end
|
||||
|
||||
it 'starts from the oldest point for users without tracks' do
|
||||
oldest_point_time = Time.zone.at(user_without_tracks.tracked_points.order(:timestamp).first.timestamp)
|
||||
|
||||
expect {
|
||||
described_class.new(end_at: current_time, user_ids: [user_without_tracks.id]).call
|
||||
}.to have_enqueued_job(Tracks::CreateJob).with(
|
||||
user_without_tracks.id,
|
||||
start_at: oldest_point_time,
|
||||
end_at: current_time.to_datetime,
|
||||
cleaning_strategy: :daily
|
||||
)
|
||||
end
|
||||
|
||||
it 'falls back to 1 day ago for users with no points' do
|
||||
expect {
|
||||
described_class.new(end_at: current_time, user_ids: [user_without_points.id]).call
|
||||
}.not_to have_enqueued_job(Tracks::CreateJob).with(
|
||||
user_without_points.id,
|
||||
start_at: anything,
|
||||
end_at: anything,
|
||||
cleaning_strategy: :daily
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with default parameters' do
|
||||
let(:user_with_recent_points) { create(:user) }
|
||||
|
||||
before do
|
||||
# Create points within yesterday's timeframe
|
||||
create(:point, user: user_with_recent_points, timestamp: 1.day.ago.beginning_of_day.to_i + 2.hours.to_i)
|
||||
create(:point, user: user_with_recent_points, timestamp: 1.day.ago.beginning_of_day.to_i + 6.hours.to_i)
|
||||
end
|
||||
|
||||
it 'uses automatic start time determination with yesterday as end_at' do
|
||||
oldest_point_time = Time.zone.at(user_with_recent_points.tracked_points.order(:timestamp).first.timestamp)
|
||||
|
||||
expect {
|
||||
described_class.new(user_ids: [user_with_recent_points.id]).call
|
||||
}.to have_enqueued_job(Tracks::CreateJob).with(
|
||||
user_with_recent_points.id,
|
||||
start_at: oldest_point_time,
|
||||
end_at: 1.day.ago.end_of_day.to_datetime,
|
||||
cleaning_strategy: :daily
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#start_time' do
|
||||
let(:user) { create(:user) }
|
||||
let(:service) { described_class.new }
|
||||
|
||||
context 'when user has tracks' do
|
||||
let!(:old_track) { create(:track, user: user, end_at: 3.days.ago) }
|
||||
let!(:recent_track) { create(:track, user: user, end_at: 1.day.ago) }
|
||||
|
||||
it 'returns the end time of the most recent track' do
|
||||
result = service.send(:start_time, user)
|
||||
expect(result).to eq(recent_track.end_at)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when user has no tracks but has points' do
|
||||
let!(:old_point) { create(:point, user: user, timestamp: 5.days.ago.to_i) }
|
||||
let!(:recent_point) { create(:point, user: user, timestamp: 2.days.ago.to_i) }
|
||||
|
||||
it 'returns the timestamp of the oldest point' do
|
||||
result = service.send(:start_time, user)
|
||||
expect(result).to eq(Time.zone.at(old_point.timestamp))
|
||||
end
|
||||
end
|
||||
|
||||
context 'when user has no tracks and no points' do
|
||||
it 'returns 1 day ago beginning of day' do
|
||||
result = service.send(:start_time, user)
|
||||
expect(result).to eq(1.day.ago.beginning_of_day)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,95 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Tracks::Cleaners::DailyCleaner do
|
||||
let(:user) { create(:user) }
|
||||
let(:start_at) { 1.day.ago.beginning_of_day }
|
||||
let(:end_at) { 1.day.ago.end_of_day }
|
||||
let(:cleaner) { described_class.new(user, start_at: start_at.to_i, end_at: end_at.to_i) }
|
||||
|
||||
describe '#cleanup' do
|
||||
context 'when there are no overlapping tracks' do
|
||||
before do
|
||||
# Create a track that ends before our window
|
||||
track = create(:track, user: user, start_at: 2.days.ago, end_at: 2.days.ago + 1.hour)
|
||||
create(:point, user: user, track: track, timestamp: 2.days.ago.to_i)
|
||||
end
|
||||
|
||||
it 'does not remove any tracks' do
|
||||
expect { cleaner.cleanup }.not_to change { user.tracks.count }
|
||||
end
|
||||
end
|
||||
|
||||
context 'when a track is completely within the time window' do
|
||||
let!(:track) { create(:track, user: user, start_at: start_at + 1.hour, end_at: end_at - 1.hour) }
|
||||
let!(:point1) { create(:point, user: user, track: track, timestamp: (start_at + 1.hour).to_i) }
|
||||
let!(:point2) { create(:point, user: user, track: track, timestamp: (start_at + 2.hours).to_i) }
|
||||
|
||||
it 'removes all points from the track and deletes it' do
|
||||
expect { cleaner.cleanup }.to change { user.tracks.count }.by(-1)
|
||||
expect(point1.reload.track_id).to be_nil
|
||||
expect(point2.reload.track_id).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
context 'when a track spans across the time window' do
|
||||
let!(:track) { create(:track, user: user, start_at: start_at - 1.hour, end_at: end_at + 1.hour) }
|
||||
let!(:point_before) { create(:point, user: user, track: track, timestamp: (start_at - 30.minutes).to_i) }
|
||||
let!(:point_during1) { create(:point, user: user, track: track, timestamp: (start_at + 1.hour).to_i) }
|
||||
let!(:point_during2) { create(:point, user: user, track: track, timestamp: (start_at + 2.hours).to_i) }
|
||||
let!(:point_after) { create(:point, user: user, track: track, timestamp: (end_at + 30.minutes).to_i) }
|
||||
|
||||
it 'removes only points within the window and updates track boundaries' do
|
||||
expect { cleaner.cleanup }.not_to change { user.tracks.count }
|
||||
|
||||
# Points outside window should remain attached
|
||||
expect(point_before.reload.track_id).to eq(track.id)
|
||||
expect(point_after.reload.track_id).to eq(track.id)
|
||||
|
||||
# Points inside window should be detached
|
||||
expect(point_during1.reload.track_id).to be_nil
|
||||
expect(point_during2.reload.track_id).to be_nil
|
||||
|
||||
# Track boundaries should be updated
|
||||
track.reload
|
||||
expect(track.start_at).to be_within(1.second).of(Time.zone.at(point_before.timestamp))
|
||||
expect(track.end_at).to be_within(1.second).of(Time.zone.at(point_after.timestamp))
|
||||
end
|
||||
end
|
||||
|
||||
context 'when a track overlaps but has insufficient remaining points' do
|
||||
let!(:track) { create(:track, user: user, start_at: start_at - 1.hour, end_at: end_at + 1.hour) }
|
||||
let!(:point_before) { create(:point, user: user, track: track, timestamp: (start_at - 30.minutes).to_i) }
|
||||
let!(:point_during) { create(:point, user: user, track: track, timestamp: (start_at + 1.hour).to_i) }
|
||||
|
||||
it 'removes the track entirely and orphans remaining points' do
|
||||
expect { cleaner.cleanup }.to change { user.tracks.count }.by(-1)
|
||||
|
||||
expect(point_before.reload.track_id).to be_nil
|
||||
expect(point_during.reload.track_id).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
context 'when track has no points in the time window' do
|
||||
let!(:track) { create(:track, user: user, start_at: start_at - 2.hours, end_at: end_at + 2.hours) }
|
||||
let!(:point_before) { create(:point, user: user, track: track, timestamp: (start_at - 30.minutes).to_i) }
|
||||
let!(:point_after) { create(:point, user: user, track: track, timestamp: (end_at + 30.minutes).to_i) }
|
||||
|
||||
it 'does not modify the track' do
|
||||
expect { cleaner.cleanup }.not_to change { user.tracks.count }
|
||||
expect(track.reload.start_at).to be_within(1.second).of(track.start_at)
|
||||
expect(track.reload.end_at).to be_within(1.second).of(track.end_at)
|
||||
end
|
||||
end
|
||||
|
||||
context 'without start_at and end_at' do
|
||||
let(:cleaner) { described_class.new(user) }
|
||||
|
||||
it 'does not perform any cleanup' do
|
||||
create(:track, user: user)
|
||||
expect { cleaner.cleanup }.not_to change { user.tracks.count }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,357 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Tracks::CreateFromPoints do
|
||||
let(:user) { create(:user) }
|
||||
let(:service) { described_class.new(user) }
|
||||
|
||||
describe '#initialize' do
|
||||
it 'sets user and thresholds from user settings' do
|
||||
expect(service.user).to eq(user)
|
||||
expect(service.distance_threshold_meters).to eq(user.safe_settings.meters_between_routes.to_i)
|
||||
expect(service.time_threshold_minutes).to eq(user.safe_settings.minutes_between_routes.to_i)
|
||||
end
|
||||
|
||||
it 'defaults to replace cleaning strategy' do
|
||||
expect(service.cleaning_strategy).to eq(:replace)
|
||||
end
|
||||
|
||||
context 'with custom user settings' do
|
||||
before do
|
||||
user.update!(settings: user.settings.merge({
|
||||
'meters_between_routes' => 1000,
|
||||
'minutes_between_routes' => 60
|
||||
}))
|
||||
end
|
||||
|
||||
it 'uses custom settings' do
|
||||
service = described_class.new(user)
|
||||
expect(service.distance_threshold_meters).to eq(1000)
|
||||
expect(service.time_threshold_minutes).to eq(60)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with custom cleaning strategy' do
|
||||
it 'accepts daily cleaning strategy' do
|
||||
service = described_class.new(user, cleaning_strategy: :daily)
|
||||
expect(service.cleaning_strategy).to eq(:daily)
|
||||
end
|
||||
|
||||
it 'accepts none cleaning strategy' do
|
||||
service = described_class.new(user, cleaning_strategy: :none)
|
||||
expect(service.cleaning_strategy).to eq(:none)
|
||||
end
|
||||
|
||||
it 'accepts custom date range with cleaning strategy' do
|
||||
start_time = 1.day.ago.beginning_of_day.to_i
|
||||
end_time = 1.day.ago.end_of_day.to_i
|
||||
service = described_class.new(user, start_at: start_time, end_at: end_time, cleaning_strategy: :daily)
|
||||
|
||||
expect(service.start_at).to eq(start_time)
|
||||
expect(service.end_at).to eq(end_time)
|
||||
expect(service.cleaning_strategy).to eq(:daily)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#call' do
|
||||
context 'with no points' do
|
||||
it 'returns 0 tracks created' do
|
||||
expect(service.call).to eq(0)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with insufficient points' do
|
||||
let!(:single_point) { create(:point, user: user, timestamp: 1.hour.ago.to_i) }
|
||||
|
||||
it 'returns 0 tracks created' do
|
||||
expect(service.call).to eq(0)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with points that form a single track' do
|
||||
let(:base_time) { 1.hour.ago }
|
||||
let!(:points) do
|
||||
[
|
||||
create(:point, user: user, timestamp: base_time.to_i,
|
||||
lonlat: 'POINT(-74.0060 40.7128)', altitude: 10),
|
||||
create(:point, user: user, timestamp: (base_time + 5.minutes).to_i,
|
||||
lonlat: 'POINT(-74.0070 40.7130)', altitude: 15),
|
||||
create(:point, user: user, timestamp: (base_time + 10.minutes).to_i,
|
||||
lonlat: 'POINT(-74.0080 40.7132)', altitude: 20)
|
||||
]
|
||||
end
|
||||
|
||||
it 'creates one track' do
|
||||
expect { service.call }.to change(Track, :count).by(1)
|
||||
end
|
||||
|
||||
it 'returns 1 track created' do
|
||||
expect(service.call).to eq(1)
|
||||
end
|
||||
|
||||
it 'sets track attributes correctly' do
|
||||
service.call
|
||||
track = Track.last
|
||||
|
||||
expect(track.user).to eq(user)
|
||||
expect(track.start_at).to be_within(1.second).of(base_time)
|
||||
expect(track.end_at).to be_within(1.second).of(base_time + 10.minutes)
|
||||
expect(track.duration).to eq(600) # 10 minutes in seconds
|
||||
expect(track.original_path).to be_present
|
||||
expect(track.distance).to be > 0
|
||||
expect(track.avg_speed).to be > 0
|
||||
expect(track.elevation_gain).to eq(10) # 20 - 10
|
||||
expect(track.elevation_loss).to eq(0)
|
||||
expect(track.elevation_max).to eq(20)
|
||||
expect(track.elevation_min).to eq(10)
|
||||
end
|
||||
|
||||
it 'associates points with the track' do
|
||||
service.call
|
||||
track = Track.last
|
||||
expect(points.map(&:reload).map(&:track)).to all(eq(track))
|
||||
end
|
||||
end
|
||||
|
||||
context 'with points that should be split by time' do
|
||||
let(:base_time) { 2.hours.ago }
|
||||
let!(:points) do
|
||||
[
|
||||
# First track
|
||||
create(:point, user: user, timestamp: base_time.to_i,
|
||||
lonlat: 'POINT(-74.0060 40.7128)'),
|
||||
create(:point, user: user, timestamp: (base_time + 5.minutes).to_i,
|
||||
lonlat: 'POINT(-74.0070 40.7130)'),
|
||||
|
||||
# Gap > time threshold (default 30 minutes)
|
||||
create(:point, user: user, timestamp: (base_time + 45.minutes).to_i,
|
||||
lonlat: 'POINT(-74.0080 40.7132)'),
|
||||
create(:point, user: user, timestamp: (base_time + 50.minutes).to_i,
|
||||
lonlat: 'POINT(-74.0090 40.7134)')
|
||||
]
|
||||
end
|
||||
|
||||
it 'creates two tracks' do
|
||||
expect { service.call }.to change(Track, :count).by(2)
|
||||
end
|
||||
|
||||
it 'returns 2 tracks created' do
|
||||
expect(service.call).to eq(2)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with points that should be split by distance' do
|
||||
let(:base_time) { 1.hour.ago }
|
||||
let!(:points) do
|
||||
[
|
||||
# First track - close points
|
||||
create(:point, user: user, timestamp: base_time.to_i,
|
||||
lonlat: 'POINT(-74.0060 40.7128)'),
|
||||
create(:point, user: user, timestamp: (base_time + 1.minute).to_i,
|
||||
lonlat: 'POINT(-74.0061 40.7129)'),
|
||||
|
||||
# Far point (> distance threshold, but within time threshold)
|
||||
create(:point, user: user, timestamp: (base_time + 2.minutes).to_i,
|
||||
lonlat: 'POINT(-74.0500 40.7500)'), # ~5km away
|
||||
create(:point, user: user, timestamp: (base_time + 3.minutes).to_i,
|
||||
lonlat: 'POINT(-74.0501 40.7501)')
|
||||
]
|
||||
end
|
||||
|
||||
it 'creates two tracks' do
|
||||
expect { service.call }.to change(Track, :count).by(2)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with existing tracks' do
|
||||
let!(:existing_track) { create(:track, user: user) }
|
||||
let!(:points) do
|
||||
[
|
||||
create(:point, user: user, timestamp: 1.hour.ago.to_i,
|
||||
lonlat: 'POINT(-74.0060 40.7128)'),
|
||||
create(:point, user: user, timestamp: 50.minutes.ago.to_i,
|
||||
lonlat: 'POINT(-74.0070 40.7130)')
|
||||
]
|
||||
end
|
||||
|
||||
it 'destroys existing tracks and creates new ones' do
|
||||
expect { service.call }.to change(Track, :count).by(0) # -1 + 1
|
||||
expect(Track.exists?(existing_track.id)).to be false
|
||||
end
|
||||
|
||||
context 'with none cleaning strategy' do
|
||||
let(:service) { described_class.new(user, cleaning_strategy: :none) }
|
||||
|
||||
it 'preserves existing tracks and creates new ones' do
|
||||
expect { service.call }.to change(Track, :count).by(1) # +1, existing preserved
|
||||
expect(Track.exists?(existing_track.id)).to be true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'with different cleaning strategies' do
|
||||
let!(:points) do
|
||||
[
|
||||
create(:point, user: user, timestamp: 1.hour.ago.to_i,
|
||||
lonlat: 'POINT(-74.0060 40.7128)'),
|
||||
create(:point, user: user, timestamp: 50.minutes.ago.to_i,
|
||||
lonlat: 'POINT(-74.0070 40.7130)')
|
||||
]
|
||||
end
|
||||
|
||||
it 'works with replace strategy (default)' do
|
||||
service = described_class.new(user, cleaning_strategy: :replace)
|
||||
expect { service.call }.to change(Track, :count).by(1)
|
||||
end
|
||||
|
||||
it 'works with daily strategy' do
|
||||
# Create points within the daily range we're testing
|
||||
start_time = 1.day.ago.beginning_of_day.to_i
|
||||
end_time = 1.day.ago.end_of_day.to_i
|
||||
|
||||
# Create test points within the daily range
|
||||
create(:point, user: user, timestamp: start_time + 1.hour.to_i,
|
||||
lonlat: 'POINT(-74.0060 40.7128)')
|
||||
create(:point, user: user, timestamp: start_time + 2.hours.to_i,
|
||||
lonlat: 'POINT(-74.0070 40.7130)')
|
||||
|
||||
# Create an existing track that overlaps with our time window
|
||||
existing_track = create(:track, user: user,
|
||||
start_at: Time.zone.at(start_time - 1.hour),
|
||||
end_at: Time.zone.at(start_time + 30.minutes))
|
||||
|
||||
service = described_class.new(user, start_at: start_time, end_at: end_time, cleaning_strategy: :daily)
|
||||
|
||||
# Daily cleaning should handle existing tracks properly and create new ones
|
||||
expect { service.call }.to change(Track, :count).by(0) # existing cleaned and new created
|
||||
end
|
||||
|
||||
it 'works with none strategy' do
|
||||
service = described_class.new(user, cleaning_strategy: :none)
|
||||
expect { service.call }.to change(Track, :count).by(1)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with mixed elevation data' do
|
||||
let!(:points) do
|
||||
[
|
||||
create(:point, user: user, timestamp: 1.hour.ago.to_i,
|
||||
lonlat: 'POINT(-74.0060 40.7128)', altitude: 100),
|
||||
create(:point, user: user, timestamp: 50.minutes.ago.to_i,
|
||||
lonlat: 'POINT(-74.0070 40.7130)', altitude: 150),
|
||||
create(:point, user: user, timestamp: 40.minutes.ago.to_i,
|
||||
lonlat: 'POINT(-74.0080 40.7132)', altitude: 120)
|
||||
]
|
||||
end
|
||||
|
||||
it 'calculates elevation correctly' do
|
||||
service.call
|
||||
track = Track.last
|
||||
|
||||
expect(track.elevation_gain).to eq(50) # 150 - 100
|
||||
expect(track.elevation_loss).to eq(30) # 150 - 120
|
||||
expect(track.elevation_max).to eq(150)
|
||||
expect(track.elevation_min).to eq(100)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with points missing altitude data' do
|
||||
let!(:points) do
|
||||
[
|
||||
create(:point, user: user, timestamp: 1.hour.ago.to_i,
|
||||
lonlat: 'POINT(-74.0060 40.7128)', altitude: nil),
|
||||
create(:point, user: user, timestamp: 50.minutes.ago.to_i,
|
||||
lonlat: 'POINT(-74.0070 40.7130)', altitude: nil)
|
||||
]
|
||||
end
|
||||
|
||||
it 'uses default elevation values' do
|
||||
service.call
|
||||
track = Track.last
|
||||
|
||||
expect(track.elevation_gain).to eq(0)
|
||||
expect(track.elevation_loss).to eq(0)
|
||||
expect(track.elevation_max).to eq(0)
|
||||
expect(track.elevation_min).to eq(0)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'private methods' do
|
||||
describe '#should_start_new_track?' do
|
||||
let(:point1) { build(:point, timestamp: 1.hour.ago.to_i, lonlat: 'POINT(-74.0060 40.7128)') }
|
||||
let(:point2) { build(:point, timestamp: 50.minutes.ago.to_i, lonlat: 'POINT(-74.0070 40.7130)') }
|
||||
|
||||
it 'returns false when previous point is nil' do
|
||||
result = service.send(:should_start_new_track?, point1, nil)
|
||||
expect(result).to be false
|
||||
end
|
||||
|
||||
it 'returns true when time threshold is exceeded' do
|
||||
# Create a point > 30 minutes later (default threshold)
|
||||
later_point = build(:point, timestamp: 29.minutes.ago.to_i, lonlat: 'POINT(-74.0070 40.7130)')
|
||||
|
||||
result = service.send(:should_start_new_track?, later_point, point1)
|
||||
expect(result).to be true
|
||||
end
|
||||
|
||||
it 'returns true when distance threshold is exceeded' do
|
||||
# Create a point far away (> 500m default threshold)
|
||||
far_point = build(:point, timestamp: 59.minutes.ago.to_i, lonlat: 'POINT(-74.0500 40.7500)')
|
||||
|
||||
result = service.send(:should_start_new_track?, far_point, point1)
|
||||
expect(result).to be true
|
||||
end
|
||||
|
||||
it 'returns false when both thresholds are not exceeded' do
|
||||
result = service.send(:should_start_new_track?, point2, point1)
|
||||
expect(result).to be false
|
||||
end
|
||||
end
|
||||
|
||||
describe '#calculate_distance_kilometers' do
|
||||
let(:point1) { build(:point, lonlat: 'POINT(-74.0060 40.7128)') }
|
||||
let(:point2) { build(:point, lonlat: 'POINT(-74.0070 40.7130)') }
|
||||
|
||||
it 'calculates distance between two points in kilometers' do
|
||||
distance = service.send(:calculate_distance_kilometers, point1, point2)
|
||||
expect(distance).to be > 0
|
||||
expect(distance).to be < 0.2 # Should be small distance for close points (in km)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#calculate_average_speed' do
|
||||
it 'calculates speed correctly' do
|
||||
# 1000 meters in 100 seconds = 10 m/s = 36 km/h
|
||||
speed = service.send(:calculate_average_speed, 1000, 100)
|
||||
expect(speed).to eq(36.0)
|
||||
end
|
||||
|
||||
it 'returns 0 for zero duration' do
|
||||
speed = service.send(:calculate_average_speed, 1000, 0)
|
||||
expect(speed).to eq(0.0)
|
||||
end
|
||||
|
||||
it 'returns 0 for zero distance' do
|
||||
speed = service.send(:calculate_average_speed, 0, 100)
|
||||
expect(speed).to eq(0.0)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#calculate_track_distance' do
|
||||
let(:points) do
|
||||
[
|
||||
build(:point, lonlat: 'POINT(-74.0060 40.7128)'),
|
||||
build(:point, lonlat: 'POINT(-74.0070 40.7130)')
|
||||
]
|
||||
end
|
||||
|
||||
it 'stores distance in meters by default' do
|
||||
distance = service.send(:calculate_track_distance, points)
|
||||
expect(distance).to eq(87)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,257 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Tracks::Generator do
|
||||
let(:user) { create(:user) }
|
||||
let(:point_loader) { double('PointLoader') }
|
||||
let(:incomplete_segment_handler) { double('IncompleteSegmentHandler') }
|
||||
let(:track_cleaner) { double('Cleaner') }
|
||||
|
||||
let(:generator) do
|
||||
described_class.new(
|
||||
user,
|
||||
point_loader: point_loader,
|
||||
incomplete_segment_handler: incomplete_segment_handler,
|
||||
track_cleaner: track_cleaner
|
||||
)
|
||||
end
|
||||
|
||||
before do
|
||||
allow_any_instance_of(Users::SafeSettings).to receive(:meters_between_routes).and_return(500)
|
||||
allow_any_instance_of(Users::SafeSettings).to receive(:minutes_between_routes).and_return(60)
|
||||
allow_any_instance_of(Users::SafeSettings).to receive(:distance_unit).and_return('km')
|
||||
end
|
||||
|
||||
describe '#call' do
|
||||
context 'with no points to process' do
|
||||
before do
|
||||
allow(track_cleaner).to receive(:cleanup)
|
||||
allow(point_loader).to receive(:load_points).and_return([])
|
||||
end
|
||||
|
||||
it 'returns 0 tracks created' do
|
||||
result = generator.call
|
||||
expect(result).to eq(0)
|
||||
end
|
||||
|
||||
it 'does not call incomplete segment handler' do
|
||||
expect(incomplete_segment_handler).not_to receive(:should_finalize_segment?)
|
||||
expect(incomplete_segment_handler).not_to receive(:handle_incomplete_segment)
|
||||
expect(incomplete_segment_handler).not_to receive(:cleanup_processed_data)
|
||||
|
||||
generator.call
|
||||
end
|
||||
end
|
||||
|
||||
context 'with points that create tracks' do
|
||||
let!(:points) do
|
||||
[
|
||||
create(:point, user: user, lonlat: 'POINT(-74.0060 40.7128)', timestamp: 1.hour.ago.to_i, latitude: 40.7128, longitude: -74.0060),
|
||||
create(:point, user: user, lonlat: 'POINT(-74.0050 40.7138)', timestamp: 30.minutes.ago.to_i, latitude: 40.7138, longitude: -74.0050),
|
||||
create(:point, user: user, lonlat: 'POINT(-74.0040 40.7148)', timestamp: 10.minutes.ago.to_i, latitude: 40.7148, longitude: -74.0040)
|
||||
]
|
||||
end
|
||||
|
||||
before do
|
||||
allow(track_cleaner).to receive(:cleanup)
|
||||
allow(point_loader).to receive(:load_points).and_return(points)
|
||||
allow(incomplete_segment_handler).to receive(:should_finalize_segment?).and_return(true)
|
||||
allow(incomplete_segment_handler).to receive(:cleanup_processed_data)
|
||||
end
|
||||
|
||||
it 'creates tracks from segments' do
|
||||
expect { generator.call }.to change { Track.count }.by(1)
|
||||
end
|
||||
|
||||
it 'returns the number of tracks created' do
|
||||
result = generator.call
|
||||
expect(result).to eq(1)
|
||||
end
|
||||
|
||||
it 'calls cleanup on processed data' do
|
||||
expect(incomplete_segment_handler).to receive(:cleanup_processed_data)
|
||||
generator.call
|
||||
end
|
||||
|
||||
it 'assigns points to the created track' do
|
||||
generator.call
|
||||
points.each(&:reload)
|
||||
track_ids = points.map(&:track_id).uniq.compact
|
||||
expect(track_ids.size).to eq(1)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with incomplete segments' do
|
||||
let!(:points) do
|
||||
[
|
||||
create(:point, user: user, lonlat: 'POINT(-74.0060 40.7128)', timestamp: 5.minutes.ago.to_i, latitude: 40.7128, longitude: -74.0060),
|
||||
create(:point, user: user, lonlat: 'POINT(-74.0050 40.7138)', timestamp: 4.minutes.ago.to_i, latitude: 40.7138, longitude: -74.0050)
|
||||
]
|
||||
end
|
||||
|
||||
before do
|
||||
allow(track_cleaner).to receive(:cleanup)
|
||||
allow(point_loader).to receive(:load_points).and_return(points)
|
||||
allow(incomplete_segment_handler).to receive(:should_finalize_segment?).and_return(false)
|
||||
allow(incomplete_segment_handler).to receive(:handle_incomplete_segment)
|
||||
allow(incomplete_segment_handler).to receive(:cleanup_processed_data)
|
||||
end
|
||||
|
||||
it 'does not create tracks' do
|
||||
expect { generator.call }.not_to change { Track.count }
|
||||
end
|
||||
|
||||
it 'handles incomplete segments' do
|
||||
expect(incomplete_segment_handler).to receive(:handle_incomplete_segment).with(points)
|
||||
generator.call
|
||||
end
|
||||
|
||||
it 'returns 0 tracks created' do
|
||||
result = generator.call
|
||||
expect(result).to eq(0)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with mixed complete and incomplete segments' do
|
||||
let!(:old_points) do
|
||||
[
|
||||
create(:point, user: user, lonlat: 'POINT(-74.0060 40.7128)', timestamp: 2.hours.ago.to_i, latitude: 40.7128, longitude: -74.0060),
|
||||
create(:point, user: user, lonlat: 'POINT(-74.0050 40.7138)', timestamp: 1.hour.ago.to_i, latitude: 40.7138, longitude: -74.0050)
|
||||
]
|
||||
end
|
||||
|
||||
let!(:recent_points) do
|
||||
[
|
||||
create(:point, user: user, lonlat: 'POINT(-74.0040 40.7148)', timestamp: 3.minutes.ago.to_i, latitude: 40.7148, longitude: -74.0040),
|
||||
create(:point, user: user, lonlat: 'POINT(-74.0030 40.7158)', timestamp: 2.minutes.ago.to_i, latitude: 40.7158, longitude: -74.0030)
|
||||
]
|
||||
end
|
||||
|
||||
before do
|
||||
allow(track_cleaner).to receive(:cleanup)
|
||||
allow(point_loader).to receive(:load_points).and_return(old_points + recent_points)
|
||||
|
||||
# First segment (old points) should be finalized
|
||||
# Second segment (recent points) should be incomplete
|
||||
call_count = 0
|
||||
allow(incomplete_segment_handler).to receive(:should_finalize_segment?) do |segment_points|
|
||||
call_count += 1
|
||||
call_count == 1 # Only finalize first segment
|
||||
end
|
||||
|
||||
allow(incomplete_segment_handler).to receive(:handle_incomplete_segment)
|
||||
allow(incomplete_segment_handler).to receive(:cleanup_processed_data)
|
||||
end
|
||||
|
||||
it 'creates tracks for complete segments only' do
|
||||
expect { generator.call }.to change { Track.count }.by(1)
|
||||
end
|
||||
|
||||
it 'handles incomplete segments' do
|
||||
# Note: The exact behavior depends on segmentation logic
|
||||
# The important thing is that the method can be called without errors
|
||||
generator.call
|
||||
# Test passes if no exceptions are raised
|
||||
expect(true).to be_truthy
|
||||
end
|
||||
|
||||
it 'returns the correct number of tracks created' do
|
||||
result = generator.call
|
||||
expect(result).to eq(1)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with insufficient points for track creation' do
|
||||
let!(:single_point) do
|
||||
[create(:point, user: user, lonlat: 'POINT(-74.0060 40.7128)', timestamp: 1.hour.ago.to_i, latitude: 40.7128, longitude: -74.0060)]
|
||||
end
|
||||
|
||||
before do
|
||||
allow(track_cleaner).to receive(:cleanup)
|
||||
allow(point_loader).to receive(:load_points).and_return(single_point)
|
||||
allow(incomplete_segment_handler).to receive(:should_finalize_segment?).and_return(true)
|
||||
allow(incomplete_segment_handler).to receive(:cleanup_processed_data)
|
||||
end
|
||||
|
||||
it 'does not create tracks with less than 2 points' do
|
||||
expect { generator.call }.not_to change { Track.count }
|
||||
end
|
||||
|
||||
it 'returns 0 tracks created' do
|
||||
result = generator.call
|
||||
expect(result).to eq(0)
|
||||
end
|
||||
end
|
||||
|
||||
context 'error handling' do
|
||||
before do
|
||||
allow(track_cleaner).to receive(:cleanup)
|
||||
allow(point_loader).to receive(:load_points).and_raise(StandardError, 'Point loading failed')
|
||||
end
|
||||
|
||||
it 'propagates errors from point loading' do
|
||||
expect { generator.call }.to raise_error(StandardError, 'Point loading failed')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'strategy pattern integration' do
|
||||
context 'with bulk processing strategies' do
|
||||
let(:bulk_loader) { Tracks::PointLoaders::BulkLoader.new(user) }
|
||||
let(:ignore_handler) { Tracks::IncompleteSegmentHandlers::IgnoreHandler.new(user) }
|
||||
let(:replace_cleaner) { Tracks::Cleaners::ReplaceCleaner.new(user) }
|
||||
|
||||
let(:bulk_generator) do
|
||||
described_class.new(
|
||||
user,
|
||||
point_loader: bulk_loader,
|
||||
incomplete_segment_handler: ignore_handler,
|
||||
track_cleaner: replace_cleaner
|
||||
)
|
||||
end
|
||||
|
||||
let!(:existing_track) { create(:track, user: user) }
|
||||
let!(:points) do
|
||||
[
|
||||
create(:point, user: user, lonlat: 'POINT(-74.0060 40.7128)', timestamp: 1.hour.ago.to_i, latitude: 40.7128, longitude: -74.0060),
|
||||
create(:point, user: user, lonlat: 'POINT(-74.0050 40.7138)', timestamp: 30.minutes.ago.to_i, latitude: 40.7138, longitude: -74.0050)
|
||||
]
|
||||
end
|
||||
|
||||
it 'behaves like bulk processing' do
|
||||
initial_count = Track.count
|
||||
bulk_generator.call
|
||||
# Bulk processing replaces existing tracks with new ones
|
||||
# The final count depends on how many valid tracks can be created from the points
|
||||
expect(Track.count).to be >= 0
|
||||
end
|
||||
end
|
||||
|
||||
context 'with incremental processing strategies' do
|
||||
let(:incremental_loader) { Tracks::PointLoaders::IncrementalLoader.new(user) }
|
||||
let(:buffer_handler) { Tracks::IncompleteSegmentHandlers::BufferHandler.new(user, Date.current, 5) }
|
||||
let(:noop_cleaner) { Tracks::Cleaners::NoOpCleaner.new(user) }
|
||||
|
||||
let(:incremental_generator) do
|
||||
described_class.new(
|
||||
user,
|
||||
point_loader: incremental_loader,
|
||||
incomplete_segment_handler: buffer_handler,
|
||||
track_cleaner: noop_cleaner
|
||||
)
|
||||
end
|
||||
|
||||
let!(:existing_track) { create(:track, user: user) }
|
||||
|
||||
before do
|
||||
# Mock the incremental loader to return some points
|
||||
allow(incremental_loader).to receive(:load_points).and_return([])
|
||||
end
|
||||
|
||||
it 'behaves like incremental processing' do
|
||||
expect { incremental_generator.call }.not_to change { Track.count }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,238 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Tracks::RedisBuffer do
|
||||
let(:user_id) { 123 }
|
||||
let(:day) { Date.current }
|
||||
let(:buffer) { described_class.new(user_id, day) }
|
||||
|
||||
describe '#initialize' do
|
||||
it 'stores user_id and converts day to Date' do
|
||||
expect(buffer.user_id).to eq(user_id)
|
||||
expect(buffer.day).to eq(day)
|
||||
expect(buffer.day).to be_a(Date)
|
||||
end
|
||||
|
||||
it 'handles string date input' do
|
||||
buffer = described_class.new(user_id, '2024-01-15')
|
||||
expect(buffer.day).to eq(Date.parse('2024-01-15'))
|
||||
end
|
||||
|
||||
it 'handles Time input' do
|
||||
time = Time.current
|
||||
buffer = described_class.new(user_id, time)
|
||||
expect(buffer.day).to eq(time.to_date)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#store' do
|
||||
let(:user) { create(:user) }
|
||||
let!(:points) do
|
||||
[
|
||||
create(:point, user: user, lonlat: 'POINT(-74.0060 40.7128)', timestamp: 1.hour.ago.to_i),
|
||||
create(:point, user: user, lonlat: 'POINT(-74.0070 40.7130)', timestamp: 30.minutes.ago.to_i)
|
||||
]
|
||||
end
|
||||
|
||||
it 'stores points in Redis cache' do
|
||||
expect(Rails.cache).to receive(:write).with(
|
||||
"track_buffer:#{user_id}:#{day.strftime('%Y-%m-%d')}",
|
||||
anything,
|
||||
expires_in: 7.days
|
||||
)
|
||||
|
||||
buffer.store(points)
|
||||
end
|
||||
|
||||
it 'serializes points correctly' do
|
||||
buffer.store(points)
|
||||
|
||||
stored_data = Rails.cache.read("track_buffer:#{user_id}:#{day.strftime('%Y-%m-%d')}")
|
||||
|
||||
expect(stored_data).to be_an(Array)
|
||||
expect(stored_data.size).to eq(2)
|
||||
|
||||
first_point = stored_data.first
|
||||
expect(first_point[:id]).to eq(points.first.id)
|
||||
expect(first_point[:timestamp]).to eq(points.first.timestamp)
|
||||
expect(first_point[:lat]).to eq(points.first.lat)
|
||||
expect(first_point[:lon]).to eq(points.first.lon)
|
||||
expect(first_point[:user_id]).to eq(points.first.user_id)
|
||||
end
|
||||
|
||||
it 'does nothing when given empty array' do
|
||||
expect(Rails.cache).not_to receive(:write)
|
||||
buffer.store([])
|
||||
end
|
||||
|
||||
it 'logs debug message when storing points' do
|
||||
expect(Rails.logger).to receive(:debug).with(
|
||||
"Stored 2 points in buffer for user #{user_id}, day #{day}"
|
||||
)
|
||||
|
||||
buffer.store(points)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#retrieve' do
|
||||
context 'when buffer exists' do
|
||||
let(:stored_data) do
|
||||
[
|
||||
{
|
||||
id: 1,
|
||||
lonlat: 'POINT(-74.0060 40.7128)',
|
||||
timestamp: 1.hour.ago.to_i,
|
||||
lat: 40.7128,
|
||||
lon: -74.0060,
|
||||
altitude: 100,
|
||||
velocity: 5.0,
|
||||
battery: 80,
|
||||
user_id: user_id
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
lonlat: 'POINT(-74.0070 40.7130)',
|
||||
timestamp: 30.minutes.ago.to_i,
|
||||
lat: 40.7130,
|
||||
lon: -74.0070,
|
||||
altitude: 105,
|
||||
velocity: 6.0,
|
||||
battery: 75,
|
||||
user_id: user_id
|
||||
}
|
||||
]
|
||||
end
|
||||
|
||||
before do
|
||||
Rails.cache.write(
|
||||
"track_buffer:#{user_id}:#{day.strftime('%Y-%m-%d')}",
|
||||
stored_data
|
||||
)
|
||||
end
|
||||
|
||||
it 'returns the stored point data' do
|
||||
result = buffer.retrieve
|
||||
|
||||
expect(result).to eq(stored_data)
|
||||
expect(result.size).to eq(2)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when buffer does not exist' do
|
||||
it 'returns empty array' do
|
||||
result = buffer.retrieve
|
||||
expect(result).to eq([])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when Redis read fails' do
|
||||
before do
|
||||
allow(Rails.cache).to receive(:read).and_raise(StandardError.new('Redis error'))
|
||||
end
|
||||
|
||||
it 'returns empty array and logs error' do
|
||||
expect(Rails.logger).to receive(:error).with(
|
||||
"Failed to retrieve buffered points for user #{user_id}, day #{day}: Redis error"
|
||||
)
|
||||
|
||||
result = buffer.retrieve
|
||||
expect(result).to eq([])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#clear' do
|
||||
before do
|
||||
Rails.cache.write(
|
||||
"track_buffer:#{user_id}:#{day.strftime('%Y-%m-%d')}",
|
||||
[{ id: 1, timestamp: 1.hour.ago.to_i }]
|
||||
)
|
||||
end
|
||||
|
||||
it 'deletes the buffer from cache' do
|
||||
buffer.clear
|
||||
|
||||
expect(Rails.cache.read("track_buffer:#{user_id}:#{day.strftime('%Y-%m-%d')}")).to be_nil
|
||||
end
|
||||
|
||||
it 'logs debug message' do
|
||||
expect(Rails.logger).to receive(:debug).with(
|
||||
"Cleared buffer for user #{user_id}, day #{day}"
|
||||
)
|
||||
|
||||
buffer.clear
|
||||
end
|
||||
end
|
||||
|
||||
describe '#exists?' do
|
||||
context 'when buffer exists' do
|
||||
before do
|
||||
Rails.cache.write(
|
||||
"track_buffer:#{user_id}:#{day.strftime('%Y-%m-%d')}",
|
||||
[{ id: 1 }]
|
||||
)
|
||||
end
|
||||
|
||||
it 'returns true' do
|
||||
expect(buffer.exists?).to be true
|
||||
end
|
||||
end
|
||||
|
||||
context 'when buffer does not exist' do
|
||||
it 'returns false' do
|
||||
expect(buffer.exists?).to be false
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'buffer key generation' do
|
||||
it 'generates correct Redis key format' do
|
||||
expected_key = "track_buffer:#{user_id}:#{day.strftime('%Y-%m-%d')}"
|
||||
|
||||
# Access private method for testing
|
||||
actual_key = buffer.send(:buffer_key)
|
||||
|
||||
expect(actual_key).to eq(expected_key)
|
||||
end
|
||||
|
||||
it 'handles different date formats consistently' do
|
||||
date_as_string = '2024-03-15'
|
||||
date_as_date = Date.parse(date_as_string)
|
||||
|
||||
buffer1 = described_class.new(user_id, date_as_string)
|
||||
buffer2 = described_class.new(user_id, date_as_date)
|
||||
|
||||
expect(buffer1.send(:buffer_key)).to eq(buffer2.send(:buffer_key))
|
||||
end
|
||||
end
|
||||
|
||||
describe 'integration test' do
|
||||
let(:user) { create(:user) }
|
||||
let!(:points) do
|
||||
[
|
||||
create(:point, user: user, lonlat: 'POINT(-74.0060 40.7128)', timestamp: 2.hours.ago.to_i),
|
||||
create(:point, user: user, lonlat: 'POINT(-74.0070 40.7130)', timestamp: 1.hour.ago.to_i)
|
||||
]
|
||||
end
|
||||
|
||||
it 'stores and retrieves points correctly' do
|
||||
# Store points
|
||||
buffer.store(points)
|
||||
expect(buffer.exists?).to be true
|
||||
|
||||
# Retrieve points
|
||||
retrieved_points = buffer.retrieve
|
||||
expect(retrieved_points.size).to eq(2)
|
||||
|
||||
# Verify data integrity
|
||||
expect(retrieved_points.first[:id]).to eq(points.first.id)
|
||||
expect(retrieved_points.last[:id]).to eq(points.last.id)
|
||||
|
||||
# Clear buffer
|
||||
buffer.clear
|
||||
expect(buffer.exists?).to be false
|
||||
expect(buffer.retrieve).to eq([])
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,326 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Tracks::TrackBuilder do
|
||||
# Create a test class that includes the concern for testing
|
||||
let(:test_class) do
|
||||
Class.new do
|
||||
include Tracks::TrackBuilder
|
||||
|
||||
def initialize(user)
|
||||
@user = user
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :user
|
||||
end
|
||||
end
|
||||
|
||||
let(:user) { create(:user) }
|
||||
let(:builder) { test_class.new(user) }
|
||||
|
||||
before do
|
||||
# Set up user settings for consistent testing
|
||||
allow_any_instance_of(Users::SafeSettings).to receive(:distance_unit).and_return('km')
|
||||
end
|
||||
|
||||
describe '#create_track_from_points' do
|
||||
context 'with valid points' do
|
||||
let!(:points) do
|
||||
[
|
||||
create(:point, user: user, lonlat: 'POINT(-74.0060 40.7128)',
|
||||
timestamp: 2.hours.ago.to_i, altitude: 100),
|
||||
create(:point, user: user, lonlat: 'POINT(-74.0070 40.7130)',
|
||||
timestamp: 1.hour.ago.to_i, altitude: 110),
|
||||
create(:point, user: user, lonlat: 'POINT(-74.0080 40.7132)',
|
||||
timestamp: 30.minutes.ago.to_i, altitude: 105)
|
||||
]
|
||||
end
|
||||
|
||||
it 'creates a track with correct attributes' do
|
||||
track = builder.create_track_from_points(points)
|
||||
|
||||
expect(track).to be_persisted
|
||||
expect(track.user).to eq(user)
|
||||
expect(track.start_at).to be_within(1.second).of(Time.zone.at(points.first.timestamp))
|
||||
expect(track.end_at).to be_within(1.second).of(Time.zone.at(points.last.timestamp))
|
||||
expect(track.distance).to be > 0
|
||||
expect(track.duration).to eq(90.minutes.to_i)
|
||||
expect(track.avg_speed).to be > 0
|
||||
expect(track.original_path).to be_present
|
||||
end
|
||||
|
||||
it 'calculates elevation statistics correctly' do
|
||||
track = builder.create_track_from_points(points)
|
||||
|
||||
expect(track.elevation_gain).to eq(10) # 110 - 100
|
||||
expect(track.elevation_loss).to eq(5) # 110 - 105
|
||||
expect(track.elevation_max).to eq(110)
|
||||
expect(track.elevation_min).to eq(100)
|
||||
end
|
||||
|
||||
it 'associates points with the track' do
|
||||
track = builder.create_track_from_points(points)
|
||||
|
||||
points.each(&:reload)
|
||||
expect(points.map(&:track)).to all(eq(track))
|
||||
end
|
||||
end
|
||||
|
||||
context 'with insufficient points' do
|
||||
let(:single_point) { [create(:point, user: user)] }
|
||||
|
||||
it 'returns nil for single point' do
|
||||
result = builder.create_track_from_points(single_point)
|
||||
expect(result).to be_nil
|
||||
end
|
||||
|
||||
it 'returns nil for empty array' do
|
||||
result = builder.create_track_from_points([])
|
||||
expect(result).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
context 'when track save fails' do
|
||||
let(:points) do
|
||||
[
|
||||
create(:point, user: user, timestamp: 1.hour.ago.to_i),
|
||||
create(:point, user: user, timestamp: 30.minutes.ago.to_i)
|
||||
]
|
||||
end
|
||||
|
||||
before do
|
||||
allow_any_instance_of(Track).to receive(:save).and_return(false)
|
||||
end
|
||||
|
||||
it 'returns nil and logs error' do
|
||||
expect(Rails.logger).to receive(:error).with(
|
||||
/Failed to create track for user #{user.id}/
|
||||
)
|
||||
|
||||
result = builder.create_track_from_points(points)
|
||||
expect(result).to be_nil
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#build_path' do
|
||||
let(:points) do
|
||||
[
|
||||
create(:point, lonlat: 'POINT(-74.0060 40.7128)'),
|
||||
create(:point, lonlat: 'POINT(-74.0070 40.7130)')
|
||||
]
|
||||
end
|
||||
|
||||
it 'builds path using Tracks::BuildPath service' do
|
||||
expect(Tracks::BuildPath).to receive(:new).with(
|
||||
points.map(&:lonlat)
|
||||
).and_call_original
|
||||
|
||||
result = builder.build_path(points)
|
||||
expect(result).to respond_to(:as_text) # RGeo geometry object
|
||||
end
|
||||
end
|
||||
|
||||
describe '#calculate_track_distance' do
|
||||
let(:points) do
|
||||
[
|
||||
create(:point, user: user, lonlat: 'POINT(-74.0060 40.7128)'),
|
||||
create(:point, user: user, lonlat: 'POINT(-74.0070 40.7130)')
|
||||
]
|
||||
end
|
||||
|
||||
before do
|
||||
# Mock Point.total_distance to return distance in meters
|
||||
allow(Point).to receive(:total_distance).and_return(1500) # 1500 meters
|
||||
end
|
||||
|
||||
it 'stores distance in meters regardless of user unit preference' do
|
||||
result = builder.calculate_track_distance(points)
|
||||
expect(result).to eq(1500) # Always stored as meters
|
||||
end
|
||||
|
||||
it 'rounds distance to nearest meter' do
|
||||
allow(Point).to receive(:total_distance).and_return(1500.7)
|
||||
result = builder.calculate_track_distance(points)
|
||||
expect(result).to eq(1501) # Rounded to nearest meter
|
||||
end
|
||||
end
|
||||
|
||||
describe '#calculate_duration' do
|
||||
let(:start_time) { 2.hours.ago.to_i }
|
||||
let(:end_time) { 1.hour.ago.to_i }
|
||||
let(:points) do
|
||||
[
|
||||
double(timestamp: start_time),
|
||||
double(timestamp: end_time)
|
||||
]
|
||||
end
|
||||
|
||||
it 'calculates duration in seconds' do
|
||||
result = builder.calculate_duration(points)
|
||||
expect(result).to eq(1.hour.to_i)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#calculate_average_speed' do
|
||||
context 'with valid distance and duration' do
|
||||
it 'calculates speed in km/h' do
|
||||
distance_meters = 1000 # 1 km
|
||||
duration_seconds = 3600 # 1 hour
|
||||
|
||||
result = builder.calculate_average_speed(distance_meters, duration_seconds)
|
||||
expect(result).to eq(1.0) # 1 km/h
|
||||
end
|
||||
|
||||
it 'rounds to 2 decimal places' do
|
||||
distance_meters = 1500 # 1.5 km
|
||||
duration_seconds = 1800 # 30 minutes
|
||||
|
||||
result = builder.calculate_average_speed(distance_meters, duration_seconds)
|
||||
expect(result).to eq(3.0) # 3 km/h
|
||||
end
|
||||
end
|
||||
|
||||
context 'with invalid inputs' do
|
||||
it 'returns 0.0 for zero duration' do
|
||||
result = builder.calculate_average_speed(1000, 0)
|
||||
expect(result).to eq(0.0)
|
||||
end
|
||||
|
||||
it 'returns 0.0 for zero distance' do
|
||||
result = builder.calculate_average_speed(0, 3600)
|
||||
expect(result).to eq(0.0)
|
||||
end
|
||||
|
||||
it 'returns 0.0 for negative duration' do
|
||||
result = builder.calculate_average_speed(1000, -3600)
|
||||
expect(result).to eq(0.0)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#calculate_elevation_stats' do
|
||||
context 'with elevation data' do
|
||||
let(:points) do
|
||||
[
|
||||
double(altitude: 100),
|
||||
double(altitude: 150),
|
||||
double(altitude: 120),
|
||||
double(altitude: 180),
|
||||
double(altitude: 160)
|
||||
]
|
||||
end
|
||||
|
||||
it 'calculates elevation gain correctly' do
|
||||
result = builder.calculate_elevation_stats(points)
|
||||
expect(result[:gain]).to eq(110) # (150-100) + (180-120) = 50 + 60 = 110
|
||||
end
|
||||
|
||||
it 'calculates elevation loss correctly' do
|
||||
result = builder.calculate_elevation_stats(points)
|
||||
expect(result[:loss]).to eq(50) # (150-120) + (180-160) = 30 + 20 = 50
|
||||
end
|
||||
|
||||
it 'finds max elevation' do
|
||||
result = builder.calculate_elevation_stats(points)
|
||||
expect(result[:max]).to eq(180)
|
||||
end
|
||||
|
||||
it 'finds min elevation' do
|
||||
result = builder.calculate_elevation_stats(points)
|
||||
expect(result[:min]).to eq(100)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with no elevation data' do
|
||||
let(:points) do
|
||||
[
|
||||
double(altitude: nil),
|
||||
double(altitude: nil)
|
||||
]
|
||||
end
|
||||
|
||||
it 'returns default elevation stats' do
|
||||
result = builder.calculate_elevation_stats(points)
|
||||
expect(result).to eq({
|
||||
gain: 0,
|
||||
loss: 0,
|
||||
max: 0,
|
||||
min: 0
|
||||
})
|
||||
end
|
||||
end
|
||||
|
||||
context 'with mixed elevation data' do
|
||||
let(:points) do
|
||||
[
|
||||
double(altitude: 100),
|
||||
double(altitude: nil),
|
||||
double(altitude: 150)
|
||||
]
|
||||
end
|
||||
|
||||
it 'ignores nil values' do
|
||||
result = builder.calculate_elevation_stats(points)
|
||||
expect(result[:gain]).to eq(50) # 150 - 100
|
||||
expect(result[:loss]).to eq(0)
|
||||
expect(result[:max]).to eq(150)
|
||||
expect(result[:min]).to eq(100)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#default_elevation_stats' do
|
||||
it 'returns hash with zero values' do
|
||||
result = builder.default_elevation_stats
|
||||
expect(result).to eq({
|
||||
gain: 0,
|
||||
loss: 0,
|
||||
max: 0,
|
||||
min: 0
|
||||
})
|
||||
end
|
||||
end
|
||||
|
||||
describe 'user method requirement' do
|
||||
let(:invalid_class) do
|
||||
Class.new do
|
||||
include Tracks::TrackBuilder
|
||||
# Does not implement user method
|
||||
end
|
||||
end
|
||||
|
||||
it 'raises NotImplementedError when user method is not implemented' do
|
||||
invalid_builder = invalid_class.new
|
||||
expect { invalid_builder.send(:user) }.to raise_error(
|
||||
NotImplementedError,
|
||||
"Including class must implement user method"
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
describe 'integration test' do
|
||||
let!(:points) do
|
||||
[
|
||||
create(:point, user: user, lonlat: 'POINT(-74.0060 40.7128)',
|
||||
timestamp: 2.hours.ago.to_i, altitude: 100),
|
||||
create(:point, user: user, lonlat: 'POINT(-74.0070 40.7130)',
|
||||
timestamp: 1.hour.ago.to_i, altitude: 120)
|
||||
]
|
||||
end
|
||||
|
||||
it 'creates a complete track end-to-end' do
|
||||
expect { builder.create_track_from_points(points) }.to change(Track, :count).by(1)
|
||||
|
||||
track = Track.last
|
||||
expect(track.user).to eq(user)
|
||||
expect(track.points).to match_array(points)
|
||||
expect(track.distance).to be > 0
|
||||
expect(track.duration).to eq(1.hour.to_i)
|
||||
expect(track.elevation_gain).to eq(20)
|
||||
end
|
||||
end
|
||||
end
|
||||
184
tracks-feature-analysis.md
Normal file
184
tracks-feature-analysis.md
Normal file
@@ -0,0 +1,184 @@
|
||||
# Tracks Feature Analysis
|
||||
|
||||
## Overview
|
||||
|
||||
The Tracks feature in Dawarich is a streamlined GPS route tracking system that automatically organizes location data into meaningful journeys. It transforms raw GPS points into structured track records that represent individual trips, walks, drives, or any other movement patterns using a simplified, unified architecture.
|
||||
|
||||
## Core Concept
|
||||
|
||||
A **Track** represents a continuous journey or route taken by a user. Unlike individual GPS points that are timestamped location coordinates, tracks provide higher-level information about complete journeys including start/end times, total distance, duration, speed statistics, and elevation changes.
|
||||
|
||||
## Key Components
|
||||
|
||||
### Track Data Structure
|
||||
|
||||
Each track contains:
|
||||
- **Temporal Information**: Start and end timestamps marking the journey boundaries
|
||||
- **Spatial Information**: Geographic path represented as a LineString containing all route coordinates
|
||||
- **Distance Metrics**: Total distance traveled (stored in meters for consistency)
|
||||
- **Speed Analytics**: Average speed throughout the journey (stored in km/h)
|
||||
- **Duration Data**: Total time spent on the journey (in seconds)
|
||||
- **Elevation Statistics**: Gain, loss, maximum, and minimum altitude measurements
|
||||
- **User Association**: Links each track to its owner
|
||||
|
||||
### Simplified Track Generation Process
|
||||
|
||||
The system uses a unified, streamlined approach to create tracks:
|
||||
|
||||
1. **Unified Processing**: Single service handles both bulk and incremental processing
|
||||
2. **Smart Segmentation**: Analyzes point sequences to identify natural break points between journeys
|
||||
3. **Real-time Creation**: Immediately processes new GPS data as it arrives for responsive user experience
|
||||
4. **Intelligent Batching**: Optimizes processing load while maintaining responsiveness
|
||||
|
||||
### Segmentation Intelligence
|
||||
|
||||
The system uses intelligent algorithms to determine where one track ends and another begins:
|
||||
|
||||
- **Time-based Segmentation**: Identifies gaps in GPS data that exceed configurable time thresholds (default: 60 minutes)
|
||||
- **Distance-based Segmentation**: Detects sudden location jumps that indicate teleportation or data gaps (default: 500 meters)
|
||||
- **Configurable Thresholds**: Users can adjust sensitivity through distance and time parameters
|
||||
- **Minimum Requirements**: Ensures tracks have sufficient data points to be meaningful (minimum 2 points)
|
||||
|
||||
### Statistics Calculation
|
||||
|
||||
For each track, the system calculates comprehensive statistics:
|
||||
|
||||
- **Distance Calculation**: Uses geographical formulas to compute accurate distances between points
|
||||
- **Speed Analysis**: Calculates average speed while handling stationary periods appropriately
|
||||
- **Elevation Processing**: Analyzes altitude changes to determine climbs and descents
|
||||
- **Duration Computation**: Accounts for actual movement time vs. total elapsed time
|
||||
|
||||
## Processing Modes
|
||||
|
||||
### Bulk Processing
|
||||
- Processes all unassigned GPS points for a user at once
|
||||
- Suitable for initial setup or historical data migration
|
||||
- Optimized for performance with large datasets
|
||||
- Triggered via scheduled job or manual execution
|
||||
|
||||
### Incremental Processing
|
||||
- Handles new GPS points as they arrive in real-time
|
||||
- Maintains system responsiveness during continuous tracking
|
||||
- Uses smart batching to optimize performance
|
||||
- Provides immediate user feedback
|
||||
|
||||
### Smart Real-time Processing
|
||||
- **Immediate Processing**: Triggers instant track creation for obvious track boundaries (30+ minute gaps, 1+ km jumps)
|
||||
- **Batched Processing**: Groups continuous tracking points for efficient processing
|
||||
- **Automatic Optimization**: Reduces system load while maintaining user experience
|
||||
|
||||
## User Experience Features
|
||||
|
||||
### Interactive Map Visualization
|
||||
- **Track Rendering**: Displays tracks as colored paths on interactive maps
|
||||
- **Hover Information**: Shows track details when users hover over routes
|
||||
- **Click Interactions**: Provides detailed statistics and journey markers
|
||||
- **Start/End Markers**: Visual indicators for journey beginning and completion points
|
||||
|
||||
### Real-time Updates
|
||||
- **WebSocket Integration**: Pushes track updates to connected clients immediately
|
||||
- **Live Tracking**: Shows new tracks as they're created from incoming GPS data
|
||||
- **Automatic Refresh**: Updates map display without requiring page reloads
|
||||
|
||||
### Filtering and Navigation
|
||||
- **Time-based Filtering**: Allows users to view tracks within specific date ranges
|
||||
- **Distance Filtering**: Enables filtering by journey length or duration
|
||||
- **Visual Controls**: Provides opacity and visibility toggles for track display
|
||||
|
||||
## Technical Architecture
|
||||
|
||||
### Simplified Design
|
||||
- **Single Service**: Unified `TrackService` handles all track operations
|
||||
- **Single Job**: `TrackProcessingJob` manages both bulk and incremental processing
|
||||
- **Minimal Dependencies**: Eliminated Redis buffering and complex strategy patterns
|
||||
- **Streamlined Architecture**: Reduced from 16 files to 4 core components
|
||||
|
||||
### Core Components
|
||||
- **TrackService**: Main service class containing all track processing logic
|
||||
- **TrackProcessingJob**: Background job for asynchronous track processing
|
||||
- **Point Model**: Simplified with smart track processing triggers
|
||||
- **Track Model**: Unchanged, maintains existing functionality and WebSocket broadcasting
|
||||
|
||||
### Processing Intelligence
|
||||
- **Smart Triggering**: Immediate processing for track boundaries, batched for continuous tracking
|
||||
- **Threshold-based Segmentation**: Configurable time (60 min) and distance (500m) thresholds
|
||||
- **Automatic Optimization**: Reduces database load while maintaining responsiveness
|
||||
- **Error Handling**: Comprehensive error management and reporting
|
||||
|
||||
## Data Management
|
||||
|
||||
### Storage Architecture
|
||||
- **Efficient Schema**: Optimized database structure for track storage and retrieval
|
||||
- **Geographic Indexing**: Enables fast spatial queries for map-based operations
|
||||
- **User Isolation**: Ensures each user's tracks remain private and separate
|
||||
|
||||
### Import/Export Capabilities
|
||||
- **GPX Support**: Imports tracks from standard GPS Exchange Format files
|
||||
- **Multiple Sources**: Handles data from various GPS tracking applications
|
||||
- **Format Conversion**: Transforms different input formats into standardized track records
|
||||
|
||||
### Performance Optimization
|
||||
- **Unified Processing**: Single service eliminates complexity and reduces overhead
|
||||
- **Smart Batching**: Job deduplication prevents queue overflow during high activity
|
||||
- **Efficient Queries**: Optimized database queries for point loading and track creation
|
||||
- **Minimal Memory Usage**: Eliminated Redis buffering in favor of direct processing
|
||||
|
||||
## Integration Points
|
||||
|
||||
### GPS Data Sources
|
||||
- **OwnTracks Integration**: Processes location data from OwnTracks applications
|
||||
- **File Imports**: Handles GPX and other standard GPS file formats
|
||||
- **API Endpoints**: Accepts GPS data from external applications and services
|
||||
|
||||
### Real-time Features
|
||||
- **WebSocket Broadcasting**: Immediate track updates to connected clients
|
||||
- **Live Tracking**: Shows new tracks as they're created from incoming GPS data
|
||||
- **Automatic Refresh**: Updates map display without requiring page reloads
|
||||
|
||||
## Quality Assurance
|
||||
|
||||
### Data Validation
|
||||
- **Input Validation**: Ensures GPS points meet quality standards before processing
|
||||
- **Duplicate Detection**: Prevents creation of redundant tracks from the same data
|
||||
- **Error Handling**: Gracefully manages corrupted or incomplete GPS data
|
||||
|
||||
### System Reliability
|
||||
- **Simplified Testing**: Reduced complexity enables comprehensive test coverage
|
||||
- **Performance Monitoring**: Built-in logging and error reporting
|
||||
- **Graceful Degradation**: System continues functioning even with individual point failures
|
||||
|
||||
## Configuration and Customization
|
||||
|
||||
### User Settings
|
||||
- **Threshold Configuration**: Allows users to adjust segmentation sensitivity
|
||||
- **Display Preferences**: Customizes how tracks appear on maps
|
||||
- **Privacy Controls**: Manages track visibility and sharing settings
|
||||
|
||||
### System Configuration
|
||||
- **Performance Tuning**: Adjusts processing parameters for optimal performance
|
||||
- **Resource Management**: Controls background job execution and resource usage
|
||||
- **Scaling Options**: Configures system behavior for different usage patterns
|
||||
|
||||
## Benefits and Applications
|
||||
|
||||
### Personal Tracking
|
||||
- **Journey Documentation**: Creates permanent records of personal travels and activities
|
||||
- **Activity Analysis**: Provides insights into movement patterns and habits
|
||||
- **Historical Records**: Maintains searchable archive of past journeys
|
||||
|
||||
### Real-time Experience
|
||||
- **Immediate Feedback**: Tracks appear instantly for obvious journey boundaries
|
||||
- **Responsive Interface**: Smart batching maintains UI responsiveness
|
||||
- **Live Updates**: Real-time track creation and broadcasting
|
||||
|
||||
### Data Organization
|
||||
- **Automatic Categorization**: Organizes raw GPS data into meaningful journey segments
|
||||
- **Reduced Complexity**: Simplifies large datasets into manageable track records
|
||||
- **Enhanced Searchability**: Enables efficient searching and filtering of location history
|
||||
|
||||
### Visualization Enhancement
|
||||
- **Map Clarity**: Reduces visual clutter by grouping related GPS points
|
||||
- **Interactive Features**: Provides rich interaction capabilities for exploring journey data
|
||||
- **Statistical Insights**: Offers comprehensive analytics about travel patterns
|
||||
|
||||
The Tracks feature represents a streamlined approach to GPS data management that transforms raw location information into meaningful, organized, and interactive journey records while maintaining high performance and providing real-time user experience through simplified, maintainable architecture.
|
||||
Reference in New Issue
Block a user