Merge pull request #1668 from Freika/feature/unify-import-form

Unify imports form
This commit is contained in:
Evgenii Burmakin
2025-08-22 20:53:01 +02:00
committed by GitHub
19 changed files with 659 additions and 132 deletions

View File

@@ -13,6 +13,10 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
- User can now delete a visit by clicking on the delete button in the visit popup.
- Import failure now throws an internal server error.
## Changed
- Source of imports is now being detected automatically.
# [0.30.9] - 2025-08-19

View File

@@ -12,14 +12,14 @@ class Api::V1::VisitsController < ApiController
def create
service = Visits::Create.new(current_api_user, visit_params)
if service.call
result = service.call
if result
render json: Api::VisitSerializer.new(service.visit).call
else
render json: {
error: 'Failed to create visit',
errors: service.errors
}, status: :unprocessable_entity
error_message = service.errors || 'Failed to create visit'
render json: { error: error_message }, status: :unprocessable_entity
end
end
@@ -77,11 +77,11 @@ class Api::V1::VisitsController < ApiController
def destroy
visit = current_api_user.visits.find(params[:id])
if visit.destroy
head :no_content
else
render json: {
render json: {
error: 'Failed to delete visit',
errors: visit.errors.full_messages
}, status: :unprocessable_entity

View File

@@ -43,8 +43,7 @@ class ImportsController < ApplicationController
raw_files = Array(files_params).reject(&:blank?)
if raw_files.empty?
redirect_to new_import_path, alert: 'No files were selected for upload', status: :unprocessable_entity
return
redirect_to new_import_path, alert: 'No files were selected for upload', status: :unprocessable_entity and return
end
created_imports = []
@@ -59,11 +58,11 @@ class ImportsController < ApplicationController
if created_imports.any?
redirect_to imports_url,
notice: "#{created_imports.size} files are queued to be imported in background",
status: :see_other
status: :see_other and return
else
redirect_to new_import_path,
alert: 'No valid file references were found. Please upload files using the file selector.',
status: :unprocessable_entity
status: :unprocessable_entity and return
end
rescue StandardError => e
if created_imports.present?
@@ -95,7 +94,7 @@ class ImportsController < ApplicationController
end
def import_params
params.require(:import).permit(:name, :source, files: [])
params.require(:import).permit(:name, files: [])
end
def create_import_from_signed_id(signed_id)
@@ -103,18 +102,29 @@ class ImportsController < ApplicationController
blob = ActiveStorage::Blob.find_signed(signed_id)
import = current_user.imports.build(
name: blob.filename.to_s,
source: params[:import][:source]
)
import = current_user.imports.build(name: blob.filename.to_s)
import.file.attach(blob)
import.source = detect_import_source(import.file) if import.source.blank?
import.save!
import
end
def detect_import_source(file_attachment)
temp_file_path = Imports::SecureFileDownloader.new(file_attachment).download_to_temp_file
Imports::SourceDetector.new_from_file_header(temp_file_path).detect_source
rescue StandardError => e
Rails.logger.warn "Failed to auto-detect import source for #{file_attachment.filename}: #{e.message}"
nil
ensure
# Cleanup temp file
if temp_file_path && File.exist?(temp_file_path)
File.unlink(temp_file_path)
end
end
def validate_points_limit
limit_exceeded = PointsLimitExceeded.new(current_user).call

View File

@@ -2,19 +2,19 @@
class Geojson::Importer
include Imports::Broadcaster
include Imports::FileLoader
include PointValidation
attr_reader :import, :user_id
attr_reader :import, :user_id, :file_path
def initialize(import, user_id)
def initialize(import, user_id, file_path = nil)
@import = import
@user_id = user_id
@file_path = file_path
end
def call
file_content = Imports::SecureFileDownloader.new(import.file).download_with_verification
json = Oj.load(file_content)
json = load_json_data
data = Geojson::Params.new(json).call
data.each.with_index(1) do |point, index|

View File

@@ -2,12 +2,14 @@
class GoogleMaps::PhoneTakeoutImporter
include Imports::Broadcaster
include Imports::FileLoader
attr_reader :import, :user_id
attr_reader :import, :user_id, :file_path
def initialize(import, user_id)
def initialize(import, user_id, file_path = nil)
@import = import
@user_id = user_id
@file_path = file_path
end
def call
@@ -46,9 +48,7 @@ class GoogleMaps::PhoneTakeoutImporter
raw_signals = []
raw_array = []
file_content = Imports::SecureFileDownloader.new(import.file).download_with_verification
json = Oj.load(file_content)
json = load_json_data
if json.is_a?(Array)
raw_array = parse_raw_array(json)

View File

@@ -4,11 +4,14 @@
# via the UI, vs the CLI, which uses the `GoogleMaps::RecordsImporter` class.
class GoogleMaps::RecordsStorageImporter
include Imports::FileLoader
BATCH_SIZE = 1000
def initialize(import, user_id)
def initialize(import, user_id, file_path = nil)
@import = import
@user = User.find_by(id: user_id)
@file_path = file_path
end
def call
@@ -20,21 +23,16 @@ class GoogleMaps::RecordsStorageImporter
private
attr_reader :import, :user
attr_reader :import, :user, :file_path
def process_file_in_batches
file_content = Imports::SecureFileDownloader.new(import.file).download_with_verification
locations = parse_file(file_content)
parsed_file = load_json_data
return unless parsed_file.is_a?(Hash) && parsed_file['locations']
locations = parsed_file['locations']
process_locations_in_batches(locations) if locations.present?
end
def parse_file(file_content)
parsed_file = Oj.load(file_content, mode: :compat)
return nil unless parsed_file.is_a?(Hash) && parsed_file['locations']
parsed_file['locations']
end
def process_locations_in_batches(locations)
batch = []
index = 0

View File

@@ -2,13 +2,15 @@
class GoogleMaps::SemanticHistoryImporter
include Imports::Broadcaster
include Imports::FileLoader
BATCH_SIZE = 1000
attr_reader :import, :user_id
attr_reader :import, :user_id, :file_path
def initialize(import, user_id)
def initialize(import, user_id, file_path = nil)
@import = import
@user_id = user_id
@file_path = file_path
@current_index = 0
end
@@ -61,8 +63,7 @@ class GoogleMaps::SemanticHistoryImporter
end
def points_data
file_content = Imports::SecureFileDownloader.new(import.file).download_with_verification
json = Oj.load(file_content)
json = load_json_data
json['timelineObjects'].flat_map do |timeline_object|
parse_timeline_object(timeline_object)

View File

@@ -4,16 +4,18 @@ require 'rexml/document'
class Gpx::TrackImporter
include Imports::Broadcaster
include Imports::FileLoader
attr_reader :import, :user_id
attr_reader :import, :user_id, :file_path
def initialize(import, user_id)
def initialize(import, user_id, file_path = nil)
@import = import
@user_id = user_id
@file_path = file_path
end
def call
file_content = Imports::SecureFileDownloader.new(import.file).download_with_verification
file_content = load_file_content
json = Hash.from_xml(file_content)
tracks = json['gpx']['trk']

View File

@@ -14,7 +14,10 @@ class Imports::Create
import.update!(status: :processing)
broadcast_status_update
importer(import.source).new(import, user.id).call
temp_file_path = Imports::SecureFileDownloader.new(import.file).download_to_temp_file
source = import.source.presence || detect_source_from_file(temp_file_path)
importer(source).new(import, user.id, temp_file_path).call
schedule_stats_creating(user.id)
schedule_visit_suggesting(user.id, import)
@@ -27,6 +30,10 @@ class Imports::Create
create_import_failed_notification(import, user, e)
ensure
if temp_file_path && File.exist?(temp_file_path)
File.unlink(temp_file_path)
end
if import.processing?
import.update!(status: :completed)
broadcast_status_update
@@ -81,6 +88,11 @@ class Imports::Create
).call
end
def detect_source_from_file(temp_file_path)
detector = Imports::SourceDetector.new_from_file_header(temp_file_path)
detector.detect_source!
end
def import_failed_message(import, error)
if DawarichSettings.self_hosted?
"Import \"#{import.name}\" failed: #{error.message}, stacktrace: #{error.backtrace.join("\n")}"

View File

@@ -0,0 +1,26 @@
# frozen_string_literal: true
module Imports
module FileLoader
extend ActiveSupport::Concern
private
def load_json_data
if file_path && File.exist?(file_path)
Oj.load_file(file_path, mode: :compat)
else
file_content = Imports::SecureFileDownloader.new(import.file).download_with_verification
Oj.load(file_content, mode: :compat)
end
end
def load_file_content
if file_path && File.exist?(file_path)
File.read(file_path)
else
Imports::SecureFileDownloader.new(import.file).download_with_verification
end
end
end
end

View File

@@ -9,6 +9,63 @@ class Imports::SecureFileDownloader
end
def download_with_verification
file_content = download_to_string
verify_file_integrity(file_content)
file_content
end
def download_to_temp_file
retries = 0
temp_file = nil
begin
Timeout.timeout(DOWNLOAD_TIMEOUT) do
temp_file = create_temp_file
# Download directly to temp file
storage_attachment.download do |chunk|
temp_file.write(chunk)
end
temp_file.rewind
# If file is empty, try alternative download method
if temp_file.size == 0
Rails.logger.warn('No content received from block download, trying alternative method')
temp_file.write(storage_attachment.blob.download)
temp_file.rewind
end
end
rescue Timeout::Error => e
retries += 1
if retries <= MAX_RETRIES
Rails.logger.warn("Download timeout, attempt #{retries} of #{MAX_RETRIES}")
cleanup_temp_file(temp_file)
retry
else
Rails.logger.error("Download failed after #{MAX_RETRIES} attempts")
cleanup_temp_file(temp_file)
raise
end
rescue StandardError => e
Rails.logger.error("Download error: #{e.message}")
cleanup_temp_file(temp_file)
raise
end
raise 'Download completed but no content was received' if temp_file.size == 0
verify_temp_file_integrity(temp_file)
temp_file.path
ensure
# Keep temp file open so it can be read by other processes
# Caller is responsible for cleanup
end
private
attr_reader :storage_attachment
def download_to_string
retries = 0
file_content = nil
@@ -51,13 +108,23 @@ class Imports::SecureFileDownloader
raise 'Download completed but no content was received' if file_content.nil? || file_content.empty?
verify_file_integrity(file_content)
file_content
end
private
def create_temp_file
extension = File.extname(storage_attachment.filename.to_s)
basename = File.basename(storage_attachment.filename.to_s, extension)
Tempfile.new(["#{basename}_#{Time.now.to_i}", extension], binmode: true)
end
attr_reader :storage_attachment
def cleanup_temp_file(temp_file)
return unless temp_file
temp_file.close unless temp_file.closed?
temp_file.unlink if File.exist?(temp_file.path)
rescue StandardError => e
Rails.logger.warn("Failed to cleanup temp file: #{e.message}")
end
def verify_file_integrity(file_content)
return if file_content.nil? || file_content.empty?
@@ -78,4 +145,26 @@ class Imports::SecureFileDownloader
raise "Checksum mismatch: expected #{expected_checksum}, got #{actual_checksum}"
end
def verify_temp_file_integrity(temp_file)
return if temp_file.nil? || temp_file.size == 0
# Verify file size
expected_size = storage_attachment.blob.byte_size
actual_size = temp_file.size
if expected_size != actual_size
raise "Incomplete download: expected #{expected_size} bytes, got #{actual_size} bytes"
end
# Verify checksum
expected_checksum = storage_attachment.blob.checksum
temp_file.rewind
actual_checksum = Base64.strict_encode64(Digest::MD5.digest(temp_file.read))
temp_file.rewind
return unless expected_checksum != actual_checksum
raise "Checksum mismatch: expected #{expected_checksum}, got #{actual_checksum}"
end
end

View File

@@ -0,0 +1,235 @@
# frozen_string_literal: true
class Imports::SourceDetector
class UnknownSourceError < StandardError; end
DETECTION_RULES = {
google_semantic_history: {
required_keys: ['timelineObjects'],
nested_patterns: [
['timelineObjects', 0, 'activitySegment'],
['timelineObjects', 0, 'placeVisit']
]
},
google_records: {
required_keys: ['locations'],
nested_patterns: [
['locations', 0, 'latitudeE7'],
['locations', 0, 'longitudeE7']
]
},
google_phone_takeout: {
alternative_patterns: [
# Pattern 1: Object with semanticSegments
{
required_keys: ['semanticSegments'],
nested_patterns: [['semanticSegments', 0, 'startTime']]
},
# Pattern 2: Object with rawSignals
{
required_keys: ['rawSignals']
},
# Pattern 3: Array format with visit/activity objects
{
structure: :array,
nested_patterns: [
[0, 'visit', 'topCandidate', 'placeLocation'],
[0, 'activity']
]
}
]
},
geojson: {
required_keys: ['type', 'features'],
required_values: { 'type' => 'FeatureCollection' },
nested_patterns: [
['features', 0, 'type'],
['features', 0, 'geometry'],
['features', 0, 'properties']
]
},
owntracks: {
structure: :rec_file_lines,
line_pattern: /"_type":"location"/
}
}.freeze
def initialize(file_content, filename = nil, file_path = nil)
@file_content = file_content
@filename = filename
@file_path = file_path
end
def self.new_from_file_header(file_path)
filename = File.basename(file_path)
# For detection, read only first 2KB to optimize performance
header_content = File.open(file_path, 'rb') { |f| f.read(2048) }
new(header_content, filename, file_path)
end
def detect_source
return :gpx if gpx_file?
return :owntracks if owntracks_file?
json_data = parse_json
return nil unless json_data
DETECTION_RULES.each do |format, rules|
next if format == :owntracks # Already handled above
if matches_format?(json_data, rules)
return format
end
end
nil
end
def detect_source!
format = detect_source
raise UnknownSourceError, 'Unable to detect file format' unless format
format
end
private
attr_reader :file_content, :filename, :file_path
def gpx_file?
return false unless filename
# Must have .gpx extension AND contain GPX XML structure
return false unless filename.downcase.end_with?('.gpx')
# Check content for GPX structure
content_to_check = if file_path && File.exist?(file_path)
# Read first 1KB for GPX detection
File.open(file_path, 'rb') { |f| f.read(1024) }
else
file_content
end
content_to_check.strip.start_with?('<?xml') && content_to_check.include?('<gpx')
end
def owntracks_file?
return false unless filename
# Check for .rec extension first (fastest check)
return true if filename.downcase.end_with?('.rec')
# Check for specific OwnTracks line format in content
content_to_check = if file_path && File.exist?(file_path)
# For OwnTracks, read first few lines only
File.open(file_path, 'r') { |f| f.read(2048) }
else
file_content
end
content_to_check.lines.any? { |line| line.include?('"_type":"location"') }
end
def parse_json
# If we have a file path, use streaming for better memory efficiency
if file_path && File.exist?(file_path)
Oj.load_file(file_path, mode: :compat)
else
Oj.load(file_content, mode: :compat)
end
rescue Oj::ParseError, JSON::ParserError
# If full file parsing fails but we have a file path, try with just the header
if file_path && file_content.length < 2048
begin
File.open(file_path, 'rb') do |f|
partial_content = f.read(4096) # Try a bit more content
Oj.load(partial_content, mode: :compat)
end
rescue Oj::ParseError, JSON::ParserError
nil
end
else
nil
end
end
def matches_format?(json_data, rules)
# Handle alternative patterns (for google_phone_takeout)
if rules[:alternative_patterns]
return rules[:alternative_patterns].any? { |pattern| matches_pattern?(json_data, pattern) }
end
matches_pattern?(json_data, rules)
end
def matches_pattern?(json_data, pattern)
# Check structure requirements
return false unless structure_matches?(json_data, pattern[:structure])
# Check required keys
if pattern[:required_keys]
return false unless has_required_keys?(json_data, pattern[:required_keys])
end
# Check required values
if pattern[:required_values]
return false unless has_required_values?(json_data, pattern[:required_values])
end
# Check nested patterns
if pattern[:nested_patterns]
return false unless has_nested_patterns?(json_data, pattern[:nested_patterns])
end
true
end
def structure_matches?(json_data, required_structure)
case required_structure
when :array
json_data.is_a?(Array)
when nil
true # No specific structure required
else
true # Default to no restriction
end
end
def has_required_keys?(json_data, keys)
return false unless json_data.is_a?(Hash)
keys.all? { |key| json_data.key?(key) }
end
def has_required_values?(json_data, values)
return false unless json_data.is_a?(Hash)
values.all? { |key, expected_value| json_data[key] == expected_value }
end
def has_nested_patterns?(json_data, patterns)
patterns.any? { |pattern| nested_key_exists?(json_data, pattern) }
end
def nested_key_exists?(data, key_path)
current = data
key_path.each do |key|
return false unless current
if current.is_a?(Array)
return false if key >= current.length
current = current[key]
elsif current.is_a?(Hash)
return false unless current.key?(key)
current = current[key]
else
return false
end
end
!current.nil?
end
end

View File

@@ -2,16 +2,18 @@
class OwnTracks::Importer
include Imports::Broadcaster
include Imports::FileLoader
attr_reader :import, :user_id
attr_reader :import, :user_id, :file_path
def initialize(import, user_id)
def initialize(import, user_id, file_path = nil)
@import = import
@user_id = user_id
@file_path = file_path
end
def call
file_content = Imports::SecureFileDownloader.new(import.file).download_with_verification
file_content = load_file_content
parsed_data = OwnTracks::RecParser.new(file_content).call
points_data = parsed_data.map do |point|

View File

@@ -2,17 +2,18 @@
class Photos::Importer
include Imports::Broadcaster
include Imports::FileLoader
include PointValidation
attr_reader :import, :user_id
attr_reader :import, :user_id, :file_path
def initialize(import, user_id)
def initialize(import, user_id, file_path = nil)
@import = import
@user_id = user_id
@file_path = file_path
end
def call
file_content = Imports::SecureFileDownloader.new(import.file).download_with_verification
json = Oj.load(file_content)
json = load_json_data
json.each.with_index(1) { |point, index| create_point(point, index) }
end

View File

@@ -8,6 +8,7 @@ module Visits
@user = user
@params = params.respond_to?(:with_indifferent_access) ? params.with_indifferent_access : params
@visit = nil
@errors = nil
end
def call
@@ -15,10 +16,19 @@ module Visits
place = find_or_create_place
return false unless place
create_visit(place)
visit = create_visit(place)
visit
end
rescue ActiveRecord::RecordInvalid => e
ExceptionReporter.call(e, "Failed to create visit: #{e.message}")
@errors = "Failed to create visit: #{e.message}"
false
rescue StandardError => e
ExceptionReporter.call(e, 'Failed to create visit')
ExceptionReporter.call(e, "Failed to create visit: #{e.message}")
@errors = "Failed to create visit: #{e.message}"
false
end
@@ -56,7 +66,7 @@ module Visits
place
rescue StandardError => e
ExceptionReporter.call(e, 'Failed to create place')
ExceptionReporter.call(e, "Failed to create place: #{e.message}")
nil
end

View File

@@ -4,68 +4,6 @@
direct_upload_user_trial_value: current_user.trial?,
direct_upload_target: "form"
} do |form| %>
<div class="form-control w-full">
<label class="label">
<span class="label-text">Select source</span>
</label>
<div class="grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-3 gap-4">
<div class="card bordered shadow-lg p-3 hover:shadow-blue-500/50">
<div class="form-control">
<label class="label cursor-pointer space-x-3">
<%= form.radio_button :source, :google_semantic_history, class: "radio radio-primary" %>
<span class="label-text">Google Semantic History</span>
</label>
<p class="text-sm mt-2">JSON files from your Takeout/Location History/Semantic Location History/YEAR</p>
</div>
</div>
<div class="card bordered shadow-lg p-3 hover:shadow-blue-500/50">
<div class="form-control">
<label class="label cursor-pointer space-x-3">
<%= form.radio_button :source, :google_records, class: "radio radio-primary" %>
<span class="label-text">Google Records</span>
</label>
<p class="text-sm mt-2">The Records.json file from your Google Takeout</p>
</div>
</div>
<div class="card bordered shadow-lg p-3 hover:shadow-blue-500/50">
<div class="form-control">
<label class="label cursor-pointer space-x-3">
<%= form.radio_button :source, :google_phone_takeout, class: "radio radio-primary" %>
<span class="label-text">Google Phone Takeout</span>
</label>
<p class="text-sm mt-2">A JSON file you received after your request for Takeout from your mobile device</p>
</div>
</div>
<div class="card bordered shadow-lg p-3 hover:shadow-blue-500/50">
<div class="form-control">
<label class="label cursor-pointer space-x-3">
<%= form.radio_button :source, :owntracks, class: "radio radio-primary" %>
<span class="label-text">Owntracks</span>
</label>
<p class="text-sm mt-2">A .REC file you could find in your volumes/owntracks-recorder/store/rec/USER/TOPIC directory</p>
</div>
</div>
<div class="card bordered shadow-lg p-3 hover:shadow-blue-500/50">
<div class="form-control">
<label class="label cursor-pointer space-x-3">
<%= form.radio_button :source, :geojson, class: "radio radio-primary" %>
<span class="label-text">GeoJSON</span>
</label>
<p class="text-sm mt-2">A valid GeoJSON file. For example, a file, exported from a Dawarich instance</p>
</div>
</div>
<div class="card bordered shadow-lg p-3 hover:shadow-blue-500/50">
<div class="form-control">
<label class="label cursor-pointer space-x-3">
<%= form.radio_button :source, :gpx, class: "radio radio-primary" %>
<span class="label-text">GPX</span>
</label>
<p class="text-sm mt-2">GPX track file</p>
</div>
</div>
</div>
</div>
<label class="form-control w-full max-w-xs my-5">
<div class="label">
<span class="label-text">Select one or multiple files</span>

View File

@@ -139,8 +139,8 @@ RSpec.describe 'Api::V1::Visits', type: :request do
post '/api/v1/visits', params: missing_name_params, headers: auth_headers
json_response = JSON.parse(response.body)
expect(json_response['error']).to eq('Failed to create visit')
expect(json_response['errors']).to include("Name can't be blank")
end
it 'does not create a visit' do

View File

@@ -17,6 +17,7 @@ RSpec.describe Imports::Create do
it 'sets status to processing at start' do
service.call
expect(import.reload.status).to eq('processing').or eq('completed')
end
@@ -29,7 +30,7 @@ RSpec.describe Imports::Create do
context 'when import fails' do
before do
allow(OwnTracks::Importer).to receive(:new).with(import, user.id).and_raise(StandardError)
allow(OwnTracks::Importer).to receive(:new).with(import, user.id, kind_of(String)).and_raise(StandardError)
end
it 'sets status to failed' do
@@ -51,7 +52,7 @@ RSpec.describe Imports::Create do
it 'calls the GoogleMaps::SemanticHistoryImporter' do
expect(GoogleMaps::SemanticHistoryImporter).to \
receive(:new).with(import, user.id).and_return(double(call: true))
receive(:new).with(import, user.id, kind_of(String)).and_return(double(call: true))
service.call
end
@@ -62,10 +63,16 @@ RSpec.describe Imports::Create do
context 'when source is google_phone_takeout' do
let(:import) { create(:import, source: 'google_phone_takeout') }
let(:file_path) { Rails.root.join('spec/fixtures/files/google/phone-takeout.json') }
before do
import.file.attach(io: File.open(file_path), filename: 'phone-takeout.json',
content_type: 'application/json')
end
it 'calls the GoogleMaps::PhoneTakeoutImporter' do
expect(GoogleMaps::PhoneTakeoutImporter).to \
receive(:new).with(import, user.id).and_return(double(call: true))
receive(:new).with(import, user.id, kind_of(String)).and_return(double(call: true))
service.call
end
end
@@ -81,7 +88,7 @@ RSpec.describe Imports::Create do
it 'calls the OwnTracks::Importer' do
expect(OwnTracks::Importer).to \
receive(:new).with(import, user.id).and_return(double(call: true))
receive(:new).with(import, user.id, kind_of(String)).and_return(double(call: true))
service.call
end
@@ -102,7 +109,7 @@ RSpec.describe Imports::Create do
context 'when import fails' do
before do
allow(OwnTracks::Importer).to receive(:new).with(import, user.id).and_raise(StandardError)
allow(OwnTracks::Importer).to receive(:new).with(import, user.id, kind_of(String)).and_raise(StandardError)
end
context 'when self-hosted' do
@@ -153,37 +160,55 @@ RSpec.describe Imports::Create do
it 'calls the Gpx::TrackImporter' do
expect(Gpx::TrackImporter).to \
receive(:new).with(import, user.id).and_return(double(call: true))
receive(:new).with(import, user.id, kind_of(String)).and_return(double(call: true))
service.call
end
end
context 'when source is geojson' do
let(:import) { create(:import, source: 'geojson') }
let(:file_path) { Rails.root.join('spec/fixtures/files/geojson/export.json') }
before do
import.file.attach(io: File.open(file_path), filename: 'export.json',
content_type: 'application/json')
end
it 'calls the Geojson::Importer' do
expect(Geojson::Importer).to \
receive(:new).with(import, user.id).and_return(double(call: true))
receive(:new).with(import, user.id, kind_of(String)).and_return(double(call: true))
service.call
end
end
context 'when source is immich_api' do
let(:import) { create(:import, source: 'immich_api') }
let(:file_path) { Rails.root.join('spec/fixtures/files/immich/geodata.json') }
before do
import.file.attach(io: File.open(file_path), filename: 'geodata.json',
content_type: 'application/json')
end
it 'calls the Photos::Importer' do
expect(Photos::Importer).to \
receive(:new).with(import, user.id).and_return(double(call: true))
receive(:new).with(import, user.id, kind_of(String)).and_return(double(call: true))
service.call
end
end
context 'when source is photoprism_api' do
let(:import) { create(:import, source: 'photoprism_api') }
let(:file_path) { Rails.root.join('spec/fixtures/files/immich/geodata.json') }
before do
import.file.attach(io: File.open(file_path), filename: 'geodata.json',
content_type: 'application/json')
end
it 'calls the Photos::Importer' do
expect(Photos::Importer).to \
receive(:new).with(import, user.id).and_return(double(call: true))
receive(:new).with(import, user.id, kind_of(String)).and_return(double(call: true))
service.call
end
end

View File

@@ -0,0 +1,174 @@
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe Imports::SourceDetector do
let(:detector) { described_class.new(file_content, filename) }
let(:filename) { nil }
describe '#detect_source' do
context 'with Google Semantic History format' do
let(:file_content) { file_fixture('google/semantic_history.json').read }
it 'detects google_semantic_history format' do
expect(detector.detect_source).to eq(:google_semantic_history)
end
end
context 'with Google Records format' do
let(:file_content) { file_fixture('google/records.json').read }
it 'detects google_records format' do
expect(detector.detect_source).to eq(:google_records)
end
end
context 'with Google Phone Takeout format' do
let(:file_content) { file_fixture('google/phone-takeout.json').read }
it 'detects google_phone_takeout format' do
expect(detector.detect_source).to eq(:google_phone_takeout)
end
end
context 'with Google Phone Takeout array format' do
let(:file_content) { file_fixture('google/location-history.json').read }
it 'detects google_phone_takeout format' do
expect(detector.detect_source).to eq(:google_phone_takeout)
end
end
context 'with GeoJSON format' do
let(:file_content) { file_fixture('geojson/export.json').read }
it 'detects geojson format' do
expect(detector.detect_source).to eq(:geojson)
end
end
context 'with OwnTracks REC file' do
let(:file_content) { file_fixture('owntracks/2024-03.rec').read }
let(:filename) { 'test.rec' }
it 'detects owntracks format' do
expect(detector.detect_source).to eq(:owntracks)
end
end
context 'with OwnTracks content without .rec extension' do
let(:file_content) { '{"_type":"location","lat":52.225,"lon":13.332}' }
let(:filename) { 'test.json' }
it 'detects owntracks format based on content' do
expect(detector.detect_source).to eq(:owntracks)
end
end
context 'with GPX file' do
let(:file_content) { file_fixture('gpx/gpx_track_single_segment.gpx').read }
let(:filename) { 'test.gpx' }
it 'detects gpx format' do
expect(detector.detect_source).to eq(:gpx)
end
end
context 'with invalid JSON' do
let(:file_content) { 'invalid json content' }
it 'returns nil for invalid JSON' do
expect(detector.detect_source).to be_nil
end
end
context 'with unknown JSON format' do
let(:file_content) { '{"unknown": "format", "data": []}' }
it 'returns nil for unknown format' do
expect(detector.detect_source).to be_nil
end
end
context 'with empty content' do
let(:file_content) { '' }
it 'returns nil for empty content' do
expect(detector.detect_source).to be_nil
end
end
end
describe '#detect_source!' do
context 'with valid format' do
let(:file_content) { file_fixture('google/records.json').read }
it 'returns the detected format' do
expect(detector.detect_source!).to eq(:google_records)
end
end
context 'with unknown format' do
let(:file_content) { '{"unknown": "format"}' }
it 'raises UnknownSourceError' do
expect { detector.detect_source! }.to raise_error(
Imports::SourceDetector::UnknownSourceError,
'Unable to detect file format'
)
end
end
end
describe '.new_from_file_header' do
context 'with Google Records file' do
let(:fixture_path) { file_fixture('google/records.json').to_s }
it 'detects source correctly from file path' do
detector = described_class.new_from_file_header(fixture_path)
expect(detector.detect_source).to eq(:google_records)
end
it 'can detect source efficiently from file' do
detector = described_class.new_from_file_header(fixture_path)
# Verify it can detect correctly using file-based approach
expect(detector.detect_source).to eq(:google_records)
end
end
context 'with GeoJSON file' do
let(:fixture_path) { file_fixture('geojson/export.json').to_s }
it 'detects source correctly from file path' do
detector = described_class.new_from_file_header(fixture_path)
expect(detector.detect_source).to eq(:geojson)
end
end
end
describe 'detection accuracy with real fixture files' do
shared_examples 'detects format correctly' do |expected_format, fixture_path|
it "detects #{expected_format} format for #{fixture_path}" do
file_content = file_fixture(fixture_path).read
filename = File.basename(fixture_path)
detector = described_class.new(file_content, filename)
expect(detector.detect_source).to eq(expected_format)
end
end
# Test various Google Semantic History variations
include_examples 'detects format correctly', :google_semantic_history, 'google/location-history/with_activitySegment_with_startLocation.json'
include_examples 'detects format correctly', :google_semantic_history, 'google/location-history/with_placeVisit_with_location_with_coordinates.json'
# Test GeoJSON variations
include_examples 'detects format correctly', :geojson, 'geojson/export_same_points.json'
include_examples 'detects format correctly', :geojson, 'geojson/gpslogger_example.json'
# Test GPX files
include_examples 'detects format correctly', :gpx, 'gpx/arc_example.gpx'
include_examples 'detects format correctly', :gpx, 'gpx/garmin_example.gpx'
include_examples 'detects format correctly', :gpx, 'gpx/gpx_track_multiple_segments.gpx'
end
end