208 lines
6.6 KiB
Python
208 lines
6.6 KiB
Python
#!/usr/bin/env python3
|
|
import json
|
|
import os
|
|
from datetime import datetime
|
|
from collections import defaultdict
|
|
|
|
def parse_coordinates(latlng_str):
|
|
"""
|
|
Parse LatLng values in the format "50.6335999°, 6.6983381°"
|
|
Returns a tuple of (latitude, longitude) as floats
|
|
"""
|
|
if not latlng_str:
|
|
return None, None
|
|
|
|
try:
|
|
# Remove degree symbols and split by comma
|
|
latlng_str = latlng_str.replace('°', '')
|
|
parts = latlng_str.split(',')
|
|
|
|
if len(parts) != 2:
|
|
return None, None
|
|
|
|
lat = float(parts[0].strip())
|
|
lng = float(parts[1].strip())
|
|
return lat, lng
|
|
except (ValueError, IndexError):
|
|
return None, None
|
|
|
|
def create_gpx_header():
|
|
"""Create a GPX file header with proper namespaces"""
|
|
return '''<?xml version="1.0" encoding="UTF-8"?>
|
|
<gpx version="1.1"
|
|
creator="Timeline-to-GPX converter"
|
|
xmlns="http://www.topografix.com/GPX/1/1"
|
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
|
xsi:schemaLocation="http://www.topografix.com/GPX/1/1
|
|
http://www.topografix.com/GPX/1/1/gpx.xsd">
|
|
'''
|
|
|
|
def create_gpx_footer():
|
|
"""Create a GPX file footer"""
|
|
return '</gpx>'
|
|
|
|
def create_track_header(date):
|
|
"""Create a track header with the date as name"""
|
|
return f''' <trk>
|
|
<name>Track {date}</name>
|
|
<trkseg>
|
|
'''
|
|
|
|
def create_track_footer():
|
|
"""Create a track footer"""
|
|
return ''' </trkseg>
|
|
</trk>
|
|
'''
|
|
|
|
def create_trackpoint(lat, lon, altitude, timestamp):
|
|
"""
|
|
Create a GPX trackpoint with the given coordinates, elevation, and time
|
|
"""
|
|
trackpoint = f''' <trkpt lat="{lat}" lon="{lon}">
|
|
'''
|
|
|
|
if altitude is not None:
|
|
trackpoint += f' <ele>{altitude}</ele>\n'
|
|
|
|
if timestamp:
|
|
trackpoint += f' <time>{timestamp}</time>\n'
|
|
|
|
trackpoint += ' </trkpt>\n'
|
|
return trackpoint
|
|
|
|
def process_timeline_json(input_file):
|
|
"""
|
|
Process the Timeline.json file and generate GPX files by day
|
|
"""
|
|
print(f"Processing {input_file}...")
|
|
|
|
try:
|
|
with open(input_file, 'r') as f:
|
|
data = json.load(f)
|
|
except (json.JSONDecodeError, FileNotFoundError) as e:
|
|
print(f"Error reading JSON file: {e}")
|
|
return
|
|
|
|
# Group points by date
|
|
points_by_date = defaultdict(list)
|
|
skipped_points = 0
|
|
total_points = 0
|
|
|
|
# Process points from rawSignals
|
|
print("Processing rawSignals data...")
|
|
for signal in data.get('rawSignals', []):
|
|
position = signal.get('position', {})
|
|
timestamp_str = position.get('timestamp')
|
|
|
|
# Skip points with null timestamps
|
|
if not timestamp_str:
|
|
skipped_points += 1
|
|
continue
|
|
|
|
try:
|
|
# Parse timestamp and extract date
|
|
timestamp_dt = datetime.fromisoformat(timestamp_str.replace('Z', '+00:00'))
|
|
date_str = timestamp_dt.strftime('%Y-%m-%d')
|
|
|
|
# Parse coordinates
|
|
lat, lng = parse_coordinates(position.get('LatLng'))
|
|
if lat is None or lng is None:
|
|
skipped_points += 1
|
|
continue
|
|
|
|
altitude = position.get('altitude')
|
|
|
|
# Add point to the appropriate date
|
|
points_by_date[date_str].append({
|
|
'lat': lat,
|
|
'lon': lng,
|
|
'altitude': altitude,
|
|
'timestamp': timestamp_str,
|
|
'source': 'rawSignal'
|
|
})
|
|
total_points += 1
|
|
except (ValueError, TypeError) as e:
|
|
print(f"Error processing rawSignal point: {e}")
|
|
skipped_points += 1
|
|
|
|
# Process points from semanticSegments timelinePath
|
|
print("Processing semanticSegments timelinePath data...")
|
|
for segment in data.get('semanticSegments', []):
|
|
timeline_path = segment.get('timelinePath', [])
|
|
|
|
for path_point in timeline_path:
|
|
point_str = path_point.get('point')
|
|
timestamp_str = path_point.get('time')
|
|
|
|
# Skip points with null data
|
|
if not point_str or not timestamp_str:
|
|
skipped_points += 1
|
|
continue
|
|
|
|
try:
|
|
# Parse timestamp and extract date
|
|
timestamp_dt = datetime.fromisoformat(timestamp_str.replace('Z', '+00:00'))
|
|
date_str = timestamp_dt.strftime('%Y-%m-%d')
|
|
|
|
# Parse coordinates
|
|
lat, lng = parse_coordinates(point_str)
|
|
if lat is None or lng is None:
|
|
skipped_points += 1
|
|
continue
|
|
|
|
# Add point to the appropriate date (no altitude in timelinePath)
|
|
points_by_date[date_str].append({
|
|
'lat': lat,
|
|
'lon': lng,
|
|
'altitude': None,
|
|
'timestamp': timestamp_str,
|
|
'source': 'semanticSegment'
|
|
})
|
|
total_points += 1
|
|
except (ValueError, TypeError) as e:
|
|
print(f"Error processing semanticSegment point: {e}")
|
|
skipped_points += 1
|
|
|
|
# Create GPX files for each day
|
|
for date, points in points_by_date.items():
|
|
if not points:
|
|
continue
|
|
|
|
# Extract year and month from date (format: YYYY-MM-DD)
|
|
year, month, _ = date.split('-')
|
|
|
|
# Create directory structure
|
|
dir_path = os.path.join(year, month)
|
|
os.makedirs(dir_path, exist_ok=True)
|
|
|
|
# Update output file path
|
|
output_file = os.path.join(dir_path, f"track_{date}.gpx")
|
|
print(f"Creating {output_file} with {len(points)} trackpoints...")
|
|
|
|
# Sort points by timestamp
|
|
points.sort(key=lambda p: p.get('timestamp', ''))
|
|
|
|
with open(output_file, 'w') as f:
|
|
f.write(create_gpx_header())
|
|
f.write(create_track_header(date))
|
|
|
|
for point in points:
|
|
f.write(create_trackpoint(
|
|
point['lat'],
|
|
point['lon'],
|
|
point['altitude'],
|
|
point['timestamp']
|
|
))
|
|
|
|
f.write(create_track_footer())
|
|
f.write(create_gpx_footer())
|
|
|
|
print(f"Processing complete. Created {len(points_by_date)} GPX files.")
|
|
print(f"Total points processed: {total_points}")
|
|
if skipped_points > 0:
|
|
print(f"Skipped {skipped_points} points due to missing or invalid data.")
|
|
|
|
if __name__ == "__main__":
|
|
process_timeline_json("Timeline.json")
|
|
|