Compare commits
14 Commits
6b86052998
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| bfc244e1f0 | |||
| abc3b9fcaf | |||
| 19aa910c60 | |||
| a8c85f1b74 | |||
| 6839bfe316 | |||
| 9f84754a23 | |||
| d9ec1405d7 | |||
| be20ba8c41 | |||
| 674fc57795 | |||
| be4a87303a | |||
| 92e85f9faf | |||
| d84f72b936 | |||
| 8ffb1735f5 | |||
| 61d36c3beb |
66
.gitignore
vendored
Normal file
66
.gitignore
vendored
Normal file
@@ -0,0 +1,66 @@
|
||||
# PocketBase data and migrations
|
||||
pb_data/
|
||||
pb_migrations/
|
||||
|
||||
# Python
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
*.so
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# Virtual environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Node.js
|
||||
node_modules/
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
.pnpm-debug.log*
|
||||
|
||||
# IDEs
|
||||
.vscode/
|
||||
.idea/
|
||||
*.swp
|
||||
*.swo
|
||||
|
||||
# OS
|
||||
.DS_Store
|
||||
.DS_Store?
|
||||
._*
|
||||
.Spotlight-V100
|
||||
.Trashes
|
||||
ehthumbs.db
|
||||
Thumbs.db
|
||||
|
||||
# Logs
|
||||
*.log
|
||||
logs/
|
||||
|
||||
# Temporary files
|
||||
*.tmp
|
||||
*.temp
|
||||
167
apps/streamlit_data_viewer/app.py
Normal file
167
apps/streamlit_data_viewer/app.py
Normal file
@@ -0,0 +1,167 @@
|
||||
import streamlit as st
|
||||
import pandas as pd
|
||||
import os
|
||||
from pathlib import Path
|
||||
import folium
|
||||
from streamlit_folium import folium_static
|
||||
import numpy as np
|
||||
|
||||
st.set_page_config(page_title="Timeline CSV Viewer", layout="wide")
|
||||
|
||||
st.title("Timeline CSV Viewer")
|
||||
|
||||
# Path to the timeline_csv folder
|
||||
timeline_csv_path = Path("../timeline_csv")
|
||||
|
||||
# Get all CSV files from all subdirectories
|
||||
csv_files = []
|
||||
if timeline_csv_path.exists():
|
||||
for subdir in timeline_csv_path.iterdir():
|
||||
if subdir.is_dir():
|
||||
for csv_file in subdir.glob("*.csv"):
|
||||
csv_files.append(csv_file)
|
||||
|
||||
if not csv_files:
|
||||
st.error("No CSV files found in the timeline_csv folder.")
|
||||
st.stop()
|
||||
|
||||
# Define geospatial datasets and their coordinate columns
|
||||
GEOSPATIAL_FILES = {
|
||||
'timeline_path_points.csv': {'lat': 'lat', 'lon': 'lon', 'time': 'time'},
|
||||
'visits.csv': {'lat': 'top_lat', 'lon': 'top_lon', 'time': 'startTime'},
|
||||
'raw_signals.csv': {'lat': 'lat', 'lon': 'lon', 'time': 'timestamp'},
|
||||
'frequent_places.csv': {'lat': 'lat', 'lon': 'lon', 'time': None},
|
||||
'semantic_segments.csv': {'lat': None, 'lon': None, 'time': 'startTime'}
|
||||
}
|
||||
|
||||
# Create enhanced file names with geospatial indicators
|
||||
enhanced_file_names = []
|
||||
for f in csv_files:
|
||||
if f.name in GEOSPATIAL_FILES:
|
||||
enhanced_file_names.append(f"🗺️ {f.name} (Geospatial)")
|
||||
else:
|
||||
enhanced_file_names.append(f.name)
|
||||
|
||||
selected_enhanced_name = st.selectbox("Select a CSV file to view:", enhanced_file_names)
|
||||
|
||||
# Extract the actual filename from the enhanced name
|
||||
selected_file_name = selected_enhanced_name.replace('🗺️ ', '').replace(' (Geospatial)', '')
|
||||
|
||||
# Find the full path for the selected file
|
||||
selected_file_path = None
|
||||
for file_path in csv_files:
|
||||
if file_path.name == selected_file_name:
|
||||
selected_file_path = file_path
|
||||
break
|
||||
|
||||
if selected_file_path:
|
||||
st.write(f"**File:** {selected_file_path}")
|
||||
|
||||
try:
|
||||
# Read the CSV file
|
||||
df = pd.read_csv(selected_file_path)
|
||||
|
||||
# Display basic info
|
||||
is_geospatial = selected_file_name in GEOSPATIAL_FILES
|
||||
if is_geospatial:
|
||||
st.success(f"🗺️ **Geospatial Dataset Detected** - {df.shape[0]} rows × {df.shape[1]} columns")
|
||||
else:
|
||||
st.write(f"**Shape:** {df.shape[0]} rows × {df.shape[1]} columns")
|
||||
|
||||
# Show geospatial visualization if applicable
|
||||
if is_geospatial and selected_file_name in GEOSPATIAL_FILES:
|
||||
geo_config = GEOSPATIAL_FILES[selected_file_name]
|
||||
lat_col = geo_config['lat']
|
||||
lon_col = geo_config['lon']
|
||||
time_col = geo_config['time']
|
||||
|
||||
if lat_col and lon_col and lat_col in df.columns and lon_col in df.columns:
|
||||
st.subheader("🗺️ Map Visualization")
|
||||
|
||||
# Filter out null coordinates
|
||||
geo_df = df.dropna(subset=[lat_col, lon_col])
|
||||
|
||||
if len(geo_df) > 0:
|
||||
# Sample data if too large for performance
|
||||
if len(geo_df) > 1000:
|
||||
geo_df = geo_df.sample(n=1000)
|
||||
st.info(f"Showing 1000 randomly sampled points out of {len(df)} total points for performance")
|
||||
|
||||
# Create map centered on mean coordinates
|
||||
center_lat = geo_df[lat_col].mean()
|
||||
center_lon = geo_df[lon_col].mean()
|
||||
|
||||
m = folium.Map(location=[center_lat, center_lon], zoom_start=10)
|
||||
|
||||
# Add points to map
|
||||
for idx, row in geo_df.iterrows():
|
||||
popup_text = f"Index: {idx}"
|
||||
if time_col and time_col in df.columns:
|
||||
popup_text += f"<br>Time: {row[time_col]}"
|
||||
|
||||
# Color code based on dataset type
|
||||
if selected_file_name == 'timeline_path_points.csv':
|
||||
color = 'blue'
|
||||
elif selected_file_name == 'visits.csv':
|
||||
color = 'red'
|
||||
elif selected_file_name == 'raw_signals.csv':
|
||||
color = 'green'
|
||||
else:
|
||||
color = 'orange'
|
||||
|
||||
folium.CircleMarker(
|
||||
location=[row[lat_col], row[lon_col]],
|
||||
radius=3,
|
||||
popup=popup_text,
|
||||
color=color,
|
||||
fillColor=color,
|
||||
fillOpacity=0.7
|
||||
).add_to(m)
|
||||
|
||||
folium_static(m)
|
||||
|
||||
# Show coordinate statistics
|
||||
st.subheader("📍 Coordinate Statistics")
|
||||
coord_stats = pd.DataFrame({
|
||||
'Statistic': ['Count', 'Min Lat', 'Max Lat', 'Min Lon', 'Max Lon', 'Center Lat', 'Center Lon'],
|
||||
'Value': [
|
||||
len(geo_df),
|
||||
f"{geo_df[lat_col].min():.6f}",
|
||||
f"{geo_df[lat_col].max():.6f}",
|
||||
f"{geo_df[lon_col].min():.6f}",
|
||||
f"{geo_df[lon_col].max():.6f}",
|
||||
f"{center_lat:.6f}",
|
||||
f"{center_lon:.6f}"
|
||||
]
|
||||
})
|
||||
st.dataframe(coord_stats)
|
||||
else:
|
||||
st.warning("No valid coordinates found in this dataset")
|
||||
else:
|
||||
if selected_file_name == 'semantic_segments.csv':
|
||||
st.info("📅 This dataset contains temporal data that links to spatial information in other datasets")
|
||||
else:
|
||||
st.warning(f"Expected coordinate columns ({lat_col}, {lon_col}) not found in this dataset")
|
||||
|
||||
# Show first few rows
|
||||
st.subheader("Data Preview")
|
||||
st.dataframe(df.head(100))
|
||||
|
||||
# Show column info
|
||||
st.subheader("Column Information")
|
||||
col_info = pd.DataFrame({
|
||||
'Column': df.columns,
|
||||
'Data Type': df.dtypes,
|
||||
'Non-Null Count': df.count(),
|
||||
'Null Count': df.isnull().sum()
|
||||
})
|
||||
st.dataframe(col_info)
|
||||
|
||||
# Show basic statistics for numeric columns
|
||||
numeric_cols = df.select_dtypes(include=['number']).columns
|
||||
if len(numeric_cols) > 0:
|
||||
st.subheader("Numeric Column Statistics")
|
||||
st.dataframe(df[numeric_cols].describe())
|
||||
|
||||
except Exception as e:
|
||||
st.error(f"Error reading the CSV file: {str(e)}")
|
||||
11
data/timeline_csv/frequent_places.csv
Normal file
11
data/timeline_csv/frequent_places.csv
Normal file
@@ -0,0 +1,11 @@
|
||||
placeId,label,lat,lon
|
||||
ChIJAAAAAAAAAAARjhi7lioZa2Y,HOME,51.6658192,-0.4056977
|
||||
ChIJAAAAAAAAAAARqv1PBlyKQqU,,51.5280408,-0.1333271
|
||||
ChIJAAAAAAAAAAAR4xVxQ-iFuSc,WORK,51.5157699,-0.1316355
|
||||
ChIJAAAAAAAAAAAREvwoXLLyhU0,,51.6636753,-0.3960678
|
||||
ChIJAAAAAAAAAAAR-aFGg1la334,,51.6541167,-0.3928591
|
||||
ChIJAAAAAAAAAAARVbhj9-fMCgA,,51.512069,-0.1321399
|
||||
ChIJAAAAAAAAAAARrsPaSQqNays,,48.1381556,11.5602535
|
||||
ChIJAAAAAAAAAAARwsyEMxqdhzg,,48.13921,11.57766
|
||||
ChIJAAAAAAAAAAARCPYgxYn5PKk,,51.6698017,-0.4085657
|
||||
ChIJAAAAAAAAAAAR5gk8mcL7EqQ,,51.5347377,-0.1382573
|
||||
|
34
data/timeline_csv/frequent_trip_mode_distribution.csv
Normal file
34
data/timeline_csv/frequent_trip_mode_distribution.csv
Normal file
@@ -0,0 +1,34 @@
|
||||
trip_index,entry_index,raw_json
|
||||
0,0,"{""mode"": ""WALKING"", ""rate"": 0.46875}"
|
||||
0,1,"{""mode"": ""IN_TRAIN"", ""rate"": 0.3125}"
|
||||
0,2,"{""mode"": ""IN_SUBWAY"", ""rate"": 0.15625}"
|
||||
0,3,"{""mode"": ""CYCLING"", ""rate"": 0.0625}"
|
||||
1,0,"{""mode"": ""WALKING"", ""rate"": 0.4333333373069763}"
|
||||
1,1,"{""mode"": ""IN_TRAIN"", ""rate"": 0.3333333432674408}"
|
||||
1,2,"{""mode"": ""IN_SUBWAY"", ""rate"": 0.13333334028720856}"
|
||||
1,3,"{""mode"": ""CYCLING"", ""rate"": 0.10000000149011612}"
|
||||
2,0,"{""mode"": ""WALKING"", ""rate"": 0.3888888955116272}"
|
||||
2,1,"{""mode"": ""IN_TRAIN"", ""rate"": 0.3055555522441864}"
|
||||
2,2,"{""mode"": ""IN_SUBWAY"", ""rate"": 0.25}"
|
||||
2,3,"{""mode"": ""CYCLING"", ""rate"": 0.0555555559694767}"
|
||||
3,0,"{""mode"": ""WALKING"", ""rate"": 0.42105263471603394}"
|
||||
3,1,"{""mode"": ""IN_TRAIN"", ""rate"": 0.31578946113586426}"
|
||||
3,2,"{""mode"": ""IN_SUBWAY"", ""rate"": 0.21052631735801697}"
|
||||
3,3,"{""mode"": ""CYCLING"", ""rate"": 0.05263157933950424}"
|
||||
4,0,"{""mode"": ""WALKING"", ""rate"": 0.5}"
|
||||
4,1,"{""mode"": ""IN_TRAIN"", ""rate"": 0.3499999940395355}"
|
||||
4,2,"{""mode"": ""IN_SUBWAY"", ""rate"": 0.10000000149011612}"
|
||||
4,3,"{""mode"": ""CYCLING"", ""rate"": 0.05000000074505806}"
|
||||
5,0,"{""mode"": ""WALKING"", ""rate"": 0.6111111044883728}"
|
||||
5,1,"{""mode"": ""IN_TRAIN"", ""rate"": 0.2777777910232544}"
|
||||
5,2,"{""mode"": ""IN_SUBWAY"", ""rate"": 0.1111111119389534}"
|
||||
6,0,"{""mode"": ""WALKING"", ""rate"": 0.5769230723381042}"
|
||||
6,1,"{""mode"": ""IN_TRAIN"", ""rate"": 0.3076923191547394}"
|
||||
6,2,"{""mode"": ""IN_SUBWAY"", ""rate"": 0.11538461595773697}"
|
||||
7,0,"{""mode"": ""WALKING"", ""rate"": 0.5263158082962036}"
|
||||
7,1,"{""mode"": ""IN_TRAIN"", ""rate"": 0.2631579041481018}"
|
||||
7,2,"{""mode"": ""IN_SUBWAY"", ""rate"": 0.15789473056793213}"
|
||||
7,3,"{""mode"": ""IN_PASSENGER_VEHICLE"", ""rate"": 0.05263157933950424}"
|
||||
8,0,"{""mode"": ""WALKING"", ""rate"": 0.5454545617103577}"
|
||||
8,1,"{""mode"": ""IN_TRAIN"", ""rate"": 0.27272728085517883}"
|
||||
8,2,"{""mode"": ""IN_SUBWAY"", ""rate"": 0.09090909361839294}"
|
||||
|
45
data/timeline_csv/frequent_trip_waypoints.csv
Normal file
45
data/timeline_csv/frequent_trip_waypoints.csv
Normal file
@@ -0,0 +1,45 @@
|
||||
trip_index,waypoint_order,waypoint_id
|
||||
0,0,ChIJAAAAAAAAAAARjhi7lioZa2Y
|
||||
0,1,ChIJAAAAAAAAAAARqv1PBlyKQqU
|
||||
0,2,ChIJAAAAAAAAAAAREvwoXLLyhU0
|
||||
0,3,ChIJAAAAAAAAAAAR4xVxQ-iFuSc
|
||||
1,0,ChIJAAAAAAAAAAARjhi7lioZa2Y
|
||||
1,1,ChIJAAAAAAAAAAARqv1PBlyKQqU
|
||||
1,2,ChIJAAAAAAAAAAAREvwoXLLyhU0
|
||||
1,3,ChIJAAAAAAAAAAAR4xVxQ-iFuSc
|
||||
2,0,ChIJAAAAAAAAAAARjhi7lioZa2Y
|
||||
2,1,ChIJAAAAAAAAAAARqv1PBlyKQqU
|
||||
2,2,ChIJAAAAAAAAAAAREvwoXLLyhU0
|
||||
2,3,ChIJAAAAAAAAAAAR4xVxQ-iFuSc
|
||||
3,0,ChIJAAAAAAAAAAARjhi7lioZa2Y
|
||||
3,1,ChIJAAAAAAAAAAAREvwoXLLyhU0
|
||||
3,2,ChIJAAAAAAAAAAARqv1PBlyKQqU
|
||||
3,3,ChIJAAAAAAAAAAAR4xVxQ-iFuSc
|
||||
4,0,ChIJAAAAAAAAAAARjhi7lioZa2Y
|
||||
4,1,ChIJAAAAAAAAAAARCO-FF_-eZUc
|
||||
4,2,ChIJAAAAAAAAAAAREvwoXLLyhU0
|
||||
4,3,ChIJAAAAAAAAAAARqv1PBlyKQqU
|
||||
4,4,ChIJAAAAAAAAAAAR4xVxQ-iFuSc
|
||||
5,0,ChIJAAAAAAAAAAAR4xVxQ-iFuSc
|
||||
5,1,ChIJAAAAAAAAAAARqv1PBlyKQqU
|
||||
5,2,ChIJAAAAAAAAAAAREvwoXLLyhU0
|
||||
5,3,ChIJAAAAAAAAAAARjhi7lioZa2Y
|
||||
6,0,ChIJAAAAAAAAAAAR4xVxQ-iFuSc
|
||||
6,1,ChIJAAAAAAAAAAARDpW2KdCKr_0
|
||||
6,2,ChIJAAAAAAAAAAARlCV8K3CctEM
|
||||
6,3,ChIJAAAAAAAAAAARFS254-AYGeQ
|
||||
6,4,ChIJAAAAAAAAAAARYgJcSNWXRnI
|
||||
6,5,ChIJAAAAAAAAAAARqv1PBlyKQqU
|
||||
6,6,ChIJAAAAAAAAAAAREvwoXLLyhU0
|
||||
6,7,ChIJAAAAAAAAAAARjhi7lioZa2Y
|
||||
7,0,ChIJAAAAAAAAAAAR4xVxQ-iFuSc
|
||||
7,1,ChIJAAAAAAAAAAARqv1PBlyKQqU
|
||||
7,2,ChIJAAAAAAAAAAARyKARfrOcmkE
|
||||
7,3,ChIJAAAAAAAAAAAREvwoXLLyhU0
|
||||
7,4,ChIJAAAAAAAAAAARjhi7lioZa2Y
|
||||
8,0,ChIJAAAAAAAAAAAR4xVxQ-iFuSc
|
||||
8,1,ChIJAAAAAAAAAAARqv1PBlyKQqU
|
||||
8,2,ChIJAAAAAAAAAAARlCV8K3CctEM
|
||||
8,3,ChIJAAAAAAAAAAARDpW2KdCKr_0
|
||||
8,4,ChIJAAAAAAAAAAAREvwoXLLyhU0
|
||||
8,5,ChIJAAAAAAAAAAARjhi7lioZa2Y
|
||||
|
10
data/timeline_csv/frequent_trips.csv
Normal file
10
data/timeline_csv/frequent_trips.csv
Normal file
@@ -0,0 +1,10 @@
|
||||
trip_index,startTimeMinutes,endTimeMinutes,durationMinutes,confidence,commuteDirection,waypoint_count,mode_dist_count
|
||||
0,504,551,47,0.0,COMMUTE_DIRECTION_HOME_TO_WORK,4,4
|
||||
1,1947,1992,45,0.0,COMMUTE_DIRECTION_HOME_TO_WORK,4,4
|
||||
2,3383,3429,46,0.0,COMMUTE_DIRECTION_HOME_TO_WORK,4,4
|
||||
3,4828,4875,46,0.0,COMMUTE_DIRECTION_HOME_TO_WORK,4,4
|
||||
4,6276,6325,48,0.0,COMMUTE_DIRECTION_HOME_TO_WORK,5,4
|
||||
5,1158,1198,40,0.0,COMMUTE_DIRECTION_WORK_TO_HOME,4,3
|
||||
6,2592,2658,66,0.0,COMMUTE_DIRECTION_WORK_TO_HOME,8,3
|
||||
7,4002,4063,61,0.0,COMMUTE_DIRECTION_WORK_TO_HOME,5,4
|
||||
8,5514,5586,71,0.0,COMMUTE_DIRECTION_WORK_TO_HOME,6,3
|
||||
|
9550
data/timeline_csv/raw_signals.csv
Normal file
9550
data/timeline_csv/raw_signals.csv
Normal file
File diff suppressed because it is too large
Load Diff
43296
data/timeline_csv/semantic_segments.csv
Normal file
43296
data/timeline_csv/semantic_segments.csv
Normal file
File diff suppressed because it is too large
Load Diff
122326
data/timeline_csv/timeline_path_points.csv
Normal file
122326
data/timeline_csv/timeline_path_points.csv
Normal file
File diff suppressed because it is too large
Load Diff
8
data/timeline_csv/travel_mode_affinities.csv
Normal file
8
data/timeline_csv/travel_mode_affinities.csv
Normal file
@@ -0,0 +1,8 @@
|
||||
mode,affinity
|
||||
WALKING,0.4837758243083954
|
||||
IN_TRAIN,0.2743362784385681
|
||||
IN_PASSENGER_VEHICLE,0.2418879121541977
|
||||
IN_SUBWAY,0.12389380484819412
|
||||
CYCLING,0.06489675492048264
|
||||
IN_BUS,0.014749262481927872
|
||||
FLYING,0.0029498524963855743
|
||||
|
14891
data/timeline_csv/visits.csv
Normal file
14891
data/timeline_csv/visits.csv
Normal file
File diff suppressed because it is too large
Load Diff
14
node_modules/.package-lock.json
generated
vendored
Normal file
14
node_modules/.package-lock.json
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"name": "personal-tracker",
|
||||
"version": "1.0.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"node_modules/pocketbase": {
|
||||
"version": "0.26.2",
|
||||
"resolved": "https://registry.npmjs.org/pocketbase/-/pocketbase-0.26.2.tgz",
|
||||
"integrity": "sha512-WA8EOBc3QnSJh8rJ3iYoi9DmmPOMFIgVfAmIGux7wwruUEIzXgvrO4u0W2htfQjGIcyezJkdZOy5Xmh7SxAftw==",
|
||||
"license": "MIT"
|
||||
}
|
||||
}
|
||||
}
|
||||
823
node_modules/pocketbase/CHANGELOG.md
generated
vendored
Normal file
823
node_modules/pocketbase/CHANGELOG.md
generated
vendored
Normal file
@@ -0,0 +1,823 @@
|
||||
## 0.26.2
|
||||
|
||||
- Allow body object without constructor ([#352](https://github.com/pocketbase/js-sdk/issues/352)).
|
||||
|
||||
|
||||
## 0.26.1
|
||||
|
||||
- Set the `cause` property of `ClientResponseError` to the original thrown error/data for easier debugging ([#349](https://github.com/pocketbase/js-sdk/pull/349); thanks @shish).
|
||||
|
||||
|
||||
## 0.26.0
|
||||
|
||||
- Ignore `undefined` properties when submitting an object that has `Blob`/`File` fields (_which is under the hood converted to `FormData`_)
|
||||
for consistency with how `JSON.stringify` works (see [pocketbase#6731](https://github.com/pocketbase/pocketbase/issues/6731#issuecomment-2812382827)).
|
||||
|
||||
|
||||
## 0.25.2
|
||||
|
||||
- Removed unnecessary checks in `serializeQueryParams` and added automated tests.
|
||||
|
||||
|
||||
## 0.25.1
|
||||
|
||||
- Ignore query parameters with `undefined` value ([#330](https://github.com/pocketbase/js-sdk/issues/330)).
|
||||
|
||||
|
||||
## 0.25.0
|
||||
|
||||
- Added `pb.crons` service to interact with the cron Web APIs.
|
||||
|
||||
|
||||
## 0.24.0
|
||||
|
||||
- Added support for assigning `FormData` as body to individual batch requests ([pocketbase#6145](https://github.com/pocketbase/pocketbase/discussions/6145)).
|
||||
|
||||
|
||||
## 0.23.0
|
||||
|
||||
- Added optional `pb.realtime.onDisconnect` hook function.
|
||||
_Note that the realtime client autoreconnect on its own and this hook is useful only for the cases where you want to apply a special behavior on server error or after closing the realtime connection._
|
||||
|
||||
|
||||
## 0.22.1
|
||||
|
||||
- Fixed old `pb.authStore.isAdmin`/`pb.authStore.isAuthRecord` and marked them as deprecated in favour of `pb.authStore.isSuperuser` ([#323](https://github.com/pocketbase/js-sdk/issues/323)).
|
||||
_Note that with PocketBase v0.23.0 superusers are converted to a system auth collection so you can always simply check the value of `pb.authStore.record?.collectionName`._
|
||||
|
||||
|
||||
## 0.22.0
|
||||
|
||||
**⚠️ This release introduces some breaking changes and works only with PocketBase v0.23.0+.**
|
||||
|
||||
- Added support for sending batch/transactional create/updated/delete/**upsert** requests with the new batch Web APIs.
|
||||
```js
|
||||
const batch = pb.createBatch();
|
||||
|
||||
batch.collection("example1").create({ ... });
|
||||
batch.collection("example2").update("RECORD_ID", { ... });
|
||||
batch.collection("example3").delete("RECORD_ID");
|
||||
batch.collection("example4").upsert({ ... });
|
||||
|
||||
const result = await batch.send();
|
||||
```
|
||||
|
||||
- Added support for authenticating with OTP (email code):
|
||||
```js
|
||||
const result = await pb.collection("users").requestOTP("test@example.com");
|
||||
|
||||
// ... show a modal for users to check their email and to enter the received code ...
|
||||
|
||||
await pb.collection("users").authWithOTP(result.otpId, "EMAIL_CODE");
|
||||
```
|
||||
|
||||
Note that PocketBase v0.23.0 comes also with Multi-factor authentication (MFA) support.
|
||||
When enabled from the dashboard, the first auth attempt will result in 401 response and a `mfaId` response,
|
||||
that will have to be submitted with the second auth request. For example:
|
||||
```js
|
||||
try {
|
||||
await pb.collection("users").authWithPassword("test@example.com", "1234567890");
|
||||
} catch (err) {
|
||||
const mfaId = err.response?.mfaId;
|
||||
if (!mfaId) {
|
||||
throw err; // not mfa -> rethrow
|
||||
}
|
||||
|
||||
// the user needs to authenticate again with another auth method, for example OTP
|
||||
const result = await pb.collection("users").requestOTP("test@example.com");
|
||||
// ... show a modal for users to check their email and to enter the received code ...
|
||||
await pb.collection("users").authWithOTP(result.otpId, "EMAIL_CODE", { "mfaId": mfaId });
|
||||
}
|
||||
```
|
||||
|
||||
- Added new `pb.collection("users").impersonate("RECORD_ID")` method for superusers.
|
||||
It authenticates with the specified record id and returns a new client with the impersonated auth state loaded in a memory store.
|
||||
```js
|
||||
// authenticate as superusers (with v0.23.0 admins is converted to a special system auth collection "_superusers"):
|
||||
await pb.collection("_superusers").authWithPassword("test@example.com", "1234567890");
|
||||
|
||||
// impersonate
|
||||
const impersonateClient = pb.collection("users").impersonate("USER_RECORD_ID", 3600 /* optional token duration in seconds */)
|
||||
|
||||
// log the impersonate token and user data
|
||||
console.log(impersonateClient.authStore.token);
|
||||
console.log(impersonateClient.authStore.record);
|
||||
|
||||
// send requests as the impersonated user
|
||||
impersonateClient.collection("example").getFullList();
|
||||
```
|
||||
|
||||
- Added new `pb.collections.getScaffolds()` method to retrieve a type indexed map with the collection models (base, auth, view) loaded with their defaults.
|
||||
|
||||
- Added new `pb.collections.truncate(idOrName)` to delete all records associated with the specified collection.
|
||||
|
||||
- Added the submitted fetch options as 3rd last argument in the `pb.afterSend` hook.
|
||||
|
||||
- Instead of replacing the entire `pb.authStore.record`, on auth record update we now only replace the available returned response record data ([pocketbase#5638](https://github.com/pocketbase/pocketbase/issues/5638)).
|
||||
|
||||
- ⚠️ Admins are converted to `_superusers` auth collection and there is no longer `AdminService` and `AdminModel` types.
|
||||
`pb.admins` is soft-deprecated and aliased to `pb.collection("_superusers")`.
|
||||
```js
|
||||
// before -> after
|
||||
pb.admins.* -> pb.collection("_superusers").*
|
||||
```
|
||||
|
||||
- ⚠️ `pb.authStore.model` is soft-deprecated and superseded by `pb.authStore.record`.
|
||||
|
||||
- ⚠️ Soft-deprecated the OAuth2 success auth `meta.avatarUrl` response field in favour of `meta.avatarURL` for consistency with the Go conventions.
|
||||
|
||||
- ⚠️ Changed `AuthMethodsList` inerface fields to accomodate the new auth methods and `listAuthMethods()` response.
|
||||
```
|
||||
{
|
||||
"mfa": {
|
||||
"duration": 100,
|
||||
"enabled": true
|
||||
},
|
||||
"otp": {
|
||||
"duration": 0,
|
||||
"enabled": false
|
||||
},
|
||||
"password": {
|
||||
"enabled": true,
|
||||
"identityFields": ["email", "username"]
|
||||
},
|
||||
"oauth2": {
|
||||
"enabled": true,
|
||||
"providers": [{"name": "gitlab", ...}, {"name": "google", ...}]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
- ⚠️ Require specifying collection id or name when sending test email because the email templates can be changed per collection.
|
||||
```js
|
||||
// old
|
||||
pb.settings.testEmail(email, "verification")
|
||||
|
||||
// new
|
||||
pb.settings.testEmail("users", email, "verification")
|
||||
```
|
||||
|
||||
- ⚠️ Soft-deprecated and aliased `*Url()` -> `*URL()` methods for consistency with other similar native JS APIs and the accepted Go conventions.
|
||||
_The old methods still works but you may get a console warning to replace them because they will be removed in the future._
|
||||
```js
|
||||
pb.baseUrl -> pb.baseURL
|
||||
pb.buildUrl() -> pb.buildURL()
|
||||
pb.files.getUrl() -> pb.files.getURL()
|
||||
pb.backups.getDownloadUrl() -> pb.backups.getDownloadURL()
|
||||
```
|
||||
|
||||
- ⚠️ Renamed `CollectionModel.schema` to `CollectionModel.fields`.
|
||||
|
||||
- ⚠️ Renamed type `SchemaField` to `CollectionField`.
|
||||
|
||||
|
||||
## 0.21.5
|
||||
|
||||
- Shallow copy the realtime subscribe `options` argument for consistency with the other methods ([#308](https://github.com/pocketbase/js-sdk/issues/308)).
|
||||
|
||||
|
||||
## 0.21.4
|
||||
|
||||
- Fixed the `requestKey` handling in `authWithOAuth2({...})` to allow manually cancelling the entire OAuth2 pending request flow using `pb.cancelRequest(requestKey)`.
|
||||
_Due to the [`window.close` caveats](https://developer.mozilla.org/en-US/docs/Web/API/Window/close) note that the OAuth2 popup window may still remain open depending on which stage of the OAuth2 flow the cancellation has been invoked._
|
||||
|
||||
|
||||
## 0.21.3
|
||||
|
||||
- Enforce temporary the `atob` polyfill for ReactNative until [Expo 51+ and React Native v0.74+ `atob` fix get released](https://github.com/reactwg/react-native-releases/issues/287).
|
||||
|
||||
|
||||
## 0.21.2
|
||||
|
||||
- Exported `HealthService` types ([#289](https://github.com/pocketbase/js-sdk/issues/289)).
|
||||
|
||||
|
||||
## 0.21.1
|
||||
|
||||
- Manually update the verified state of the current matching `AuthStore` model on successful "confirm-verification" call.
|
||||
|
||||
- Manually clear the current matching `AuthStore` on "confirm-email-change" call because previous tokens are always invalidated.
|
||||
|
||||
- Updated the `fetch` mock tests to check also the sent body params.
|
||||
|
||||
- Formatted the source and tests with prettier.
|
||||
|
||||
|
||||
## 0.21.0
|
||||
|
||||
**⚠️ This release works only with PocketBase v0.21.0+ due to changes of how the `multipart/form-data` body is handled.**
|
||||
|
||||
- Properly sent json body with `multipart/form-data` requests.
|
||||
_This should fix the edge cases mentioned in the v0.20.3 release._
|
||||
|
||||
- Gracefully handle OAuth2 redirect error with the `authWithOAuth2()` call.
|
||||
|
||||
|
||||
## 0.20.3
|
||||
|
||||
- Partial and temporary workaround for the auto `application/json` -> `multipart/form-data` request serialization of a `json` field when a `Blob`/`File` is found in the request body ([#274](https://github.com/pocketbase/js-sdk/issues/274)).
|
||||
|
||||
The "fix" is partial because there are still 2 edge cases that are not handled - when a `json` field value is empty array (eg. `[]`) or array of strings (eg. `["a","b"]`).
|
||||
The reason for this is because the SDK doesn't have information about the field types and doesn't know which field is a `json` or an arrayable `select`, `file` or `relation`, so it can't serialize it properly on its own as `FormData` string value.
|
||||
|
||||
If you are having troubles with persisting `json` values as part of a `multipart/form-data` request the easiest fix for now is to manually stringify the `json` field value:
|
||||
```js
|
||||
await pb.collection("example").create({
|
||||
// having a Blob/File as object value will convert the request to multipart/form-data
|
||||
"someFileField": new Blob([123]),
|
||||
"someJsonField": JSON.stringify(["a","b","c"]),
|
||||
})
|
||||
```
|
||||
|
||||
A proper fix for this will be implemented with PocketBase v0.21.0 where we'll have support for a special `@jsonPayload` multipart body key, which will allow us to submit mixed `multipart/form-data` content (_kindof similar to the `multipart/mixed` MIME_).
|
||||
|
||||
|
||||
## 0.20.2
|
||||
|
||||
- Throw 404 error for `getOne("")` when invoked with empty id ([#271](https://github.com/pocketbase/js-sdk/issues/271)).
|
||||
|
||||
- Added `@throw {ClientResponseError}` jsdoc annotation to the regular request methods ([#262](https://github.com/pocketbase/js-sdk/issues/262)).
|
||||
|
||||
|
||||
## 0.20.1
|
||||
|
||||
- Propagate the `PB_CONNECT` event to allow listening to the realtime connect/reconnect events.
|
||||
```js
|
||||
pb.realtime.subscribe("PB_CONNECT", (e) => {
|
||||
console.log(e.clientId);
|
||||
})
|
||||
```
|
||||
|
||||
## 0.20.0
|
||||
|
||||
- Added `expand`, `filter`, `fields`, custom query and headers parameters support for the realtime subscriptions.
|
||||
```js
|
||||
pb.collection("example").subscribe("*", (e) => {
|
||||
...
|
||||
}, { filter: "someField > 10" });
|
||||
```
|
||||
_This works only with PocketBase v0.20.0+._
|
||||
|
||||
- Changes to the logs service methods in relation to the logs generalization in PocketBase v0.20.0+:
|
||||
```js
|
||||
pb.logs.getRequestsList(...) -> pb.logs.getList(...)
|
||||
pb.logs.getRequest(...) -> pb.logs.getOne(...)
|
||||
pb.logs.getRequestsStats(...) -> pb.logs.getStats(...)
|
||||
```
|
||||
|
||||
- Added missing `SchemaField.presentable` field.
|
||||
|
||||
- Added new `AuthProviderInfo.displayName` string field.
|
||||
|
||||
- Added new `AuthMethodsList.onlyVerified` bool field.
|
||||
|
||||
|
||||
## 0.19.0
|
||||
|
||||
- Added `pb.filter(rawExpr, params?)` helper to construct a filter string with placeholder parameters populated from an object.
|
||||
|
||||
```js
|
||||
const record = await pb.collection("example").getList(1, 20, {
|
||||
// the same as: "title ~ 'te\\'st' && (totalA = 123 || totalB = 123)"
|
||||
filter: pb.filter("title ~ {:title} && (totalA = {:num} || totalB = {:num})", { title: "te'st", num: 123 })
|
||||
})
|
||||
```
|
||||
|
||||
The supported placeholder parameter values are:
|
||||
|
||||
- `string` (_single quotes will be autoescaped_)
|
||||
- `number`
|
||||
- `boolean`
|
||||
- `Date` object (_will be stringified into the format expected by PocketBase_)
|
||||
- `null`
|
||||
- anything else is converted to a string using `JSON.stringify()`
|
||||
|
||||
|
||||
## 0.18.3
|
||||
|
||||
- Added optional generic support for the `RecordService` ([#251](https://github.com/pocketbase/js-sdk/issues/251)).
|
||||
This should allow specifying a single TypeScript definition for the client, eg. using type assertion:
|
||||
```ts
|
||||
interface Task {
|
||||
id: string;
|
||||
name: string;
|
||||
}
|
||||
|
||||
interface Post {
|
||||
id: string;
|
||||
title: string;
|
||||
active: boolean;
|
||||
}
|
||||
|
||||
interface TypedPocketBase extends PocketBase {
|
||||
collection(idOrName: string): RecordService // default fallback for any other collection
|
||||
collection(idOrName: 'tasks'): RecordService<Task>
|
||||
collection(idOrName: 'posts'): RecordService<Post>
|
||||
}
|
||||
|
||||
...
|
||||
|
||||
const pb = new PocketBase("http://127.0.0.1:8090") as TypedPocketBase;
|
||||
|
||||
// the same as pb.collection('tasks').getOne<Task>("RECORD_ID")
|
||||
await pb.collection('tasks').getOne("RECORD_ID") // -> results in Task
|
||||
|
||||
// the same as pb.collection('posts').getOne<Post>("RECORD_ID")
|
||||
await pb.collection('posts').getOne("RECORD_ID") // -> results in Post
|
||||
```
|
||||
|
||||
|
||||
## 0.18.2
|
||||
|
||||
- Added support for assigning a `Promise` as `AsyncAuthStore` initial value ([#249](https://github.com/pocketbase/js-sdk/issues/249)).
|
||||
|
||||
|
||||
## 0.18.1
|
||||
|
||||
- Fixed realtime subscriptions auto cancellation to use the proper `requestKey` param.
|
||||
|
||||
|
||||
## 0.18.0
|
||||
|
||||
- Added `pb.backups.upload(data)` action (_available with PocketBase v0.18.0_).
|
||||
|
||||
- Added _experimental_ `autoRefreshThreshold` option to auto refresh (or reauthenticate) the AuthStore when authenticated as admin.
|
||||
_This could be used as an alternative to fixed Admin API keys._
|
||||
```js
|
||||
await pb.admins.authWithPassword("test@example.com", "1234567890", {
|
||||
// This will trigger auto refresh or auto reauthentication in case
|
||||
// the token has expired or is going to expire in the next 30 minutes.
|
||||
autoRefreshThreshold: 30 * 60
|
||||
})
|
||||
```
|
||||
|
||||
|
||||
## 0.17.3
|
||||
|
||||
- Loosen the type check when calling `pb.files.getUrl(user, filename)` to allow passing the `pb.authStore.model` without type assertion.
|
||||
|
||||
|
||||
## 0.17.2
|
||||
|
||||
- Fixed mulitple File/Blob array values not transformed properly to their FormData equivalent when an object syntax is used.
|
||||
|
||||
|
||||
## 0.17.1
|
||||
|
||||
- Fixed typo in the deprecation console.warn messages ([#235](https://github.com/pocketbase/js-sdk/pull/235); thanks @heloineto).
|
||||
|
||||
|
||||
## 0.17.0
|
||||
|
||||
- To simplify file uploads, we now allow sending the `multipart/form-data` request body also as plain object if at least one of the object props has `File` or `Blob` value.
|
||||
```js
|
||||
// the standard way to create multipart/form-data body
|
||||
const data = new FormData();
|
||||
data.set("title", "lorem ipsum...")
|
||||
data.set("document", new File(...))
|
||||
|
||||
// this is the same as above
|
||||
// (it will be converted behind the scenes to FormData)
|
||||
const data = {
|
||||
"title": "lorem ipsum...",
|
||||
"document": new File(...),
|
||||
};
|
||||
|
||||
await pb.collection("example").create(data);
|
||||
```
|
||||
|
||||
- Added new `pb.authStore.isAdmin` and `pb.authStore.isAuthRecord` helpers to check the type of the current auth state.
|
||||
|
||||
- The default `LocalAuthStore` now listen to the browser [storage event](https://developer.mozilla.org/en-US/docs/Web/API/Window/storage_event),
|
||||
so that we can sync automatically the `pb.authStore` state between multiple tabs.
|
||||
|
||||
- Added new helper `AsyncAuthStore` class that can be used to integrate with any 3rd party async storage implementation (_usually this is needed when working with React Native_):
|
||||
```js
|
||||
import AsyncStorage from "@react-native-async-storage/async-storage";
|
||||
import PocketBase, { AsyncAuthStore } from "pocketbase";
|
||||
|
||||
const store = new AsyncAuthStore({
|
||||
save: async (serialized) => AsyncStorage.setItem("pb_auth", serialized),
|
||||
initial: AsyncStorage.getItem("pb_auth"),
|
||||
});
|
||||
|
||||
const pb = new PocketBase("https://example.com", store)
|
||||
```
|
||||
|
||||
- `pb.files.getUrl()` now returns empty string in case an empty filename is passed.
|
||||
|
||||
- ⚠️ All API actions now return plain object (POJO) as response, aka. the custom class wrapping was removed and you no longer need to manually call `structuredClone(response)` when using with SSR frameworks.
|
||||
|
||||
This could be a breaking change if you use the below classes (_and respectively their helper methods like `$isNew`, `$load()`, etc._) since they were replaced with plain TS interfaces:
|
||||
```ts
|
||||
class BaseModel -> interface BaseModel
|
||||
class Admin -> interface AdminModel
|
||||
class Record -> interface RecordModel
|
||||
class LogRequest -> interface LogRequestModel
|
||||
class ExternalAuth -> interface ExternalAuthModel
|
||||
class Collection -> interface CollectionModel
|
||||
class SchemaField -> interface SchemaField
|
||||
class ListResult -> interface ListResult
|
||||
```
|
||||
|
||||
_Side-note:_ If you use somewhere in your code the `Record` and `Admin` classes to determine the type of your `pb.authStore.model`,
|
||||
you can safely replace it with the new `pb.authStore.isAdmin` and `pb.authStore.isAuthRecord` getters.
|
||||
|
||||
- ⚠️ Added support for per-request `fetch` options, including also specifying completely custom `fetch` implementation.
|
||||
|
||||
In addition to the default [`fetch` options](https://developer.mozilla.org/en-US/docs/Web/API/fetch#options), the following configurable fields are supported:
|
||||
|
||||
```ts
|
||||
interface SendOptions extends RequestInit {
|
||||
// any other custom key will be merged with the query parameters
|
||||
// for backward compatibility and to minimize the verbosity
|
||||
[key: string]: any;
|
||||
|
||||
// optional custom fetch function to use for sending the request
|
||||
fetch?: (url: RequestInfo | URL, config?: RequestInit) => Promise<Response>;
|
||||
|
||||
// custom headers to send with the requests
|
||||
headers?: { [key: string]: string };
|
||||
|
||||
// the body of the request (serialized automatically for json requests)
|
||||
body?: any;
|
||||
|
||||
// query params that will be appended to the request url
|
||||
query?: { [key: string]: any };
|
||||
|
||||
// the request identifier that can be used to cancel pending requests
|
||||
requestKey?: string|null;
|
||||
|
||||
// @deprecated use `requestKey:string` instead
|
||||
$cancelKey?: string;
|
||||
|
||||
// @deprecated use `requestKey:null` instead
|
||||
$autoCancel?: boolean;
|
||||
}
|
||||
```
|
||||
|
||||
For most users the above will not be a breaking change since there are available function overloads (_when possible_) to preserve the old behavior, but you can get a warning message in the console to update to the new format.
|
||||
For example:
|
||||
```js
|
||||
// OLD (should still work but with a warning in the console)
|
||||
await pb.collection("example").authRefresh({}, {
|
||||
"expand": "someRelField",
|
||||
})
|
||||
|
||||
// NEW
|
||||
await pb.collection("example").authRefresh({
|
||||
"expand": "someRelField",
|
||||
// send some additional header
|
||||
"headers": {
|
||||
"X-Custom-Header": "123",
|
||||
},
|
||||
"cache": "no-store" // also usually used by frameworks like Next.js
|
||||
})
|
||||
```
|
||||
|
||||
- Eagerly open the default OAuth2 signin popup in case no custom `urlCallback` is provided as a workaround for Safari.
|
||||
|
||||
- Internal refactoring (updated dev dependencies, refactored the tests to use Vitest instead of Mocha, etc.).
|
||||
|
||||
|
||||
## 0.16.0
|
||||
|
||||
- Added `skipTotal=1` query parameter by default for the `getFirstListItem()` and `getFullList()` requests.
|
||||
_Note that this have performance boost only with PocketBase v0.17+._
|
||||
|
||||
- Added optional `download=1` query parameter to force file urls with `Content-Disposition: attachment` (_supported with PocketBase v0.17+_).
|
||||
|
||||
|
||||
## 0.15.3
|
||||
|
||||
- Automatically resolve pending realtime connect `Promise`s in case `unsubscribe` is called before
|
||||
`subscribe` is being able to complete ([pocketbase#2897](https://github.com/pocketbase/pocketbase/discussions/2897#discussioncomment-6423818)).
|
||||
|
||||
|
||||
## 0.15.2
|
||||
|
||||
- Replaced `new URL(...)` with manual url parsing as it is not fully supported in React Native ([pocketbase#2484](https://github.com/pocketbase/pocketbase/discussions/2484#discussioncomment-6114540)).
|
||||
|
||||
- Fixed nested `ClientResponseError.originalError` wrapping and added `ClientResponseError` constructor tests.
|
||||
|
||||
|
||||
## 0.15.1
|
||||
|
||||
- Cancel any pending subscriptions submit requests on realtime disconnect ([#204](https://github.com/pocketbase/js-sdk/issues/204)).
|
||||
|
||||
|
||||
## 0.15.0
|
||||
|
||||
- Added `fields` to the optional query parameters for limiting the returned API fields (_available with PocketBase v0.16.0_).
|
||||
|
||||
- Added `pb.backups` service for the new PocketBase backup and restore APIs (_available with PocketBase v0.16.0_).
|
||||
|
||||
- Updated `pb.settings.testS3(filesystem)` to allow specifying a filesystem to test - `storage` or `backups` (_available with PocketBase v0.16.0_).
|
||||
|
||||
|
||||
## 0.14.4
|
||||
|
||||
- Removed the legacy aliased `BaseModel.isNew` getter since it conflicts with similarly named record fields ([pocketbase#2385](https://github.com/pocketbase/pocketbase/discussions/2385)).
|
||||
_This helper is mainly used in the Admin UI, but if you are also using it in your code you can replace it with the `$` prefixed version, aka. `BaseModel.$isNew`._
|
||||
|
||||
|
||||
## 0.14.3
|
||||
|
||||
- Added `OAuth2AuthConfig.query` prop to send optional query parameters with the `authWithOAuth2(config)` call.
|
||||
|
||||
|
||||
## 0.14.2
|
||||
|
||||
- Use `location.origin + location.pathname` instead of full `location.href` when constructing the browser absolute url to ignore any extra hash or query parameter passed to the base url.
|
||||
_This is a small addition to the earlier change from v0.14.1._
|
||||
|
||||
|
||||
## 0.14.1
|
||||
|
||||
- Use an absolute url when the SDK is initialized with a relative base path in a browser env to ensure that the generated OAuth2 redirect and file urls are absolute.
|
||||
|
||||
|
||||
## 0.14.0
|
||||
|
||||
- Added simplified `authWithOAuth2()` version without having to implement custom redirect, deeplink or even page reload:
|
||||
```js
|
||||
const authData = await pb.collection('users').authWithOAuth2({
|
||||
provider: 'google'
|
||||
})
|
||||
```
|
||||
|
||||
Works with PocketBase v0.15.0+.
|
||||
|
||||
This method initializes a one-off realtime subscription and will
|
||||
open a popup window with the OAuth2 vendor page to authenticate.
|
||||
Once the external OAuth2 sign-in/sign-up flow is completed, the popup
|
||||
window will be automatically closed and the OAuth2 data sent back
|
||||
to the user through the previously established realtime connection.
|
||||
|
||||
_Site-note_: when creating the OAuth2 app in the provider dashboard
|
||||
you have to configure `https://yourdomain.com/api/oauth2-redirect`
|
||||
as redirect URL.
|
||||
|
||||
_The "manual" code exchange flow is still supported as `authWithOAuth2Code(provider, code, codeVerifier, redirectUrl)`._
|
||||
|
||||
_For backward compatibility it is also available as soft-deprecated function overload of `authWithOAuth2(provider, code, codeVerifier, redirectUrl)`._
|
||||
|
||||
- Added new `pb.files` service:
|
||||
```js
|
||||
// Builds and returns an absolute record file url for the provided filename.
|
||||
🔓 pb.files.getUrl(record, filename, queryParams = {});
|
||||
|
||||
// Requests a new private file access token for the current auth model (admin or record).
|
||||
🔐 pb.files.getToken(queryParams = {});
|
||||
```
|
||||
_`pb.getFileUrl()` is soft-deprecated and acts as alias calling `pb.files.getUrl()` under the hood._
|
||||
|
||||
Works with PocketBase v0.15.0+.
|
||||
|
||||
|
||||
## 0.13.1
|
||||
|
||||
- Added option to specify a generic `send()` return type and defined `SendOptions` type ([#171](https://github.com/pocketbase/js-sdk/pull/171); thanks @iamelevich).
|
||||
|
||||
- Deprecated `SchemaField.unique` prop since its function is replaced by `Collection.indexes` in the upcoming PocketBase v0.14.0 release.
|
||||
|
||||
|
||||
## 0.13.0
|
||||
|
||||
- Aliased all `BaseModel` helpers with `$` equivalent to avoid conflicts with the dynamic record props ([#169](https://github.com/pocketbase/js-sdk/issues/169)).
|
||||
```js
|
||||
isNew -> $isNew
|
||||
load(data) -> $load(data)
|
||||
clone() -> $clone()
|
||||
export() -> $export()
|
||||
// ...
|
||||
```
|
||||
_For backward compatibility, the old helpers will still continue to work if the record doesn't have a conflicting field name._
|
||||
|
||||
- Updated `pb.beforeSend` and `pb.afterSend` signatures to allow returning and awaiting an optional `Promise` ([#166](https://github.com/pocketbase/js-sdk/pull/166); thanks @Bobby-McBobface).
|
||||
|
||||
- Added `Collection.indexes` field for the new collection indexes support in the upcoming PocketBase v0.14.0.
|
||||
|
||||
- Added `pb.settings.generateAppleClientSecret()` for sending a request to generate Apple OAuth2 client secret in the upcoming PocketBase v0.14.0.
|
||||
|
||||
|
||||
## 0.12.1
|
||||
|
||||
- Fixed request `multipart/form-data` body check to allow the React Native Android and iOS custom `FormData` implementation as valid `fetch` body ([#2002](https://github.com/pocketbase/pocketbase/discussions/2002)).
|
||||
|
||||
|
||||
## 0.12.0
|
||||
|
||||
- Changed the return type of `pb.beforeSend` hook to allow modifying the request url ([#1930](https://github.com/pocketbase/pocketbase/discussions/1930)).
|
||||
```js
|
||||
// old
|
||||
pb.beforeSend = function (url, options) {
|
||||
...
|
||||
return options;
|
||||
}
|
||||
|
||||
// new
|
||||
pb.beforeSend = function (url, options) {
|
||||
...
|
||||
return { url, options };
|
||||
}
|
||||
```
|
||||
The old return format is soft-deprecated and will still work, but you'll get a `console.warn` message to replace it.
|
||||
|
||||
|
||||
## 0.11.1
|
||||
|
||||
- Exported the services class definitions to allow being used as argument types ([#153](https://github.com/pocketbase/js-sdk/issues/153)).
|
||||
```js
|
||||
CrudService
|
||||
AdminService
|
||||
CollectionService
|
||||
LogService
|
||||
RealtimeService
|
||||
RecordService
|
||||
SettingsService
|
||||
```
|
||||
|
||||
## 0.11.0
|
||||
|
||||
- Aliased/soft-deprecated `ClientResponseError.data` in favor of `ClientResponseError.response` to avoid the stuttering when accessing the inner error response `data` key (aka. `err.data.data` now is `err.response.data`).
|
||||
The `ClientResponseError.data` will still work but it is recommend for new code to use the `response` key.
|
||||
|
||||
- Added `getFullList(queryParams = {})` overload since the default batch size in most cases doesn't need to change (it can be defined as query parameter).
|
||||
The old form `getFullList(batch = 200, queryParams = {})` will still work, but it is recommend for new code to use the shorter form.
|
||||
|
||||
|
||||
## 0.10.2
|
||||
|
||||
- Updated `getFileUrl()` to accept custom types as record argument.
|
||||
|
||||
|
||||
## 0.10.1
|
||||
|
||||
- Added check for the collection name before auto updating the `pb.authStore` state on auth record update/delete.
|
||||
|
||||
|
||||
## 0.10.0
|
||||
|
||||
- Added more helpful message for the `ECONNREFUSED ::1` localhost error (related to [#21](https://github.com/pocketbase/js-sdk/issues/21)).
|
||||
|
||||
- Preserved the "original" function and class names in the minified output for those who rely on `*.prototype.name`.
|
||||
|
||||
- Allowed sending the existing valid auth token with the `authWithPassword()` calls.
|
||||
|
||||
- Updated the Nuxt3 SSR examples to use the built-in `useCookie()` helper.
|
||||
|
||||
|
||||
## 0.9.1
|
||||
|
||||
- Normalized nested `expand` items to `Record|Array<Record>` instances.
|
||||
|
||||
|
||||
## 0.9.0
|
||||
|
||||
- Added `pb.health.check()` that checks the health status of the API service (_available in PocketBase v0.10.0_)
|
||||
|
||||
|
||||
## 0.8.4
|
||||
|
||||
- Added type declarations for the action query parameters ([#102](https://github.com/pocketbase/js-sdk/pull/102); thanks @sewera).
|
||||
```js
|
||||
BaseQueryParams
|
||||
ListQueryParams
|
||||
RecordQueryParams
|
||||
RecordListQueryParams
|
||||
LogStatsQueryParams
|
||||
FileQueryParams
|
||||
```
|
||||
|
||||
|
||||
## 0.8.3
|
||||
|
||||
- Renamed the declaration file extension from `.d.ts` to `.d.mts` to prevent type resolution issues ([#92](https://github.com/pocketbase/js-sdk/issues/92)).
|
||||
|
||||
|
||||
## 0.8.2
|
||||
|
||||
- Allowed catching the initial realtime connect error as part of the `subscribe()` Promise resolution.
|
||||
|
||||
- Reimplemented the default `EventSource` retry mechanism for better control and more consistent behavior across different browsers.
|
||||
|
||||
|
||||
## 0.8.1
|
||||
|
||||
This release contains only documentation fixes:
|
||||
|
||||
- Fixed code comment typos.
|
||||
|
||||
- Added note about loadFromCookie that you may need to call authRefresh to validate the loaded cookie state server-side.
|
||||
|
||||
- Updated the SSR examples to show the authRefresh call. _For the examples the authRefresh call is not required but it is there to remind users that it needs to be called if you want to do permission checks in a node env (eg. SSR) and rely on the `pb.authStore.isValid`._
|
||||
|
||||
|
||||
## 0.8.0
|
||||
|
||||
> ⚠️ Please note that this release works only with the new PocketBase v0.8+ API!
|
||||
>
|
||||
> See the breaking changes below for what has changed since v0.7.x.
|
||||
|
||||
#### Non breaking changes
|
||||
|
||||
- Added support for optional custom `Record` types using TypeScript generics, eg.
|
||||
`pb.collection('example').getList<Tasks>()`.
|
||||
|
||||
- Added new `pb.autoCancellation(bool)` method to globally enable or disable auto cancellation (`true` by default).
|
||||
|
||||
- Added new crud method `getFirstListItem(filter)` to fetch a single item by a list filter.
|
||||
|
||||
- You can now set additional account `createData` when authenticating with OAuth2.
|
||||
|
||||
- Added `AuthMethodsList.usernamePassword` return field (we now support combined username/email authentication; see below `authWithPassword`).
|
||||
|
||||
#### Breaking changes
|
||||
|
||||
- Changed the contstructor from `PocketBase(url, lang?, store?)` to `PocketBase(url, store?, lang?)` (aka. the `lang` option is now last).
|
||||
|
||||
- For easier and more conventional parsing, all DateTime strings now have `Z` as suffix, so that you can do directly `new Date('2022-01-01 01:02:03.456Z')`.
|
||||
|
||||
- Moved `pb.records.getFileUrl()` to `pb.getFileUrl()`.
|
||||
|
||||
- Moved all `pb.records.*` handlers under `pb.collection().*`:
|
||||
```
|
||||
pb.records.getFullList('example'); => pb.collection('example').getFullList();
|
||||
pb.records.getList('example'); => pb.collection('example').getList();
|
||||
pb.records.getOne('example', 'RECORD_ID'); => pb.collection('example').getOne('RECORD_ID');
|
||||
(no old equivalent) => pb.collection('example').getFirstListItem(filter);
|
||||
pb.records.create('example', {...}); => pb.collection('example').create({...});
|
||||
pb.records.update('example', 'RECORD_ID', {...}); => pb.collection('example').update('RECORD_ID', {...});
|
||||
pb.records.delete('example', 'RECORD_ID'); => pb.collection('example').delete('RECORD_ID');
|
||||
```
|
||||
|
||||
- The `pb.realtime` service has now a more general callback form so that it can be used with custom realtime handlers.
|
||||
Dedicated records specific subscribtions could be found under `pb.collection().*`:
|
||||
```
|
||||
pb.realtime.subscribe('example', callback) => pb.collection('example').subscribe("*", callback)
|
||||
pb.realtime.subscribe('example/RECORD_ID', callback) => pb.collection('example').subscribe('RECORD_ID', callback)
|
||||
pb.realtime.unsubscribe('example') => pb.collection('example').unsubscribe("*")
|
||||
pb.realtime.unsubscribe('example/RECORD_ID') => pb.collection('example').unsubscribe('RECORD_ID')
|
||||
(no old equivalent) => pb.collection('example').unsubscribe()
|
||||
```
|
||||
Additionally, `subscribe()` now return `UnsubscribeFunc` that could be used to unsubscribe only from a single subscription listener.
|
||||
|
||||
- Moved all `pb.users.*` handlers under `pb.collection().*`:
|
||||
```
|
||||
pb.users.listAuthMethods() => pb.collection('users').listAuthMethods()
|
||||
pb.users.authViaEmail(email, password) => pb.collection('users').authWithPassword(usernameOrEmail, password)
|
||||
pb.users.authViaOAuth2(provider, code, codeVerifier, redirectUrl) => pb.collection('users').authWithOAuth2(provider, code, codeVerifier, redirectUrl, createData = {})
|
||||
pb.users.refresh() => pb.collection('users').authRefresh()
|
||||
pb.users.requestPasswordReset(email) => pb.collection('users').requestPasswordReset(email)
|
||||
pb.users.confirmPasswordReset(resetToken, newPassword, newPasswordConfirm) => pb.collection('users').confirmPasswordReset(resetToken, newPassword, newPasswordConfirm)
|
||||
pb.users.requestVerification(email) => pb.collection('users').requestVerification(email)
|
||||
pb.users.confirmVerification(verificationToken) => pb.collection('users').confirmVerification(verificationToken)
|
||||
pb.users.requestEmailChange(newEmail) => pb.collection('users').requestEmailChange(newEmail)
|
||||
pb.users.confirmEmailChange(emailChangeToken, password) => pb.collection('users').confirmEmailChange(emailChangeToken, password)
|
||||
pb.users.listExternalAuths(recordId) => pb.collection('users').listExternalAuths(recordId)
|
||||
pb.users.unlinkExternalAuth(recordId, provider) => pb.collection('users').unlinkExternalAuth(recordId, provider)
|
||||
```
|
||||
|
||||
- Changes in `pb.admins` for consistency with the new auth handlers in `pb.collection().*`:
|
||||
```
|
||||
pb.admins.authViaEmail(email, password); => pb.admins.authWithPassword(email, password);
|
||||
pb.admins.refresh(); => pb.admins.authRefresh();
|
||||
```
|
||||
|
||||
- To prevent confusion with the auth method responses, the following methods now returns 204 with empty body (previously 200 with token and auth model):
|
||||
```js
|
||||
pb.admins.confirmPasswordReset(...): Promise<bool>
|
||||
pb.collection("users").confirmPasswordReset(...): Promise<bool>
|
||||
pb.collection("users").confirmVerification(...): Promise<bool>
|
||||
pb.collection("users").confirmEmailChange(...): Promise<bool>
|
||||
```
|
||||
|
||||
- Removed the `User` model because users are now regular records (aka. `Record`).
|
||||
**The old user fields `lastResetSentAt`, `lastVerificationSentAt` and `profile` are no longer available**
|
||||
(the `profile` fields are available under the `Record.*` property like any other fields).
|
||||
|
||||
- Renamed the special `Record` props:
|
||||
```
|
||||
@collectionId => collectionId
|
||||
@collectionName => collectionName
|
||||
@expand => expand
|
||||
```
|
||||
|
||||
- Since there is no longer `User` model, `pb.authStore.model` can now be of type `Record`, `Admin` or `null`.
|
||||
|
||||
- Removed `lastResetSentAt` from the `Admin` model.
|
||||
|
||||
- Replaced `ExternalAuth.userId` with 2 new `recordId` and `collectionId` props.
|
||||
|
||||
- Removed the deprecated uppercase service aliases:
|
||||
```
|
||||
client.Users => client.collection(*)
|
||||
client.Records => client.collection(*)
|
||||
client.AuthStore => client.authStore
|
||||
client.Realtime => client.realtime
|
||||
client.Admins => client.admins
|
||||
client.Collections => client.collections
|
||||
client.Logs => client.logs
|
||||
client.Settings => client.settings
|
||||
```
|
||||
17
node_modules/pocketbase/LICENSE.md
generated
vendored
Normal file
17
node_modules/pocketbase/LICENSE.md
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2022 - present, Gani Georgiev
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software
|
||||
and associated documentation files (the "Software"), to deal in the Software without restriction,
|
||||
including without limitation the rights to use, copy, modify, merge, publish, distribute,
|
||||
sublicense, and/or sell copies of the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or
|
||||
substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
|
||||
BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
2053
node_modules/pocketbase/README.html
generated
vendored
Normal file
2053
node_modules/pocketbase/README.html
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1091
node_modules/pocketbase/README.md
generated
vendored
Normal file
1091
node_modules/pocketbase/README.md
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1465
node_modules/pocketbase/dist/pocketbase.cjs.d.ts
generated
vendored
Normal file
1465
node_modules/pocketbase/dist/pocketbase.cjs.d.ts
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
2
node_modules/pocketbase/dist/pocketbase.cjs.js
generated
vendored
Normal file
2
node_modules/pocketbase/dist/pocketbase.cjs.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
node_modules/pocketbase/dist/pocketbase.cjs.js.map
generated
vendored
Normal file
1
node_modules/pocketbase/dist/pocketbase.cjs.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1580
node_modules/pocketbase/dist/pocketbase.es.d.mts
generated
vendored
Normal file
1580
node_modules/pocketbase/dist/pocketbase.es.d.mts
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1580
node_modules/pocketbase/dist/pocketbase.es.d.ts
generated
vendored
Normal file
1580
node_modules/pocketbase/dist/pocketbase.es.d.ts
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
2
node_modules/pocketbase/dist/pocketbase.es.js
generated
vendored
Normal file
2
node_modules/pocketbase/dist/pocketbase.es.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
node_modules/pocketbase/dist/pocketbase.es.js.map
generated
vendored
Normal file
1
node_modules/pocketbase/dist/pocketbase.es.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
2
node_modules/pocketbase/dist/pocketbase.es.mjs
generated
vendored
Normal file
2
node_modules/pocketbase/dist/pocketbase.es.mjs
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
node_modules/pocketbase/dist/pocketbase.es.mjs.map
generated
vendored
Normal file
1
node_modules/pocketbase/dist/pocketbase.es.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1465
node_modules/pocketbase/dist/pocketbase.iife.d.ts
generated
vendored
Normal file
1465
node_modules/pocketbase/dist/pocketbase.iife.d.ts
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
2
node_modules/pocketbase/dist/pocketbase.iife.js
generated
vendored
Normal file
2
node_modules/pocketbase/dist/pocketbase.iife.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
node_modules/pocketbase/dist/pocketbase.iife.js.map
generated
vendored
Normal file
1
node_modules/pocketbase/dist/pocketbase.iife.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1465
node_modules/pocketbase/dist/pocketbase.umd.d.ts
generated
vendored
Normal file
1465
node_modules/pocketbase/dist/pocketbase.umd.d.ts
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
2
node_modules/pocketbase/dist/pocketbase.umd.js
generated
vendored
Normal file
2
node_modules/pocketbase/dist/pocketbase.umd.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
node_modules/pocketbase/dist/pocketbase.umd.js.map
generated
vendored
Normal file
1
node_modules/pocketbase/dist/pocketbase.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
47
node_modules/pocketbase/package.json
generated
vendored
Normal file
47
node_modules/pocketbase/package.json
generated
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
{
|
||||
"version": "0.26.2",
|
||||
"name": "pocketbase",
|
||||
"description": "PocketBase JavaScript SDK",
|
||||
"author": "Gani Georgiev",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/pocketbase/js-sdk.git"
|
||||
},
|
||||
"exports": {
|
||||
".": "./dist/pocketbase.es.mjs",
|
||||
"./cjs": "./dist/pocketbase.cjs.js",
|
||||
"./umd": "./dist/pocketbase.umd.js"
|
||||
},
|
||||
"main": "./dist/pocketbase.es.mjs",
|
||||
"module": "./dist/pocketbase.es.mjs",
|
||||
"react-native": "./dist/pocketbase.es.js",
|
||||
"types": "./dist/pocketbase.es.d.mts",
|
||||
"keywords": [
|
||||
"pocketbase",
|
||||
"pocketbase-js",
|
||||
"js-sdk",
|
||||
"javascript-sdk",
|
||||
"pocketbase-sdk"
|
||||
],
|
||||
"prettier": {
|
||||
"tabWidth": 4,
|
||||
"printWidth": 90,
|
||||
"bracketSameLine": true
|
||||
},
|
||||
"scripts": {
|
||||
"format": "npx prettier ./src ./tests --write",
|
||||
"build": "rm -rf dist && rollup -c",
|
||||
"dev": "rollup -c -w",
|
||||
"test": "vitest",
|
||||
"prepublishOnly": "npm run build"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-terser": "^0.4.3",
|
||||
"prettier": "3.2.4",
|
||||
"rollup": "^4.0.0",
|
||||
"rollup-plugin-ts": "^3.0.0",
|
||||
"typescript": "^5.1.6",
|
||||
"vitest": "^2.0.0"
|
||||
}
|
||||
}
|
||||
21
package-lock.json
generated
Normal file
21
package-lock.json
generated
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"name": "personal-tracker",
|
||||
"version": "1.0.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "personal-tracker",
|
||||
"version": "1.0.0",
|
||||
"dependencies": {
|
||||
"pocketbase": "^0.26.2"
|
||||
}
|
||||
},
|
||||
"node_modules/pocketbase": {
|
||||
"version": "0.26.2",
|
||||
"resolved": "https://registry.npmjs.org/pocketbase/-/pocketbase-0.26.2.tgz",
|
||||
"integrity": "sha512-WA8EOBc3QnSJh8rJ3iYoi9DmmPOMFIgVfAmIGux7wwruUEIzXgvrO4u0W2htfQjGIcyezJkdZOy5Xmh7SxAftw==",
|
||||
"license": "MIT"
|
||||
}
|
||||
}
|
||||
}
|
||||
8
package.json
Normal file
8
package.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"name": "personal-tracker",
|
||||
"version": "1.0.0",
|
||||
"type": "module",
|
||||
"dependencies": {
|
||||
"pocketbase": "^0.26.2"
|
||||
}
|
||||
}
|
||||
262
scripts/csv/export_timeline_to_csv.py
Normal file
262
scripts/csv/export_timeline_to_csv.py
Normal file
@@ -0,0 +1,262 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Export Timeline.json data into multiple normalized CSV files.
|
||||
|
||||
Generated CSVs (default names):
|
||||
semantic_segments.csv
|
||||
visits.csv
|
||||
timeline_path_points.csv
|
||||
raw_signals.csv
|
||||
frequent_places.csv
|
||||
frequent_trips.csv
|
||||
frequent_trip_waypoints.csv
|
||||
frequent_trip_mode_distribution.csv
|
||||
travel_mode_affinities.csv
|
||||
|
||||
Usage:
|
||||
python export_timeline_to_csv.py \
|
||||
--timeline ../data/Timeline.json \
|
||||
--outdir ./timeline_csv \
|
||||
--prefix timeline_
|
||||
|
||||
If --timeline not supplied, the script searches upward from script dir for Timeline.json.
|
||||
"""
|
||||
|
||||
import os
|
||||
import json
|
||||
import csv
|
||||
import argparse
|
||||
from typing import Tuple, Any, Dict, List
|
||||
from datetime import datetime
|
||||
|
||||
# ---------------------------- Helpers ---------------------------------
|
||||
|
||||
def find_timeline_json(start_path: str) -> str:
|
||||
for root, dirs, files in os.walk(start_path):
|
||||
if 'Timeline.json' in files:
|
||||
return os.path.join(root, 'Timeline.json')
|
||||
return ''
|
||||
|
||||
def parse_coordinates(point_str: str) -> Tuple[Any, Any]:
|
||||
if not isinstance(point_str, str):
|
||||
return None, None
|
||||
try:
|
||||
s = point_str.replace('°', '').strip()
|
||||
if not s:
|
||||
return None, None
|
||||
parts = [p.strip() for p in s.split(',')]
|
||||
if len(parts) != 2:
|
||||
return None, None
|
||||
return float(parts[0]), float(parts[1])
|
||||
except Exception:
|
||||
return None, None
|
||||
|
||||
def ensure_dir(path: str):
|
||||
os.makedirs(path, exist_ok=True)
|
||||
|
||||
# ---------------------------- Export Functions -------------------------
|
||||
|
||||
def export_semantic_segments(data: Dict, writer):
|
||||
segments = data.get('semanticSegments', [])
|
||||
for idx, seg in enumerate(segments):
|
||||
start = seg.get('startTime')
|
||||
end = seg.get('endTime')
|
||||
has_visit = 'visit' in seg
|
||||
has_path = 'timelinePath' in seg
|
||||
writer.writerow({
|
||||
'segment_index': idx,
|
||||
'startTime': start,
|
||||
'endTime': end,
|
||||
'has_visit': int(has_visit),
|
||||
'has_timeline_path': int(has_path)
|
||||
})
|
||||
|
||||
def export_visits(data: Dict, writer):
|
||||
for idx, seg in enumerate(data.get('semanticSegments', [])):
|
||||
if 'visit' not in seg:
|
||||
continue
|
||||
visit = seg.get('visit', {})
|
||||
top = visit.get('topCandidate', {})
|
||||
lat, lon = parse_coordinates(top.get('placeLocation', {}).get('latLng'))
|
||||
writer.writerow({
|
||||
'segment_index': idx,
|
||||
'hierarchyLevel': visit.get('hierarchyLevel'),
|
||||
'visit_probability': visit.get('probability'),
|
||||
'top_place_id': top.get('placeId'),
|
||||
'top_semantic_type': top.get('semanticType'),
|
||||
'top_probability': top.get('probability'),
|
||||
'top_lat': lat,
|
||||
'top_lon': lon,
|
||||
'startTime': seg.get('startTime'),
|
||||
'endTime': seg.get('endTime')
|
||||
})
|
||||
|
||||
def export_timeline_path_points(data: Dict, writer):
|
||||
for idx, seg in enumerate(data.get('semanticSegments', [])):
|
||||
path = seg.get('timelinePath')
|
||||
if not isinstance(path, list):
|
||||
continue
|
||||
for p_idx, point_obj in enumerate(path):
|
||||
point_str = point_obj.get('point')
|
||||
lat, lon = parse_coordinates(point_str)
|
||||
writer.writerow({
|
||||
'segment_index': idx,
|
||||
'point_index': p_idx,
|
||||
'time': point_obj.get('time'),
|
||||
'raw_point': point_str,
|
||||
'lat': lat,
|
||||
'lon': lon,
|
||||
})
|
||||
|
||||
def export_raw_signals(data: Dict, writer):
|
||||
for idx, signal in enumerate(data.get('rawSignals', [])):
|
||||
pos = signal.get('position', {})
|
||||
# Raw signals coordinate key observed as 'LatLng'
|
||||
lat, lon = parse_coordinates(pos.get('LatLng') or pos.get('latLng'))
|
||||
writer.writerow({
|
||||
'raw_index': idx,
|
||||
'timestamp': pos.get('timestamp'),
|
||||
'lat': lat,
|
||||
'lon': lon,
|
||||
'accuracyMeters': pos.get('accuracyMeters'),
|
||||
'altitudeMeters': pos.get('altitudeMeters'),
|
||||
'speedMetersPerSecond': pos.get('speedMetersPerSecond'),
|
||||
'source': pos.get('source')
|
||||
})
|
||||
|
||||
def export_frequent_places(data: Dict, writer):
|
||||
profile = data.get('userLocationProfile', {})
|
||||
for place in profile.get('frequentPlaces', []) or []:
|
||||
lat, lon = parse_coordinates(place.get('placeLocation'))
|
||||
writer.writerow({
|
||||
'placeId': place.get('placeId'),
|
||||
'label': place.get('label'),
|
||||
'lat': lat,
|
||||
'lon': lon
|
||||
})
|
||||
|
||||
def export_frequent_trips(data: Dict, trips_writer, waypoints_writer, mode_dist_writer):
|
||||
profile = data.get('userLocationProfile', {})
|
||||
for idx, trip in enumerate(profile.get('frequentTrips', []) or []):
|
||||
waypoint_ids = trip.get('waypointIds') or []
|
||||
mode_distribution = trip.get('modeDistribution') or []
|
||||
trips_writer.writerow({
|
||||
'trip_index': idx,
|
||||
'startTimeMinutes': trip.get('startTimeMinutes'),
|
||||
'endTimeMinutes': trip.get('endTimeMinutes'),
|
||||
'durationMinutes': trip.get('durationMinutes'),
|
||||
'confidence': trip.get('confidence'),
|
||||
'commuteDirection': trip.get('commuteDirection'),
|
||||
'waypoint_count': len(waypoint_ids),
|
||||
'mode_dist_count': len(mode_distribution)
|
||||
})
|
||||
for w_idx, wid in enumerate(waypoint_ids):
|
||||
waypoints_writer.writerow({
|
||||
'trip_index': idx,
|
||||
'waypoint_order': w_idx,
|
||||
'waypoint_id': wid
|
||||
})
|
||||
for m_idx, m in enumerate(mode_distribution):
|
||||
# Unknown exact structure, store JSON
|
||||
mode_dist_writer.writerow({
|
||||
'trip_index': idx,
|
||||
'entry_index': m_idx,
|
||||
'raw_json': json.dumps(m, ensure_ascii=False)
|
||||
})
|
||||
|
||||
def export_travel_mode_affinities(data: Dict, writer):
|
||||
profile = data.get('userLocationProfile', {})
|
||||
persona = profile.get('persona', {})
|
||||
for aff in persona.get('travelModeAffinities', []) or []:
|
||||
writer.writerow({
|
||||
'mode': aff.get('mode'),
|
||||
'affinity': aff.get('affinity')
|
||||
})
|
||||
|
||||
# ---------------------------- Main ------------------------------------
|
||||
|
||||
def export_all(data: Dict, outdir: str, prefix: str):
|
||||
ensure_dir(outdir)
|
||||
|
||||
def open_csv(name: str, fieldnames: List[str]):
|
||||
fpath = os.path.join(outdir, f"{prefix}{name}.csv")
|
||||
f = open(fpath, 'w', encoding='utf-8', newline='')
|
||||
writer = csv.DictWriter(f, fieldnames=fieldnames)
|
||||
writer.writeheader()
|
||||
return f, writer
|
||||
|
||||
files = []
|
||||
try:
|
||||
# semantic segments
|
||||
f_seg, w_seg = open_csv('semantic_segments', ['segment_index','startTime','endTime','has_visit','has_timeline_path'])
|
||||
files.append(f_seg)
|
||||
export_semantic_segments(data, w_seg)
|
||||
|
||||
# visits
|
||||
f_vis, w_vis = open_csv('visits', ['segment_index','hierarchyLevel','visit_probability','top_place_id','top_semantic_type','top_probability','top_lat','top_lon','startTime','endTime'])
|
||||
files.append(f_vis)
|
||||
export_visits(data, w_vis)
|
||||
|
||||
# timeline path points
|
||||
f_path, w_path = open_csv('timeline_path_points', ['segment_index','point_index','time','raw_point','lat','lon'])
|
||||
files.append(f_path)
|
||||
export_timeline_path_points(data, w_path)
|
||||
|
||||
# raw signals
|
||||
f_raw, w_raw = open_csv('raw_signals', ['raw_index','timestamp','lat','lon','accuracyMeters','altitudeMeters','speedMetersPerSecond','source'])
|
||||
files.append(f_raw)
|
||||
export_raw_signals(data, w_raw)
|
||||
|
||||
# frequent places
|
||||
f_fp, w_fp = open_csv('frequent_places', ['placeId','label','lat','lon'])
|
||||
files.append(f_fp)
|
||||
export_frequent_places(data, w_fp)
|
||||
|
||||
# frequent trips core
|
||||
f_trips, w_trips = open_csv('frequent_trips', ['trip_index','startTimeMinutes','endTimeMinutes','durationMinutes','confidence','commuteDirection','waypoint_count','mode_dist_count'])
|
||||
files.append(f_trips)
|
||||
# waypoints
|
||||
f_way, w_way = open_csv('frequent_trip_waypoints', ['trip_index','waypoint_order','waypoint_id'])
|
||||
files.append(f_way)
|
||||
# mode distribution
|
||||
f_md, w_md = open_csv('frequent_trip_mode_distribution', ['trip_index','entry_index','raw_json'])
|
||||
files.append(f_md)
|
||||
export_frequent_trips(data, w_trips, w_way, w_md)
|
||||
|
||||
# travel mode affinities
|
||||
f_aff, w_aff = open_csv('travel_mode_affinities', ['mode','affinity'])
|
||||
files.append(f_aff)
|
||||
export_travel_mode_affinities(data, w_aff)
|
||||
|
||||
finally:
|
||||
for f in files:
|
||||
f.close()
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description='Export Timeline.json to multiple CSV files.')
|
||||
parser.add_argument('--timeline', type=str, help='Path to Timeline.json (auto-detect if omitted)')
|
||||
parser.add_argument('--outdir', type=str, default='timeline_csv', help='Output directory for CSV files')
|
||||
parser.add_argument('--prefix', type=str, default='', help='Filename prefix for CSV files')
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.timeline:
|
||||
timeline_path = args.timeline
|
||||
else:
|
||||
timeline_path = find_timeline_json(os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')))
|
||||
|
||||
if not timeline_path or not os.path.isfile(timeline_path):
|
||||
raise SystemExit('Timeline.json not found. Provide --timeline or place file in repository.')
|
||||
|
||||
print(f'Loading {timeline_path} ...')
|
||||
with open(timeline_path, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
|
||||
ts = datetime.now().strftime('%Y%m%d_%H%M%S')
|
||||
outdir = os.path.join(args.outdir, ts)
|
||||
print(f'Exporting CSV files to: {outdir}')
|
||||
export_all(data, outdir, args.prefix)
|
||||
print('Done.')
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
643
scripts/ingest/PocketBase_Integration_Guide.txt
Normal file
643
scripts/ingest/PocketBase_Integration_Guide.txt
Normal file
@@ -0,0 +1,643 @@
|
||||
# PocketBase Integration Guide
|
||||
# Complete guide for creating schemas and uploading CSV data to PocketBase
|
||||
|
||||
## Table of Contents
|
||||
1. Authentication Setup
|
||||
2. Collection Schema Creation
|
||||
3. Data Upload Process
|
||||
4. Common Issues and Solutions
|
||||
5. Field Types and Validation
|
||||
6. Best Practices
|
||||
7. Code Examples
|
||||
8. Troubleshooting
|
||||
|
||||
================================================================================
|
||||
## 1. AUTHENTICATION SETUP
|
||||
================================================================================
|
||||
|
||||
### Superuser Authentication Required
|
||||
- Creating collections requires SUPERUSER authentication (not regular user auth)
|
||||
- Use the "_superusers" collection for authentication
|
||||
|
||||
```javascript
|
||||
import PocketBase from 'pocketbase';
|
||||
|
||||
const pb = new PocketBase('http://localhost:8090');
|
||||
|
||||
// CRITICAL: Must authenticate as superuser for collection operations
|
||||
await pb.collection("_superusers").authWithPassword('your-email@example.com', 'your-password');
|
||||
```
|
||||
|
||||
### Authentication Error Handling
|
||||
```javascript
|
||||
try {
|
||||
await pb.collection("_superusers").authWithPassword('email', 'password');
|
||||
console.log('Authenticated as superuser');
|
||||
} catch (error) {
|
||||
console.error('Failed to authenticate:', error.message);
|
||||
throw error;
|
||||
}
|
||||
```
|
||||
|
||||
================================================================================
|
||||
## 2. COLLECTION SCHEMA CREATION
|
||||
================================================================================
|
||||
|
||||
### Basic Collection Structure
|
||||
```javascript
|
||||
const collectionData = {
|
||||
name: 'collection_name', // Must be unique
|
||||
type: 'base', // 'base', 'auth', or 'view'
|
||||
fields: [...], // Array of field definitions
|
||||
indexes: [...], // Optional database indexes
|
||||
listRule: '', // API access rules (empty = public)
|
||||
viewRule: '',
|
||||
createRule: '',
|
||||
updateRule: '',
|
||||
deleteRule: ''
|
||||
};
|
||||
```
|
||||
|
||||
### Field Types and Properties
|
||||
```javascript
|
||||
// TEXT FIELD
|
||||
{
|
||||
name: 'field_name',
|
||||
type: 'text',
|
||||
required: false, // IMPORTANT: Use false for optional fields
|
||||
max: 255 // Maximum character length
|
||||
}
|
||||
|
||||
// NUMBER FIELD
|
||||
{
|
||||
name: 'numeric_field',
|
||||
type: 'number',
|
||||
required: false // CRITICAL: Set to false if field can be 0 or null
|
||||
}
|
||||
|
||||
// DATE FIELD
|
||||
{
|
||||
name: 'date_field',
|
||||
type: 'date',
|
||||
required: true
|
||||
}
|
||||
|
||||
// BOOLEAN FIELD
|
||||
{
|
||||
name: 'boolean_field',
|
||||
type: 'bool',
|
||||
required: false
|
||||
}
|
||||
```
|
||||
|
||||
### Database Indexes (Optional but Recommended)
|
||||
```javascript
|
||||
indexes: [
|
||||
'CREATE INDEX idx_tablename_field ON tablename (field_name)',
|
||||
'CREATE INDEX idx_tablename_date ON tablename (date_field)',
|
||||
'CREATE INDEX idx_tablename_composite ON tablename (field1, field2)'
|
||||
]
|
||||
```
|
||||
|
||||
### Complete Collection Creation Example
|
||||
```javascript
|
||||
async function createCollection() {
|
||||
const pb = new PocketBase('http://localhost:8090');
|
||||
await pb.collection("_superusers").authWithPassword('email', 'password');
|
||||
|
||||
const collectionData = {
|
||||
name: 'visits',
|
||||
type: 'base',
|
||||
fields: [
|
||||
{
|
||||
name: 'segment_index',
|
||||
type: 'number',
|
||||
required: true
|
||||
},
|
||||
{
|
||||
name: 'hierarchyLevel',
|
||||
type: 'number',
|
||||
required: false // IMPORTANT: Allow 0 values
|
||||
},
|
||||
{
|
||||
name: 'visit_probability',
|
||||
type: 'number',
|
||||
required: true
|
||||
},
|
||||
{
|
||||
name: 'top_place_id',
|
||||
type: 'text',
|
||||
required: false,
|
||||
max: 255
|
||||
},
|
||||
{
|
||||
name: 'startTime',
|
||||
type: 'date',
|
||||
required: true
|
||||
}
|
||||
],
|
||||
indexes: [
|
||||
'CREATE INDEX idx_visits_segment_index ON visits (segment_index)',
|
||||
'CREATE INDEX idx_visits_start_time ON visits (startTime)'
|
||||
]
|
||||
};
|
||||
|
||||
try {
|
||||
const collection = await pb.collections.create(collectionData);
|
||||
console.log('Collection created:', collection.name);
|
||||
} catch (error) {
|
||||
if (error.message.includes('already exists')) {
|
||||
console.log('Collection already exists');
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
================================================================================
|
||||
## 3. DATA UPLOAD PROCESS
|
||||
================================================================================
|
||||
|
||||
### CSV File Reading and Parsing
|
||||
```javascript
|
||||
import fs from 'fs';
|
||||
|
||||
// Read CSV file
|
||||
const csvData = fs.readFileSync('/path/to/file.csv', 'utf8');
|
||||
const lines = csvData.trim().split('\n');
|
||||
|
||||
// CRITICAL: Clean headers to remove carriage returns and whitespace
|
||||
const headers = lines[0].split(',').map(h => h.trim().replace(/\r$/, ''));
|
||||
```
|
||||
|
||||
### CSV Line Parsing (Handle Quoted Fields)
|
||||
```javascript
|
||||
function parseCsvLine(line) {
|
||||
const result = [];
|
||||
let current = '';
|
||||
let inQuotes = false;
|
||||
|
||||
for (let i = 0; i < line.length; i++) {
|
||||
const char = line[i];
|
||||
|
||||
if (char === '"') {
|
||||
inQuotes = !inQuotes;
|
||||
} else if (char === ',' && !inQuotes) {
|
||||
result.push(current.trim());
|
||||
current = '';
|
||||
} else {
|
||||
current += char;
|
||||
}
|
||||
}
|
||||
|
||||
result.push(current.trim());
|
||||
return result;
|
||||
}
|
||||
```
|
||||
|
||||
### Data Type Conversion (CRITICAL)
|
||||
```javascript
|
||||
// Process each row
|
||||
const record = {};
|
||||
headers.forEach((header, index) => {
|
||||
const value = values[index];
|
||||
|
||||
switch (header) {
|
||||
case 'segment_index':
|
||||
case 'hierarchyLevel':
|
||||
// IMPORTANT: Check for empty string, not falsy values
|
||||
record[header] = value !== '' ? parseInt(value) : null;
|
||||
break;
|
||||
|
||||
case 'visit_probability':
|
||||
case 'top_probability':
|
||||
case 'top_lat':
|
||||
case 'top_lon':
|
||||
record[header] = value !== '' ? parseFloat(value) : null;
|
||||
break;
|
||||
|
||||
case 'startTime':
|
||||
case 'endTime':
|
||||
// Convert to ISO string format
|
||||
record[header] = value ? new Date(value).toISOString() : null;
|
||||
break;
|
||||
|
||||
default:
|
||||
record[header] = value || null;
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
### Batch Upload with Error Handling
|
||||
```javascript
|
||||
async function uploadData() {
|
||||
const pb = new PocketBase('http://localhost:8090');
|
||||
await pb.collection("_superusers").authWithPassword('email', 'password');
|
||||
|
||||
let successCount = 0;
|
||||
let errorCount = 0;
|
||||
|
||||
for (let i = 1; i < lines.length; i++) { // Skip header row
|
||||
const values = parseCsvLine(lines[i]);
|
||||
|
||||
// Create record object (see data type conversion above)
|
||||
const record = { ... };
|
||||
|
||||
try {
|
||||
await pb.collection('collection_name').create(record);
|
||||
successCount++;
|
||||
|
||||
if (successCount % 100 === 0) {
|
||||
console.log(`Uploaded ${successCount} records...`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Row ${i} failed:`, error.message);
|
||||
|
||||
// Log detailed PocketBase error
|
||||
if (error.response && error.response.data) {
|
||||
console.error(`PocketBase error:`, JSON.stringify(error.response.data, null, 2));
|
||||
}
|
||||
|
||||
errorCount++;
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Success: ${successCount}, Errors: ${errorCount}`);
|
||||
}
|
||||
```
|
||||
|
||||
================================================================================
|
||||
## 4. COMMON ISSUES AND SOLUTIONS
|
||||
================================================================================
|
||||
|
||||
### Issue 1: "Cannot be blank" for Zero Values
|
||||
PROBLEM: PocketBase treats 0 as blank for required number fields
|
||||
SOLUTION: Set number fields to required: false, handle validation in app logic
|
||||
|
||||
```javascript
|
||||
// WRONG
|
||||
{
|
||||
name: 'hierarchyLevel',
|
||||
type: 'number',
|
||||
required: true // This will reject 0 values
|
||||
}
|
||||
|
||||
// CORRECT
|
||||
{
|
||||
name: 'hierarchyLevel',
|
||||
type: 'number',
|
||||
required: false // Allows 0 values
|
||||
}
|
||||
```
|
||||
|
||||
### Issue 2: Carriage Return in CSV Headers
|
||||
PROBLEM: CSV files may have \r characters in headers
|
||||
SOLUTION: Clean headers when parsing
|
||||
|
||||
```javascript
|
||||
// Clean headers
|
||||
const headers = lines[0].split(',').map(h => h.trim().replace(/\r$/, ''));
|
||||
```
|
||||
|
||||
### Issue 3: Date Format Issues
|
||||
PROBLEM: Date strings not recognized by PocketBase
|
||||
SOLUTION: Convert to ISO format
|
||||
|
||||
```javascript
|
||||
// Convert any date string to ISO format
|
||||
record[header] = value ? new Date(value).toISOString() : null;
|
||||
```
|
||||
|
||||
### Issue 4: Authentication Errors
|
||||
PROBLEM: Using wrong authentication method
|
||||
SOLUTION: Always use _superusers collection for admin operations
|
||||
|
||||
```javascript
|
||||
// WRONG
|
||||
await pb.collection("users").authWithPassword(...);
|
||||
|
||||
// CORRECT
|
||||
await pb.collection("_superusers").authWithPassword(...);
|
||||
```
|
||||
|
||||
### Issue 5: Collection Already Exists
|
||||
PROBLEM: Script fails when collection exists
|
||||
SOLUTION: Handle the error gracefully
|
||||
|
||||
```javascript
|
||||
try {
|
||||
await pb.collections.create(collectionData);
|
||||
} catch (error) {
|
||||
if (error.message.includes('already exists')) {
|
||||
console.log('Collection already exists');
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
================================================================================
|
||||
## 5. FIELD TYPES AND VALIDATION
|
||||
================================================================================
|
||||
|
||||
### Available Field Types
|
||||
- text: String values (max length configurable)
|
||||
- number: Integer or float values
|
||||
- bool: Boolean true/false
|
||||
- date: ISO date strings
|
||||
- json: JSON objects
|
||||
- file: File uploads
|
||||
- relation: References to other collections
|
||||
- select: Predefined options
|
||||
- autodate: Auto-generated timestamps
|
||||
|
||||
### Validation Rules
|
||||
```javascript
|
||||
// Text field with validation
|
||||
{
|
||||
name: 'email',
|
||||
type: 'text',
|
||||
required: true,
|
||||
pattern: '^[\\w\\.-]+@[\\w\\.-]+\\.[a-zA-Z]{2,}$', // Email regex
|
||||
max: 255
|
||||
}
|
||||
|
||||
// Number field with min/max
|
||||
{
|
||||
name: 'age',
|
||||
type: 'number',
|
||||
required: false,
|
||||
min: 0,
|
||||
max: 150
|
||||
}
|
||||
|
||||
// Select field with options
|
||||
{
|
||||
name: 'status',
|
||||
type: 'select',
|
||||
required: true,
|
||||
values: ['active', 'inactive', 'pending']
|
||||
}
|
||||
```
|
||||
|
||||
================================================================================
|
||||
## 6. BEST PRACTICES
|
||||
================================================================================
|
||||
|
||||
### Schema Design
|
||||
1. Use descriptive field names (snake_case or camelCase consistently)
|
||||
2. Set required: false for fields that can be 0, null, or empty
|
||||
3. Add appropriate indexes for frequently queried fields
|
||||
4. Use appropriate field types (don't store numbers as text)
|
||||
5. Set reasonable max lengths for text fields
|
||||
|
||||
### Data Upload
|
||||
1. Always authenticate as superuser for admin operations
|
||||
2. Parse CSV data carefully, handling quotes and special characters
|
||||
3. Convert data types explicitly before uploading
|
||||
4. Use batch processing with progress logging
|
||||
5. Implement proper error handling and logging
|
||||
6. Validate data before uploading when possible
|
||||
|
||||
### Error Handling
|
||||
1. Log detailed error messages including PocketBase responses
|
||||
2. Handle common errors gracefully (collection exists, auth failures)
|
||||
3. Stop processing after multiple consecutive errors to debug
|
||||
4. Provide meaningful error messages for troubleshooting
|
||||
|
||||
### Performance
|
||||
1. Use database indexes for frequently queried fields
|
||||
2. Process uploads in batches with progress indicators
|
||||
3. Consider using transactions for large data sets
|
||||
4. Monitor memory usage for very large CSV files
|
||||
|
||||
================================================================================
|
||||
## 7. COMPLETE CODE EXAMPLES
|
||||
================================================================================
|
||||
|
||||
### Collection Creation Script Template
|
||||
```javascript
|
||||
import PocketBase from 'pocketbase';
|
||||
|
||||
async function createMyCollection() {
|
||||
const pb = new PocketBase('http://localhost:8090');
|
||||
|
||||
// Authenticate
|
||||
try {
|
||||
await pb.collection("_superusers").authWithPassword('your-email', 'your-password');
|
||||
console.log('Authenticated as superuser');
|
||||
} catch (error) {
|
||||
console.error('Authentication failed:', error.message);
|
||||
throw error;
|
||||
}
|
||||
|
||||
const collectionData = {
|
||||
name: 'your_collection_name',
|
||||
type: 'base',
|
||||
fields: [
|
||||
// Define your fields here based on CSV structure
|
||||
],
|
||||
indexes: [
|
||||
// Add indexes for performance
|
||||
],
|
||||
listRule: '', // Set access rules as needed
|
||||
viewRule: '',
|
||||
createRule: '',
|
||||
updateRule: '',
|
||||
deleteRule: ''
|
||||
};
|
||||
|
||||
try {
|
||||
const collection = await pb.collections.create(collectionData);
|
||||
console.log('Collection created successfully:', collection.name);
|
||||
return collection;
|
||||
} catch (error) {
|
||||
if (error.message.includes('already exists')) {
|
||||
console.log('Collection already exists');
|
||||
} else {
|
||||
console.error('Error creating collection:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Export for use in other scripts
|
||||
export { createMyCollection };
|
||||
|
||||
// Run if called directly
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
createMyCollection()
|
||||
.then(() => {
|
||||
console.log('Script completed successfully');
|
||||
process.exit(0);
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error('Script failed:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
### Data Upload Script Template
|
||||
```javascript
|
||||
import PocketBase from 'pocketbase';
|
||||
import fs from 'fs';
|
||||
|
||||
async function uploadDataToCollection() {
|
||||
const pb = new PocketBase('http://localhost:8090');
|
||||
|
||||
// Authenticate
|
||||
await pb.collection("_superusers").authWithPassword('your-email', 'your-password');
|
||||
|
||||
// Read CSV
|
||||
const csvPath = '/path/to/your/data.csv';
|
||||
const csvData = fs.readFileSync(csvPath, 'utf8');
|
||||
const lines = csvData.trim().split('\n');
|
||||
const headers = lines[0].split(',').map(h => h.trim().replace(/\r$/, ''));
|
||||
|
||||
console.log(`Found ${lines.length - 1} rows to upload`);
|
||||
console.log('Headers:', headers);
|
||||
|
||||
let successCount = 0;
|
||||
let errorCount = 0;
|
||||
|
||||
// Process each row
|
||||
for (let i = 1; i < lines.length; i++) {
|
||||
const values = parseCsvLine(lines[i]);
|
||||
|
||||
if (values.length !== headers.length) {
|
||||
console.warn(`Row ${i}: Column count mismatch, skipping`);
|
||||
errorCount++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Build record object
|
||||
const record = {};
|
||||
headers.forEach((header, index) => {
|
||||
const value = values[index];
|
||||
|
||||
// Customize data type conversion based on your schema
|
||||
switch (header) {
|
||||
case 'id_field':
|
||||
case 'count_field':
|
||||
record[header] = value !== '' ? parseInt(value) : null;
|
||||
break;
|
||||
case 'decimal_field':
|
||||
case 'percentage_field':
|
||||
record[header] = value !== '' ? parseFloat(value) : null;
|
||||
break;
|
||||
case 'date_field':
|
||||
case 'timestamp_field':
|
||||
record[header] = value ? new Date(value).toISOString() : null;
|
||||
break;
|
||||
default:
|
||||
record[header] = value || null;
|
||||
}
|
||||
});
|
||||
|
||||
// Upload record
|
||||
try {
|
||||
await pb.collection('your_collection_name').create(record);
|
||||
successCount++;
|
||||
|
||||
if (successCount % 100 === 0) {
|
||||
console.log(`Uploaded ${successCount} records...`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Row ${i} failed:`, error.message);
|
||||
if (error.response && error.response.data) {
|
||||
console.error(`Details:`, JSON.stringify(error.response.data, null, 2));
|
||||
}
|
||||
errorCount++;
|
||||
|
||||
// Stop after too many errors to debug
|
||||
if (errorCount >= 5) {
|
||||
console.log('Too many errors, stopping to debug...');
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`\nUpload completed:`);
|
||||
console.log(`Success: ${successCount} records`);
|
||||
console.log(`Errors: ${errorCount} records`);
|
||||
}
|
||||
|
||||
function parseCsvLine(line) {
|
||||
const result = [];
|
||||
let current = '';
|
||||
let inQuotes = false;
|
||||
|
||||
for (let i = 0; i < line.length; i++) {
|
||||
const char = line[i];
|
||||
|
||||
if (char === '"') {
|
||||
inQuotes = !inQuotes;
|
||||
} else if (char === ',' && !inQuotes) {
|
||||
result.push(current.trim());
|
||||
current = '';
|
||||
} else {
|
||||
current += char;
|
||||
}
|
||||
}
|
||||
|
||||
result.push(current.trim());
|
||||
return result;
|
||||
}
|
||||
|
||||
export { uploadDataToCollection };
|
||||
```
|
||||
|
||||
================================================================================
|
||||
## 8. TROUBLESHOOTING CHECKLIST
|
||||
================================================================================
|
||||
|
||||
### Before Running Scripts
|
||||
□ PocketBase server is running on correct port (default: 8090)
|
||||
□ Superuser account exists with correct credentials
|
||||
□ CSV file path is correct and file is accessible
|
||||
□ Node.js modules are installed (npm install pocketbase)
|
||||
□ Project is configured for ES modules (package.json has "type": "module")
|
||||
|
||||
### If Collection Creation Fails
|
||||
□ Check authentication credentials
|
||||
□ Verify collection name doesn't already exist
|
||||
□ Ensure field definitions are valid
|
||||
□ Check for typos in field types ('number' not 'Number')
|
||||
□ Verify required vs optional field settings
|
||||
|
||||
### If Data Upload Fails
|
||||
□ Collection exists and schema matches CSV structure
|
||||
□ CSV headers are clean (no carriage returns)
|
||||
□ Data types are being converted correctly
|
||||
□ Check for null/empty value handling
|
||||
□ Verify date formats are valid
|
||||
□ Look at detailed PocketBase error messages
|
||||
|
||||
### Common Error Messages
|
||||
- "Cannot be blank": Field is required but receiving null/0/empty value
|
||||
- "Failed to create record": Generic error, check detailed error response
|
||||
- "The request requires valid record authorization token": Authentication failed
|
||||
- "Collection not found": Typo in collection name or collection doesn't exist
|
||||
|
||||
### Performance Issues
|
||||
□ Add database indexes for frequently queried fields
|
||||
□ Process large files in smaller batches
|
||||
□ Monitor memory usage with very large CSVs
|
||||
□ Consider using database transactions for consistency
|
||||
|
||||
================================================================================
|
||||
## FINAL NOTES
|
||||
================================================================================
|
||||
|
||||
1. Always test with a small subset of data first
|
||||
2. Keep backups of your CSV files before processing
|
||||
3. Monitor PocketBase logs for additional error details
|
||||
4. Use version control for your scripts
|
||||
5. Document any custom data transformations needed for your specific use case
|
||||
6. Consider creating utility functions for common operations
|
||||
7. Test error scenarios (network issues, invalid data, etc.)
|
||||
|
||||
Remember: PocketBase validation is strict. It's better to be permissive in your schema
|
||||
and validate in your application logic than to fight with PocketBase's validation rules.
|
||||
113
scripts/ingest/create-visits-collection.js
Normal file
113
scripts/ingest/create-visits-collection.js
Normal file
@@ -0,0 +1,113 @@
|
||||
import PocketBase from 'pocketbase';
|
||||
|
||||
async function createVisitsCollection() {
|
||||
const pb = new PocketBase('http://localhost:8090');
|
||||
|
||||
// Authenticate as superuser (required for creating collections)
|
||||
try {
|
||||
await pb.collection("_superusers").authWithPassword('azeem.fidahusein@gmail.com', 'azsxdcazsxdc');
|
||||
console.log('Authenticated as superuser');
|
||||
} catch (error) {
|
||||
console.error('Failed to authenticate as superuser:', error.message);
|
||||
console.error('Make sure your PocketBase instance has a superuser with email "test@example.com" and password "1234567890"');
|
||||
throw error;
|
||||
}
|
||||
|
||||
const collectionData = {
|
||||
name: 'visits',
|
||||
type: 'base',
|
||||
fields: [
|
||||
{
|
||||
name: 'segment_index',
|
||||
type: 'number',
|
||||
required: true
|
||||
},
|
||||
{
|
||||
name: 'hierarchyLevel',
|
||||
type: 'number',
|
||||
required: false
|
||||
},
|
||||
{
|
||||
name: 'visit_probability',
|
||||
type: 'number',
|
||||
required: true
|
||||
},
|
||||
{
|
||||
name: 'top_place_id',
|
||||
type: 'text',
|
||||
required: false,
|
||||
max: 255
|
||||
},
|
||||
{
|
||||
name: 'top_semantic_type',
|
||||
type: 'text',
|
||||
required: false,
|
||||
max: 100
|
||||
},
|
||||
{
|
||||
name: 'top_probability',
|
||||
type: 'number',
|
||||
required: false
|
||||
},
|
||||
{
|
||||
name: 'top_lat',
|
||||
type: 'number',
|
||||
required: false
|
||||
},
|
||||
{
|
||||
name: 'top_lon',
|
||||
type: 'number',
|
||||
required: false
|
||||
},
|
||||
{
|
||||
name: 'startTime',
|
||||
type: 'date',
|
||||
required: true
|
||||
},
|
||||
{
|
||||
name: 'endTime',
|
||||
type: 'date',
|
||||
required: true
|
||||
}
|
||||
],
|
||||
indexes: [
|
||||
'CREATE INDEX idx_visits_segment_index ON visits (segment_index)',
|
||||
'CREATE INDEX idx_visits_start_time ON visits (startTime)',
|
||||
'CREATE INDEX idx_visits_place_id ON visits (top_place_id)'
|
||||
],
|
||||
listRule: '',
|
||||
viewRule: '',
|
||||
createRule: '',
|
||||
updateRule: '',
|
||||
deleteRule: ''
|
||||
};
|
||||
|
||||
try {
|
||||
console.log('Creating visits collection...');
|
||||
const collection = await pb.collections.create(collectionData);
|
||||
console.log('Collection created successfully:', collection.name);
|
||||
return collection;
|
||||
} catch (error) {
|
||||
if (error.message.includes('already exists')) {
|
||||
console.log('Collection "visits" already exists');
|
||||
} else {
|
||||
console.error('Error creating collection:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if this file is being run directly
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
createVisitsCollection()
|
||||
.then(() => {
|
||||
console.log('Script completed successfully');
|
||||
process.exit(0);
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error('Script failed:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
export { createVisitsCollection };
|
||||
65
scripts/ingest/delete-collections.js
Normal file
65
scripts/ingest/delete-collections.js
Normal file
@@ -0,0 +1,65 @@
|
||||
import PocketBase from 'pocketbase';
|
||||
|
||||
async function deleteAllCollections() {
|
||||
const pb = new PocketBase('http://localhost:8090');
|
||||
|
||||
// Authenticate as superuser (required for deleting collections)
|
||||
try {
|
||||
await pb.collection("_superusers").authWithPassword('admin@example.com', 'azsxdcazsxdc');
|
||||
console.log('Authenticated as superuser');
|
||||
} catch (error) {
|
||||
console.error('Failed to authenticate as superuser:', error.message);
|
||||
throw error;
|
||||
}
|
||||
|
||||
try {
|
||||
console.log('Fetching all collections...');
|
||||
const collections = await pb.collections.getFullList();
|
||||
|
||||
console.log(`Found ${collections.length} collections to delete`);
|
||||
|
||||
for (const collection of collections) {
|
||||
try {
|
||||
console.log(`Deleting records from collection: ${collection.name}`);
|
||||
|
||||
// Get all records from the collection
|
||||
const records = await pb.collection(collection.name).getFullList();
|
||||
console.log(` Found ${records.length} records in ${collection.name}`);
|
||||
|
||||
// Delete all records
|
||||
for (const record of records) {
|
||||
await pb.collection(collection.name).delete(record.id);
|
||||
}
|
||||
console.log(` Deleted all records from ${collection.name}`);
|
||||
|
||||
// Delete the collection itself
|
||||
await pb.collections.delete(collection.id);
|
||||
console.log(`✓ Deleted collection: ${collection.name}`);
|
||||
|
||||
} catch (error) {
|
||||
console.error(`Error deleting collection ${collection.name}:`, error.message);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('All collections and records deleted successfully');
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error fetching collections:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// Check if this script is being run directly
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
deleteAllCollections()
|
||||
.then(() => {
|
||||
console.log('Script completed successfully');
|
||||
process.exit(0);
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error('Script failed:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
export { deleteAllCollections };
|
||||
185
scripts/ingest/geospatial_data_relationships.txt
Normal file
185
scripts/ingest/geospatial_data_relationships.txt
Normal file
@@ -0,0 +1,185 @@
|
||||
GEOSPATIAL DATA RELATIONSHIPS IN PERSONAL TRACKER
|
||||
==================================================
|
||||
|
||||
This document explains how the geospatial datasets in the timeline_csv folder are interconnected
|
||||
and structured to provide a complete picture of location-based personal tracking data.
|
||||
|
||||
OVERVIEW
|
||||
--------
|
||||
The location tracking system uses a hierarchical approach with semantic segments as the master
|
||||
index that coordinates different types of location data. The data is organized into movement
|
||||
periods (travel) and stationary periods (visits), creating a complete chronological timeline.
|
||||
|
||||
CORE DATASETS AND THEIR RELATIONSHIPS
|
||||
=====================================
|
||||
|
||||
1. SEMANTIC_SEGMENTS.CSV - The Master Index
|
||||
--------------------------------------------
|
||||
Purpose: Acts as the central orchestrator that defines time-based segments
|
||||
Key Fields:
|
||||
- segment_index: Unique identifier linking all other datasets
|
||||
- startTime/endTime: Time boundaries for each segment
|
||||
- has_visit: Boolean indicating if segment contains visit data
|
||||
- has_timeline_path: Boolean indicating if segment contains movement data
|
||||
|
||||
This dataset defines the temporal structure and determines which other datasets contain
|
||||
data for each time period.
|
||||
|
||||
2. TIMELINE_PATH_POINTS.CSV - Movement Data
|
||||
--------------------------------------------
|
||||
Purpose: GPS tracking data during travel/movement periods
|
||||
Key Fields:
|
||||
- segment_index: Links to semantic_segments
|
||||
- point_index: Order of GPS points within a segment
|
||||
- time: Precise timestamp for each GPS reading
|
||||
- lat/lon: Geographic coordinates
|
||||
- raw_point: Original coordinate string
|
||||
|
||||
Relationship: Contains data ONLY for segments where has_timeline_path=1 in semantic_segments.
|
||||
These represent periods when the person was moving between locations.
|
||||
|
||||
3. VISITS.CSV - Stationary Location Data
|
||||
-----------------------------------------
|
||||
Purpose: Information about places where the person stayed for extended periods
|
||||
Key Fields:
|
||||
- segment_index: Links to semantic_segments
|
||||
- top_place_id: Google Places API identifier
|
||||
- top_semantic_type: Category (HOME, WORK, UNKNOWN, etc.)
|
||||
- top_lat/top_lon: Geographic coordinates of the visit location
|
||||
- startTime/endTime: Duration of the visit
|
||||
- visit_probability: Confidence that this was actually a visit
|
||||
|
||||
Relationship: Contains data ONLY for segments where has_visit=1 in semantic_segments.
|
||||
These represent periods when the person was stationary at a specific location.
|
||||
|
||||
4. FREQUENT_PLACES.CSV - Location Reference Data
|
||||
------------------------------------------------
|
||||
Purpose: Registry of commonly visited locations with semantic labels
|
||||
Key Fields:
|
||||
- placeId: Google Places API identifier (links to visits.top_place_id)
|
||||
- label: Semantic meaning (HOME, WORK, or empty for unlabeled)
|
||||
- lat/lon: Geographic coordinates
|
||||
|
||||
Relationship: Acts as a lookup table for visits.csv. The placeId field provides
|
||||
cross-references to identify and categorize frequently visited locations.
|
||||
|
||||
5. RAW_SIGNALS.CSV - Raw GPS Data
|
||||
---------------------------------
|
||||
Purpose: Unprocessed GPS signals from the device
|
||||
Key Fields:
|
||||
- raw_index: Sequential identifier
|
||||
- timestamp: When the GPS signal was recorded
|
||||
- lat/lon: Geographic coordinates
|
||||
- accuracyMeters: GPS accuracy measurement
|
||||
- altitudeMeters: Elevation data
|
||||
- speedMetersPerSecond: Movement speed
|
||||
- source: Data source type
|
||||
|
||||
Relationship: This is the foundation data that gets processed into timeline_path_points
|
||||
and visits. It represents the raw GPS signals before semantic interpretation.
|
||||
|
||||
SUPPORTING DATASETS
|
||||
===================
|
||||
|
||||
6. FREQUENT_TRIPS.CSV - Trip Pattern Analysis
|
||||
----------------------------------------------
|
||||
Purpose: Analysis of regular travel patterns (like commutes)
|
||||
Key Fields:
|
||||
- trip_index: Unique identifier for trip patterns
|
||||
- startTimeMinutes/endTimeMinutes: Time of day patterns
|
||||
- durationMinutes: Typical trip duration
|
||||
- commuteDirection: HOME_TO_WORK or WORK_TO_HOME
|
||||
- waypoint_count: Number of stops in the trip
|
||||
|
||||
7. FREQUENT_TRIP_WAYPOINTS.CSV - Trip Waypoint Details
|
||||
------------------------------------------------------
|
||||
Purpose: Specific locations that are part of frequent trips
|
||||
Key Fields:
|
||||
- trip_index: Links to frequent_trips.csv
|
||||
- waypoint_order: Sequence of stops in the trip
|
||||
- waypoint_id: Links to frequent_places.placeId
|
||||
|
||||
8. FREQUENT_TRIP_MODE_DISTRIBUTION.CSV - Transportation Analysis
|
||||
---------------------------------------------------------------
|
||||
Purpose: Analysis of transportation methods used
|
||||
Key Fields:
|
||||
- trip_index: Links to frequent_trips.csv
|
||||
- mode: Transportation type (WALKING, DRIVING, etc.)
|
||||
- percentage: How often this mode was used for this trip
|
||||
|
||||
9. TRAVEL_MODE_AFFINITIES.CSV - User Preferences
|
||||
------------------------------------------------
|
||||
Purpose: User's preferred transportation methods
|
||||
Key Fields:
|
||||
- mode: Transportation type
|
||||
- affinity: Preference score
|
||||
|
||||
DATA FLOW AND RELATIONSHIPS
|
||||
============================
|
||||
|
||||
1. RAW COLLECTION:
|
||||
raw_signals.csv contains all GPS pings from the device
|
||||
|
||||
2. TEMPORAL SEGMENTATION:
|
||||
semantic_segments.csv divides time into logical periods based on movement patterns
|
||||
|
||||
3. MOVEMENT vs. STATIONARY CLASSIFICATION:
|
||||
- Movement periods → timeline_path_points.csv (detailed GPS tracking)
|
||||
- Stationary periods → visits.csv (location identification and categorization)
|
||||
|
||||
4. LOCATION IDENTIFICATION:
|
||||
frequent_places.csv provides semantic meaning to visited locations
|
||||
|
||||
5. PATTERN ANALYSIS:
|
||||
frequent_trips.csv, frequent_trip_waypoints.csv, and frequent_trip_mode_distribution.csv
|
||||
analyze regular patterns and transportation preferences
|
||||
|
||||
EXAMPLE DATA FLOW
|
||||
==================
|
||||
|
||||
Segment 0 (Movement): 2013-12-31 22:00 - 2014-01-01 00:00
|
||||
- semantic_segments: has_timeline_path=1, has_visit=0
|
||||
- timeline_path_points: Contains GPS coordinates during this travel period
|
||||
- visits: No data for this segment
|
||||
|
||||
Segment 1 (Visit): 2013-12-31 22:29 - 2014-01-01 17:10
|
||||
- semantic_segments: has_timeline_path=0, has_visit=1
|
||||
- timeline_path_points: No data for this segment
|
||||
- visits: Shows visit to place ChIJyaJWtZVqdkgRZHVIi0HKLto (HOME)
|
||||
- frequent_places: Confirms this placeId is labeled as "HOME"
|
||||
|
||||
QUERYING STRATEGIES
|
||||
===================
|
||||
|
||||
To get complete journey information:
|
||||
1. Query semantic_segments for time range
|
||||
2. For movement segments: Join with timeline_path_points on segment_index
|
||||
3. For visit segments: Join with visits on segment_index
|
||||
4. Enhance visit data by joining visits.top_place_id with frequent_places.placeId
|
||||
|
||||
To analyze location patterns:
|
||||
1. Use frequent_places for location categories
|
||||
2. Use frequent_trips for commute patterns
|
||||
3. Use travel_mode_affinities for transportation preferences
|
||||
|
||||
COORDINATE SYSTEMS
|
||||
==================
|
||||
All latitude/longitude data uses WGS84 decimal degrees:
|
||||
- Latitude: Positive = North, Negative = South
|
||||
- Longitude: Positive = East, Negative = West
|
||||
- Precision: Typically 6-7 decimal places (meter-level accuracy)
|
||||
|
||||
TIME ZONES
|
||||
==========
|
||||
All timestamps include timezone information (typically +00:00 or +01:00 for UK data).
|
||||
Time ranges in semantic_segments define the boundaries for linking other datasets.
|
||||
|
||||
DATA COMPLETENESS
|
||||
=================
|
||||
- Not all segments have both movement and visit data
|
||||
- Some segments may have neither (gaps in tracking)
|
||||
- Visit probability scores indicate confidence levels
|
||||
- Missing coordinates in raw_signals are represented as empty fields
|
||||
|
||||
This hierarchical structure allows for both detailed movement tracking and high-level
|
||||
pattern analysis while maintaining semantic meaning about the places visited.
|
||||
137
scripts/ingest/upload-visits-data.js
Normal file
137
scripts/ingest/upload-visits-data.js
Normal file
@@ -0,0 +1,137 @@
|
||||
import PocketBase from 'pocketbase';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
async function uploadVisitsData() {
|
||||
const pb = new PocketBase('http://localhost:8090');
|
||||
|
||||
// Authenticate as superuser
|
||||
try {
|
||||
await pb.collection("_superusers").authWithPassword('azeem.fidahusein@gmail.com', 'azsxdcazsxdc');
|
||||
console.log('Authenticated as superuser');
|
||||
} catch (error) {
|
||||
console.error('Failed to authenticate as superuser:', error.message);
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Read and parse CSV file
|
||||
const csvPath = '/Users/azeem/repos/personal-tracker/data/timeline_csv/visits.csv';
|
||||
|
||||
try {
|
||||
const csvData = fs.readFileSync(csvPath, 'utf8');
|
||||
const lines = csvData.trim().split('\n');
|
||||
const headers = lines[0].split(',').map(h => h.trim().replace(/\r$/, ''));
|
||||
|
||||
console.log(`Found ${lines.length - 1} rows to upload`);
|
||||
console.log('Headers:', headers);
|
||||
|
||||
let successCount = 0;
|
||||
let errorCount = 0;
|
||||
|
||||
// Process each row (skip header)
|
||||
for (let i = 1; i < lines.length; i++) {
|
||||
const values = parseCsvLine(lines[i]);
|
||||
|
||||
if (values.length !== headers.length) {
|
||||
console.warn(`Row ${i}: Column count mismatch, skipping`);
|
||||
errorCount++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Create record object
|
||||
const record = {};
|
||||
headers.forEach((header, index) => {
|
||||
const value = values[index];
|
||||
|
||||
// Handle different data types based on field name
|
||||
switch (header) {
|
||||
case 'segment_index':
|
||||
case 'hierarchyLevel':
|
||||
record[header] = value !== '' ? parseInt(value) : null;
|
||||
break;
|
||||
case 'visit_probability':
|
||||
case 'top_probability':
|
||||
case 'top_lat':
|
||||
case 'top_lon':
|
||||
record[header] = value !== '' ? parseFloat(value) : null;
|
||||
break;
|
||||
case 'startTime':
|
||||
case 'endTime':
|
||||
// Convert ISO string to proper date format
|
||||
record[header] = value ? new Date(value).toISOString() : null;
|
||||
break;
|
||||
default:
|
||||
record[header] = value || null;
|
||||
}
|
||||
});
|
||||
|
||||
try {
|
||||
await pb.collection('visits').create(record);
|
||||
successCount++;
|
||||
|
||||
if (successCount % 100 === 0) {
|
||||
console.log(`Uploaded ${successCount} records...`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Row ${i} failed:`, error.message);
|
||||
if (error.response && error.response.data) {
|
||||
console.error(`PocketBase error:`, JSON.stringify(error.response.data, null, 2));
|
||||
}
|
||||
console.error(`Record data:`, JSON.stringify(record, null, 2));
|
||||
errorCount++;
|
||||
|
||||
// Stop after first few errors to debug
|
||||
if (errorCount >= 3) {
|
||||
console.log('Stopping after 3 errors to debug...');
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`\nUpload completed:`);
|
||||
console.log(`Success: ${successCount} records`);
|
||||
console.log(`Errors: ${errorCount} records`);
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error reading CSV file:', error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to parse CSV line properly (handles commas in quoted fields)
|
||||
function parseCsvLine(line) {
|
||||
const result = [];
|
||||
let current = '';
|
||||
let inQuotes = false;
|
||||
|
||||
for (let i = 0; i < line.length; i++) {
|
||||
const char = line[i];
|
||||
|
||||
if (char === '"') {
|
||||
inQuotes = !inQuotes;
|
||||
} else if (char === ',' && !inQuotes) {
|
||||
result.push(current.trim());
|
||||
current = '';
|
||||
} else {
|
||||
current += char;
|
||||
}
|
||||
}
|
||||
|
||||
result.push(current.trim());
|
||||
return result;
|
||||
}
|
||||
|
||||
// Check if this file is being run directly
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
uploadVisitsData()
|
||||
.then(() => {
|
||||
console.log('Upload script completed successfully');
|
||||
process.exit(0);
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error('Upload script failed:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
export { uploadVisitsData };
|
||||
Reference in New Issue
Block a user