diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 1897df1..39f0517 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -81,6 +81,8 @@ jobs: env: INTEGRATION_TEST_DATABASE_ID: ${{ secrets.INTEGRATION_TEST_DATABASE_ID }} + INTEGRATION_TEST_APIKEY: ${{ secrets.INTEGRATION_TEST_APIKEY }} + INTEGRATION_TEST_CLOUDSYNC_ADDRESS: ${{ secrets.INTEGRATION_TEST_CLOUDSYNC_ADDRESS }} INTEGRATION_TEST_OFFLINE_DATABASE_ID: ${{ secrets.INTEGRATION_TEST_OFFLINE_DATABASE_ID }} steps: @@ -126,6 +128,8 @@ jobs: -v ${{ github.workspace }}:/workspace \ -w /workspace \ -e INTEGRATION_TEST_DATABASE_ID="${{ env.INTEGRATION_TEST_DATABASE_ID }}" \ + -e INTEGRATION_TEST_APIKEY="${{ env.INTEGRATION_TEST_APIKEY }}" \ + -e INTEGRATION_TEST_CLOUDSYNC_ADDRESS="${{ env.INTEGRATION_TEST_CLOUDSYNC_ADDRESS }}" \ -e INTEGRATION_TEST_OFFLINE_DATABASE_ID="${{ env.INTEGRATION_TEST_OFFLINE_DATABASE_ID }}" \ alpine:latest \ tail -f /dev/null @@ -194,9 +198,12 @@ jobs: echo "::group::prepare the test script" make test PLATFORM=$PLATFORM ARCH=$ARCH || echo "It should fail. Running remaining commands in the emulator" cat > commands.sh << EOF + set -e mv -f /data/local/tmp/sqlite3 /system/xbin cd /data/local/tmp export INTEGRATION_TEST_DATABASE_ID="$INTEGRATION_TEST_DATABASE_ID" + export INTEGRATION_TEST_APIKEY="$INTEGRATION_TEST_APIKEY" + export INTEGRATION_TEST_CLOUDSYNC_ADDRESS="$INTEGRATION_TEST_CLOUDSYNC_ADDRESS" export INTEGRATION_TEST_OFFLINE_DATABASE_ID="$INTEGRATION_TEST_OFFLINE_DATABASE_ID" $(make test PLATFORM=$PLATFORM ARCH=$ARCH -n) EOF @@ -212,7 +219,8 @@ jobs: adb root adb remount adb push ${{ github.workspace }}/. /data/local/tmp/ - adb shell "sh /data/local/tmp/commands.sh" + adb shell "sh /data/local/tmp/commands.sh; echo EXIT_CODE=\$?" | tee /tmp/adb_output.log + grep -q "EXIT_CODE=0" /tmp/adb_output.log - name: test sqlite-sync if: contains(matrix.name, 'linux') || matrix.name == 'windows' || ( matrix.name == 'macos' && matrix.arch != 'x86_64' ) diff --git a/Makefile b/Makefile index ebdee48..8f190c5 100644 --- a/Makefile +++ b/Makefile @@ -218,12 +218,8 @@ $(BUILD_TEST)/%.o: %.c $(CC) $(T_CFLAGS) -c $< -o $@ # Run code coverage (--css-file $(CUSTOM_CSS)) -test: $(TARGET) $(TEST_TARGET) unittest - @if [ -f .env ]; then \ - export $$(grep -v '^#' .env | xargs); \ - fi; \ - set -e; $(SQLITE3) ":memory:" -cmd ".bail on" ".load ./$<" "SELECT cloudsync_version();" # && \ - #for t in $(TEST_TARGET); do ./$$t; done +test: $(TARGET) $(TEST_TARGET) unittest e2e + set -e; $(SQLITE3) ":memory:" -cmd ".bail on" ".load ./$<" "SELECT cloudsync_version();" ifneq ($(COVERAGE),false) mkdir -p $(COV_DIR) lcov --capture --directory . --output-file $(COV_DIR)/coverage.info $(subst src, --include src,${COV_FILES}) @@ -234,6 +230,13 @@ endif unittest: $(TARGET) $(DIST_DIR)/unit$(EXE) @./$(DIST_DIR)/unit$(EXE) +# Run end-to-end integration tests +e2e: $(TARGET) $(DIST_DIR)/integration$(EXE) + @if [ -f .env ]; then \ + export $$(grep -v '^#' .env | xargs); \ + fi; \ + ./$(DIST_DIR)/integration$(EXE) + OPENSSL_TARBALL = $(OPENSSL_DIR)/$(OPENSSL_VERSION).tar.gz $(OPENSSL_TARBALL): @@ -448,4 +451,4 @@ help: # Include PostgreSQL extension targets include docker/Makefile.postgresql -.PHONY: all clean test unittest extension help version xcframework aar +.PHONY: all clean test unittest e2e extension help version xcframework aar diff --git a/docs/postgresql/SPORT_APP_README_SUPABASE.md b/docs/postgresql/SPORT_APP_README_SUPABASE.md index 573911d..e0ffb9e 100644 --- a/docs/postgresql/SPORT_APP_README_SUPABASE.md +++ b/docs/postgresql/SPORT_APP_README_SUPABASE.md @@ -1,6 +1,6 @@ # Sport Tracker app with SQLite Sync 🚵 -A Vite/React demonstration app showcasing [**SQLite Sync (Dev)**](https://github.com/sqliteai/sqlite-sync) implementation for **offline-first** data synchronization across multiple devices. This example illustrates how to integrate SQLite AI's sync capabilities into modern web applications with proper authentication via [Access Token](https://docs.sqlitecloud.io/docs/access-tokens) and [Row-Level Security (RLS)](https://docs.sqlitecloud.io/docs/rls). +A Vite/React demonstration app showcasing [**SQLite Sync**](https://github.com/sqliteai/sqlite-sync) implementation for **offline-first** data synchronization across multiple devices. This example illustrates how to integrate SQLite AI's sync capabilities into modern web applications with proper authentication via [Access Token](https://docs.sqlitecloud.io/docs/access-tokens) and [Row-Level Security (RLS)](https://docs.sqlitecloud.io/docs/rls). > This app uses the packed WASM version of SQLite with the [SQLite Sync extension enabled](https://www.npmjs.com/package/@sqliteai/sqlite-wasm). diff --git a/examples/simple-todo-db/README.md b/examples/simple-todo-db/README.md index 6c7e977..56f4d8f 100644 --- a/examples/simple-todo-db/README.md +++ b/examples/simple-todo-db/README.md @@ -20,11 +20,15 @@ Before using the local CLI, you need to set up your cloud database: 2. Name your database (e.g., "todo_app.sqlite") 3. Click **"Create"** -### 1.3 Get Connection Details -1. Copy the **Connection String** (format: `sqlitecloud://projectid.sqlite.cloud/database.sqlite`) +### 1.3 Enable OffSync +1. Click the **OffSync** button next to your database, then **Enable OffSync** and confirm with the **Enable** button +2. In the **Configuration** tab copy the **Database ID** (format: `db_*`) + +### 1.4 Get Auth Details +1. In your project dashboard, click **Settings**, then **API Keys** 2. Copy an **API Key** -### 1.4 Configure Row-Level Security (Optional) +### 1.5 Configure Row-Level Security (Optional) 1. In your database dashboard, go to **"Security"** → **"Row-Level Security"** 2. Enable RLS for tables you want to secure 3. Create policies to control user access (e.g., users can only see their own tasks) @@ -104,11 +108,11 @@ SELECT cloudsync_is_enabled('tasks'); ```sql -- Configure connection to SQLite Cloud --- Replace with your managedDatabaseId from the OffSync page on the SQLiteCloud dashboard +-- Replace with your managedDatabaseId from the OffSync page on the SQLiteCloud dashboard from Step 1.3 SELECT cloudsync_network_init('your-managed-database-id'); -- Configure authentication: --- Set your API key from Step 1.3 +-- Set your API key from Step 1.4 SELECT cloudsync_network_set_apikey('your-api-key-here'); -- Or use token authentication (required for Row-Level Security) -- SELECT cloudsync_network_set_token('your_auth_token'); diff --git a/examples/to-do-app/README.md b/examples/to-do-app/README.md index bc77123..0e3d0c0 100644 --- a/examples/to-do-app/README.md +++ b/examples/to-do-app/README.md @@ -24,10 +24,10 @@ cd MyApp Rename the `.env.example` into `.env` and fill with your values. -> **⚠️ SECURITY WARNING**: This example puts database connection strings directly in `.env` files for demonstration purposes only. **Do not use this pattern in production.** +> **⚠️ SECURITY WARNING**: This example puts database API Keys directly in `.env` files for demonstration purposes only. **Do not use this pattern in production.** > > **Why this is unsafe:** -> - Connection strings contain sensitive credentials +> - API Keys allow access to sensitive credentials > - Client-side apps expose all environment variables to users > - Anyone can inspect your app and extract database credentials > diff --git a/examples/to-do-app/hooks/useCategories.js b/examples/to-do-app/hooks/useCategories.js index dc608bd..519f7ec 100644 --- a/examples/to-do-app/hooks/useCategories.js +++ b/examples/to-do-app/hooks/useCategories.js @@ -1,7 +1,7 @@ import { useState, useEffect } from 'react' import { Platform } from 'react-native'; import { db } from "../db/dbConnection"; -import { ANDROID_MANAGED_DATABASE_ID, MANAGED_DATABASE_ID, API_TOKEN } from "@env"; +import { MANAGED_DATABASE_ID, API_TOKEN } from "@env"; import { getDylibPath } from "@op-engineering/op-sqlite"; import { randomUUID } from 'expo-crypto'; import { useSyncContext } from '../components/SyncContext'; @@ -72,8 +72,8 @@ const useCategories = () => { await db.execute('INSERT OR IGNORE INTO tags (uuid, name) VALUES (?, ?)', ['work', 'Work']) await db.execute('INSERT OR IGNORE INTO tags (uuid, name) VALUES (?, ?)', ['personal', 'Personal']) - if ((ANDROID_MANAGED_DATABASE_ID || MANAGED_DATABASE_ID) && API_TOKEN) { - await db.execute(`SELECT cloudsync_network_init('${Platform.OS == 'android' && ANDROID_MANAGED_DATABASE_ID ? ANDROID_MANAGED_DATABASE_ID : MANAGED_DATABASE_ID}');`); + if (MANAGED_DATABASE_ID && API_TOKEN) { + await db.execute(`SELECT cloudsync_network_init('${MANAGED_DATABASE_ID}');`); await db.execute(`SELECT cloudsync_network_set_token('${API_TOKEN}');`) } else { throw new Error('No valid MANAGED_DATABASE_ID or API_TOKEN provided, cloudsync_network_init will not be called'); diff --git a/examples/to-do-app/package.json b/examples/to-do-app/package.json index b314840..cb05893 100644 --- a/examples/to-do-app/package.json +++ b/examples/to-do-app/package.json @@ -1,6 +1,6 @@ { "name": "@sqliteai/todoapp", - "version": "1.0.6", + "version": "1.0.7", "description": "An Expo template for building apps with the SQLite CloudSync extension", "repository": { "type": "git", diff --git a/test/integration.c b/test/integration.c index 064c91f..979f64a 100644 --- a/test/integration.c +++ b/test/integration.c @@ -224,17 +224,31 @@ int test_init (const char *db_path, int init) { rc = db_exec(db, "SELECT cloudsync_init('activities');"); RCHECK rc = db_exec(db, "SELECT cloudsync_init('workouts');"); RCHECK - // init network with JSON connection string + // init network char network_init[1024]; const char* test_db_id = getenv("INTEGRATION_TEST_DATABASE_ID"); if (!test_db_id) { fprintf(stderr, "Error: INTEGRATION_TEST_DATABASE_ID not set.\n"); exit(1); } - snprintf(network_init, sizeof(network_init), - "SELECT cloudsync_network_init('%s');", test_db_id); + const char* custom_address = getenv("INTEGRATION_TEST_CLOUDSYNC_ADDRESS"); + if (custom_address) { + snprintf(network_init, sizeof(network_init), + "SELECT cloudsync_network_init_custom('%s', '%s');", custom_address, test_db_id); + } else { + snprintf(network_init, sizeof(network_init), + "SELECT cloudsync_network_init('%s');", test_db_id); + } rc = db_exec(db, network_init); RCHECK + const char* apikey = getenv("INTEGRATION_TEST_APIKEY"); + if (apikey) { + char set_apikey[512]; + snprintf(set_apikey, sizeof(set_apikey), + "SELECT cloudsync_network_set_apikey('%s');", apikey); + rc = db_exec(db, set_apikey); RCHECK + } + rc = db_expect_int(db, "SELECT COUNT(*) as count FROM activities;", 0); RCHECK rc = db_expect_int(db, "SELECT COUNT(*) as count FROM workouts;", 0); RCHECK char value[UUID_STR_MAXLEN]; @@ -294,17 +308,31 @@ int test_enable_disable(const char *db_path) { snprintf(sql, sizeof(sql), "INSERT INTO users (id, name) VALUES ('%s-should-sync', '%s-should-sync');", value, value); rc = db_exec(db, sql); RCHECK - // init network with JSON connection string + // init network char network_init[1024]; const char* test_db_id = getenv("INTEGRATION_TEST_DATABASE_ID"); if (!test_db_id) { fprintf(stderr, "Error: INTEGRATION_TEST_DATABASE_ID not set.\n"); exit(1); } - snprintf(network_init, sizeof(network_init), - "SELECT cloudsync_network_init('%s');", test_db_id); + const char* custom_address = getenv("INTEGRATION_TEST_CLOUDSYNC_ADDRESS"); + if (custom_address) { + snprintf(network_init, sizeof(network_init), + "SELECT cloudsync_network_init_custom('%s', '%s');", custom_address, test_db_id); + } else { + snprintf(network_init, sizeof(network_init), + "SELECT cloudsync_network_init('%s');", test_db_id); + } rc = db_exec(db, network_init); RCHECK + const char* apikey = getenv("INTEGRATION_TEST_APIKEY"); + if (apikey) { + char set_apikey[512]; + snprintf(set_apikey, sizeof(set_apikey), + "SELECT cloudsync_network_set_apikey('%s');", apikey); + rc = db_exec(db, set_apikey); RCHECK + } + rc = db_exec(db, "SELECT cloudsync_network_send_changes();"); RCHECK rc = db_exec(db, "SELECT cloudsync_cleanup('users');"); RCHECK rc = db_exec(db, "SELECT cloudsync_cleanup('activities');"); RCHECK @@ -324,6 +352,13 @@ int test_enable_disable(const char *db_path) { // init network with connection string + apikey rc = db_exec(db2, network_init); RCHECK + if (apikey) { + char set_apikey2[512]; + snprintf(set_apikey2, sizeof(set_apikey2), + "SELECT cloudsync_network_set_apikey('%s');", apikey); + rc = db_exec(db2, set_apikey2); RCHECK + } + rc = db_expect_gt0(db2, "SELECT cloudsync_network_sync(250,10) ->> '$.receive.rows';"); RCHECK snprintf(sql, sizeof(sql), "SELECT COUNT(*) FROM users WHERE name='%s';", value); @@ -362,10 +397,26 @@ int test_offline_error(const char *db_path) { } char network_init[512]; - snprintf(network_init, sizeof(network_init), "SELECT cloudsync_network_init('%s');", offline_db_id); + const char* custom_address = getenv("INTEGRATION_TEST_CLOUDSYNC_ADDRESS"); + if (custom_address) { + snprintf(network_init, sizeof(network_init), + "SELECT cloudsync_network_init_custom('%s', '%s');", custom_address, offline_db_id); + } else { + snprintf(network_init, sizeof(network_init), + "SELECT cloudsync_network_init('%s');", offline_db_id); + } rc = db_exec(db, network_init); RCHECK + const char* apikey = getenv("INTEGRATION_TEST_APIKEY"); + if (apikey) { + char set_apikey[512]; + snprintf(set_apikey, sizeof(set_apikey), + "SELECT cloudsync_network_set_apikey('%s');", apikey); + rc = db_exec(db, set_apikey); + RCHECK + } + // Try to sync - this should fail with the expected error char *errmsg = NULL; rc = sqlite3_exec(db, "SELECT cloudsync_network_sync();", NULL, NULL, &errmsg); @@ -376,17 +427,35 @@ int test_offline_error(const char *db_path) { goto abort_test; } - // Verify the error message contains the expected text - const char *expected_error = "cloudsync_network_send_changes unable to upload BLOB changes to remote host"; - if (!errmsg || strstr(errmsg, expected_error) == NULL) { - printf("Error: Expected error message containing '%s', but got '%s'\n", - expected_error, errmsg ? errmsg : "NULL"); - if (errmsg) sqlite3_free(errmsg); + // Verify the error JSON contains expected fields using SQLite JSON extraction + if (!errmsg) { + printf("Error: Expected an error message, but got NULL\n"); rc = SQLITE_ERROR; goto abort_test; } - if (errmsg) sqlite3_free(errmsg); + char verify_sql[1024]; + snprintf(verify_sql, sizeof(verify_sql), + "SELECT json_extract('%s', '$.errors[0].status');", errmsg); + rc = db_expect_str(db, verify_sql, "500"); + if (rc != SQLITE_OK) { printf("Offline error: unexpected status in: %s\n", errmsg); sqlite3_free(errmsg); goto abort_test; } + + snprintf(verify_sql, sizeof(verify_sql), + "SELECT json_extract('%s', '$.errors[0].code');", errmsg); + rc = db_expect_str(db, verify_sql, "internal_server_error"); + if (rc != SQLITE_OK) { printf("Offline error: unexpected code in: %s\n", errmsg); sqlite3_free(errmsg); goto abort_test; } + + snprintf(verify_sql, sizeof(verify_sql), + "SELECT json_extract('%s', '$.errors[0].title');", errmsg); + rc = db_expect_str(db, verify_sql, "Internal Server Error"); + if (rc != SQLITE_OK) { printf("Offline error: unexpected title in: %s\n", errmsg); sqlite3_free(errmsg); goto abort_test; } + + snprintf(verify_sql, sizeof(verify_sql), + "SELECT json_extract('%s', '$.errors[0].detail');", errmsg); + rc = db_expect_str(db, verify_sql, "failed to resolve token data: failed to resolve db user for api key: db: connect sqlitecloud failed after 3 attempts: Your free node has been paused due to inactivity. To resume usage, please restart your node from your dashboard: https://dashboard.sqlitecloud.io"); + if (rc != SQLITE_OK) { printf("Offline error: unexpected detail in: %s\n", errmsg); sqlite3_free(errmsg); goto abort_test; } + + sqlite3_free(errmsg); rc = SQLITE_OK; ABORT_TEST @@ -588,4 +657,4 @@ int main (void) { printf("\n"); return rc; -} +} \ No newline at end of file