Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# To get started with Dependabot version updates, you'll need to specify which
# package ecosystems to update and where the package manifests are located.
# Please see the documentation for all configuration options:
# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file

version: 2
updates:
- package-ecosystem: "gomod" # See documentation for possible values
directory: "/client" # Location of package manifests
schedule:
interval: "weekly"
161 changes: 111 additions & 50 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
@@ -1,16 +1,25 @@
name: Multi-Platform Build

on:
push:
branches: [main, develop]
pull_request:
branches: [main]
branches:
- main
- "v*"
release:
types: [published]

jobs:
securityIntention:
name: Security Intention
runs-on: ubuntu-latest
steps:
- name: Security Intention
run: |
echo "This workflow is intended to build the project in a secure manner:"
echo " - Only installs absolutely essential and trusted dependencies. (steps \"Install *\")"
echo " - Uses HTTPS for direct package downloads"
echo " - Only uses official Github Actions \"actions/*\""
build:
name: Build for ${{ matrix.os }}
name: Build for ${{ matrix.os }}-${{matrix.arch}}
runs-on: ${{ matrix.runs-on }}
strategy:
matrix:
Expand All @@ -24,15 +33,7 @@ jobs:
- os: linux
runs-on: ubuntu-latest
arch: x86_64

steps:
- name: Security Intention
run: |
echo "This workflow is intended to build the project in a secure manner:"
echo " - Only installs absolutely essential and trusted dependencies. (steps \"Install *\")"
echo " - Uses HTTPS for direct package downloads"
echo " - Only uses official Github Actions \"actions/*\""

- name: Checkout code
uses: actions/checkout@v4

Expand Down Expand Up @@ -133,6 +134,66 @@ jobs:
# Use make with MSYS2/MinGW
bash -c "make build"

- name: Test sqlrsync --version
run: |
echo "Testing sqlrsync --version..."
./client/bin/sqlrsync --version

- name: Test sqlrsync help
run: |
echo "Testing sqlrsync help..."
./client/bin/sqlrsync || true

- name: Test sqlrsync with usgs.gov/earthquakes.db
run: |
echo "Testing sqlrsync usgs.gov/earthquakes.db..."
./client/bin/sqlrsync usgs.gov/earthquakes.db

- name: Test sqlrsync with subscribe for 10 seconds (Linux)
if: matrix.os == 'linux'
run: |
echo "Testing sqlrsync usgs.gov/earthquakes.db --subscribe for 10 seconds..."
timeout 10s ./client/bin/sqlrsync usgs.gov/earthquakes.db --subscribe > subscribe_output.log 2>&1 || true

- name: Test sqlrsync with subscribe for 10 seconds (macOS)
if: matrix.os == 'darwin'
run: |
echo "Testing sqlrsync usgs.gov/earthquakes.db --subscribe for 10 seconds..."
# macOS doesn't have timeout, use gtimeout or alternative
if command -v gtimeout &> /dev/null; then
gtimeout 10s ./client/bin/sqlrsync usgs.gov/earthquakes.db --subscribe > subscribe_output.log 2>&1 || true
else
# Fallback: run in background and kill after 10 seconds
./client/bin/sqlrsync usgs.gov/earthquakes.db --subscribe > subscribe_output.log 2>&1 &
PID=$!
sleep 10
kill $PID 2>/dev/null || true
wait $PID 2>/dev/null || true
fi

- name: Test sqlrsync with subscribe for 10 seconds (Windows)
if: matrix.os == 'windows'
run: |
echo "Testing sqlrsync usgs.gov/earthquakes.db --subscribe for 10 seconds..."
# Windows doesn't have timeout, use PowerShell equivalent
$job = Start-Job { ./client/bin/sqlrsync.exe usgs.gov/earthquakes.db --subscribe }
Wait-Job $job -Timeout 10
Stop-Job $job
Receive-Job $job > subscribe_output.log 2>&1 || $true

- name: Verify subscribe output (Unix)
if: matrix.os != 'windows'
run: |
echo "Checking for 'Sync complete' in output..."
cat subscribe_output.log
if grep -q "Sync complete" subscribe_output.log; then
echo "✅ SUCCESS: Found 'Sync complete' in output"
else
echo "❌ FAILURE: 'Sync complete' not found in output"
echo "Full output:"
cat subscribe_output.log
exit 1
fi
- name: Create release directory
run: |
mkdir -p release
Expand Down Expand Up @@ -162,47 +223,47 @@ jobs:
release:
if: github.ref == 'refs/heads/main' && github.event_name == 'push'
needs: build
permissions:
permissions:
contents: write
packages: write
issues: write
pull-requests: write
actions: write
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- name: Extract version from main.go
id: extract-version
run: |
VERSION=$(grep 'var VERSION = ' client/main.go | sed 's/var VERSION = "\(.*\)"/\1/')
echo "version=$VERSION" >> $GITHUB_OUTPUT
echo "Extracted version: $VERSION"
- name: Check if tag exists
id: tag-check
run: |
VERSION=${{ steps.extract-version.outputs.version }}
if git rev-parse "v$VERSION" >/dev/null 2>&1; then
echo "Tag v$VERSION already exists"
echo "tag-created=false" >> $GITHUB_OUTPUT
else
echo "Tag v$VERSION does not exist, will create"
echo "tag-created=true" >> $GITHUB_OUTPUT
fi
- name: Download all release artifacts
if: steps.tag-check.outputs.tag-created == 'true'
uses: actions/download-artifact@v5
- name: Create tag and GitHub Release, attach artifact
env:
GH_TOKEN: ${{ github.token }}
run: |
TAG=v${{ steps.extract-version.outputs.version }}
git config user.name "${{ github.actor }}"
git config user.email "${{ github.actor }}@users.noreply.github.com"
git tag -a $TAG -m "Release $TAG"
git push origin $TAG
# create the release and attach the artifact (gh CLI)
gh release create $TAG --generate-notes sqlrsync-*/sqlrsync-*
- uses: actions/checkout@v5

- name: Extract version from main.go
id: extract-version
run: |
VERSION=$(grep 'var VERSION = ' client/main.go | sed 's/var VERSION = "\(.*\)"/\1/')
echo "version=$VERSION" >> $GITHUB_OUTPUT
echo "Extracted version: $VERSION"

- name: Check if tag exists
id: tag-check
run: |
VERSION=${{ steps.extract-version.outputs.version }}
if git rev-parse "v$VERSION" >/dev/null 2>&1; then
echo "Tag v$VERSION already exists"
echo "tag-created=false" >> $GITHUB_OUTPUT
else
echo "Tag v$VERSION does not exist, will create"
echo "tag-created=true" >> $GITHUB_OUTPUT
fi

- name: Download all release artifacts
if: steps.tag-check.outputs.tag-created == 'true'
uses: actions/download-artifact@v5

- name: Create tag and GitHub Release, attach artifact
env:
GH_TOKEN: ${{ github.token }}
run: |
TAG=v${{ steps.extract-version.outputs.version }}
git config user.name "${{ github.actor }}"
git config user.email "${{ github.actor }}@users.noreply.github.com"
git tag -a $TAG -m "Release $TAG"
git push origin $TAG
# create the release and attach the artifact (gh CLI)
gh release create $TAG --generate-notes sqlrsync-*/sqlrsync-*
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -16,3 +16,5 @@ tmp/
client/sqlrsync
client/sqlrsync
client/sqlrsync_simple
asciinema/
examples/earthquakes/nohup.out
30 changes: 30 additions & 0 deletions bridge/cgo_bridge.go
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,36 @@ func cgoGetDatabaseInfo(dbPath string) (*DatabaseInfo, error) {

return info, nil
}
// SQLRSYNC
// CheckIntegrity checks the database integrity using PRAGMA integrity_check
func CheckIntegrity(dbPath string) (bool, string, error) {
return cgoCheckIntegrity(dbPath)
}

// SQLRSYNC
// cgoCheckIntegrity checks the database integrity using PRAGMA integrity_check
func cgoCheckIntegrity(dbPath string) (bool, string, error) {
cDbPath := C.CString(dbPath)
defer C.free(unsafe.Pointer(cDbPath))

const errorMsgSize = 1024
errorMsg := make([]byte, errorMsgSize)
cErrorMsg := (*C.char)(unsafe.Pointer(&errorMsg[0]))

result := C.sqlite_rsync_check_integrity(cDbPath, cErrorMsg, C.int(errorMsgSize))

switch result {
case 0:
return true, "", nil // Database is OK
case 1:
return false, C.GoString(cErrorMsg), nil // Database is corrupted
default:
return false, C.GoString(cErrorMsg), &SQLiteRsyncError{
Code: int(result),
Message: "failed to check database integrity",
}
}
}

// RunOriginSync wraps the C function to run origin synchronization
func RunOriginSync(dbPath string, dryRun bool, client *BridgeClient) error {
Expand Down
25 changes: 24 additions & 1 deletion bridge/client.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package bridge

import (
"fmt"
"os"

"go.uber.org/zap"
)
Expand Down Expand Up @@ -70,6 +71,28 @@ func (c *BridgeClient) GetDatabaseInfo() (*DatabaseInfo, error) {
return info, nil
}

// CheckIntegrity checks the database integrity using PRAGMA integrity_check
func (c *BridgeClient) CheckIntegrity() {
c.Logger.Debug("Checking database integrity", zap.String("path", c.Config.DatabasePath))

if _, err := os.Stat(c.Config.DatabasePath); os.IsNotExist(err) {
c.Logger.Fatal("database file does not exist", zap.String("path", c.Config.DatabasePath))
}

isOk, errorMsg, err := CheckIntegrity(c.Config.DatabasePath)
if err != nil {
c.Logger.Fatal("fatal error while checking integrity", zap.Error(err))
}

if !isOk {
c.Logger.Fatal("database integrity check failed",
zap.String("database", c.Config.DatabasePath),
zap.String("error", errorMsg))
}

c.Logger.Debug("Database integrity check passed", zap.String("database", c.Config.DatabasePath))
}

// RunPushSync runs the origin-side synchronization with provided I/O functions
func (c *BridgeClient) RunPushSync(readFunc ReadFunc, writeFunc WriteFunc) error {
c.Logger.Info("Starting origin sync", zap.String("database", c.Config.DatabasePath))
Expand Down Expand Up @@ -117,7 +140,7 @@ func (c *BridgeClient) RunPullSync(readFunc ReadFunc, writeFunc WriteFunc) error
return err
}

c.Logger.Info("Replica sync completed successfully")
c.Logger.Info("Replica sync completed")
return nil
}

Expand Down
57 changes: 57 additions & 0 deletions bridge/sqlite_rsync_wrapper.c
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,63 @@ int sqlite_rsync_get_db_info(const char *db_path, sqlite_db_info_t *info)
sqlite3_close(db);
return 0;
}
// SQLRSYNC
// Check database integrity using PRAGMA integrity_check
int sqlite_rsync_check_integrity(const char *db_path, char *error_msg, int error_msg_size)
{
if (!db_path || !error_msg)
{
return -1;
}

// Initialize error message
error_msg[0] = '\0';

sqlite3 *db;
int rc = sqlite3_open_v2(db_path, &db, SQLITE_OPEN_READONLY, NULL);
if (rc != SQLITE_OK)
{
if (error_msg_size > 0)
{
snprintf(error_msg, error_msg_size, "Cannot open database: %s", sqlite3_errmsg(db));
}
if (db)
sqlite3_close(db);
return -1;
}

sqlite3_stmt *stmt;
rc = sqlite3_prepare_v2(db, "PRAGMA integrity_check", -1, &stmt, NULL);
if (rc != SQLITE_OK)
{
if (error_msg_size > 0)
{
snprintf(error_msg, error_msg_size, "Cannot prepare integrity check: %s", sqlite3_errmsg(db));
}
sqlite3_close(db);
return -1;
}

int result = 0; // Assume OK
while (sqlite3_step(stmt) == SQLITE_ROW)
{
const char *result_text = (const char *)sqlite3_column_text(stmt, 0);
if (result_text && strcmp(result_text, "ok") != 0)
{
// Database is corrupted
result = 1;
if (error_msg_size > 0)
{
snprintf(error_msg, error_msg_size, "Integrity check failed: %s", result_text);
}
break;
}
}

sqlite3_finalize(stmt);
sqlite3_close(db);
return result;
}

// Cleanup resources
void sqlite_rsync_cleanup(void)
Expand Down
4 changes: 4 additions & 0 deletions bridge/sqlite_rsync_wrapper.h
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,10 @@ extern "C"

int sqlite_rsync_get_db_info(const char *db_path, sqlite_db_info_t *info);

// SQLRSYNC: Check database integrity using PRAGMA integrity_check
// Returns 0 if OK, 1 if corrupted, -1 on error
int sqlite_rsync_check_integrity(const char *db_path, char *error_msg, int error_msg_size);

// Cleanup resources
void sqlite_rsync_cleanup(void);

Expand Down
Loading
Loading