Ruby Client
Official SDK
v0.1.0
Ruby 2.7+
Getting Started
Installation
Using RubyGems:
gem install solidb
In your Gemfile:
gem 'solidb', '~> 0.1.0'
Requirements: Ruby 2.7 or higher. The client uses the msgpack gem for binary serialization.
Quick Start
require 'solidb'
# Create client instance
client = SoliDB::Client.new('127.0.0.1', 6745)
# Establish connection
client.connect
# Authenticate
client.auth('_system', 'admin', 'password')
# Set database context (required for sub-clients)
client.use_database('mydb')
# Basic CRUD operations
doc = client.insert('mydb', 'users', { name: 'Alice', age: 30 })
puts "Created: #{doc['_key']}"
user = client.get('mydb', 'users', doc['_key'])
puts "Retrieved: #{user['name']}"
client.update('mydb', 'users', doc['_key'], { age: 31 })
# Query with SDBQL
results = client.query('mydb', 'FOR u IN users FILTER u.age > @min RETURN u', { min: 25 })
puts "Found #{results.length} users"
# Use management sub-clients
scripts = client.scripts.list
triggers = client.triggers.list
# Clean up
client.close
Connection Management
# Initialize with host and port
client = SoliDB::Client.new('127.0.0.1', 6745)
# Connect (establishes TCP socket)
client.connect
# Check connection latency (returns ms)
latency = client.ping
puts "Latency: #{latency.round(2)}ms"
# Close connection when done
client.close
| Method | Returns | Description |
|---|---|---|
new(host, port) | Client | Create client instance |
connect | nil | Establish TCP connection |
ping | Float | Latency in milliseconds |
close | nil | Close connection |
use_database(name) | self | Set database context for sub-clients |
Authentication
# Authenticate with database, username, and password
client.auth('_system', 'admin', 'password')
# Authentication is required for most operations
# The session remains authenticated until disconnected
Core Operations
Database Operations
# List all databases
databases = client.list_databases
# => ["_system", "mydb", "testdb"]
# Create a new database
client.create_database('analytics')
# Delete a database
client.delete_database('old_db')
| Method | Returns | Description |
|---|---|---|
list_databases | Array | List all database names |
create_database(name) | nil | Create new database |
delete_database(name) | nil | Delete database |
Collection Operations
# List collections in a database
collections = client.list_collections('mydb')
# => [{"name"=>"users", "type"=>"document"}, ...]
# Create a document collection
client.create_collection('mydb', 'products')
# Create an edge collection (for graphs)
client.create_collection('mydb', 'relationships', 'edge')
# Get collection statistics
stats = client.collection_stats('mydb', 'users')
# => {"count"=>1523, "size"=>245760, ...}
# Delete a collection
client.delete_collection('mydb', 'old_collection')
| Method | Returns | Description |
|---|---|---|
list_collections(db) | Array | List collections in database |
create_collection(db, name, type=nil) | nil | Create collection (type: document/edge) |
collection_stats(db, name) | Hash | Get collection statistics |
delete_collection(db, name) | nil | Delete collection |
Document Operations (CRUD)
# INSERT - Create a new document
doc = client.insert('mydb', 'users', {
name: 'Alice',
email: '[email protected]',
age: 30
})
puts doc['_key'] # Auto-generated key
# INSERT with custom key
doc = client.insert('mydb', 'users', { name: 'Bob' }, 'custom-key-123')
# GET - Retrieve a document by key
user = client.get('mydb', 'users', 'custom-key-123')
# => {"_key"=>"custom-key-123", "name"=>"Bob", ...}
# UPDATE - Modify a document (merge by default)
client.update('mydb', 'users', 'custom-key-123', { age: 25 })
# UPDATE - Replace entire document (merge: false)
client.update('mydb', 'users', 'custom-key-123', { name: 'Robert' }, false)
# DELETE - Remove a document
client.delete('mydb', 'users', 'custom-key-123')
# LIST - Paginated document listing
docs = client.list('mydb', 'users', 50, 0) # limit: 50, offset: 0
| Method | Returns | Description |
|---|---|---|
insert(db, col, doc, key=nil) | Hash | Insert document, returns doc with _key |
get(db, col, key) | Hash | Get document by key |
update(db, col, key, doc, merge=true) | nil | Update document (merge or replace) |
delete(db, col, key) | nil | Delete document |
list(db, col, limit=50, offset=0) | Array | List documents with pagination |
SDBQL Queries
# Simple query
users = client.query('mydb', 'FOR u IN users RETURN u')
# Query with bind variables (recommended for security)
results = client.query('mydb', '
FOR u IN users
FILTER u.age >= @min_age AND u.status == @status
SORT u.created_at DESC
LIMIT @limit
RETURN { name: u.name, email: u.email }
', {
min_age: 18,
status: 'active',
limit: 100
})
# Aggregation query
stats = client.query('mydb', '
FOR u IN users
COLLECT status = u.status WITH COUNT INTO count
RETURN { status, count }
')
# Join query
orders = client.query('mydb', '
FOR o IN orders
FOR u IN users FILTER u._key == o.user_id
RETURN { order: o, user: u.name }
')
# Explain query plan (for optimization)
plan = client.explain('mydb', 'FOR u IN users FILTER u.age > 25 RETURN u')
ACID Transactions
# Begin a transaction
tx_id = client.begin_transaction('mydb', 'read_committed')
# Isolation levels: read_uncommitted, read_committed, repeatable_read, serializable
begin
# Perform operations within transaction
client.insert('mydb', 'accounts', { id: 1, balance: 1000 })
client.insert('mydb', 'accounts', { id: 2, balance: 500 })
# Commit if all operations succeed
client.commit_transaction(tx_id)
puts "Transaction committed"
rescue => e
# Rollback on any error
client.rollback_transaction(tx_id)
puts "Transaction rolled back: #{e.message}"
end
| Method | Returns | Description |
|---|---|---|
begin_transaction(db, isolation) | String | Start transaction, returns tx_id |
commit_transaction(tx_id) | nil | Commit transaction |
rollback_transaction(tx_id) | nil | Rollback transaction |
Index Management
# Create an index
client.create_index('mydb', 'users', 'idx_email', ['email'], true, false)
# db col name fields unique sparse
# List indexes on a collection
indexes = client.list_indexes('mydb', 'users')
# Delete an index
client.delete_index('mydb', 'users', 'idx_email')
Management Sub-Clients
Sub-clients provide namespaced access to management APIs.
Important: Call use_database(name) first to set the database context.
client.scripts
Lua Script Endpointsclient.use_database('mydb')
# Create a Lua script endpoint
script = client.scripts.create(
name: 'hello',
path: '/api/hello',
methods: ['GET', 'POST'],
code: 'return { message = "Hello, " .. (req.params.name or "World") }',
description: 'Greeting endpoint', # optional
collection: 'users' # optional: restrict to collection
)
puts "Created script: #{script['_key']}"
# List all scripts
scripts = client.scripts.list
scripts.each { |s| puts "#{s['name']} -> #{s['path']}" }
# Get a specific script
script = client.scripts.get('script_key')
# Update script code
client.scripts.update('script_key', {
code: 'return { message = "Updated!" }',
methods: ['GET']
})
# Delete a script
client.scripts.delete('script_key')
# Get execution statistics
stats = client.scripts.get_stats
puts "Total calls: #{stats['total_calls']}"
| Method | Parameters | Description |
|---|---|---|
create | name:, path:, methods:, code:, description:, collection: | Create Lua endpoint |
list | - | List all scripts |
get(script_id) | script_id | Get script details |
update(script_id, updates) | script_id, Hash | Update script properties |
delete(script_id) | script_id | Delete script |
get_stats | - | Execution statistics |
client.jobs & client.cron
Background Processingclient.use_database('mydb')
# === JOBS ===
# List all queues
queues = client.jobs.list_queues
# => [{"name"=>"default", "pending"=>5, "running"=>2}, ...]
# List jobs in a queue with filters
jobs = client.jobs.list_jobs('default',
status: 'pending', # pending, running, completed, failed
limit: 50,
offset: 0
)
# Enqueue a new job
job = client.jobs.enqueue('default',
script_path: '/scripts/process-order',
params: { order_id: 12345 },
priority: 10, # optional: higher = more urgent
run_at: nil # optional: ISO8601 datetime for delayed execution
)
puts "Job ID: #{job['_key']}"
# Get job details
job = client.jobs.get('job_id')
puts "Status: #{job['status']}"
# Cancel a pending job
client.jobs.cancel('job_id')
# === CRON ===
# List scheduled jobs
crons = client.cron.list
# Create a cron job
cron = client.cron.create(
name: 'daily-cleanup',
schedule: '0 2 * * *', # Every day at 2 AM
script_path: '/scripts/cleanup',
params: { days_old: 30 }, # optional
enabled: true, # optional
description: 'Remove old records' # optional
)
# Get cron job details
cron = client.cron.get('cron_id')
# Update cron schedule
client.cron.update('cron_id', { schedule: '0 3 * * *' })
# Toggle cron job on/off
client.cron.toggle('cron_id', false) # disable
client.cron.toggle('cron_id', true) # enable
# Delete cron job
client.cron.delete('cron_id')
client.triggers
Database Triggersclient.use_database('mydb')
# List all triggers
triggers = client.triggers.list
# List triggers for a specific collection
triggers = client.triggers.list_by_collection('users')
# Create a trigger
trigger = client.triggers.create(
name: 'on_user_created',
collection: 'users',
event: 'insert', # insert, update, delete
timing: 'after', # before, after
script_path: '/scripts/on-user-create',
enabled: true # optional
)
# Get trigger details
trigger = client.triggers.get('trigger_id')
# Update trigger
client.triggers.update('trigger_id', {
script_path: '/scripts/new-handler',
enabled: false
})
# Toggle trigger on/off
client.triggers.toggle('trigger_id', true) # enable
client.triggers.toggle('trigger_id', false) # disable
# Delete trigger
client.triggers.delete('trigger_id')
| Event | Timing | Description |
|---|---|---|
insert | before / after | Fires on document creation |
update | before / after | Fires on document modification |
delete | before / after | Fires on document removal |
client.roles & client.users
Role-Based Access Control# === ROLES ===
# List all roles
roles = client.roles.list
# Create a role with permissions
role = client.roles.create(
name: 'editor',
permissions: [
{ action: 'read', scope: 'database', database: 'mydb' },
{ action: 'write', scope: 'collection', database: 'mydb', collection: 'articles' },
{ action: 'execute', scope: 'script', database: 'mydb' }
],
description: 'Content editor role'
)
# Get role details
role = client.roles.get('editor')
# Update role permissions
client.roles.update('editor',
permissions: [
{ action: 'read', scope: 'database', database: 'mydb' },
{ action: 'write', scope: 'database', database: 'mydb' }
]
)
# Delete role
client.roles.delete('editor')
# === USERS ===
# List all users
users = client.users.list
# Create a user
user = client.users.create(
username: 'john',
password: 'secure_password',
roles: ['editor', 'viewer'] # optional
)
# Get user details
user = client.users.get('john')
# Get user's assigned roles
roles = client.users.get_roles('john')
# Assign a role to user
client.users.assign_role('john', 'admin', database: 'mydb')
# Revoke a role from user
client.users.revoke_role('john', 'admin', database: 'mydb')
# Get current authenticated user
me = client.users.me
# Get current user's permissions
permissions = client.users.my_permissions
# Change password
client.users.change_password('john', 'old_password', 'new_password')
# Delete user
client.users.delete('john')
| Action | Scopes | Description |
|---|---|---|
read | database, collection | Read documents and query |
write | database, collection | Create, update, delete documents |
admin | database, collection | Manage indexes, schema, etc. |
execute | script | Execute Lua scripts |
Advanced Features
client.vector
Vector Search & AIclient.use_database('mydb')
# Create a vector index
index = client.vector.create_index('products',
name: 'product_embeddings',
field: 'embedding',
dimensions: 1536,
metric: 'cosine' # cosine, euclidean, dot_product
)
# Search by vector (semantic search)
embedding = get_embedding("wireless headphones") # Your embedding function
results = client.vector.search('products',
vector: embedding,
limit: 10,
filter: 'doc.category == "electronics"' # optional SDBQL filter
)
results.each do |result|
puts "#{result['doc']['name']} - Score: #{result['score']}"
end
# Search by existing document (find similar)
similar = client.vector.search_by_document('products',
doc_key: 'product-123',
field: 'embedding',
limit: 5
)
# Quantize index (reduce memory usage)
client.vector.quantize('products', 'product_embeddings', 'binary')
# Dequantize (restore full precision)
client.vector.dequantize('products', 'product_embeddings')
# Get index info
info = client.vector.get_index_info('products', 'product_embeddings')
# List vector indexes
indexes = client.vector.list_indexes('products')
# Delete index
client.vector.delete_index('products', 'product_embeddings')
client.geo
Geospatial Queriesclient.use_database('mydb')
# Create a geo index
client.geo.create_index('stores',
name: 'location_idx',
fields: ['location'], # Field containing [lat, lon] or GeoJSON
geo_json: true # optional: true if using GeoJSON format
)
# Find nearby locations (radius search)
nearby = client.geo.near('stores',
latitude: 48.8566,
longitude: 2.3522,
radius: 5000, # meters
limit: 20 # optional
)
nearby.each do |result|
puts "#{result['doc']['name']} - #{result['distance']}m away"
end
# Find within polygon
polygon = {
type: 'Polygon',
coordinates: [[[2.3, 48.8], [2.4, 48.8], [2.4, 48.9], [2.3, 48.9], [2.3, 48.8]]]
}
within = client.geo.within('stores', geometry: polygon)
# Find intersecting geometries
intersects = client.geo.intersects('zones', geometry: polygon)
# Calculate distance between two points
distance = client.geo.distance(
lat1: 48.8566, lon1: 2.3522,
lat2: 51.5074, lon2: -0.1278
)
puts "Paris to London: #{distance / 1000}km"
# List geo indexes
indexes = client.geo.list_indexes('stores')
# Delete index
client.geo.delete_index('stores', 'location_idx')
client.ttl
Time-To-Live Indexesclient.use_database('mydb')
# Create TTL index (auto-expire documents)
client.ttl.create_index('sessions',
name: 'session_ttl',
field: 'created_at', # DateTime field to check
expire_after_seconds: 3600 # Expire after 1 hour
)
# Update expiration time
client.ttl.update_expiration('sessions', 'session_ttl', 7200) # 2 hours
# Get index info
info = client.ttl.get_index_info('sessions', 'session_ttl')
puts "Expires after: #{info['expire_after_seconds']}s"
# Manually trigger cleanup (normally runs automatically)
result = client.ttl.run_cleanup('sessions')
puts "Deleted #{result['deleted']} expired documents"
# List TTL indexes
indexes = client.ttl.list_indexes('sessions')
# Delete TTL index
client.ttl.delete_index('sessions', 'session_ttl')
client.columnar
Columnar/Analytics Storageclient.use_database('mydb')
# Create a columnar table (optimized for analytics)
table = client.columnar.create('metrics', [
{ name: 'timestamp', type: 'datetime' },
{ name: 'metric_name', type: 'string' },
{ name: 'value', type: 'float' },
{ name: 'tags', type: 'string' }
])
# Insert rows (batch insert is efficient)
client.columnar.insert('metrics', [
{ timestamp: '2024-01-15T10:00:00Z', metric_name: 'cpu_usage', value: 45.2, tags: 'server1' },
{ timestamp: '2024-01-15T10:01:00Z', metric_name: 'cpu_usage', value: 47.8, tags: 'server1' },
{ timestamp: '2024-01-15T10:00:00Z', metric_name: 'memory', value: 72.1, tags: 'server1' }
])
# Query with SQL-like syntax
results = client.columnar.query('metrics',
'SELECT * FROM metrics WHERE value > @min ORDER BY timestamp DESC LIMIT 100',
params: { min: 40.0 }
)
# Aggregation
agg = client.columnar.aggregate('metrics', {
group_by: ['metric_name', 'tags'],
metrics: [
{ column: 'value', function: 'avg' },
{ column: 'value', function: 'max' },
{ column: 'value', function: 'min' },
{ column: 'value', function: 'count' }
],
filters: { metric_name: 'cpu_usage' } # optional
})
# Get table statistics
stats = client.columnar.stats('metrics')
puts "Row count: #{stats['row_count']}, Size: #{stats['size_bytes']}"
# Add a column
client.columnar.add_column('metrics',
column_name: 'host',
column_type: 'string',
default_value: 'unknown' # optional
)
# Drop a column
client.columnar.drop_column('metrics', 'host')
# Create index on columnar table
client.columnar.create_index('metrics',
index_name: 'idx_timestamp',
column: 'timestamp',
index_type: 'btree' # optional
)
# List indexes
indexes = client.columnar.list_indexes('metrics')
# Delete index
client.columnar.delete_index('metrics', 'idx_timestamp')
# List all columnar tables
tables = client.columnar.list
# Get table info
table = client.columnar.get('metrics')
# Delete table
client.columnar.delete('metrics')
client.cluster
Cluster Management# Get cluster status
status = client.cluster.status
puts "Mode: #{status['mode']}" # standalone, cluster
puts "Nodes: #{status['node_count']}"
# Get detailed cluster info
info = client.cluster.info
# Get all nodes
nodes = client.cluster.get_nodes
nodes.each do |node|
puts "#{node['id']}: #{node['address']} (#{node['status']})"
end
# Get shard distribution
shards = client.cluster.get_shards
# Remove a node from cluster
client.cluster.remove_node('node-id-to-remove')
# Trigger data rebalancing
client.cluster.rebalance
# Cleanup orphaned data
client.cluster.cleanup
# Reshard cluster
client.cluster.reshard(16) # new number of shards
client.collections_ops
Advanced Collection Operationsclient.use_database('mydb')
# Truncate collection (delete all documents)
client.collections_ops.truncate('logs')
# Compact collection (reclaim disk space)
client.collections_ops.compact('users')
# Repair collection (fix inconsistencies)
client.collections_ops.repair('orders')
# Get collection statistics
stats = client.collections_ops.stats('users')
# Prune old documents
client.collections_ops.prune('logs',
older_than: '2024-01-01T00:00:00Z',
field: 'created_at'
)
# Recount documents
client.collections_ops.recount('users')
# Set JSON schema validation
client.collections_ops.set_schema('users', {
type: 'object',
required: ['name', 'email'],
properties: {
name: { type: 'string', minLength: 1 },
email: { type: 'string', format: 'email' },
age: { type: 'integer', minimum: 0 }
}
})
# Get current schema
schema = client.collections_ops.get_schema('users')
# Remove schema validation
client.collections_ops.delete_schema('users')
# Export collection
data = client.collections_ops.export('users', 'json') # json, csv, msgpack
# Import data
client.collections_ops.import('users_backup', data, 'json')
# Get sharding configuration
sharding = client.collections_ops.get_sharding('orders')
# Configure sharding
client.collections_ops.set_sharding('orders', {
num_shards: 8,
shard_key: 'user_id'
})
client.env
Environment Variablesclient.use_database('mydb')
# List environment variables (for Lua scripts)
vars = client.env.list
# Set an environment variable
client.env.set('API_KEY', 'sk-xxx-your-api-key')
client.env.set('WEBHOOK_URL', 'https://example.com/webhook')
# Delete an environment variable
client.env.delete('OLD_VAR')
Error Handling
require 'solidb'
begin
client = SoliDB::Client.new('127.0.0.1', 6745)
client.connect
client.auth('mydb', 'user', 'password')
doc = client.get('mydb', 'users', 'nonexistent-key')
rescue SoliDB::ConnectionError => e
# Network/connection issues
puts "Connection failed: #{e.message}"
rescue SoliDB::ServerError => e
# Server-side errors (not found, validation, etc.)
puts "Server error: #{e.message}"
rescue SoliDB::ProtocolError => e
# Protocol/serialization errors
puts "Protocol error: #{e.message}"
ensure
client&.close
end
ConnectionError
Network failures, connection refused, timeouts, disconnections
ServerError
Document not found, permission denied, validation errors
ProtocolError
Invalid response format, message too large, serialization issues