Node.js / Bun Client

Official SDK v0.1.0 TypeScript / JavaScript

Getting Started

Installation

npm

npm install solidb-client

yarn

yarn add solidb-client

Bun

bun add solidb-client

Requirements: Node.js 18+ or Bun 1.0+. The client uses the @msgpack/msgpack package for binary serialization over TCP.

Quick Start

import { Client } from 'solidb-client';

// Create client instance
const client = new Client('127.0.0.1', 6745);

// Establish connection
await client.connect();

// Authenticate
await client.auth('_system', 'admin', 'password');

// Set database context (required for sub-clients)
client.useDatabase('mydb');

// Basic CRUD operations
const doc = await client.insert('mydb', 'users', { name: 'Alice', age: 30 });
console.log('Created:', doc._key);

const user = await client.get('mydb', 'users', doc._key);
console.log('Retrieved:', user.name);

await client.update('mydb', 'users', doc._key, { age: 31 });

// Query with SDBQL
const results = await client.query('mydb', 'FOR u IN users FILTER u.age > @min RETURN u', { min: 25 });
console.log('Found', results.length, 'users');

// Use management sub-clients
const scripts = await client.scripts.list();
const triggers = await client.triggers.list();

// Clean up
client.close();

Connection Management

import { Client } from 'solidb-client';

// Initialize with host and port
const client = new Client('127.0.0.1', 6745);

// Connect (establishes TCP socket)
await client.connect();

// Check connection latency (returns ms)
const latency = await client.ping();
console.log(`Latency: ${latency.toFixed(2)}ms`);

// Close connection when done
client.close();
Method Returns Description
new Client(host, port)ClientCreate client instance
connect()Promise<void>Establish TCP connection
ping()Promise<number>Latency in milliseconds
close()voidClose connection
useDatabase(name)thisSet database context for sub-clients

Authentication

// Authenticate with database, username, and password
await client.auth('_system', 'admin', 'password');

// Authentication is required for most operations
// The session remains authenticated until disconnected

// Get authentication token for HTTP requests (if needed)
const token = client.getToken();

Core Operations

Database Operations

// List all databases
const databases = await client.listDatabases();
// => ['_system', 'mydb', 'testdb']

// Create a new database
await client.createDatabase('analytics');

// Delete a database
await client.deleteDatabase('old_db');
Method Returns Description
listDatabases()Promise<string[]>List all database names
createDatabase(name)Promise<void>Create new database
deleteDatabase(name)Promise<void>Delete database

Collection Operations

// List collections in a database
const collections = await client.listCollections('mydb');
// => [{name: 'users', type: 'document'}, ...]

// Create a document collection
await client.createCollection('mydb', 'products');

// Create an edge collection (for graphs)
await client.createCollection('mydb', 'relationships', 'edge');

// Get collection statistics
const stats = await client.collectionStats('mydb', 'users');
// => {count: 1523, size: 245760, ...}

// Delete a collection
await client.deleteCollection('mydb', 'old_collection');
Method Returns Description
listCollections(db)Promise<Collection[]>List collections in database
createCollection(db, name, type?)Promise<void>Create collection (type: document/edge)
collectionStats(db, name)Promise<Stats>Get collection statistics
deleteCollection(db, name)Promise<void>Delete collection

Document Operations (CRUD)

// INSERT - Create a new document
const doc = await client.insert('mydb', 'users', {
  name: 'Alice',
  email: '[email protected]',
  age: 30
});
console.log(doc._key);  // Auto-generated key

// INSERT with custom key
const doc2 = await client.insert('mydb', 'users', { name: 'Bob' }, 'custom-key-123');

// GET - Retrieve a document by key
const user = await client.get('mydb', 'users', 'custom-key-123');
// => {_key: 'custom-key-123', name: 'Bob', ...}

// UPDATE - Modify a document (merge by default)
await client.update('mydb', 'users', 'custom-key-123', { age: 25 });

// UPDATE - Replace entire document (merge: false)
await client.update('mydb', 'users', 'custom-key-123', { name: 'Robert' }, false);

// DELETE - Remove a document
await client.delete('mydb', 'users', 'custom-key-123');

// LIST - Paginated document listing
const docs = await client.list('mydb', 'users', 50, 0);  // limit: 50, offset: 0
Method Returns Description
insert(db, col, doc, key?)Promise<Document>Insert document, returns doc with _key
get(db, col, key)Promise<Document>Get document by key
update(db, col, key, doc, merge?)Promise<void>Update document (merge or replace)
delete(db, col, key)Promise<void>Delete document
list(db, col, limit?, offset?)Promise<Document[]>List documents with pagination

SDBQL Queries

// Simple query
const users = await client.query('mydb', 'FOR u IN users RETURN u');

// Query with bind variables (recommended for security)
const results = await client.query('mydb', `
  FOR u IN users
  FILTER u.age >= @minAge AND u.status == @status
  SORT u.createdAt DESC
  LIMIT @limit
  RETURN { name: u.name, email: u.email }
`, {
  minAge: 18,
  status: 'active',
  limit: 100
});

// Aggregation query
const stats = await client.query('mydb', `
  FOR u IN users
  COLLECT status = u.status WITH COUNT INTO count
  RETURN { status, count }
`);

// Join query
const orders = await client.query('mydb', `
  FOR o IN orders
  FOR u IN users FILTER u._key == o.userId
  RETURN { order: o, user: u.name }
`);

// Explain query plan (for optimization)
const plan = await client.explain('mydb', 'FOR u IN users FILTER u.age > 25 RETURN u');

ACID Transactions

// Begin a transaction
const txId = await client.beginTransaction('mydb', 'read_committed');
// Isolation levels: read_uncommitted, read_committed, repeatable_read, serializable

try {
  // Perform operations within transaction
  await client.insert('mydb', 'accounts', { id: 1, balance: 1000 });
  await client.insert('mydb', 'accounts', { id: 2, balance: 500 });

  // Commit if all operations succeed
  await client.commitTransaction(txId);
  console.log('Transaction committed');
} catch (error) {
  // Rollback on any error
  await client.rollbackTransaction(txId);
  console.log('Transaction rolled back:', error.message);
}
Method Returns Description
beginTransaction(db, isolation?)Promise<string>Start transaction, returns txId
commitTransaction(txId)Promise<void>Commit transaction
rollbackTransaction(txId)Promise<void>Rollback transaction

Index Management

// Create an index
await client.createIndex('mydb', 'users', 'idx_email', ['email'], true, false);
//                       db      col       name       fields   unique  sparse

// List indexes on a collection
const indexes = await client.listIndexes('mydb', 'users');

// Delete an index
await client.deleteIndex('mydb', 'users', 'idx_email');

Management Sub-Clients

Sub-clients provide namespaced access to management APIs. Important: Call useDatabase(name) first to set the database context.

client.scripts

Lua Script Endpoints
client.useDatabase('mydb');

// Create a Lua script endpoint
const script = await client.scripts.create({
  name: 'hello',
  path: '/api/hello',
  methods: ['GET', 'POST'],
  code: 'return { message = "Hello, " .. (req.params.name or "World") }',
  description: 'Greeting endpoint',   // optional
  collection: 'users'                 // optional: restrict to collection
});
console.log('Created script:', script._key);

// List all scripts
const scripts = await client.scripts.list();
scripts.forEach(s => console.log(`${s.name} -> ${s.path}`));

// Get a specific script
const script = await client.scripts.get('script_key');

// Update script code
await client.scripts.update('script_key', {
  code: 'return { message = "Updated!" }',
  methods: ['GET']
});

// Delete a script
await client.scripts.delete('script_key');

// Get execution statistics
const stats = await client.scripts.getStats();
console.log('Total calls:', stats.totalCalls);
Method Parameters Description
create(options)name, path, methods, code, description?, collection?Create Lua endpoint
list()-List all scripts
get(scriptId)scriptIdGet script details
update(scriptId, updates)scriptId, objectUpdate script properties
delete(scriptId)scriptIdDelete script
getStats()-Execution statistics

client.jobs & client.cron

Background Processing
client.useDatabase('mydb');

// === JOBS ===

// List all queues
const queues = await client.jobs.listQueues();
// => [{name: 'default', pending: 5, running: 2}, ...]

// List jobs in a queue with filters
const jobs = await client.jobs.listJobs('default', {
  status: 'pending',  // pending, running, completed, failed
  limit: 50,
  offset: 0
});

// Enqueue a new job
const job = await client.jobs.enqueue('default', {
  scriptPath: '/scripts/process-order',
  params: { orderId: 12345 },
  priority: 10,       // optional: higher = more urgent
  runAt: null         // optional: ISO8601 datetime for delayed execution
});
console.log('Job ID:', job._key);

// Get job details
const jobDetails = await client.jobs.get('job_id');
console.log('Status:', jobDetails.status);

// Cancel a pending job
await client.jobs.cancel('job_id');

// === CRON ===

// List scheduled jobs
const crons = await client.cron.list();

// Create a cron job
const cron = await client.cron.create({
  name: 'daily-cleanup',
  schedule: '0 2 * * *',              // Every day at 2 AM
  scriptPath: '/scripts/cleanup',
  params: { daysOld: 30 },            // optional
  enabled: true,                      // optional
  description: 'Remove old records'   // optional
});

// Get cron job details
const cronDetails = await client.cron.get('cron_id');

// Update cron schedule
await client.cron.update('cron_id', { schedule: '0 3 * * *' });

// Toggle cron job on/off
await client.cron.toggle('cron_id', false);  // disable
await client.cron.toggle('cron_id', true);   // enable

// Delete cron job
await client.cron.delete('cron_id');

client.triggers

Database Triggers
client.useDatabase('mydb');

// List all triggers
const triggers = await client.triggers.list();

// List triggers for a specific collection
const userTriggers = await client.triggers.listByCollection('users');

// Create a trigger
const trigger = await client.triggers.create({
  name: 'on_user_created',
  collection: 'users',
  event: 'insert',                    // insert, update, delete
  timing: 'after',                    // before, after
  scriptPath: '/scripts/on-user-create',
  enabled: true                       // optional
});

// Get trigger details
const triggerDetails = await client.triggers.get('trigger_id');

// Update trigger
await client.triggers.update('trigger_id', {
  scriptPath: '/scripts/new-handler',
  enabled: false
});

// Toggle trigger on/off
await client.triggers.toggle('trigger_id', true);   // enable
await client.triggers.toggle('trigger_id', false);  // disable

// Delete trigger
await client.triggers.delete('trigger_id');
Event Timing Description
insertbefore / afterFires on document creation
updatebefore / afterFires on document modification
deletebefore / afterFires on document removal

client.roles & client.users

Role-Based Access Control
// === ROLES ===

// List all roles
const roles = await client.roles.list();

// Create a role with permissions
const role = await client.roles.create({
  name: 'editor',
  permissions: [
    { action: 'read', scope: 'database', database: 'mydb' },
    { action: 'write', scope: 'collection', database: 'mydb', collection: 'articles' },
    { action: 'execute', scope: 'script', database: 'mydb' }
  ],
  description: 'Content editor role'
});

// Get role details
const roleDetails = await client.roles.get('editor');

// Update role permissions
await client.roles.update('editor', {
  permissions: [
    { action: 'read', scope: 'database', database: 'mydb' },
    { action: 'write', scope: 'database', database: 'mydb' }
  ]
});

// Delete role
await client.roles.delete('editor');

// === USERS ===

// List all users
const users = await client.users.list();

// Create a user
const user = await client.users.create({
  username: 'john',
  password: 'secure_password',
  roles: ['editor', 'viewer']  // optional
});

// Get user details
const userDetails = await client.users.get('john');

// Get user's assigned roles
const userRoles = await client.users.getRoles('john');

// Assign a role to user
await client.users.assignRole('john', 'admin', { database: 'mydb' });

// Revoke a role from user
await client.users.revokeRole('john', 'admin', { database: 'mydb' });

// Get current authenticated user
const me = await client.users.me();

// Get current user's permissions
const permissions = await client.users.myPermissions();

// Change password
await client.users.changePassword('john', 'old_password', 'new_password');

// Delete user
await client.users.delete('john');
Action Scopes Description
readdatabase, collectionRead documents and query
writedatabase, collectionCreate, update, delete documents
admindatabase, collectionManage indexes, schema, etc.
executescriptExecute Lua scripts

Advanced Features

client.vector

Vector Search & AI
client.useDatabase('mydb');

// Create a vector index
const index = await client.vector.createIndex('products', {
  name: 'product_embeddings',
  field: 'embedding',
  dimensions: 1536,
  metric: 'cosine'  // cosine, euclidean, dot_product
});

// Search by vector (semantic search)
const embedding = await getEmbedding("wireless headphones");  // Your embedding function
const results = await client.vector.search('products', {
  vector: embedding,
  limit: 10,
  filter: 'doc.category == "electronics"'  // optional SDBQL filter
});

results.forEach(result => {
  console.log(`${result.doc.name} - Score: ${result.score}`);
});

// Search by existing document (find similar)
const similar = await client.vector.searchByDocument('products', {
  docKey: 'product-123',
  field: 'embedding',
  limit: 5
});

// Quantize index (reduce memory usage)
await client.vector.quantize('products', 'product_embeddings', 'binary');

// Dequantize (restore full precision)
await client.vector.dequantize('products', 'product_embeddings');

// Get index info
const info = await client.vector.getIndexInfo('products', 'product_embeddings');

// List vector indexes
const indexes = await client.vector.listIndexes('products');

// Delete index
await client.vector.deleteIndex('products', 'product_embeddings');

client.geo

Geospatial Queries
client.useDatabase('mydb');

// Create a geo index
await client.geo.createIndex('stores', {
  name: 'location_idx',
  fields: ['location'],     // Field containing [lat, lon] or GeoJSON
  geoJson: true             // optional: true if using GeoJSON format
});

// Find nearby locations (radius search)
const nearby = await client.geo.near('stores', {
  latitude: 48.8566,
  longitude: 2.3522,
  radius: 5000,      // meters
  limit: 20          // optional
});

nearby.forEach(result => {
  console.log(`${result.doc.name} - ${result.distance}m away`);
});

// Find within polygon
const polygon = {
  type: 'Polygon',
  coordinates: [[[2.3, 48.8], [2.4, 48.8], [2.4, 48.9], [2.3, 48.9], [2.3, 48.8]]]
};
const within = await client.geo.within('stores', { geometry: polygon });

// Find intersecting geometries
const intersects = await client.geo.intersects('zones', { geometry: polygon });

// Calculate distance between two points
const distance = await client.geo.distance({
  lat1: 48.8566, lon1: 2.3522,
  lat2: 51.5074, lon2: -0.1278
});
console.log(`Paris to London: ${distance / 1000}km`);

// List geo indexes
const indexes = await client.geo.listIndexes('stores');

// Delete index
await client.geo.deleteIndex('stores', 'location_idx');

client.ttl

Time-To-Live Indexes
client.useDatabase('mydb');

// Create TTL index (auto-expire documents)
await client.ttl.createIndex('sessions', {
  name: 'session_ttl',
  field: 'createdAt',             // DateTime field to check
  expireAfterSeconds: 3600        // Expire after 1 hour
});

// Update expiration time
await client.ttl.updateExpiration('sessions', 'session_ttl', 7200);  // 2 hours

// Get index info
const info = await client.ttl.getIndexInfo('sessions', 'session_ttl');
console.log('Expires after:', info.expireAfterSeconds, 's');

// Manually trigger cleanup (normally runs automatically)
const result = await client.ttl.runCleanup('sessions');
console.log('Deleted', result.deleted, 'expired documents');

// List TTL indexes
const indexes = await client.ttl.listIndexes('sessions');

// Delete TTL index
await client.ttl.deleteIndex('sessions', 'session_ttl');

client.columnar

Columnar/Analytics Storage
client.useDatabase('mydb');

// Create a columnar table (optimized for analytics)
const table = await client.columnar.create('metrics', [
  { name: 'timestamp', type: 'datetime' },
  { name: 'metricName', type: 'string' },
  { name: 'value', type: 'float' },
  { name: 'tags', type: 'string' }
]);

// Insert rows (batch insert is efficient)
await client.columnar.insert('metrics', [
  { timestamp: '2024-01-15T10:00:00Z', metricName: 'cpu_usage', value: 45.2, tags: 'server1' },
  { timestamp: '2024-01-15T10:01:00Z', metricName: 'cpu_usage', value: 47.8, tags: 'server1' },
  { timestamp: '2024-01-15T10:00:00Z', metricName: 'memory', value: 72.1, tags: 'server1' }
]);

// Query with SQL-like syntax
const results = await client.columnar.query('metrics',
  'SELECT * FROM metrics WHERE value > @min ORDER BY timestamp DESC LIMIT 100',
  { params: { min: 40.0 } }
);

// Aggregation
const agg = await client.columnar.aggregate('metrics', {
  groupBy: ['metricName', 'tags'],
  metrics: [
    { column: 'value', function: 'avg' },
    { column: 'value', function: 'max' },
    { column: 'value', function: 'min' },
    { column: 'value', function: 'count' }
  ],
  filters: { metricName: 'cpu_usage' }  // optional
});

// Get table statistics
const stats = await client.columnar.stats('metrics');
console.log('Row count:', stats.rowCount, 'Size:', stats.sizeBytes);

// Add a column
await client.columnar.addColumn('metrics', {
  columnName: 'host',
  columnType: 'string',
  defaultValue: 'unknown'  // optional
});

// Drop a column
await client.columnar.dropColumn('metrics', 'host');

// Create index on columnar table
await client.columnar.createIndex('metrics', {
  indexName: 'idx_timestamp',
  column: 'timestamp',
  indexType: 'btree'  // optional
});

// List indexes
const indexes = await client.columnar.listIndexes('metrics');

// Delete index
await client.columnar.deleteIndex('metrics', 'idx_timestamp');

// List all columnar tables
const tables = await client.columnar.list();

// Get table info
const tableInfo = await client.columnar.get('metrics');

// Delete table
await client.columnar.delete('metrics');

client.cluster

Cluster Management
// Get cluster status
const status = await client.cluster.status();
console.log('Mode:', status.mode);  // standalone, cluster
console.log('Nodes:', status.nodeCount);

// Get detailed cluster info
const info = await client.cluster.info();

// Get all nodes
const nodes = await client.cluster.getNodes();
nodes.forEach(node => {
  console.log(`${node.id}: ${node.address} (${node.status})`);
});

// Get shard distribution
const shards = await client.cluster.getShards();

// Remove a node from cluster
await client.cluster.removeNode('node-id-to-remove');

// Trigger data rebalancing
await client.cluster.rebalance();

// Cleanup orphaned data
await client.cluster.cleanup();

// Reshard cluster
await client.cluster.reshard(16);  // new number of shards

client.collectionsOps

Advanced Collection Operations
client.useDatabase('mydb');

// Truncate collection (delete all documents)
await client.collectionsOps.truncate('logs');

// Compact collection (reclaim disk space)
await client.collectionsOps.compact('users');

// Repair collection (fix inconsistencies)
await client.collectionsOps.repair('orders');

// Get collection statistics
const stats = await client.collectionsOps.stats('users');

// Prune old documents
await client.collectionsOps.prune('logs', {
  olderThan: '2024-01-01T00:00:00Z',
  field: 'createdAt'
});

// Recount documents
await client.collectionsOps.recount('users');

// Set JSON schema validation
await client.collectionsOps.setSchema('users', {
  type: 'object',
  required: ['name', 'email'],
  properties: {
    name: { type: 'string', minLength: 1 },
    email: { type: 'string', format: 'email' },
    age: { type: 'integer', minimum: 0 }
  }
});

// Get current schema
const schema = await client.collectionsOps.getSchema('users');

// Remove schema validation
await client.collectionsOps.deleteSchema('users');

// Export collection
const data = await client.collectionsOps.export('users', 'json');  // json, csv, msgpack

// Import data
await client.collectionsOps.import('users_backup', data, 'json');

// Get sharding configuration
const sharding = await client.collectionsOps.getSharding('orders');

// Configure sharding
await client.collectionsOps.setSharding('orders', {
  numShards: 8,
  shardKey: 'userId'
});

client.env

Environment Variables
client.useDatabase('mydb');

// List environment variables (for Lua scripts)
const vars = await client.env.list();

// Set an environment variable
await client.env.set('API_KEY', 'sk-xxx-your-api-key');
await client.env.set('WEBHOOK_URL', 'https://example.com/webhook');

// Delete an environment variable
await client.env.delete('OLD_VAR');

Error Handling

import { Client, ConnectionError, ServerError, ProtocolError } from 'solidb-client';

const client = new Client('127.0.0.1', 6745);

try {
  await client.connect();
  await client.auth('mydb', 'user', 'password');

  const doc = await client.get('mydb', 'users', 'nonexistent-key');

} catch (error) {
  if (error instanceof ConnectionError) {
    // Network/connection issues
    console.log('Connection failed:', error.message);

  } else if (error instanceof ServerError) {
    // Server-side errors (not found, validation, etc.)
    console.log('Server error:', error.message);

  } else if (error instanceof ProtocolError) {
    // Protocol/serialization errors
    console.log('Protocol error:', error.message);
  }

} finally {
  client.close();
}

ConnectionError

Network failures, connection refused, timeouts, disconnections

ServerError

Document not found, permission denied, validation errors

ProtocolError

Invalid response format, message too large, serialization issues