Language Bindings
One engine, every language.
curl -fsSL https://oz.com/install | sh # Native binary
pip install arcflow # Python
npm install arcflow # TypeScript
docker run ghcr.io/oz-global/arcflow:latest # DockerAll bindings wrap a single C ABI shared library (libarcflow). Direct function calls when in-process. TCP/HTTP/MCP when you need network access. Docker when you need isolation.
Rust SDK#
The primary interface.
use arcflow::{open, open_concurrent, compile, Label, Properties};
// In-memory graph — direct store access
let mut store = arcflow::open();
let id = store.create_node(
vec![Label::new("Person")],
Properties::new(),
);
// Thread-safe concurrent access — recommended for applications
let db = arcflow::open_concurrent();
db.execute("CREATE (n:Person {name: 'Alice', age: 30})").unwrap();
let result = db.execute("MATCH (n:Person) RETURN n.name").unwrap();
for row in &result.rows {
println!("{}", row.get("name").unwrap());
}Key Types#
| Type | Description |
|---|---|
GraphStore | Single-threaded graph store with full low-level API |
ConcurrentStore | Thread-safe wrapper with execute() — auto-detects read vs write |
Engine | Read-only query engine |
MutableEngine | Read-write query engine |
QueryResult | Rows + columns from query execution |
compile() | Parse WorldCypher text into executable IR |
Compile + Execute#
let db = arcflow::open_concurrent();
db.execute("CREATE (n:Person {name: 'Alice'})").unwrap();
// Pre-compile for repeated execution
let query = arcflow::compile("MATCH (n:Person) RETURN n.name").unwrap();
// Execute compiled query
let result = db.execute("CALL algo.pageRank()").unwrap();
assert!(result.rows.len() > 0);Python#
Native bindings over the C ABI. No compile step needed — loads the shared library at runtime.
from arcflow import ArcFlow
# In-memory database
db = ArcFlow()
# Or persistent
db = ArcFlow("/tmp/mydb")
# Execute queries
db.execute("CREATE (n:Person {name: 'Alice', age: 30})")
result = db.execute("MATCH (n:Person) RETURN n.name, n.age")
for row in result:
print(row) # {'name': 'Alice', 'age': '30'}
# Algorithms work the same way
result = db.execute("CALL algo.pageRank()")
for row in result:
print(f"{row['name']}: {row['score']}")
# Context manager for automatic cleanup
with ArcFlow() as db:
db.execute("CALL db.demo()")
result = db.execute("MATCH (n) RETURN n.name")
print(result.row_count)
# Version check
print(ArcFlow.version()) # "1.5.0"TypeScript / Node.js#
napi-rs native addon. The Rust engine runs in-process inside Node.js — function calls, not HTTP, no serialization, microsecond latency.
import { open, openInMemory, ArcflowError } from 'arcflow'
// In-memory (testing, short-lived)
const db = openInMemory()
// Persistent (WAL-journaled, survives crashes)
const db = open('./data/graph')
// Queries return typed values — numbers are numbers, not strings
db.mutate("CREATE (n:Person {name: 'Alice', age: 30})")
const result = db.query("MATCH (n:Person) RETURN n.name, n.age")
result.rows[0].get('name') // "Alice"
result.rows[0].get('age') // 30 (number, not "30")
// Parameters (prevent injection)
db.query("MATCH (n:Person {name: $name}) RETURN n", { name: 'Alice' })
// Atomic batch mutations
db.batchMutate([
"MERGE (a:Person {id: 'p1', name: 'Alice'})",
"MERGE (b:Org {id: 'o1', name: 'Acme'})",
])
// Paginated cursor for large result sets
const cursor = db.cursor('MATCH (n:Log) RETURN n.ts ORDER BY n.ts', undefined, 500)
let page
while ((page = cursor.next()) !== null) process(page.rows)
cursor.close()
// Live subscriptions — callback fires with added/removed events on each relevant mutation
const sub = db.subscribe(
'MATCH (n:Alert) WHERE n.level = "critical" RETURN n.id, n.message',
({ added, removed }) => console.log('new alerts', added)
)
// later:
sub.cancel()
// Sync
db.syncPending() // 0 if up to date
db.fingerprint() // hash of current state
// Error handling
try { db.query("INVALID") } catch (e) {
if (e instanceof ArcflowError) console.log(e.code, e.category, e.suggestion)
}
db.close()Install#
npm install arcflowReact#
React hooks for live graph data in components.
npm install arcflow @arcflow/reactimport { openInMemory } from 'arcflow'
import { useQuery, useLiveQuery } from '@arcflow/react'
const db = openInMemory()
db.mutate("CREATE (n:Alert {level: 'critical', message: 'Disk 95%'})")
function AlertPanel() {
// One-time query
const { data, loading, error } = useQuery(db, 'MATCH (n:Alert) RETURN n.level, n.message')
// Live subscription — re-renders on every graph mutation that affects results
const { rows } = useLiveQuery(db, 'MATCH (n:Alert) WHERE n.level = "critical" RETURN n.message')
if (loading) return <div>Loading...</div>
return (
<ul>
{rows?.map((row, i) => <li key={i}>{String(row.message)}</li>)}
</ul>
)
}| Hook | Returns | Notes |
|---|---|---|
useQuery(db, query, params?, deps?) | { data, loading, error } | Runs once, re-runs when deps change |
useLiveQuery(db, query, deps?, pollIntervalMs?) | { rows, loading, error } | Subscribes to live view, re-renders on changes |
C ABI#
The foundation all bindings build on. Stable extern "C" interface with opaque handles.
Header: arcflow.h#
#include "arcflow.h"
// Open runtime (NULL for in-memory, or path for persistent)
arcflow_runtime_t* rt = arcflow_open(NULL);
// Open session
arcflow_session_t* session = arcflow_session_open(rt);
// Execute query
arcflow_result_t* result = arcflow_execute(session, "MATCH (n) RETURN n.name");
// Read results
int64_t rows = arcflow_result_row_count(result);
int64_t cols = arcflow_result_column_count(result);
for (int64_t r = 0; r < rows; r++) {
const char* name = arcflow_result_get_string(result, r, 0);
printf("%s\n", name);
}
// Cleanup
arcflow_result_free(result);
arcflow_session_close(session);
arcflow_close(rt);
// Error handling
const char* err = arcflow_last_error();Functions#
| Function | Description |
|---|---|
arcflow_open(path) | Open runtime. NULL = in-memory |
arcflow_close(rt) | Close runtime and free memory |
arcflow_session_open(rt) | Open lightweight session on runtime |
arcflow_session_close(session) | Close session |
arcflow_execute(session, query) | Execute WorldCypher query |
arcflow_result_row_count(result) | Number of result rows |
arcflow_result_column_count(result) | Number of result columns |
arcflow_result_column_name(result, idx) | Column name by index |
arcflow_result_get_string(result, row, col) | Cell value as string |
arcflow_result_free(result) | Free result memory |
arcflow_last_error() | Last error message (thread-local) |
arcflow_version() | Engine version string |
C++#
Header-only RAII wrapper over the C ABI. Include arcflow.hpp and link with -larcflow.
#include "arcflow.hpp"
int main() {
// RAII: runtime auto-freed on scope exit
arcflow::Runtime rt; // in-memory
// arcflow::Runtime rt("/path/to/data"); // persistent
auto session = rt.session();
session.execute("CREATE (n:Person {name: 'Alice'})");
auto result = session.execute("MATCH (n:Person) RETURN n.name");
for (int64_t r = 0; r < result.row_count(); ++r) {
std::cout << result.get(r, 0) << std::endl; // "Alice"
}
// result, session, rt auto-freed on scope exit
}Build#
g++ -std=c++17 \
-I include/arcflow \
main.cpp \
-L /path/to/arcflow/lib -larcflow \
-o myappDocker#
Scratch base image, statically linked binary, under 20MB. NVIDIA bare-metal strategy — the container adds zero runtime overhead.
docker pull ghcr.io/oz-global/arcflow:latest# Run with data directory mounted
docker run -v /data/arcflow:/data \
ghcr.io/oz-global/arcflow:latest \
--data-dir /data# The ArcFlow Dockerfile
FROM scratch
COPY arcflow /arcflow
ENTRYPOINT ["/arcflow"]Volume-mount your data directory. The binary is fully static (musl libc). No dependencies inside the container.
MCP Server#
For cloud chat interfaces (ChatGPT, Claude.ai, Gemini web) that have no local shell. See MCP Server for setup, tools, and configuration.
Architecture#
All bindings target one engine. Pick the layer that matches your use case:
ArcFlow Rust engine
│
C ABI (libarcflow.so)
│
┌───────────────────┼──────────────────────┐
│ │ │
Python TypeScript / React C++
(ctypes) (napi-rs — in-process) (arcflow.hpp)
│
WASM
(browser / edge — zero-copy)
One engine. No protocol translation. Function calls go directly into the Rust engine. For the browser, the WASM build runs the same engine with zero serialization via the WASM memory model.
See Also#
- Installation — pre-built binaries for all platforms
- Platform — runtime environments: browser WASM, Docker, Cloudflare Workers, mobile
- Agent-Native Database — integration surfaces: napi-rs, CLI binary, MCP server
- MCP Server — cloud chat UI integration (the fourth binding surface)