API Reference
Complete TypeScript SDK API.
Module exports#
import { open, openInMemory, ArcflowError } from 'arcflow'open(dataDir: string): ArcflowDB#
Open a persistent graph database. Data is WAL-journaled to disk.
const db = open('./data/graph')openInMemory(): ArcflowDB#
Open an in-memory graph database. Data is lost when the process exits. Ideal for testing.
const db = openInMemory()ArcflowDB#
The main database interface.
db.version(): string#
Returns the engine version string.
db.query(cypher: string, params?: QueryParams): QueryResult#
Execute a read query. Returns typed results.
const result = db.query("MATCH (n:Person) RETURN n.name, n.age")
const result = db.query("MATCH (n {id: $id}) RETURN n", { id: 'p1' })db.mutate(cypher: string, params?: QueryParams): MutationResult#
Execute a mutating query (CREATE, MERGE, SET, DELETE).
db.mutate("CREATE (n:Person {name: $name})", { name: 'Alice' })db.batchMutate(queries: string[]): number#
Execute multiple mutations under a single write lock. Returns count of mutations applied.
const count = db.batchMutate([
"MERGE (a:Person {id: 'p1', name: 'Alice'})",
"MERGE (b:Person {id: 'p2', name: 'Bob'})",
])db.isHealthy(): boolean#
Returns true if the database is operational. Returns false after close().
db.stats(): GraphStats#
Returns node, relationship, and index counts.
const s = db.stats()
console.log(s.nodes, s.relationships, s.indexes)db.close(): void#
Close the database and flush WAL. All subsequent operations throw ArcflowError.
db.cursor(query: string, params?: QueryParams, pageSize?: number): QueryCursor#
Return a paginated cursor over a query. Useful for large result sets that shouldn't all be loaded into memory at once.
const cursor = db.cursor("MATCH (n:Person) RETURN n.name, n.age", {}, 100)
while (!cursor.done) {
const page = cursor.next()
for (const row of page!.rows) { ... }
}
cursor.close()db.subscribe(query: string, handler: SubscriptionHandler, options?: SubscribeOptions): LiveQuery#
Subscribe to a live delta stream for a query. The handler fires on every change with the added rows, removed rows, and full current result set. Uses a CREATE LIVE VIEW under the hood.
const sub = db.subscribe(
"MATCH (n:Alert) WHERE n.severity = 'critical' RETURN n.id, n.message",
(event) => {
console.log('new alerts:', event.added)
console.log('resolved:', event.removed)
}
)
// Stop when done
sub.cancel()db.syncPending(): number#
Returns the number of mutations pending sync to the cloud. Returns 0 if sync is disabled or fully caught up.
db.fingerprint(): string#
Returns the SHA-256 hash of the current full graph state. Two graphs with identical content produce identical fingerprints regardless of insertion order. Useful for sync verification and integrity checking.
const hash = db.fingerprint()
// "sha256:a1b2c3d4..."db.ingestDelta(delta: NodeEdgeDelta): void#
Apply a structured delta (added nodes, removed nodes, added edges, removed edges) in a single atomic batch. Preferred for ingestion pipelines that track changes as deltas rather than full state.
db.impactSubgraph(rootIds: string[], edgeKinds: string[], maxDepth: number): ImpactResult#
Traverse the graph from root nodes following specified edge kinds up to maxDepth hops. Returns all affected nodes with their hop distance. Used for blast-radius analysis — "what does changing this symbol break?"
const impact = db.impactSubgraph(
['fn_login'],
['CALLS', 'TESTED_BY'],
4
)
for (const node of impact.nodes) {
console.log(`hop ${node.hop}: ${node.id}`)
}QueryParams#
type QueryParams = Record<string, string | number | boolean | null>QueryResult#
interface QueryResult {
columns: string[] // Column names
rows: TypedRow[] // Typed row accessors
rowCount: number // Number of rows
computeMs: number // Execution time (ms)
gqlstatus(): string // ISO GQL status: "00000" = data, "02000" = no data
}gqlstatus() follows ISO/IEC 39075 GQL. "00000" means rows were returned; "02000" means the query succeeded but produced no rows (e.g. a MATCH with no matches, or a filter that eliminated everything).
MutationResult#
Extends QueryResult with mutation statistics.
interface MutationResult extends QueryResult {
nodesCreated: number
nodesDeleted: number
relationshipsCreated: number
relationshipsDeleted: number
propertiesSet: number
}TypedRow#
row.get(column: string): string | number | boolean | null#
Get a typed value by column name. Supports both full (n.name) and short (name) column names.
row.toObject(): Record<string, string | number | boolean | null>#
Get all columns as a typed key-value object.
GraphStats#
interface GraphStats {
nodes: number
relationships: number
indexes: number
}QueryCursor#
interface QueryCursor {
pageSize: number // Rows per page
pagesFetched: number // How many pages retrieved so far
done: boolean // True when all pages exhausted
next(): QueryResult | null // Fetch next page (null if done)
all(): QueryResult // Collect all remaining pages into one result
close(): void // Release the cursor
}LiveQuery#
interface LiveQuery {
viewName: string // Internal live view name
cancel(): void // Stop the subscription and drop the live view
}DeltaEvent#
interface DeltaEvent {
added: SubscriptionRow[] // Rows newly entering the result set
removed: SubscriptionRow[] // Rows leaving the result set
current: SubscriptionRow[] // Full current result set
frontier: number // Monotonic mutation sequence number
}
type SubscriptionRow = Record<string, string | number | boolean | null>
type SubscriptionHandler = (event: DeltaEvent) => voidSubscribeOptions#
interface SubscribeOptions {
pollIntervalMs?: number // How often to check for updates (default: 100ms)
}ArcflowError#
class ArcflowError extends Error {
code: string // "EXPECTED_KEYWORD", "LOCK_POISONED", etc.
category: ErrorCategory // "parse" | "validation" | "execution" | "integration"
suggestion?: string // Recovery hint (when available)
}ArcflowError.fromNapiError(err: unknown): ArcflowError#
Parse a raw napi error into a structured ArcflowError. Used internally by the SDK.
See Also#
- Code Intelligence Guide — using
ingestDelta()andimpactSubgraph()for codebase graphs - Data Quality Guide — using
subscribe(),cursor(), andfingerprint()in pipelines - Sync —
syncPending()and cloud sync integration - Concepts: Graph Model — node/edge data model
- Concepts: Persistence — WAL, snapshots, data directory