cachly-dev/cachly-swift
Official Cachly sdk-swift SDK
Installation
// Package.swift
.package(url: "https://github.com/cachly-dev/sdk-swift.git", from: "0.1.0-beta.1"),
// target dependencies:
.product(name: "Cachly", package: "sdk-swift"),Or in Xcode: File → Add Package Dependencies → paste URL above.
Quick Start
import Cachly
let cache = try await CachlyClient.connect(url: ProcessInfo.processInfo.environment["CACHLY_URL"]!)
// Set with TTL
try await cache.set("user:42", value: User(name: "Alice", plan: "pro"), ttl: .seconds(300))
// Get
let user: User? = try await cache.get("user:42")
// Get-or-set
let report: Report = try await cache.getOrSet("report:monthly", ttl: .seconds(60)) {
try await db.heavyQuery()
}
// Atomic counter
let count = try await cache.incr("page:views")
// Delete
try await cache.del("user:42")Semantic AI Cache (Speed / Business tiers)
Cache LLM responses by meaning, not just exact key. Cut OpenAI costs by 60%.
import Cachly
let result: SemanticResult<String> = try await cache.semantic.getOrSet(
userQuestion,
fn: { try await openAI.ask(userQuestion) },
embedFn: { text in try await openAI.embed(text) },
options: SemanticOptions(
similarityThreshold: 0.92,
ttl: .hours(1)
)
)
if result.hit {
print("⚡ Cache hit – similarity: \(result.similarity!)")
} else {
print("🔄 Fresh from LLM")
}
print(result.value)Vapor Integration
import Vapor
import Cachly
// configure.swift
public func configure(_ app: Application) async throws {
let cache = try await CachlyClient.connect(
url: Environment.get("CACHLY_URL")!)
app.storage[CachlyKey.self] = cache
}
struct CachlyKey: StorageKey { typealias Value = CachlyClient }
extension Request {
var cachly: CachlyClient { application.storage[CachlyKey.self]! }
}
// In a route handler:
app.get("user", ":id") { req async throws -> User in
let id = req.parameters.get("id")!
return try await req.cachly.getOrSet("user:\(id)", ttl: .seconds(300)) {
try await User.find(id, on: req.db).unwrap(or: Abort(.notFound))
}
}iOS Usage
// Works great in SwiftUI with `@MainActor` / `Task { }`:
struct ContentView: View {
@State private var answer = ""
var body: some View {
Button("Ask AI") {
Task {
let result: SemanticResult<String> = try await cache.semantic.getOrSet(
question,
fn: { try await openAI.ask(question) },
embedFn: { try await openAI.embed($0) }
)
answer = result.value
}
}
Text(answer)
}
}API Reference
| Method | Description | |---|---| | connect(url:) | Async factory – connect to cachly instance | | get<T>( key) | Async – get Codable value (nil if not found) | | set( key, value:, ttl:) | Async – set Codable value | | del( keys...) | Async – delete keys, returns count | | exists( key) | Async – check existence | | expire( key, ttl:) | Async – update TTL | | incr( key) | Async – atomic increment | | getOrSet(_ key, ttl:, fn:) | Async – get-or-set pattern | | semantic.getOrSet(...) | Async – semantic AI cache | | semantic.flush(namespace:) | Async – flush namespace | | semantic.size(namespace:) | Async – entry count |
Batch API — Multiple Ops in One Round-Trip
Bundle GET/SET/DEL/EXISTS/TTL operations into one HTTP request or RediStack pipeline.
let cache = try await CachlyClient(
url: ProcessInfo.processInfo.environment["CACHLY_URL"]!,
batchURL: ProcessInfo.processInfo.environment["CACHLY_BATCH_URL"] // optional
)
let results = try await cache.batch([
.get("user:1"),
.get("config:app"),
.set("visits", value: "42", ttl: 86400),
.exists("session:xyz"),
.ttl("token:abc"),
])
let user : String? = results[0].value // nil on miss
let ok : Bool = results[2].ok
let found: Bool = results[3].exists
let secs : Int64 = results[4].ttlSeconds // -1 = no TTL, -2 = key missingEnvironment Variables
CACHLY_URL=redis://:your-password@my-app.cachly.dev:30101
CACHLY_BATCH_URL=https://api.cachly.dev/v1/cache/YOUR_TOKEN # optional
# Speed / Business tier – Semantic AI Cache:
CACHLY_VECTOR_URL=https://api.cachly.dev/v1/sem/your-vector-tokenFind both values in your cachly.dev dashboard.
Quality Gates
swift build # Build complete
swift test # All tests green (requires Xcode 16+ for @Test macro discovery)Note:
@Testmacro discovery requires Xcode.app, not just CommandLineTools. Tests compile and link correctly in both environments.
AI Dev Brain — Persistent Memory for Your Coding Assistant
cachly ships a 30-tool MCP server that gives Claude Code, Cursor, GitHub Copilot, and Windsurf a persistent memory across sessions — so they never forget your architecture, lessons learned, or last session context.
npx @cachly-dev/initsession_start(instance_id, focus) returns a full briefing in one call: last session summary, relevant lessons, open failures, brain health. 60 % fewer file reads, instant context, zero re-discovery.
→ Full docs: cachly.dev/docs/ai-memory
Links
MIT © cachly.dev
Package Metadata
Repository: cachly-dev/cachly-swift
Stars: 1
Forks: 0
Open issues: 1
Default branch: main
Primary language: swift
README: README.md