The programming language built for teams that run AI at scale.
Alpha — macOS — all releases
Designed for AI code generation. Short type aliases (I/S/B/F), compact function syntax, no unnecessary keywords. Every token counts.
Compiles to machine code via LLVM. Performance on par with Rust and Go — up to 30x faster than Python on compute-heavy tasks.
Built-in HTTP server/client, JSON, file I/O, concurrency with channels and mutexes, error handling with Result types.
Sans compiles itself. The compiler (~11,600 LOC) and entire runtime are written in Sans — zero C. A fully bootstrapped stage 0→1→2→3 pipeline with fixed-point output.
AI models pay per token. Sans uses 40-60% fewer tokens than Go or Rust for the same logic — meaning faster generation, lower cost, and fewer errors.
fib(n:I) I = n <= 1 ? n : fib(n-1) + fib(n-2)func fib(n int) int {
if n <= 1 { return n }
return fib(n-1) + fib(n-2)
}fn fib(n: i64) -> i64 {
if n <= 1 { n }
else { fib(n-1) + fib(n-2) }
}function fib(n) {
return n <= 1 ? n : fib(n - 1) + fib(n - 2)
}def fib(n):
return n if n <= 1 else fib(n - 1) + fib(n - 2)main() {
j = jo()
k := 0
while k < 1000 {
j.set(str(k), ji(k))
k += 1
}
p(jp(jfy(j)).get("999").get_int())
}func main() {
m := make(map[string]int, 1000)
for i := 0; i < 1000; i++ {
m[fmt.Sprintf("key%d", i)] = i
}
data, _ := json.Marshal(m)
var out map[string]int
json.Unmarshal(data, &out)
}fn main() {
let mut obj = serde_json::Map::new();
for i in 0..1000 {
obj.insert(format!("key{}", i), json!(i));
}
let s = serde_json::to_string(&obj).unwrap();
let p: Value = serde_json::from_str(&s).unwrap();
}const obj = {}
for (let k = 0; k < 1000; k++) {
obj[String(k)] = k
}
const parsed = JSON.parse(JSON.stringify(obj))
console.log(parsed["999"])import json
obj = {str(k): k for k in range(1000)}
parsed = json.loads(json.dumps(obj))
print(parsed["999"])handler(req:HR) S = "hello world"
main() {
s = hs(8080)
s.get("/", handler)
s.start()
}func main() {
http.HandleFunc("/",
func(w http.ResponseWriter,
r *http.Request) {
fmt.Fprint(w, "hello world")
})
http.ListenAndServe(":8080", nil)
}async fn handler() -> impl Responder {
"hello world"
}
#[actix_web::main]
async fn main() -> std::io::Result<()> {
HttpServer::new(|| App::new()
.route("/", web::get().to(handler)))
.bind("0.0.0.0:8080")?.run().await
}const http = require("http")
http.createServer((req, res) => {
res.end("hello world")
}).listen(8080)from flask import Flask
app = Flask(__name__)
@app.route("/")
def handler():
return "hello world"
app.run(port=8080)| Example | Sans | Go | Rust | Node | Python | Savings |
|---|---|---|---|---|---|---|
| Fibonacci | 14 tokens | 26 tokens | 27 tokens | 22 tokens | 16 tokens | 13-46% |
| JSON roundtrip | 36 tokens | 65 tokens | 72 tokens | 40 tokens | 34 tokens | 6-50% |
| HTTP server | 18 tokens | 42 tokens | 58 tokens | 32 tokens | 26 tokens | 31-69% |