-
Notifications
You must be signed in to change notification settings - Fork 5
Expand file tree
/
Copy pathCargo.toml
More file actions
56 lines (51 loc) · 1.66 KB
/
Cargo.toml
File metadata and controls
56 lines (51 loc) · 1.66 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
[package]
name = "splintr"
version = "0.9.1"
edition = "2021"
description = "Fast Rust tokenizer (BPE + SentencePiece + WordPiece) with Python bindings"
license = "MIT"
repository = "https://github.com/ml-rust/splintr"
homepage = "https://github.com/ml-rust/splintr"
readme = "README.md"
keywords = ["tokenizer", "bpe", "sentencepiece", "wordpiece", "llm"]
categories = ["text-processing", "encoding"]
[lib]
name = "splintr"
crate-type = ["cdylib", "rlib"]
[features]
default = ["rayon", "regexr-jit"]
python = ["dep:pyo3"]
pcre2 = ["dep:pcre2"]
rayon = ["dep:rayon"]
regexr-jit = ["regexr/jit", "regexr/simd"]
wasm = [] # disables rayon, uses scalar regex — use with --no-default-features
[dependencies]
# PCRE2 regex with JIT support (optional, for benchmarking)
pcre2 = { version = "0.2", optional = true }
# Rayon for internal parallelism
rayon = { version = "1.10", optional = true }
# Fast hashing (FxHashMap)
rustc-hash = "2.0"
# Error handling
thiserror = "2.0"
# Python bindings
pyo3 = { version = "0.27", features = ["extension-module"], optional = true }
# Base64 decoding for tiktoken vocab files
base64 = "0.22"
# Aho-Corasick for fast multi-pattern special token matching
aho-corasick = "1.1"
# LRU cache for frequent token sequences
lru = "0.16"
# regexr regex engine (default backend)
regexr = { version = "0.1", default-features = false }
# Unicode normalization for WordPiece accent stripping
unicode-normalization = "0.1"
# Unicode general category for punctuation detection
unicode-general-category = "1.0"
[dev-dependencies]
# PCRE2 for benchmarking comparisons
pcre2 = "0.2"
[profile.release]
opt-level = 3
lto = true
codegen-units = 1