Revert tokenizers (#423)

* Revert "fix: add missing "with" in README"

This reverts commit bf1e7f1f17.

* Revert "feat: tokenizers (#407)"

This reverts commit d2095ba267.
This commit is contained in:
yetone
2024-08-31 22:45:31 +08:00
committed by GitHub
parent bf1e7f1f17
commit d5a4db8321
27 changed files with 27 additions and 3554 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -1,32 +0,0 @@
[lib]
crate-type = ["cdylib"]
[package]
name = "avante-tokenizers"
edition = { workspace = true }
version = { workspace = true }
rust-version = { workspace = true }
license = { workspace = true }
[lints]
workspace = true
[dependencies]
mlua = { version = "0.10.0-beta.1", features = [
"module",
"serialize",
], git = "https://github.com/mlua-rs/mlua.git", branch = "main" }
tiktoken-rs = "0.5.9"
tokenizers = { version = "0.20.0", features = [
"esaxx_fast",
"http",
"unstable_wasm",
"onig",
], default-features = false }
[features]
lua51 = ["mlua/lua51"]
lua52 = ["mlua/lua52"]
lua53 = ["mlua/lua53"]
lua54 = ["mlua/lua54"]
luajit = ["mlua/luajit"]

View File

@@ -1 +0,0 @@
A simple crate to unify hf/tokenizers and tiktoken-rs

View File

@@ -1,96 +0,0 @@
use mlua::prelude::*;
use std::sync::{Arc, Mutex};
use tiktoken_rs::{get_bpe_from_model, CoreBPE};
use tokenizers::Tokenizer;
struct Tiktoken {
bpe: CoreBPE,
}
impl Tiktoken {
fn new(model: String) -> Self {
let bpe = get_bpe_from_model(&model).unwrap();
Tiktoken { bpe }
}
fn encode(&self, text: String) -> (Vec<usize>, usize, usize) {
let tokens = self.bpe.encode_with_special_tokens(&text);
let num_tokens = tokens.len();
let num_chars = text.chars().count();
(tokens, num_tokens, num_chars)
}
}
struct HuggingFaceTokenizer {
tokenizer: Tokenizer,
}
impl HuggingFaceTokenizer {
fn new(model: String) -> Self {
let tokenizer = Tokenizer::from_pretrained(model, None).unwrap();
HuggingFaceTokenizer { tokenizer }
}
fn encode(&self, text: String) -> (Vec<usize>, usize, usize) {
let encoding = self.tokenizer.encode(text, false).unwrap();
let tokens: Vec<usize> = encoding.get_ids().iter().map(|x| *x as usize).collect();
let num_tokens = tokens.len();
let num_chars = encoding.get_offsets().last().unwrap().1;
(tokens, num_tokens, num_chars)
}
}
enum TokenizerType {
Tiktoken(Tiktoken),
HuggingFace(HuggingFaceTokenizer),
}
struct State {
tokenizer: Mutex<Option<TokenizerType>>,
}
impl State {
fn new() -> Self {
State {
tokenizer: Mutex::new(None),
}
}
}
fn encode(state: &State, text: String) -> LuaResult<(Vec<usize>, usize, usize)> {
let tokenizer = state.tokenizer.lock().unwrap();
match tokenizer.as_ref() {
Some(TokenizerType::Tiktoken(tokenizer)) => Ok(tokenizer.encode(text)),
Some(TokenizerType::HuggingFace(tokenizer)) => Ok(tokenizer.encode(text)),
None => Err(LuaError::RuntimeError(
"Tokenizer not initialized".to_string(),
)),
}
}
fn from_pretrained(state: &State, model: String) -> LuaResult<()> {
let mut tokenizer_mutex = state.tokenizer.lock().unwrap();
*tokenizer_mutex = Some(match model.as_str() {
"gpt-4o" => TokenizerType::Tiktoken(Tiktoken::new(model)),
_ => TokenizerType::HuggingFace(HuggingFaceTokenizer::new(model)),
});
Ok(())
}
#[mlua::lua_module]
fn avante_tokenizers(lua: &Lua) -> LuaResult<LuaTable> {
let core = State::new();
let state = Arc::new(core);
let state_clone = Arc::clone(&state);
let exports = lua.create_table()?;
exports.set(
"from_pretrained",
lua.create_function(move |_, model: String| from_pretrained(&state, model))?,
)?;
exports.set(
"encode",
lua.create_function(move |_, text: String| encode(&state_clone, text))?,
)?;
Ok(exports)
}