A helper library for interacting with Oadin (the Baize model framework).
[English] | 简体中文
- Check whether the Oadin service is available
- Check & download oadin.exe
- Start Oadin service
- Full helpers to: list/create/update services, models, and providers
- Import / export configuration
- Chat / Text generation (streaming & non‑streaming)
- Text‑to‑Image
# from npm registry
npm install oadin-lib
# or from a local tarball
npm install oadin-lib-1.0.0.tgzconst OadinLib = require('oadin-lib');
const oadin = new OadinLib();oadin.isOadinAvailable().then((result) => {
console.log(result);
});const existed = oadin.isOadinExisted();
console.log(existed);oadin.downloadOadin().then((result) => {
console.log(result);
});oadin.startOadin().then((result) => {
console.log(result);
});oadin.getServices().then((result) => {
console.log(result);
});const data = {
service_name: "chat/embed/generate/text-to-image",
service_source: "remote/local",
hybrid_policy: "default/always_local/always_remote",
flavor_name: "ollama/openai/...",
provider_name: "local_ollama_chat/remote_openai_chat/...",
auth_type: "none/apikey",
auth_key: "your_api_key",
}; // Required: service_name, service_source, hybrid_policy, flavor_name, provider_name
oadin.installService(data).then((result) => {
console.log(result);
});const data = {
service_name: "chat/embed/generate/text-to-image",
hybrid_policy: "default/always_local/always_remote",
remote_provider: "",
local_provider: ""
}; // Required: service_name
oadin.updateService(data).then((result) => {
console.log(result);
});oadin.getModels().then((result) => {
console.log(result);
});const data = {
model_name: "llama2",
service_name: "chat/embed/generate/text-to-image",
service_source: "remote/local",
provider_name: "local_ollama_chat/remote_openai_chat/...",
}; // Required: model_name, service_name, service_source
oadin.installModel(data).then((result) => {
console.log(result);
});const data = {
model_name: "llama2",
service_name: "chat/embed/generate/text-to-image",
service_source: "remote/local",
provider_name: "local_ollama_chat/remote_openai_chat/...",
}; // Required: model_name, service_name, service_source
oadin.deleteModel(data).then((result) => {
console.log(result);
});oadin.getServiceProviders().then((result) => {
console.log(result);
});const data = {
service_name: "chat/embed/generate/text-to-image",
service_source: "remote/local",
flavor_name: "ollama/openai/...",
provider_name: "local_ollama_chat/remote_openai_chat/...",
desc: "",
method: "",
auth_type: "none/apikey",
auth_key: "your_api_key",
models: ["qwen2:7b", "deepseek-r1:7b", ...],
extra_headers: {},
extra_json_body: {},
properties: {}
}; // Required: service_name, service_source, flavor_name, provider_name
oadin.installServiceProvider(data).then((result) => {
console.log(result);
});const data = {
service_name: "chat/embed/generate/text-to-image",
service_source: "remote/local",
flavor_name: "ollama/openai/...",
provider_name: "local_ollama_chat/remote_openai_chat/...",
desc: "",
method: "",
auth_type: "none/apikey",
auth_key: "your_api_key",
models: ["qwen2:7b", "deepseek-r1:7b", ...],
extra_headers: {},
extra_json_body: {},
properties: {}
}; // Required: service_name, service_source, flavor_name, provider_name
oadin.updateServiceProvider(data).then((result) => {
console.log(result);
});const data = {
provider_name: ""
};
oadin.deleteServiceProvider(data).then((result) => {
console.log(result);
});oadin.importConfig("path/to/.oadin").then((result) => {
console.log(result);
});const data = {
service_name: "chat/embed/generate/text-to-image"
};
oadin.exportConfig(data).then((result) => { // omit data to export everything
console.log(result);
});oadin.getModelsRecommended().then((result) => {
console.log(result);
});const data = {
service_source: "remote/local",
flavor: "ollama/openai/..." // for local, defaults to ollama
}; // Required: service_source, flavor
oadin.getModelsSupported(data).then((result) => {
console.log(result);
});const data = {
model: "deepseek-r1:7b",
stream: true,
messages: [
{
role: "user",
content: "你好"
}
],
temperature: 0.7,
max_tokens: 100,
}
oadin.chat(data).then((chatStream) => {
chatStream.on('data', (data) => {
console.log(data);
});
chatStream.on('error', (error) => {
console.error(error);
});
chatStream.on('end', () => {
console.log('Chat stream ended');
});
});const data = {
model: "deepseek-r1:7b",
stream: false,
messages: [
{
role: "user",
content: "你好"
}
],
temperature: 0.7,
max_tokens: 100,
}
oadin.chat(data).then((result) => {
console.log(result);
});const data = {
model: "deepseek-r1:7b",
stream: true,
prompt: "你好",
}
oadin.generate(data).then((generateStream) => {
generateStream.on('data', (data) => {
console.log(data);
});
generateStream.on('error', (error) => {
console.error(error);
});
generateStream.on('end', () => {
console.log('Generate stream ended');
});
});const data = {
model: "deepseek-r1:7b",
stream: false,
prompt: "你好",
}
oadin.generate(data).then((result) => {
console.log(result);
});const data = {
model: "wanx2.1-t2i-turbo",
prompt: "一间有着精致窗户的花店,漂亮的木质门,摆放着花朵",
}
oadin.textToImage(data).then((result) => {
console.log(result);
});oadin.getModelsAvailiable()has been removed or renamed. UsegetModels()instead.