node-llama-cpp

v3.0.0-beta.4
Run AI models locally on your machine with node.js bindings for llama.cpp. Force a JSON schema on the model output on the generation level
llama llama-cpp llama.cpp bindings ai cmake cmake-js prebuilt-binaries llm and 15 more...

lint

npm run lint:eslint

test

npm run test:typescript && npm run lint:eslint && npm run test:standalone && npm run test:modelDependent

build

tsc --build tsconfig.json --force

clean

rm -rf ./node_modules ./dist ./tsconfig.tsbuildinfo ./test/.models

watch

tsc --build tsconfig.json --watch --force

format

npm run lint:eslint -- --fix

prepare

[ "$CI" = true ] || [ -d '.husky/_' ] || husky install

docs:dev

npm run docs:generateTypedoc && vitepress dev

prebuild

rimraf ./dist ./tsconfig.tsbuildinfo

prewatch

rimraf ./dist ./tsconfig.tsbuildinfo

dev:build

npm run build && node ./dist/cli/cli.js build

dev:setup

npm run build && node ./dist/cli/cli.js download && npm run docs:generateTypedoc && npm run dev:setup:downloadAllTestModels

docs:build

npm run docs:generateTypedoc && vitepress build

lint:eslint

eslint --ext .js --ext .ts .

postinstall

node ./dist/cli/cli.js postinstall

docs:preview

npm run docs:generateTypedoc && vitepress preview

cmake-js-llama

cd llama && cmake-js

test:standalone

vitest run ./test/standalone

test:typescript

tsc --build tsconfig.json --dry --force

test:modelDependent

vitest run ./test/modelDependent

addPostinstallScript

npm pkg set scripts.postinstall="node ./dist/cli/cli.js postinstall"

docs:generateTypedoc

typedoc && rimraf ./docs/api/index.md ./docs/api/exports.md

test:standalone:interactive

vitest watch ./test/standalone

dev:setup:downloadAllTestModels

node --loader ts-node/esm test/utils/scripts/downloadAllTestModels.ts

test:modelDependent:interactive

vitest watch ./test/modelDependent

Metadata

  • MIT
  • >=18.0.0
  • Gilad S.
  • released 1/21/2024

Downloads

Maintainers