forked from sheetjs/docs.sheetjs.com
40 lines
1.4 KiB
JavaScript
40 lines
1.4 KiB
JavaScript
import { existsSync } from 'fs';
|
|
import { ChatOllama } from "@langchain/community/chat_models/ollama";
|
|
import { OllamaEmbeddings } from "@langchain/community/embeddings/ollama"
|
|
import { HNSWLib } from "@langchain/community/vectorstores/hnswlib";
|
|
import { SelfQueryRetriever } from "langchain/retrievers/self_query";
|
|
import { FunctionalTranslator } from "@langchain/core/structured_query";
|
|
|
|
import LoadOfSheet from "./loadofsheet.mjs";
|
|
|
|
const modelName = "llama3-chatqa:8b-v1.5-q8_0";
|
|
|
|
const model = new ChatOllama({ baseUrl: "http://localhost:11434", model: modelName });
|
|
const embeddings = new OllamaEmbeddings({model: modelName});
|
|
|
|
const loader = new LoadOfSheet("./cd.xls");
|
|
const docs = await loader.load();
|
|
|
|
const vectorstore = await (async() => {
|
|
if(!existsSync("store/hnswlib.index")) {
|
|
const vectorstore = await HNSWLib.fromDocuments(docs, embeddings);
|
|
await vectorstore.save("store");
|
|
return vectorstore;
|
|
}
|
|
return await HNSWLib.load("store", embeddings);
|
|
})();
|
|
|
|
const selfQueryRetriever = SelfQueryRetriever.fromLLM({
|
|
llm: model,
|
|
vectorStore: vectorstore,
|
|
documentContents: "Data rows from a worksheet",
|
|
attributeInfo: loader.attributes,
|
|
structuredQueryTranslator: new FunctionalTranslator(),
|
|
searchParams: { k: 1024 } // default is 4
|
|
});
|
|
|
|
const res = await selfQueryRetriever.invoke(
|
|
"Which rows have over 40 miles per gallon?"
|
|
);
|
|
res.forEach(({metadata}) => { console.log({ Name: metadata.Name, MPG: metadata.Miles_per_Gallon }); });
|