- BaseAI
+ Base AI
The first Web AI Framework.
@@ -245,4 +245,4 @@ function CopyableCommand({ command }: CopyableCommandProps) {
);
-}
\ No newline at end of file
+}
diff --git a/apps/baseai.dev/src/components/home/webgl.tsx b/apps/baseai.dev/src/components/home/webgl.tsx
index c6d649ee..9b3099e9 100644
--- a/apps/baseai.dev/src/components/home/webgl.tsx
+++ b/apps/baseai.dev/src/components/home/webgl.tsx
@@ -29,6 +29,21 @@ const WebGLInitializer = () => {
canvas.width = window.innerWidth;
canvas.height = window.innerHeight;
+ // const fontFace = new FontFace(
+ // 'Grotesk',
+ // 'url(/AlteHaasGroteskBold.ttf)'
+ // );
+ // document.fonts.add(fontFace);
+
+ // const style = document.createElement('style');
+ // style.innerHTML = `
+ // @import url(/AlteHaasGroteskBold.ttf);
+ // body {
+ // font-family: 'Grotesk', sans-serif;
+ // }
+ // `;
+ // document.head.appendChild(style);
+
const textDiv = document.createElement('div');
textDiv.style.position = 'absolute';
textDiv.style.left = '0';
@@ -39,6 +54,7 @@ const WebGLInitializer = () => {
textDiv.style.fontFamily = 'Grotesk';
textDiv.style.color = 'rgba(255,255,255,1)';
textDiv.style.display = 'flex';
+ // textDiv.style.lineHeight = '0px';
textDiv.style.justifyContent = 'center';
textDiv.style.alignItems = 'center';
textDiv.textContent = 'BASE AI';
@@ -57,7 +73,7 @@ const WebGLInitializer = () => {
await document.fonts.ready;
document.body.appendChild(textDiv);
- const lineHeight = window.getComputedStyle(textDiv).lineHeight;
+ const lineHeight = window.getComputedStyle(textDiv).lineHeight; // e.g., "20px"
const y = parseFloat(lineHeight);
const canvas = await html2canvas(textDiv, {
@@ -68,6 +84,7 @@ const WebGLInitializer = () => {
logging: false,
y: y * 0,
x: 0,
+ // foreignObjectRendering: true,
onclone: document => {
Array.from(document.querySelectorAll('*')).forEach(e => {
let existingStyle = e.getAttribute('style') || '';
@@ -116,6 +133,7 @@ const WebGLInitializer = () => {
}
);
+ // Custom shader material for the enhanced liquid wavy effect
const material = new THREE.ShaderMaterial({
transparent: true,
uniforms: {
@@ -391,6 +409,7 @@ const WebGLInitializer = () => {
`
});
+ // Create a mesh with the geometry and material
const sphere = new THREE.Mesh(geometry, material);
scene.add(sphere);
@@ -399,7 +418,7 @@ const WebGLInitializer = () => {
function calculateCameraZ(screenWidth: number, screenHeight: number) {
let cameraZ;
-
+ // Breakpoints based on screen width and height
if (screenWidth <= 768) {
if (screen.availWidth < screen.availHeight) {
cameraZ = 4.5;
@@ -408,41 +427,46 @@ const WebGLInitializer = () => {
}
} else if (screenWidth > 768 && screenWidth <= 1920) {
if (screenHeight <= 1080) {
- cameraZ = 2;
+ cameraZ = 2; // Full HD screens (1920x1080)
} else {
- cameraZ = 1.9;
+ cameraZ = 1.9; // Higher aspect ratio or larger height
}
} else if (screenWidth > 1920 && screenWidth <= 2440) {
if (screenHeight <= 1080) {
- cameraZ = 1.75;
+ cameraZ = 1.75; // Wide screens with Full HD height
} else {
- cameraZ = 1.65;
+ cameraZ = 1.65; // Taller screens with higher resolutions
}
} else if (screenWidth > 2440) {
if (screenHeight <= 1440) {
- cameraZ = 1.5;
+ cameraZ = 1.5; // Ultra-wide or larger 2K displays
} else {
- cameraZ = 1.4;
+ cameraZ = 1.4; // 4K and above
}
}
return cameraZ;
}
+ // Get screen width and height
const screenWidth = window.innerWidth;
const screenHeight = window.innerHeight;
+ // Calculate camera Z position based on breakpoints
const cameraZ = calculateCameraZ(screenWidth, screenHeight);
if (cameraZ) camera.position.z = cameraZ;
+ // Raycaster setup
const raycaster = new THREE.Raycaster();
const mouse = new THREE.Vector2();
+ // Animation loop
const animate = () => {
requestAnimationFrame(animate);
- material.uniforms.u_time.value += 0.02;
+ material.uniforms.u_time.value += 0.02; // Update time for animation
material.uniforms.u_viewVector.value = camera.position;
+ // Update mouse position in the shader
raycaster.setFromCamera(mouse, camera);
const intersects = raycaster.intersectObject(sphere);
if (intersects.length > 0) {
@@ -477,6 +501,13 @@ const WebGLInitializer = () => {
updateCameraPosition();
+ // Update background texture with new dimensions
+ createHighResBackgroundTexture(width, height).then(texture => {
+ scene.background = texture;
+ if (material.uniforms && material.uniforms.u_background) {
+ material.uniforms.u_background.value = texture;
+ }
+ });
createHighResBackgroundTexture(width, height).then(texture => {
scene.background = texture;
if (material.uniforms && material.uniforms.u_background) {
@@ -498,4 +529,4 @@ const WebGLInitializer = () => {
return
;
};
-export default WebGLInitializer;
\ No newline at end of file
+export default WebGLInitializer;
diff --git a/apps/baseai.dev/src/components/mdx/InlineCodeCopy.tsx b/apps/baseai.dev/src/components/mdx/InlineCodeCopy.tsx
index a39ba5e1..6cfd7f7e 100644
--- a/apps/baseai.dev/src/components/mdx/InlineCodeCopy.tsx
+++ b/apps/baseai.dev/src/components/mdx/InlineCodeCopy.tsx
@@ -6,7 +6,6 @@ import {
} from '@heroicons/react/24/solid';
import { Button } from '../ui/button';
import { useCopyToClipboard } from '@/hooks/use-copy-to-clipboard';
-import cn from 'mxcn';
export function InlineCopy({
content,
@@ -16,7 +15,6 @@ export function InlineCopy({
children: React.ReactNode;
}) {
const { isCopied, copyToClipboard } = useCopyToClipboard({ timeout: 2000 });
- const totalChars = content.length;
const onCopy = () => {
navigator.clipboard.writeText(content);
@@ -26,7 +24,7 @@ export function InlineCopy({
return (
- 25 && 'w-[50%] sm:w-full overflow-scroll')}>{content}
+ {content}
{
},
table: (props: any) => {
return (
-
+
);
},
h2: (props: any) => {
diff --git a/apps/baseai.dev/src/components/support-button.tsx b/apps/baseai.dev/src/components/support-button.tsx
deleted file mode 100644
index 370acac5..00000000
--- a/apps/baseai.dev/src/components/support-button.tsx
+++ /dev/null
@@ -1,35 +0,0 @@
-'use client';
-
-import {
- Tooltip,
- TooltipContent,
- TooltipProvider,
- TooltipTrigger
-} from '@/components/ui/tooltip';
-
-export default function SupportButton() {
- return (
-
-
-
-
-
-
-
-
- Contact Support
-
-
-
- );
-}
\ No newline at end of file
diff --git a/apps/baseai.dev/src/mdx/languages.mjs b/apps/baseai.dev/src/mdx/languages.mjs
index 8074789c..2000aef5 100644
--- a/apps/baseai.dev/src/mdx/languages.mjs
+++ b/apps/baseai.dev/src/mdx/languages.mjs
@@ -1,23 +1,20 @@
-import { createRequire } from 'module';
-
-const require = createRequire(import.meta.url);
-const langGraphQL = require('shiki/languages/graphql.tmLanguage.json');
-const langJS = require('shiki/languages/javascript.tmLanguage.json');
-const langJSX = require('shiki/languages/jsx.tmLanguage.json');
-const langJSON = require('shiki/languages/json.tmLanguage.json');
-const langXML = require('shiki/languages/xml.tmLanguage.json');
-const langYAML = require('shiki/languages/yaml.tmLanguage.json');
-const langPHP = require('shiki/languages/php.tmLanguage.json');
-const langHTML = require('shiki/languages/html.tmLanguage.json');
-const langCSS = require('shiki/languages/css.tmLanguage.json');
-const langSCSS = require('shiki/languages/scss.tmLanguage.json');
-const langSASS = require('shiki/languages/sass.tmLanguage.json');
-const langLESS = require('shiki/languages/less.tmLanguage.json');
-const langMarkdown = require('shiki/languages/markdown.tmLanguage.json');
-const langTS = require('shiki/languages/typescript.tmLanguage.json');
-const langTSX = require('shiki/languages/tsx.tmLanguage.json');
-const langShell = require('shiki/languages/shellscript.tmLanguage.json');
-const langPy = require('shiki/languages/python.tmLanguage.json');
+import langGraphQL from 'shiki/languages/graphql.tmLanguage.json' assert { type: 'json' };
+import langJS from 'shiki/languages/javascript.tmLanguage.json' assert { type: 'json' };
+import langJSX from 'shiki/languages/jsx.tmLanguage.json' assert { type: 'json' };
+import langJSON from 'shiki/languages/json.tmLanguage.json' assert { type: 'json' };
+import langXML from 'shiki/languages/xml.tmLanguage.json' assert { type: 'json' };
+import langYAML from 'shiki/languages/yaml.tmLanguage.json' assert { type: 'json' };
+import langPHP from 'shiki/languages/php.tmLanguage.json' assert { type: 'json' };
+import langHTML from 'shiki/languages/html.tmLanguage.json' assert { type: 'json' };
+import langCSS from 'shiki/languages/css.tmLanguage.json' assert { type: 'json' };
+import langSCSS from 'shiki/languages/scss.tmLanguage.json' assert { type: 'json' };
+import langSASS from 'shiki/languages/sass.tmLanguage.json' assert { type: 'json' };
+import langLESS from 'shiki/languages/less.tmLanguage.json' assert { type: 'json' };
+import langMarkdown from 'shiki/languages/markdown.tmLanguage.json' assert { type: 'json' };
+import langTS from 'shiki/languages/typescript.tmLanguage.json' assert { type: 'json' };
+import langTSX from 'shiki/languages/tsx.tmLanguage.json' assert { type: 'json' };
+import langShell from 'shiki/languages/shellscript.tmLanguage.json' assert { type: 'json' };
+import langPy from 'shiki/languages/python.tmLanguage.json' assert { type: 'json' };
const lang = [
{
diff --git a/apps/baseai.dev/src/mdx/rehype.mjs b/apps/baseai.dev/src/mdx/rehype.mjs
index 34874934..ab15be45 100644
--- a/apps/baseai.dev/src/mdx/rehype.mjs
+++ b/apps/baseai.dev/src/mdx/rehype.mjs
@@ -1,17 +1,14 @@
-import { createRequire } from 'module';
import { slugifyWithCounter } from '@sindresorhus/slugify';
import * as acorn from 'acorn';
import { toString } from 'mdast-util-to-string';
import { mdxAnnotations } from 'mdx-annotations';
import shiki from 'shiki';
import { visit } from 'unist-util-visit';
+import theme from './themes/shades-of-purple.json' assert { type: 'json' };
import lang from './languages.mjs';
-const require = createRequire(import.meta.url);
-const theme = require('./themes/shades-of-purple.json');
-
export function rehypeParseCodeBlocks() {
- return tree => {
+ return (tree) => {
visit(tree, 'element', (node, _nodeIndex, parentNode) => {
if (node.tagName === 'code' && node.properties.className) {
parentNode.properties.language =
@@ -24,10 +21,10 @@ export function rehypeParseCodeBlocks() {
let highlighter;
export function rehypeShiki() {
- return async tree => {
+ return async (tree) => {
highlighter =
highlighter ??
- (await shiki.getHighlighter({ theme: theme, langs: lang }));
+ (await shiki.getHighlighter({ theme: theme , langs: lang }));
visit(tree, 'element', node => {
if (
@@ -59,7 +56,7 @@ export function rehypeShiki() {
}
export function rehypeSlugify() {
- return tree => {
+ return (tree) => {
let slugify = slugifyWithCounter();
visit(tree, 'element', node => {
if (node.tagName === 'h2' && !node.properties.id) {
@@ -70,7 +67,7 @@ export function rehypeSlugify() {
}
export function rehypeAddMDXExports(getExports) {
- return tree => {
+ return (tree) => {
let exports = Object.entries(getExports(tree));
for (let [name, value] of exports) {
@@ -126,7 +123,7 @@ export const rehypePlugins = [
rehypeSlugify,
[
rehypeAddMDXExports,
- tree => ({
+ (tree) => ({
sections: `[${getSections(tree).join()}]`
})
]
diff --git a/apps/baseai.dev/src/mdx/themes/index.mjs b/apps/baseai.dev/src/mdx/themes/index.mjs
index 90995857..98477e32 100644
--- a/apps/baseai.dev/src/mdx/themes/index.mjs
+++ b/apps/baseai.dev/src/mdx/themes/index.mjs
@@ -1,3 +1,3 @@
-import shadesOfPurple from './shades-of-purple.json' assert { type: 'json' };
+import shadesOfPurple from './shades-of-purple.json' assert { type: 'json' }
-export default { shadesOfPurple };
+export default { shadesOfPurple }
diff --git a/apps/baseai.dev/src/mdx/themes/index.ts b/apps/baseai.dev/src/mdx/themes/index.ts
index 90995857..98477e32 100644
--- a/apps/baseai.dev/src/mdx/themes/index.ts
+++ b/apps/baseai.dev/src/mdx/themes/index.ts
@@ -1,3 +1,3 @@
-import shadesOfPurple from './shades-of-purple.json' assert { type: 'json' };
+import shadesOfPurple from './shades-of-purple.json' assert { type: 'json' }
-export default { shadesOfPurple };
+export default { shadesOfPurple }
diff --git a/apps/baseai.dev/src/styles/global.css b/apps/baseai.dev/src/styles/global.css
index 7b656b4d..a4051b7a 100644
--- a/apps/baseai.dev/src/styles/global.css
+++ b/apps/baseai.dev/src/styles/global.css
@@ -75,6 +75,9 @@
body {
@apply bg-background text-foreground;
}
+ img {
+ display: inline-block !important;
+ }
}
::selection {
@@ -116,18 +119,3 @@ html {
font-weight: normal;
font-style: normal;
}
-
-/* Crisp Chat Widget Style Overrides */
-#crisp-chatbox .cc-1d4mk.cc-8mq05 {
- background-color: black !important;
-}
-
-/* Additional selector for better specificity */
-#crisp-chatbox [data-id="chat_opened"] .cc-1d4mk {
- background-color: black !important;
-}
-
-/* Target the chat bubble background */
-.crisp-client .cc-1d4mk {
- background-color: black !important;
-}
diff --git a/apps/baseai.dev/src/styles/webgl.css b/apps/baseai.dev/src/styles/webgl.css
index edc49551..32bc5626 100644
--- a/apps/baseai.dev/src/styles/webgl.css
+++ b/apps/baseai.dev/src/styles/webgl.css
@@ -3,6 +3,9 @@
img {
@apply inline-block;
}
+ img {
+ display: inline-block !important;
+ }
}
@tailwind components;
@tailwind utilities;
diff --git a/examples/agents/readme-writer-agent/.env.baseai.example b/examples/agents/human-in-the-loop-agent/.env.baseai.example
similarity index 98%
rename from examples/agents/readme-writer-agent/.env.baseai.example
rename to examples/agents/human-in-the-loop-agent/.env.baseai.example
index b0d9e992..8c643651 100644
--- a/examples/agents/readme-writer-agent/.env.baseai.example
+++ b/examples/agents/human-in-the-loop-agent/.env.baseai.example
@@ -19,4 +19,3 @@ GROQ_API_KEY=
MISTRAL_API_KEY=
PERPLEXITY_API_KEY=
TOGETHER_API_KEY=
-XAI_API_KEY=
diff --git a/examples/agents/human-in-the-loop-agent/.gitignore b/examples/agents/human-in-the-loop-agent/.gitignore
new file mode 100644
index 00000000..9b5994f6
--- /dev/null
+++ b/examples/agents/human-in-the-loop-agent/.gitignore
@@ -0,0 +1,9 @@
+# baseai
+**/.baseai/
+node_modules
+.env
+package-lock.json
+pnpm-lock.yaml
+# env file
+.env
+
diff --git a/examples/agents/human-in-the-loop-agent/README.md b/examples/agents/human-in-the-loop-agent/README.md
new file mode 100644
index 00000000..5cd7f059
--- /dev/null
+++ b/examples/agents/human-in-the-loop-agent/README.md
@@ -0,0 +1,76 @@
+![Human in the loop Agent by ⌘ BaseAI][cover]
+
+![License: MIT][mit] [![Fork on ⌘ Langbase][fork]][pipe]
+
+## Build a Human In The Loop (HITL) Agent for IT Support with BaseAI framework — ⌘ Langbase
+
+The **Human In The Loop (HITL) Agent** is a CLI-based application designed to handle IT support tasks efficiently while ensuring that complex or critical issues are escalated to human agents. This agent leverages a BaseAI pipe with tool calling capabilities, enabling it to interact dynamically with users, assess problem scenarios, and involve human agents when necessary.
+
+This AI Agent is built using the BaseAI framework. It leverages an agentic pipe that integrates over 30+ LLMs (including OpenAI, Gemini, Mistral, Llama, Gemma, etc.) and can handle any data, with context sizes of up to 10M+ tokens, supported by memory. The framework is compatible with any front-end framework (such as React, Remix, Astro, Next.js), giving you, as a developer, the freedom to tailor your AI application exactly as you envision.
+
+## How to use
+
+Navigate to `examples/agents/it-systems-triage-agent` and run the following commands:
+
+```sh
+# Navigate to baseai/examples/agents/it-systems-triage-agent
+cd examples/agents/it-systems-triage-agent
+
+# Install the dependencies
+npm install
+
+# Make sure to copy .env.baseai.example file and
+# create .env file and add all the relevant API keys in it
+cp .env.baseai.example .env
+
+# Run the local baseai dev server to test the examples (uses localhost:9000 port)
+npx baseai dev
+
+# Run the agent
+tsx index.ts
+```
+
+## Features
+
+- Human In The Loop Agent for IT Support — Built with [BaseAI framework and agentic Pipe ⌘ ][qs]
+- Composable Agents — build and compose agents with BaseAI
+- Deploy this pipe and sync the deployed pipe on Langbase locally using `npx baseai@latest deploy` – [Learn more about deployment][deploy]
+
+## Learn more
+
+1. Check the [Learning path to build an agentic AI pipe with ⌘ BaseAI][learn]
+2. Read the [source code on GitHub][gh] for this agent example
+3. Go through Documentaion: [Pipe Quick Start][qs]
+4. Learn more about [Memory features in ⌘ BaseAI][memory]
+5. Learn more about [Tool calls support in ⌘ BaseAI][toolcalls]
+
+
+> NOTE:
+> This is a BaseAI project, you can deploy BaseAI pipes, memory and tool calls on Langbase.
+
+---
+
+## Authors
+
+This project is created by [Langbase][lb] team members, with contributions from:
+
+- Muhammad-Ali Danish - Software Engineer, [Langbase][lb]
+**_Built by ⌘ [Langbase.com][lb] — Ship hyper-personalized AI assistants with memory!_**
+
+[lb]: https://langbase.com
+[gh]: https://github.com/LangbaseInc/baseai/tree/main/examples/agents/it-systems-triage-agent
+[cover]:https://raw.githubusercontent.com/LangbaseInc/docs-images/main/baseai/baseai-cover.png
+[download]:https://download-directory.github.io/?url=https://github.com/LangbaseInc/baseai/tree/main/examples/it-systems-triage-agent
+[learn]:https://baseai.dev/learn
+[memory]:https://baseai.dev/docs/memory/quickstart
+[toolcalls]:https://baseai.dev/docs/tools/quickstart
+[deploy]:https://baseai.dev/docs/deployment/authentication
+[signup]: https://langbase.fyi/io
+[qs]:https://baseai.dev/docs/pipe/quickstart
+[deploy]:https://baseai.dev/docs/deployment/authentication
+[docs]:https://baseai.dev/docs
+[xaa]:https://x.com/MrAhmadAwais
+[xab]:https://x.com/AhmadBilalDev
+[local]:http://localhost:9000
+[mit]: https://img.shields.io/badge/license-MIT-blue.svg?style=for-the-badge&color=%23000000
+[fork]: https://img.shields.io/badge/FORK%20ON-%E2%8C%98%20Langbase-000000.svg?style=for-the-badge&logo=%E2%8C%98%20Langbase&logoColor=000000
diff --git a/examples/agents/readme-writer-agent/baseai/baseai.config.ts b/examples/agents/human-in-the-loop-agent/baseai/baseai.config.ts
similarity index 60%
rename from examples/agents/readme-writer-agent/baseai/baseai.config.ts
rename to examples/agents/human-in-the-loop-agent/baseai/baseai.config.ts
index 4e65d3f5..f0ee748f 100644
--- a/examples/agents/readme-writer-agent/baseai/baseai.config.ts
+++ b/examples/agents/human-in-the-loop-agent/baseai/baseai.config.ts
@@ -1,18 +1,18 @@
-import type {BaseAIConfig} from 'baseai';
+import type { BaseAIConfig } from 'baseai';
export const config: BaseAIConfig = {
log: {
- isEnabled: false,
+ isEnabled: true,
logSensitiveData: false,
pipe: true,
'pipe.completion': true,
'pipe.request': true,
'pipe.response': true,
tool: true,
- memory: true,
+ memory: true
},
memory: {
- useLocalEmbeddings: false,
+ useLocalEmbeddings: false
},
- envFilePath: '.env',
+ envFilePath: '.env'
};
diff --git a/examples/agents/human-in-the-loop-agent/baseai/pipes/human-in-the-loop-support-agent.ts b/examples/agents/human-in-the-loop-agent/baseai/pipes/human-in-the-loop-support-agent.ts
new file mode 100644
index 00000000..3129b9e2
--- /dev/null
+++ b/examples/agents/human-in-the-loop-agent/baseai/pipes/human-in-the-loop-support-agent.ts
@@ -0,0 +1,49 @@
+import {PipeI} from '@baseai/core';
+import toolHandoffToHuman from '../tools/handoff-to-human';
+
+const pipeHumanInTheLoopSupportAgent = (): PipeI => ({
+ // Replace with your API key https://langbase.com/docs/api-reference/api-keys
+ apiKey: process.env.LANGBASE_API_KEY!,
+ name: 'human-in-the-loop-support-agent',
+ description:
+ 'An IT Support Human in the Loop (HITL) agent with ability to handoff to a human support resource based on issue severity or user request',
+ status: 'public',
+ model: 'openai:gpt-4o-mini',
+ stream: true,
+ json: false,
+ store: true,
+ moderate: true,
+ top_p: 1,
+ max_tokens: 1000,
+ temperature: 0.7,
+ presence_penalty: 1,
+ frequency_penalty: 1,
+ stop: [],
+ tool_choice: 'auto',
+ parallel_tool_calls: true,
+ messages: [
+ {
+ role: 'system',
+ content: `You are an IT Support Escalation Agent. Your role is to assist users by gathering information, diagnosing common IT issues, and providing clear troubleshooting steps. When an issue requires human intervention, you will escalate it using the needs_human_approval tool.
+
+Guidelines:
+
+1. Collect Key Information:
+ Ask the user for details such as the problem description, affected systems, error messages, and steps already attempted.
+
+2. Diagnose and Suggest Solutions:
+ Offer basic troubleshooting steps in clear, easy-to-follow instructions.
+
+3. Assess Escalation Need:
+ If an issue is complex, critical, or unresolved after initial steps, call the handoff_to_human tool with information about the issue's severity, number of affected users, and urgency.
+
+4. Be Empathetic and Transparent:
+ If escalating, reassure the user and let them know a specialist will handle the issue promptly.`,
+ },
+ ],
+ variables: [],
+ memory: [],
+ tools: [toolHandoffToHuman()],
+});
+
+export default pipeHumanInTheLoopSupportAgent;
diff --git a/examples/agents/human-in-the-loop-agent/baseai/tools/handoff-to-human.ts b/examples/agents/human-in-the-loop-agent/baseai/tools/handoff-to-human.ts
new file mode 100644
index 00000000..7aa244c5
--- /dev/null
+++ b/examples/agents/human-in-the-loop-agent/baseai/tools/handoff-to-human.ts
@@ -0,0 +1,72 @@
+import {ToolI} from '@baseai/core';
+
+export async function handoffToHuman() {
+ // Add your tool logic here
+ // This function will be called when the tool is executed
+}
+
+const toolHandoffToHuman = (): ToolI => ({
+ run: handoffToHuman,
+ type: 'function' as const,
+ function: {
+ name: 'handoff_to_human',
+ description:
+ 'Generates a structured summary of an IT issue for human agents to quickly understand and take action.',
+ parameters: {
+ type: 'object',
+ properties: {
+ issue_title: {
+ type: 'string',
+ description: 'A brief title summarizing the core issue.',
+ },
+ affected_systems: {
+ type: 'array',
+ items: {
+ type: 'string',
+ },
+ description:
+ 'A list of systems, applications, or services impacted by the issue.',
+ },
+ error_message: {
+ type: 'string',
+ description:
+ 'The specific error message(s) reported by the user or detected during troubleshooting.',
+ },
+ number_of_users_affected: {
+ type: 'number',
+ description:
+ 'The estimated number of users impacted by this issue.',
+ },
+ steps_attempted: {
+ type: 'array',
+ items: {
+ type: 'string',
+ },
+ description:
+ 'A list of troubleshooting steps that the AI agent suggested and that the user has already tried.',
+ },
+ severity_level: {
+ type: 'string',
+ enum: ['Low', 'Medium', 'High', 'Critical'],
+ description:
+ 'The severity of the issue based on the AI agent’s assessment.',
+ },
+ additional_notes: {
+ type: 'string',
+ description:
+ 'Any other contextual information, observations, or details provided by the user that may help the human agent.',
+ },
+ },
+ required: [
+ 'issue_title',
+ 'affected_systems',
+ 'error_message',
+ 'number_of_users_affected',
+ 'steps_attempted',
+ 'severity_level',
+ ],
+ },
+ },
+});
+
+export default toolHandoffToHuman;
diff --git a/examples/agents/human-in-the-loop-agent/index.ts b/examples/agents/human-in-the-loop-agent/index.ts
new file mode 100644
index 00000000..8fe900ba
--- /dev/null
+++ b/examples/agents/human-in-the-loop-agent/index.ts
@@ -0,0 +1,51 @@
+import 'dotenv/config';
+import {Message, Pipe} from '@baseai/core';
+import inquirer from 'inquirer';
+import ora from 'ora';
+import chalk from 'chalk';
+import pipeHumanInTheLoopSupportAgent from './baseai/pipes/human-in-the-loop-support-agent';
+
+const pipe = new Pipe(pipeHumanInTheLoopSupportAgent());
+
+async function main() {
+ // Messages array for keeping track of the conversation
+ const messages: Message[] = [];
+
+ while (true) {
+ const {userMsg} = await inquirer.prompt([
+ {
+ type: 'input',
+ name: 'userMsg',
+ message: chalk.blue(
+ 'Enter your query (or type "exit" to quit):',
+ ),
+ },
+ ]);
+
+ if (userMsg.toLowerCase() === 'exit') {
+ console.log(chalk.green('Goodbye!'));
+ break;
+ }
+
+ const spinner = ora('Processing your request...').start();
+ messages.push({role: 'user', content: userMsg});
+
+ try {
+ const {completion} = await pipe.run({
+ messages,
+ });
+ messages.push({
+ role: 'assistant',
+ content: completion,
+ });
+ spinner.stop();
+ console.log(chalk.cyan('Agent:'));
+ console.log(completion);
+ } catch (error) {
+ spinner.stop();
+ console.error(chalk.red('Error processing your request:'), error);
+ }
+ }
+}
+
+main();
diff --git a/examples/agents/human-in-the-loop-agent/package.json b/examples/agents/human-in-the-loop-agent/package.json
new file mode 100644
index 00000000..402d2791
--- /dev/null
+++ b/examples/agents/human-in-the-loop-agent/package.json
@@ -0,0 +1,22 @@
+{
+ "name": "it-systems-triage-agent",
+ "version": "1.0.0",
+ "main": "index.js",
+ "scripts": {
+ "baseai": "baseai"
+ },
+ "keywords": [],
+ "author": "",
+ "license": "ISC",
+ "description": "",
+ "dependencies": {
+ "@baseai/core": "^0.9.19",
+ "chalk": "^5.3.0",
+ "dotenv": "^16.4.5",
+ "inquirer": "^12.0.0",
+ "ora": "^8.1.0"
+ },
+ "devDependencies": {
+ "baseai": "^0.9.19"
+ }
+}
diff --git a/examples/agents/readme-writer-agent/.gitignore b/examples/agents/readme-writer-agent/.gitignore
deleted file mode 100644
index 49056616..00000000
--- a/examples/agents/readme-writer-agent/.gitignore
+++ /dev/null
@@ -1,5 +0,0 @@
-# baseai
-**/.baseai/
-# env file
-.env
-/baseai/memory/code-files/documents
diff --git a/examples/agents/readme-writer-agent/baseai/memory/code-files/index.ts b/examples/agents/readme-writer-agent/baseai/memory/code-files/index.ts
deleted file mode 100644
index 83215427..00000000
--- a/examples/agents/readme-writer-agent/baseai/memory/code-files/index.ts
+++ /dev/null
@@ -1,15 +0,0 @@
-import {MemoryI} from '@baseai/core';
-
-const memoryCodeFiles = (): MemoryI => ({
- name: 'code-files',
- description: 'Memory that contains project files',
- git: {
- enabled: false,
- include: ['documents/**/*'],
- gitignore: true,
- deployedAt: '',
- embeddedAt: '',
- },
-});
-
-export default memoryCodeFiles;
diff --git a/examples/agents/readme-writer-agent/baseai/pipes/readme-writer.ts b/examples/agents/readme-writer-agent/baseai/pipes/readme-writer.ts
deleted file mode 100644
index d71520f3..00000000
--- a/examples/agents/readme-writer-agent/baseai/pipes/readme-writer.ts
+++ /dev/null
@@ -1,47 +0,0 @@
-import {PipeI} from '@baseai/core';
-import memoryCodeFiles from '../memory/code-files';
-
-const pipeReadmeWriter = (): PipeI => ({
- // Replace with your API key https://langbase.com/docs/api-reference/api-keys
- apiKey: process.env.LANGBASE_API_KEY!,
- name: `readme-writer`,
- description: ``,
- status: `public`,
- model: `openai:gpt-4o-mini`,
- stream: true,
- json: false,
- store: true,
- moderate: true,
- top_p: 1,
- max_tokens: 1000,
- temperature: 0.7,
- presence_penalty: 0,
- frequency_penalty: 0,
- stop: [],
- tool_choice: 'auto',
- parallel_tool_calls: true,
- messages: [
- {
- role: 'system',
- content:
- 'Write a {{level}} README file for an open-source project that effectively communicates its purpose, usage, installation instructions, and contribution guidelines.\n\nThe README should include the following sections:\n\n- **Project Title**: A clear and concise title of the project.\n- **Description**: A brief overview of what the project does and its significance.\n- **Installation Instructions**: Step-by-step guidance on how to install the project.\n- **Usage**: Examples demonstrating how to use the project.\n- **Contributing**: Guidelines for contributing to the project, including how to report issues and submit pull requests.\n- **License**: Information about the project\'s license.\n\n# Output Format\n\nThe output should be structured as a Markdown document with the appropriate headings for each section. Aim for a length of approximately 500-800 words.\n\n# Examples\n\n**Example 1:**\n\n**Input:** Project Title: "WeatherApp"\n**Output:**\n\n# WeatherApp\n\n## Description\nWeatherApp is a simple application that provides real-time weather updates for any location. It uses data from various weather APIs to fetch and display the latest weather information.\n\n## Installation Instructions\n1. Clone the repository: \\`git clone https://github.com/user/weatherapp.git\\`\n2. Navigate to the project directory: \\`cd weatherapp\\`\n3. Install dependencies: \\`npm install\\`\n\n## Usage\nTo run the application, use the command: \\`npm start\\`. Open your browser and go to \\`http://localhost:3000\\`.\n\n## Contributing\nWe welcome contributions! Please fork the repository and submit a pull request for any changes.\n\n## License\nThis project is licensed under the MIT License.\n\n**Example 2:**\n\n**Input:** Project Title: "TaskTracker"\n**Output:**\n\n# TaskTracker\n\n## Description\nTaskTracker is a web application designed to help users manage their tasks efficiently. It offers features like task creation, categorization, and progress tracking.\n\n## Installation Instructions\n1. Clone the repository: \\`git clone https://github.com/user/tasktracker.git\\`\n2. Install the required packages: \\`pip install -r requirements.txt\\`\n3. Run the application: \\`python app.py\\`\n\n## Usage\nOnce the application is running, navigate to \\`http://localhost:5000\\` to access the TaskTracker interface.\n\n## Contributing\nTo contribute, please read our contribution guidelines in the \\`CONTRIBUTING.md\\` file.\n\n## License\nThis project is licensed under the GPL-3.0 License.\n\n(Examples should include detailed project descriptions, installation steps, and usage instructions as they would appear for real open-source projects, using appropriate placeholders for project-specific details.)',
- },
- {name: 'json', role: 'system', content: ''},
- {name: 'safety', role: 'system', content: ''},
- {
- name: 'opening',
- role: 'system',
- content: 'Welcome to Langbase. Prompt away!',
- },
- {
- name: 'rag',
- role: 'system',
- content: `Below is some CONTEXT for you to answer the questions. ONLY generate readme from the CONTEXT. CONTEXT consists of multiple information chunks. `,
- },
- ],
- variables: [{name: 'level', value: ''}],
- tools: [],
- memory: [memoryCodeFiles()],
-});
-
-export default pipeReadmeWriter;
diff --git a/examples/agents/readme-writer-agent/index.ts b/examples/agents/readme-writer-agent/index.ts
deleted file mode 100644
index 02636e99..00000000
--- a/examples/agents/readme-writer-agent/index.ts
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/usr/bin/env node
-import 'dotenv/config';
-import {init} from './utils/init';
-import {questions} from './utils/questions';
-import {startBaseAIDevServer} from './utils/start-baseai-server';
-import {copyProjectFiles} from './utils/copy-project-files';
-import {generateReadme} from './utils/generate-readme';
-import {exitServer} from './utils/exit-server';
-import {exit} from './utils/exit';
-import {generateEmbeddings} from './utils/generate-embeddings';
-import {dirName} from './utils/get-dirname';
-import {askOpenAIKey} from './utils/ask-openai-key';
-
-(async function () {
- // Show the welcome message
- init({
- title: `readme-writer-agent`,
- tagLine: `by Saad Irfan`,
- description: `An AI agent to help you write README files for open-source projects.`,
- version: `0.1.0`,
- clear: true,
- });
-
- // Ask for the OpenAI key if it doesn't exist
- await askOpenAIKey({dirName});
-
- // Ask for the readme level
- const {level} = await questions();
-
- // Start the baseAI server
- await startBaseAIDevServer();
-
- // Copy project files in the memory
- await copyProjectFiles({dirName});
-
- // Generate embeddings
- await generateEmbeddings({dirName});
-
- // Generate the readme
- const {path} = await generateReadme({level});
-
- // Exit the baseAI server
- await exitServer();
-
- // Exit the process
- exit({path});
-})();
diff --git a/examples/agents/readme-writer-agent/package.json b/examples/agents/readme-writer-agent/package.json
deleted file mode 100644
index e24cad5b..00000000
--- a/examples/agents/readme-writer-agent/package.json
+++ /dev/null
@@ -1,44 +0,0 @@
-{
- "name": "readme-writer-agent",
- "version": "0.1.0",
- "description": "An AI agent to help you write README files for open-source projects.",
- "type": "module",
- "main": "./dist/index.js",
- "module": "./dist/index.mjs",
- "types": "./dist/index.d.ts",
- "bin": {
- "write-readme": "dist/index.js"
- },
- "files": [
- "dist/**",
- "baseai"
- ],
- "scripts": {
- "build": "tsup",
- "dev": "tsup --watch",
- "write-readme": "NODE_NO_WARNINGS=1 npx tsx index.ts",
- "baseai": "baseai"
- },
- "repository": {
- "type": "git",
- "url": "git+https://github.com/LangbaseInc/baseai.git"
- },
- "keywords": [],
- "author": {
- "name": "Saad Irfan",
- "url": "https://github.com/msaaddev"
- },
- "license": "MIT",
- "dependencies": {
- "@baseai/core": "^0.9.20",
- "@clack/prompts": "^0.7.0",
- "chalk": "5.6.0",
- "clear-any-console": "^1.16.2",
- "figures": "^6.1.0",
- "picocolors": "^1.1.0",
- "tsup": "^8.3.0"
- },
- "devDependencies": {
- "baseai": "^0.9.20"
- }
-}
diff --git a/examples/agents/readme-writer-agent/readme.md b/examples/agents/readme-writer-agent/readme.md
deleted file mode 100644
index 71c26861..00000000
--- a/examples/agents/readme-writer-agent/readme.md
+++ /dev/null
@@ -1,59 +0,0 @@
- ![IT Systems Triage Agent by ⌘ BaseAI][cover]
-
- ![License: MIT][mit] [![Fork on ⌘ Langbase][fork]][pipe]
-
-## Description
-The `readme-writer-agent` is an AI-powered tool designed to assist developers in creating comprehensive README files for their open-source projects. This tool simplifies the process of structuring and writing documentation, ensuring that important information is communicated effectively.
-
-This AI Agent is built using the BaseAI framework. It leverages an agentic pipe that integrates over 30+ LLMs (including OpenAI, Gemini, Mistral, Llama, Gemma, etc.) and can handle any data, with context sizes of up to 10M+ tokens, supported by memory. The framework is compatible with any front-end framework (such as React, Remix, Astro, Next.js), giving you, as a developer, the freedom to tailor your AI application exactly as you envision.
-
-## How to use
-
-Navigate to `examples/agents/readme-writer-agent` and run the following commands:
-
-```sh
-# Navigate to baseai/examples/agents/readme-writer-agent
-cd examples/agents/readme-writer-agent
-
-# Install the dependencies
-npm install
-
-# Run the agent
-npm run write-readme
-```
-## Learn more
-
-1. Check the [Learning path to build an agentic AI pipe with ⌘ BaseAI][learn]
-2. Read the [source code on GitHub][gh] for this agent example
-3. Go through Documentaion: [Pipe Quick Start][qs]
-4. Learn more about [Memory features in ⌘ BaseAI][memory]
-5. Learn more about [Tool calls support in ⌘ BaseAI][toolcalls]
-
-
-> NOTE:
-> This is a BaseAI project, you can deploy BaseAI pipes, memory and tool calls on Langbase.
-
----
-
-## Authors
-
-This project is created by [Langbase][lb] team members, with contributions from:
-
-- [Saad Irfan](https://x.com/mrsaadirfan) - Founding Engineer, [Langbase][lb]
-
-
-
-
-[lb]: https://langbase.com
-[pipe]: https://langbase.com/saadirfan/readme-writer
-[gh]: https://github.com/LangbaseInc/baseai/tree/main/examples/agents/readme-writer-agent
-[cover]:https://raw.githubusercontent.com/LangbaseInc/docs-images/main/baseai/baseai-cover.png
-[learn]:https://baseai.dev/learn
-[memory]:https://baseai.dev/docs/memory/quickstart
-[toolcalls]:https://baseai.dev/docs/tools/quickstart
-[deploy]:https://baseai.dev/docs/deployment/authentication
-[signup]: https://langbase.fyi/io
-[qs]:https://baseai.dev/docs/pipe/quickstart
-[docs]:https://baseai.dev/docs
-[mit]: https://img.shields.io/badge/license-MIT-blue.svg?style=for-the-badge&color=%23000000
-[fork]: https://img.shields.io/badge/FORK%20ON-%E2%8C%98%20Langbase-000000.svg?style=for-the-badge&logo=%E2%8C%98%20Langbase&logoColor=000000
diff --git a/examples/agents/readme-writer-agent/tsup.config.ts b/examples/agents/readme-writer-agent/tsup.config.ts
deleted file mode 100644
index 48baa386..00000000
--- a/examples/agents/readme-writer-agent/tsup.config.ts
+++ /dev/null
@@ -1,16 +0,0 @@
-import {defineConfig} from 'tsup';
-
-export default defineConfig({
- clean: true,
- dts: true,
- entry: ['index.ts'],
- format: ['esm'],
- sourcemap: true,
- // target: 'esnext',
- target: 'node16',
- outDir: 'dist',
- splitting: false,
- bundle: true,
- minify: true,
- external: ['react', 'svelte', 'vue'],
-});
diff --git a/examples/agents/readme-writer-agent/utils/ask-openai-key.ts b/examples/agents/readme-writer-agent/utils/ask-openai-key.ts
deleted file mode 100644
index 6acfe9cd..00000000
--- a/examples/agents/readme-writer-agent/utils/ask-openai-key.ts
+++ /dev/null
@@ -1,37 +0,0 @@
-import * as p from '@clack/prompts';
-import path from 'path';
-import fs from 'fs';
-
-export async function askOpenAIKey({dirName}: {dirName: string}) {
- const envPath = path.join(dirName, '.env');
- const hasEnv = fs.existsSync(envPath);
-
- if (hasEnv) {
- const envContent = fs.readFileSync(envPath, 'utf-8');
- const hasOpenAIKey = envContent
- .replace('OPENAI_API_KEY=', '')
- .trim()
- .includes('sk-');
- if (hasOpenAIKey) return;
- }
-
- const openai = await p.group(
- {
- key: () =>
- p.password({
- message: 'Enter your OpenAI API key',
- }),
- },
- {
- onCancel: () => {
- p.cancel('Operation cancelled.');
- process.exit(0);
- },
- },
- );
-
- fs.writeFileSync(envPath, `OPENAI_API_KEY="${openai.key.trim()}"\n`);
- p.log.success('OpenAI API key saved successfully.');
- p.log.info('Now you can run the agent.');
- process.exit(0);
-}
diff --git a/examples/agents/readme-writer-agent/utils/copy-project-files.ts b/examples/agents/readme-writer-agent/utils/copy-project-files.ts
deleted file mode 100644
index 5253ddc0..00000000
--- a/examples/agents/readme-writer-agent/utils/copy-project-files.ts
+++ /dev/null
@@ -1,27 +0,0 @@
-import * as p from '@clack/prompts';
-import path from 'path';
-import {execAsync} from './exec-sync';
-import {handleError} from './handle-error';
-
-export async function copyProjectFiles({dirName}: {dirName: string}) {
- const spinner = p.spinner();
- spinner.start('Copying project files...');
-
- const source = process.cwd();
- const destination = path.join(
- dirName,
- 'baseai',
- 'memory',
- 'code-files',
- 'documents',
- );
-
- try {
- await execAsync(`rm -rf ${destination}`);
- await execAsync(`mkdir -p ${destination}`);
- await execAsync(`cp -rp ${source}/* ${destination}`);
- spinner.stop('Project files copied successfully.');
- } catch (error) {
- handleError({spinner, error});
- }
-}
diff --git a/examples/agents/readme-writer-agent/utils/exec-sync.ts b/examples/agents/readme-writer-agent/utils/exec-sync.ts
deleted file mode 100644
index 5180e65a..00000000
--- a/examples/agents/readme-writer-agent/utils/exec-sync.ts
+++ /dev/null
@@ -1,3 +0,0 @@
-import {promisify} from 'util';
-import {exec} from 'child_process';
-export const execAsync = promisify(exec);
diff --git a/examples/agents/readme-writer-agent/utils/exit-server.ts b/examples/agents/readme-writer-agent/utils/exit-server.ts
deleted file mode 100644
index 44aafdd9..00000000
--- a/examples/agents/readme-writer-agent/utils/exit-server.ts
+++ /dev/null
@@ -1,19 +0,0 @@
-import * as p from '@clack/prompts';
-import {spawn} from 'child_process';
-
-export async function exitServer() {
- const spinner = p.spinner();
- spinner.start('Stopping AI server...');
- // Spawn the server process detached from the parent
- const serverProcess = spawn('npx', ['kill-port', '9000'], {
- // Detach the process so it runs independently
- detached: true,
- // Pipe stdout/stderr to files or ignore them
- stdio: 'ignore',
- shell: process.platform === 'win32',
- });
-
- // Unref the process so it won't keep the parent alive
- serverProcess.unref();
- spinner.stop('AI server stopped.');
-}
diff --git a/examples/agents/readme-writer-agent/utils/exit.ts b/examples/agents/readme-writer-agent/utils/exit.ts
deleted file mode 100644
index 8efee409..00000000
--- a/examples/agents/readme-writer-agent/utils/exit.ts
+++ /dev/null
@@ -1,15 +0,0 @@
-import pc from 'picocolors';
-import figures from 'figures';
-import * as p from '@clack/prompts';
-import {heading} from './heading';
-
-export async function exit({path}: {path: string}) {
- p.outro(
- heading({
- text: 'readme.md',
- sub: `instructions written in \n ${pc.dim(figures.pointer)} ${pc.italic(pc.dim(path))}`,
- green: true,
- }),
- );
- process.exit(0);
-}
diff --git a/examples/agents/readme-writer-agent/utils/generate-embeddings.ts b/examples/agents/readme-writer-agent/utils/generate-embeddings.ts
deleted file mode 100644
index 8fd7c79f..00000000
--- a/examples/agents/readme-writer-agent/utils/generate-embeddings.ts
+++ /dev/null
@@ -1,18 +0,0 @@
-import * as p from '@clack/prompts';
-import {execAsync} from './exec-sync';
-import {handleError} from './handle-error';
-
-export async function generateEmbeddings({dirName}: {dirName: string}) {
- const spinner = p.spinner();
- spinner.start('Understanding your project codebase...');
-
- try {
- await execAsync(`npx baseai@latest embed -m code-files`, {
- cwd: dirName,
- });
-
- spinner.stop('Developed understanding of your project codebase.');
- } catch (error) {
- handleError({spinner, error});
- }
-}
diff --git a/examples/agents/readme-writer-agent/utils/generate-readme.ts b/examples/agents/readme-writer-agent/utils/generate-readme.ts
deleted file mode 100644
index 7df57105..00000000
--- a/examples/agents/readme-writer-agent/utils/generate-readme.ts
+++ /dev/null
@@ -1,54 +0,0 @@
-import {getRunner, getTextContent, Pipe} from '@baseai/core';
-import {handleError} from './handle-error';
-import pipeReadmeWriter from '../baseai/pipes/readme-writer';
-import path from 'path';
-import fs from 'fs';
-import * as p from '@clack/prompts';
-import {execAsync} from './exec-sync';
-
-export async function generateReadme({level}: {level: string}) {
- const spinner = p.spinner();
- spinner.start('AI is thinking...');
-
- try {
- const pipe = new Pipe(pipeReadmeWriter());
- let readmeContent = '';
-
- const {stream} = await pipe.run({
- messages: [
- {
- role: 'user',
- content:
- 'Generate a carefully tailored readme that contains all the necessary information to get started with the project.',
- },
- ],
- variables: [{name: 'level', value: level}],
- stream: true,
- });
-
- // Convert the stream to a stream runner.
- const runner = getRunner(stream);
- spinner.stop(`Let's write the readme docs...`);
-
- const readmePath = path.join(process.cwd(), 'readme.md');
-
- const hasReadme = fs.existsSync(readmePath);
- if (hasReadme) {
- await execAsync(`rm ${readmePath}`);
- }
-
- spinner.start('Writing readme docs in project readme.md file...');
-
- for await (const chunk of runner) {
- const textPart = getTextContent(chunk);
- readmeContent += textPart;
- fs.writeFileSync(readmePath, readmeContent);
- }
-
- spinner.stop('Readme docs written successfully.');
- return {content: readmeContent, path: readmePath};
- } catch (error) {
- handleError({spinner, error});
- process.exit(1);
- }
-}
diff --git a/examples/agents/readme-writer-agent/utils/get-dirname.ts b/examples/agents/readme-writer-agent/utils/get-dirname.ts
deleted file mode 100644
index cb5491f7..00000000
--- a/examples/agents/readme-writer-agent/utils/get-dirname.ts
+++ /dev/null
@@ -1,9 +0,0 @@
-import {dirname} from 'path';
-import {fileURLToPath} from 'url';
-import path from 'path';
-
-const __filename = fileURLToPath(import.meta.url);
-let dirName = dirname(__filename);
-dirName = path.join(dirName, '..');
-
-export {dirName};
diff --git a/examples/agents/readme-writer-agent/utils/handle-error.ts b/examples/agents/readme-writer-agent/utils/handle-error.ts
deleted file mode 100644
index f92124c4..00000000
--- a/examples/agents/readme-writer-agent/utils/handle-error.ts
+++ /dev/null
@@ -1,14 +0,0 @@
-import * as p from '@clack/prompts';
-type Spinner = ReturnType;
-
-export async function handleError({
- spinner,
- error,
-}: {
- spinner: Spinner;
- error: Error;
-}) {
- spinner.stop();
- p.log.error(`ERROR: ${(error as Error).message}`);
- process.exit(1);
-}
diff --git a/examples/agents/readme-writer-agent/utils/heading.ts b/examples/agents/readme-writer-agent/utils/heading.ts
deleted file mode 100644
index f5afebf4..00000000
--- a/examples/agents/readme-writer-agent/utils/heading.ts
+++ /dev/null
@@ -1,21 +0,0 @@
-import color from 'picocolors';
-
-export function heading({
- text,
- sub,
- dim,
- green,
-}: {
- text: string;
- sub?: string;
- dim?: boolean;
- green?: boolean;
-}) {
- if (green) {
- return `${color.bgGreen(color.black(` ${text} `))} ${sub && sub}`;
- }
- if (dim) {
- return `${color.bgBlack(color.white(` ${text} `))} ${sub && sub}`;
- }
- return `${color.bold(color.bgCyan(color.black(` ${text} `)))} ${sub && sub}`;
-}
diff --git a/examples/agents/readme-writer-agent/utils/init.ts b/examples/agents/readme-writer-agent/utils/init.ts
deleted file mode 100644
index 513c7281..00000000
--- a/examples/agents/readme-writer-agent/utils/init.ts
+++ /dev/null
@@ -1,16 +0,0 @@
-import chalk from 'chalk';
-import clearConsole from 'clear-any-console';
-
-export function init({clear, title, version, tagLine, description}) {
- clear && clearConsole();
- const bg = chalk.hex('#6CC644').inverse.bold;
- const clr = chalk.hex(`#000000`).bold;
-
- console.log();
- console.log(
- `${clr(`${bg(` ${title} `)}`)} v${version} ${chalk.dim(tagLine)}\n${chalk.dim(
- description,
- )}`,
- );
- console.log();
-}
diff --git a/examples/agents/readme-writer-agent/utils/questions.ts b/examples/agents/readme-writer-agent/utils/questions.ts
deleted file mode 100644
index 93cdc9b0..00000000
--- a/examples/agents/readme-writer-agent/utils/questions.ts
+++ /dev/null
@@ -1,28 +0,0 @@
-import * as p from '@clack/prompts';
-
-export async function questions() {
- const readme = await p.group(
- {
- level: () =>
- p.select({
- message:
- 'Choose the level of detail you want in the README.',
- options: [
- {label: 'Simple', value: 'simple' as unknown as any},
- {
- label: 'Detailed',
- value: 'detailed' as unknown as any,
- },
- ],
- }),
- },
- {
- onCancel: () => {
- p.cancel('Operation cancelled.');
- process.exit(0);
- },
- },
- );
-
- return {level: readme.level};
-}
diff --git a/examples/agents/readme-writer-agent/utils/start-baseai-server.ts b/examples/agents/readme-writer-agent/utils/start-baseai-server.ts
deleted file mode 100644
index f905bcb4..00000000
--- a/examples/agents/readme-writer-agent/utils/start-baseai-server.ts
+++ /dev/null
@@ -1,26 +0,0 @@
-import {exec, spawn} from 'child_process';
-import * as p from '@clack/prompts';
-
-export async function startBaseAIDevServer() {
- const spinner = p.spinner();
- spinner.start('Starting AI server...');
- // Spawn the server process detached from the parent
- const serverProcess = spawn('npx', ['baseai', 'dev'], {
- // Detach the process so it runs independently
- detached: true,
- // Pipe stdout/stderr to files or ignore them
- stdio: 'ignore',
- shell: process.platform === 'win32',
- });
-
- // Unref the process so it won't keep the parent alive
- serverProcess.unref();
-
- // Wait a bit for the server to start
- return new Promise(resolve => {
- setTimeout(() => {
- spinner.stop('AI server started.');
- resolve(true);
- }, 2000);
- });
-}
diff --git a/examples/astro/baseai/memory/chat-with-docs/index.ts b/examples/astro/baseai/memory/chat-with-docs/index.ts
index 651e715f..3c9c53b3 100644
--- a/examples/astro/baseai/memory/chat-with-docs/index.ts
+++ b/examples/astro/baseai/memory/chat-with-docs/index.ts
@@ -1,14 +1,13 @@
import type {MemoryI} from '@baseai/core';
+import path from 'path';
const buidMemory = (): MemoryI => ({
name: 'chat-with-docs',
description: 'Chat with given docs',
- git: {
- enabled: false,
- include: ['documents/**/*'],
- gitignore: true,
- deployedAt: '',
- embeddedAt: '',
+ config: {
+ useGitRepo: false,
+ dirToTrack: path.posix.join(''),
+ extToTrack: ['*'],
},
});
diff --git a/examples/astro/package.json b/examples/astro/package.json
index 95c957d2..be22ef95 100644
--- a/examples/astro/package.json
+++ b/examples/astro/package.json
@@ -17,7 +17,7 @@
"@astrojs/react": "^3.6.2",
"@astrojs/tailwind": "^5.1.1",
"@astrojs/vercel": "^7.8.1",
- "@baseai/core": "^0.9.43",
+ "@baseai/core": "^0.9.19",
"@radix-ui/react-slot": "^1.1.0",
"@types/react": "^18.3.9",
"@types/react-dom": "^18.3.0",
@@ -33,6 +33,6 @@
"typescript": "^5.6.2"
},
"devDependencies": {
- "baseai": "^0.9.44"
+ "baseai": "^0.9.19"
}
}
diff --git a/examples/nextjs/app/api/langbase/pipes/run-stream/route.ts b/examples/nextjs/app/api/langbase/pipes/run-stream/route.ts
index a3f83d45..74390289 100644
--- a/examples/nextjs/app/api/langbase/pipes/run-stream/route.ts
+++ b/examples/nextjs/app/api/langbase/pipes/run-stream/route.ts
@@ -9,27 +9,14 @@ export async function POST(req: NextRequest) {
const pipe = new Pipe(pipeSummary());
// 2. Run the Pipe.
- try {
- const {stream, threadId} = await pipe.run(runOptions);
- // 3. Return the ReadableStream directly with the threadId in the headers
- // to be used on the client side to mainain a single chat thread.
- return new Response(stream, {
- status: 200,
- headers: {
- 'lb-thread-id': threadId ?? '',
- },
- });
- } catch (error: any) {
- return new Response(
- JSON.stringify({
- error,
- }),
- {
- status: error.status || 500,
- headers: {
- 'Content-Type': 'application/json',
- },
- },
- );
- }
+ const {stream, threadId} = await pipe.run(runOptions);
+
+ // 3. Return the ReadableStream directly with the threadId in the headers
+ // to be used on the client side to mainain a single chat thread.
+ return new Response(stream, {
+ status: 200,
+ headers: {
+ 'lb-thread-id': threadId ?? '',
+ },
+ });
}
diff --git a/examples/nextjs/app/api/langbase/pipes/run-tool-stream/route.ts b/examples/nextjs/app/api/langbase/pipes/run-tool-stream/route.ts
deleted file mode 100644
index 16d72972..00000000
--- a/examples/nextjs/app/api/langbase/pipes/run-tool-stream/route.ts
+++ /dev/null
@@ -1,24 +0,0 @@
-import pipeWithToolsStream from '@/baseai/pipes/pipe-with-tool-stream';
-import {Pipe, RunResponseStream} from '@baseai/core';
-import {NextRequest} from 'next/server';
-
-export async function POST(req: NextRequest) {
- const runOptions = await req.json();
-
- // 1. Initiate the Pipe.
- const pipe = new Pipe(pipeWithToolsStream());
-
- // 2. Run the pipe with user messages and other run options.
- let {stream, threadId} = (await pipe.run({
- ...runOptions,
- stream: true,
- })) as unknown as RunResponseStream;
-
- // 3. Stream the response.
- return new Response(stream, {
- status: 200,
- headers: {
- 'lb-thread-id': threadId ?? '',
- },
- });
-}
diff --git a/examples/nextjs/app/api/langbase/pipes/run/route.ts b/examples/nextjs/app/api/langbase/pipes/run/route.ts
index 342d100a..3bac1b6e 100644
--- a/examples/nextjs/app/api/langbase/pipes/run/route.ts
+++ b/examples/nextjs/app/api/langbase/pipes/run/route.ts
@@ -9,24 +9,8 @@ export async function POST(req: NextRequest) {
const pipe = new Pipe(pipeSummary());
// 2. Run the pipe
- try {
- const result = await pipe.run(runOptions);
+ const result = await pipe.run(runOptions);
- // 3. Return the response stringified.
- return new Response(JSON.stringify(result));
- } catch (error: any) {
- // 4. Return the error response
-
- return new Response(
- JSON.stringify({
- error,
- }),
- {
- status: error.status || 500,
- headers: {
- 'Content-Type': 'application/json',
- },
- },
- );
- }
+ // 3. Return the response stringified.
+ return new Response(JSON.stringify(result));
}
diff --git a/examples/nextjs/app/demo/tool-calling-stream/page.tsx b/examples/nextjs/app/demo/tool-calling-stream/page.tsx
deleted file mode 100644
index 6194ca9a..00000000
--- a/examples/nextjs/app/demo/tool-calling-stream/page.tsx
+++ /dev/null
@@ -1,18 +0,0 @@
-import PipeRunToolStreamExample from '@/components/pipe-run-with-tool-stream';
-import GoHome from '@/components/ui/go-home';
-
-export default function Page() {
- return (
-
-
-
-
- AI Agent Pipes: Tool Calling
-
-
- Run a pipe with tool calling.
-
-
-
- );
-}
diff --git a/examples/nextjs/app/page.tsx b/examples/nextjs/app/page.tsx
index 8e52d8c7..521cbe12 100644
--- a/examples/nextjs/app/page.tsx
+++ b/examples/nextjs/app/page.tsx
@@ -2,17 +2,16 @@ import Link from 'next/link';
export default function Page() {
const examples = [
- { title: 'Pipe Run', href: '/demo/pipe-run' },
- { title: 'Pipe Run Stream', href: '/demo/pipe-run-stream' },
- { title: 'Chat Simple', href: '/demo/chat-simple' },
- { title: 'Chat Advanced', href: '/demo/chat-advanced' },
- { title: 'Tool Calling', href: '/demo/tool-calling' },
- { title: 'Tool Calling Stream', href: '/demo/tool-calling-stream' },
+ {title: 'Pipe Run', href: '/demo/pipe-run'},
+ {title: 'Pipe Run Stream', href: '/demo/pipe-run-stream'},
+ {title: 'Chat Simple', href: '/demo/chat-simple'},
+ {title: 'Chat Advanced', href: '/demo/chat-advanced'},
+ {title: 'Tool Calling', href: '/demo/tool-calling'},
{
title: 'Tool Calling: Pipes as Tools',
href: '/demo/pipe-run-pipes-as-tools',
},
- { title: 'Memory', href: '/demo/memory' },
+ {title: 'Memory', href: '/demo/memory'},
];
return (
diff --git a/examples/nextjs/baseai/memory/chat-with-docs/index.ts b/examples/nextjs/baseai/memory/chat-with-docs/index.ts
index a2140495..58f08569 100644
--- a/examples/nextjs/baseai/memory/chat-with-docs/index.ts
+++ b/examples/nextjs/baseai/memory/chat-with-docs/index.ts
@@ -1,14 +1,13 @@
import {MemoryI} from '@baseai/core';
+import path from 'path';
const buidMemory = (): MemoryI => ({
name: 'chat-with-docs',
description: 'Chat with given docs',
- git: {
- enabled: false,
- include: ['documents/**/*'],
- gitignore: true,
- deployedAt: '',
- embeddedAt: '',
+ config: {
+ useGitRepo: false,
+ dirToTrack: path.posix.join(''),
+ extToTrack: ['*'],
},
});
diff --git a/examples/nextjs/baseai/pipes/pipe-with-tool-stream.ts b/examples/nextjs/baseai/pipes/pipe-with-tool-stream.ts
deleted file mode 100644
index 2d082f64..00000000
--- a/examples/nextjs/baseai/pipes/pipe-with-tool-stream.ts
+++ /dev/null
@@ -1,28 +0,0 @@
-import {PipeI} from '@baseai/core';
-import toolCalculator from '../tools/calculator';
-import toolGetWeather from '../tools/weather';
-
-const pipeWithToolsStream = (): PipeI => ({
- apiKey: process.env.LANGBASE_API_KEY!,
- name: 'pipe-with-tool',
- description: 'An AI agent pipe that can call tools',
- status: 'public',
- model: 'openai:gpt-4o-mini',
- stream: true,
- json: false,
- store: true,
- moderate: true,
- top_p: 1,
- max_tokens: 1000,
- temperature: 0.7,
- presence_penalty: 1,
- frequency_penalty: 1,
- stop: [],
- tool_choice: 'auto',
- parallel_tool_calls: true,
- messages: [{role: 'system', content: `You are a helpful AI assistant.`}],
- variables: [],
- memory: [],
- tools: [toolGetWeather(), toolCalculator()],
-});
-export default pipeWithToolsStream;
diff --git a/examples/nextjs/baseai/pipes/pipe-with-tool.ts b/examples/nextjs/baseai/pipes/pipe-with-tool.ts
index 6abb7c69..b208ac3c 100644
--- a/examples/nextjs/baseai/pipes/pipe-with-tool.ts
+++ b/examples/nextjs/baseai/pipes/pipe-with-tool.ts
@@ -8,7 +8,7 @@ const pipeWithTools = (): PipeI => ({
description: 'An AI agent pipe that can call tools',
status: 'public',
model: 'openai:gpt-4o-mini',
- stream: false,
+ stream: true,
json: false,
store: true,
moderate: true,
diff --git a/examples/nextjs/components/pipe-run-with-tool-stream.tsx b/examples/nextjs/components/pipe-run-with-tool-stream.tsx
deleted file mode 100644
index 171a0377..00000000
--- a/examples/nextjs/components/pipe-run-with-tool-stream.tsx
+++ /dev/null
@@ -1,75 +0,0 @@
-'use client';
-
-import { Button } from '@/components/ui/button';
-import { Input } from '@/components/ui/input';
-import { getRunner, getTextContent } from '@baseai/core';
-import { useState } from 'react';
-
-export default function PipeRunToolStreamExample() {
- const [prompt, setPrompt] = useState(
- 'What is the weather in SF. Square root of 9 and then add 7?',
- );
- const [completion, setCompletion] = useState('');
- const [loading, setLoading] = useState(false);
-
- const handleSubmit = async (e: any) => {
- e.preventDefault();
- if (!prompt.trim()) return;
-
- setLoading(true);
- try {
- const response = await fetch('/api/langbase/pipes/run-tool-stream', {
- method: 'POST',
- headers: { 'Content-Type': 'application/json' },
- body: JSON.stringify({
- messages: [{ role: 'user', content: prompt }],
- }),
- });
-
- if (!response.ok) {
- throw new Error('Network response was not ok');
- }
-
- const runner = getRunner(response.body as ReadableStream);
-
- let localCompletion = '';
- for await (const chunk of runner) {
- const textPart = getTextContent(chunk);
- localCompletion += textPart;
- setCompletion(localCompletion);
- }
- } catch (error) {
- console.error('Error:', error);
- setCompletion('An error occurred while generating the completion.');
- } finally {
- setLoading(false);
- }
- };
-
- return (
-
-
-
- {!loading && completion && (
-
- AI: {completion}
-
- )}
-
- );
-}
diff --git a/examples/nextjs/components/pipe-run.tsx b/examples/nextjs/components/pipe-run.tsx
index 16b7f3c3..239a92db 100644
--- a/examples/nextjs/components/pipe-run.tsx
+++ b/examples/nextjs/components/pipe-run.tsx
@@ -25,19 +25,15 @@ export default function PipeRunExample() {
});
if (!response.ok) {
- const res = await response.json();
- throw new Error(res.error.error.message);
+ throw new Error('Network response was not ok');
}
// Parse the JSON response.
const data = await response.json();
setCompletion(data.completion);
- } catch (error: any) {
- if (error.message) setCompletion(error.message);
- else
- setCompletion(
- 'An error occurred while generating the completion.',
- );
+ } catch (error) {
+ console.error('Error:', error);
+ setCompletion('An error occurred while generating the completion.');
} finally {
setLoading(false);
}
diff --git a/examples/nextjs/package.json b/examples/nextjs/package.json
index ca90550b..7c609b74 100644
--- a/examples/nextjs/package.json
+++ b/examples/nextjs/package.json
@@ -11,14 +11,14 @@
"baseai": "baseai"
},
"dependencies": {
- "@baseai/core": "^0.9.43",
+ "@baseai/core": "^0.9.19",
"@radix-ui/react-slot": "^1.1.0",
"class-variance-authority": "^0.7.0",
"clsx": "^2.1.1",
"lucide-react": "^0.416.0",
"mathjs": "^13.1.1",
"mxcn": "^2.0.0",
- "next": "14.2.35",
+ "next": "14.2.5",
"openai": "^4.53.0",
"react": "^18",
"react-dom": "^18",
@@ -29,7 +29,7 @@
"@types/node": "^20",
"@types/react": "^18",
"@types/react-dom": "^18",
- "baseai": "^0.9.44",
+ "baseai": "^0.9.19",
"eslint": "^8",
"eslint-config-next": "14.2.5",
"mini-css-extract-plugin": "^2.9.0",
diff --git a/examples/nodejs/baseai/memory/ai-agent-memory/index.ts b/examples/nodejs/baseai/memory/ai-agent-memory/index.ts
deleted file mode 100644
index 312ef85e..00000000
--- a/examples/nodejs/baseai/memory/ai-agent-memory/index.ts
+++ /dev/null
@@ -1,25 +0,0 @@
-import {MemoryI} from '@baseai/core';
-
-const memoryAiAgentMemory = (): MemoryI => ({
- name: 'ai-agent-memory',
- description: 'My list of docs as memory for an AI agent pipe',
- git: {
- enabled: true,
- include: ['**/*'],
- gitignore: true,
- deployedAt: '',
- embeddedAt: '',
- },
- documents: {
- meta: doc => {
- // generate a URL for each document
- const url = `https://example.com/${doc.path}`;
- return {
- url,
- name: doc.name,
- };
- },
- },
-});
-
-export default memoryAiAgentMemory;
diff --git a/examples/nodejs/baseai/memory/chat-with-docs/index.ts b/examples/nodejs/baseai/memory/chat-with-docs/index.ts
index 8f013d10..78711b34 100644
--- a/examples/nodejs/baseai/memory/chat-with-docs/index.ts
+++ b/examples/nodejs/baseai/memory/chat-with-docs/index.ts
@@ -1,14 +1,13 @@
import {MemoryI} from '@baseai/core';
+import path from 'path';
const buildMemory = (): MemoryI => ({
name: 'chat-with-docs',
description: 'Chat with docs',
- git: {
- enabled: false,
- include: ['documents/**/*'],
- gitignore: true,
- deployedAt: '',
- embeddedAt: '',
+ config: {
+ useGitRepo: false,
+ dirToTrack: path.posix.join(''),
+ extToTrack: ['*'],
},
});
diff --git a/examples/nodejs/baseai/memory/chat-with-repo/index.ts b/examples/nodejs/baseai/memory/chat-with-repo/index.ts
index 60baccc4..c9faa3e7 100644
--- a/examples/nodejs/baseai/memory/chat-with-repo/index.ts
+++ b/examples/nodejs/baseai/memory/chat-with-repo/index.ts
@@ -1,15 +1,14 @@
-import {MemoryI} from '@baseai/core';
+import { MemoryI } from '@baseai/core';
+import path from 'path';
const memoryChatWithRepo = (): MemoryI => ({
- name: 'chat-with-repo',
- description: '',
- git: {
- enabled: true,
- include: ['examples/**/*'],
- gitignore: true,
- deployedAt: '',
- embeddedAt: '',
- },
+ name: 'chat-with-repo',
+ description: '',
+ config: {
+ useGitRepo: true,
+ dirToTrack: path.posix.join('examples'),
+ extToTrack: ["*"]
+ }
});
export default memoryChatWithRepo;
diff --git a/examples/nodejs/package.json b/examples/nodejs/package.json
index 5be76560..c13c4c3e 100644
--- a/examples/nodejs/package.json
+++ b/examples/nodejs/package.json
@@ -17,11 +17,11 @@
"author": "Ahmad Awais (https://twitter.com/MrAhmadAwais)",
"license": "UNLICENSED",
"dependencies": {
- "@baseai/core": "^0.9.43",
+ "@baseai/core": "^0.9.19",
"dotenv": "^16.4.5"
},
"devDependencies": {
- "baseai": "^0.9.44",
+ "baseai": "^0.9.19",
"tsx": "^4.19.0"
}
}
diff --git a/examples/remix/baseai/memory/chat-with-docs/index.ts b/examples/remix/baseai/memory/chat-with-docs/index.ts
index 925380b1..58f08569 100644
--- a/examples/remix/baseai/memory/chat-with-docs/index.ts
+++ b/examples/remix/baseai/memory/chat-with-docs/index.ts
@@ -1,10 +1,14 @@
import {MemoryI} from '@baseai/core';
+import path from 'path';
const buidMemory = (): MemoryI => ({
name: 'chat-with-docs',
description: 'Chat with given docs',
- useGit: false,
- include: ['documents/**/*'],
+ config: {
+ useGitRepo: false,
+ dirToTrack: path.posix.join(''),
+ extToTrack: ['*'],
+ },
});
export default buidMemory;
diff --git a/examples/remix/package.json b/examples/remix/package.json
index 494b703c..8b3a78b1 100644
--- a/examples/remix/package.json
+++ b/examples/remix/package.json
@@ -13,7 +13,7 @@
"baseai": "baseai"
},
"dependencies": {
- "@baseai/core": "^0.9.43",
+ "@baseai/core": "^0.9.19",
"@radix-ui/react-slot": "^1.1.0",
"@remix-run/node": "2.12.0",
"@remix-run/react": "2.12.0",
@@ -35,7 +35,7 @@
"@typescript-eslint/parser": "^6.7.4",
"@vercel/remix": "2.12.0",
"autoprefixer": "^10.4.20",
- "baseai": "^0.9.44",
+ "baseai": "^0.9.19",
"eslint": "^8.38.0",
"eslint-import-resolver-typescript": "^3.6.1",
"eslint-plugin-import": "^2.28.1",
diff --git a/packages/baseai/CHANGELOG.md b/packages/baseai/CHANGELOG.md
index d1b20baa..506c4c24 100644
--- a/packages/baseai/CHANGELOG.md
+++ b/packages/baseai/CHANGELOG.md
@@ -1,157 +1,5 @@
# baseai
-## 0.9.44
-
-### Patch Changes
-
-- 👌 IMPROVE: Pinned chalk version
-
-## 0.9.43
-
-### Patch Changes
-
-- Fix moderation
-
-## 0.9.42
-
-### Patch Changes
-
-- 📦 NEW: LB-LLM-Key header support
-
-## 0.9.41
-
-### Patch Changes
-
-- 🐛 FIX: Google stream
-
-## 0.9.40
-
-### Patch Changes
-
-- 📦 NEW: meta-llama/Llama-3.3-70B-Instruct-Turbo model
-
-## 0.9.39
-
-### Patch Changes
-
-- 📦 NEW: tools support in pipe.run()
-
-## 0.9.38
-
-### Patch Changes
-
-- 📦 NEW: .env file based BaseAI auth
-
-## 0.9.37
-
-### Patch Changes
-
-- 👌 IMPROVE: Remove unused type
-
-## 0.9.36
-
-### Patch Changes
-
-- 📦 NEW: Dynamically set document metadata
-
-## 0.9.35
-
-### Patch Changes
-
-- 📦 NEW: Pipe API key support in pipe.run()
-
-## 0.9.34
-
-### Patch Changes
-
-- 👌 IMPROVE: Memory config with new features and better UX
-
-## 0.9.33
-
-### Patch Changes
-
-- 📦 NEW: Params for pipe.run() sdk support
-
-## 0.9.32
-
-### Patch Changes
-
-- 👌 IMPROVE: Error handling in usePipe
-
-## 0.9.31
-
-### Patch Changes
-
-- 98f2d7c: 🐛 FIX: Local development server
-- 👌 IMPROVE: Local development server
-
-## 0.9.30
-
-### Patch Changes
-
-- 📦 NEW: Request production AI agent pipe
-
-## 0.9.29
-
-### Patch Changes
-
-- 🐛 FIX: execAsync breaking paths in Windows
-
-## 0.9.28
-
-### Patch Changes
-
-- 📦 NEW: Pipe v1 support
-
-## 0.9.27
-
-### Patch Changes
-
-- 🐛 FIX: Broken pipes and tools build paths in Windows
-
-## 0.9.26
-
-### Patch Changes
-
-- 📦 NEW: Allow empty submit with no message
-
-## 0.9.25
-
-### Patch Changes
-
-- 🐛 FIX: Request timeout and special characters in description
-
-## 0.9.24
-
-### Patch Changes
-
-- 📦 NEW: claude 3.5 Haiku
-
-## 0.9.23
-
-### Patch Changes
-
-- 📦 NEW: setThreadId function in usePipe
-
-## 0.9.22
-
-### Patch Changes
-
-- 🐛 FIX: Anthropic streaming
-- 84d789c: 🐛 FIX: Anthropic streaming
-
-## 0.9.21
-
-### Patch Changes
-
-- 👌 IMPROVE: Redact LLM API key
-
-## 0.9.20
-
-### Patch Changes
-
-- 👌 IMPROVE: logs
-
## 0.9.19
### Patch Changes
diff --git a/packages/baseai/package.json b/packages/baseai/package.json
index fb30c983..4647b7f8 100644
--- a/packages/baseai/package.json
+++ b/packages/baseai/package.json
@@ -1,7 +1,7 @@
{
"name": "baseai",
"description": "The Web AI Framework Dev - BaseAI.dev",
- "version": "0.9.44",
+ "version": "0.9.19",
"license": "UNLICENSED",
"type": "module",
"main": "./dist/index.js",
@@ -52,7 +52,7 @@
"@hono/zod-openapi": "^0.16.0",
"@sindresorhus/slugify": "^2.2.1",
"camelcase": "^8.0.0",
- "chalk": "5.6.0",
+ "chalk": "^5.3.0",
"cli-alerts": "^2.0.0",
"cli-handle-error": "^4.4.0",
"cli-handle-unhandled": "^1.1.1",
@@ -60,11 +60,11 @@
"cli-table3": "^0.6.5",
"cli-welcome": "^3.0.0",
"compute-cosine-similarity": "^1.1.0",
+ "conf": "^13.0.1",
"cosmiconfig": "^9.0.0",
"cosmiconfig-typescript-loader": "^5.0.0",
"dotenv": "^16.4.5",
"execa": "^9.4.0",
- "fast-glob": "^3.3.2",
"figures": "^6.1.0",
"get-package-json-file": "^2.0.0",
"hono": "^4.5.11",
diff --git a/packages/baseai/src/add/index.ts b/packages/baseai/src/add/index.ts
index 27720abb..46fd3fb3 100644
--- a/packages/baseai/src/add/index.ts
+++ b/packages/baseai/src/add/index.ts
@@ -1,10 +1,10 @@
+import { getStoredAuth } from '@/auth';
import { dim, dimItalic } from '@/utils/formatting';
import { getAvailablePipes } from '@/utils/get-available-pipes';
import { getAvailableTools } from '@/utils/get-available-tools';
import { heading } from '@/utils/heading';
import icons from '@/utils/icons';
import { isToolPresent } from '@/utils/is-tool-present';
-import { retrieveAuthentication } from '@/utils/retrieve-credentials';
import { formatCode } from '@/utils/ts-format-code';
import * as p from '@clack/prompts';
import slugify from '@sindresorhus/slugify';
@@ -43,6 +43,37 @@ function extractLoginName(loginAndPipe: string) {
};
}
+/**
+ * Represents an account with login credentials and an API key.
+ */
+interface Account {
+ login: string;
+ apiKey: string;
+}
+
+/**
+ * Retrieves the stored authentication account.
+ *
+ * This function attempts to retrieve the stored authentication account
+ * asynchronously. If the account is found, it is returned. If no account
+ * is found or an error occurs during retrieval, `null` is returned.
+ *
+ * @returns {Promise} A promise that resolves to the stored
+ * authentication account, or `null` if no account is found or an error occurs.
+ */
+async function retrieveAuthentication(): Promise {
+ try {
+ const account = await getStoredAuth();
+ if (!account) return null;
+
+ return account;
+ } catch (error) {
+ p.log.error(
+ `Error retrieving stored auth: ${(error as Error).message}`
+ );
+ return null;
+ }
+}
/**
* Fetches a pipe from Langbase using the provided login and name.
@@ -62,17 +93,9 @@ async function getPipe({
name: string;
spinner: Spinner;
}) {
+ spinner.start('Fetching pipe from Langbase');
try {
- const account = await retrieveAuthentication({ spinner });
- if (!account) {
- p.log.error(
- 'Authentication failed. Please run "npx baseai auth" to authenticate.'
- );
- return;
- }
-
- spinner.start('Fetching pipe from Langbase');
-
+ const account = await retrieveAuthentication();
const API_URL = `https://api.langbase.com/v1/pipes/${login}/${name}`;
const createResponse = await fetch(API_URL, {
diff --git a/packages/baseai/src/auth/index.ts b/packages/baseai/src/auth/index.ts
index d042df4f..1309548b 100644
--- a/packages/baseai/src/auth/index.ts
+++ b/packages/baseai/src/auth/index.ts
@@ -9,11 +9,21 @@ import {
outro,
password
} from '@clack/prompts';
-import fs from 'fs/promises';
+import Conf from 'conf';
+import fs from 'fs';
import open from 'open';
import path from 'path';
import color from 'picocolors';
+const config = new Conf({
+ projectName: 'baseai'
+});
+
+interface Account {
+ login: string;
+ apiKey: string;
+}
+
export async function auth() {
p.intro(
heading({
@@ -62,6 +72,17 @@ export async function auth() {
process.exit(1);
}
+ // Store in Conf (old functionality)
+ const newAccount: Account = { login, apiKey };
+ const existingAccounts = (config.get('accounts') as Account[]) || [];
+ const updatedAccounts = [...existingAccounts, newAccount];
+ config.set('accounts', updatedAccounts);
+
+ // Store in .env file (new functionality)
+ // const envKeyName = apiKey.startsWith('user_')
+ // ? 'LANGBASE_USER_API_KEY'
+ // : 'LANGBASE_ORG_API_KEY';
+
const envKeyName = 'LANGBASE_API_KEY';
const envContent = `\n# Langbase API key for https://langbase.com/${login}\n${envKeyName}=${apiKey}\n\n`;
@@ -78,45 +99,37 @@ export async function auth() {
const baiConfig = await loadConfig();
let envFile = baiConfig.envFilePath || '.env';
- const envFileContent = await fs.readFile(envFile, 'utf-8');
-
- const oldKey = envFileContent
- .split('\n')
- .reverse() // Reverse to get the latest key if there are multiple
- .find(line => line.includes('LANGBASE_API_KEY'))
- ?.split('=')[1];
-
- if (oldKey) {
- const shouldOverwrite = await confirm({
- message: `API key found in ${envFile}. Overwrite?`
- });
-
- if (isCancel(shouldOverwrite)) {
- cancel('Operation cancelled.');
- process.exit(0);
- }
-
- if (!shouldOverwrite) {
- outro(
- color.yellow('API key is not overwritten.')
- );
- process.exit(0);
- }
-
- const newEnvContent = envFileContent.replace(
- new RegExp(`LANGBASE_API_KEY=${oldKey}`),
- envContent.trim()
- );
-
- await fs.writeFile(path.join(process.cwd(), envFile), newEnvContent);
- } else {
- await fs.appendFile(path.join(process.cwd(), envFile), envContent);
- }
+ fs.appendFileSync(path.join(process.cwd(), envFile), envContent);
outro(
color.green(
- `Authentication successful. API key is stored in ${envFile}`
+ `Authentication successful. Credentials stored in config and ${envFile}`
)
);
+ console.log(color.dim(`Config file location: ${config.path}`));
process.exit(0);
}
+
+export function getStoredAuth(): Account | undefined {
+ const accounts = (config.get('accounts') as Account[]) || [];
+ const currentLogin = config.get('currentAccount') as string | undefined;
+
+ if (currentLogin) {
+ return accounts.find(account => account.login === currentLogin);
+ }
+
+ return accounts[0]; // Return the first account if no current account is set
+}
+
+export function getStoredAccounts(): Account[] {
+ return (config.get('accounts') as Account[]) || [];
+}
+
+export function setCurrentAccount(login: string): boolean {
+ const accounts = getStoredAccounts();
+ if (accounts.some(account => account.login === login)) {
+ config.set('currentAccount', login);
+ return true;
+ }
+ return false;
+}
diff --git a/packages/baseai/src/build/index.ts b/packages/baseai/src/build/index.ts
index 4db3337b..e6f213ae 100644
--- a/packages/baseai/src/build/index.ts
+++ b/packages/baseai/src/build/index.ts
@@ -32,7 +32,7 @@ const buildPipes = async () => {
p.intro(heading({ text: 'PIPES', sub: '', dim: true }));
const sourcePath = path.join(process.cwd(), 'baseai', 'pipes');
- const outputPath = path.join(process.cwd(), '.baseai', 'pipes');
+ const outputPath = path.join(process.cwd(), '.baseai/pipes');
const builtPipes = await buildTypeScriptFiles(
sourcePath,
@@ -47,7 +47,7 @@ const buildTools = async () => {
p.intro(heading({ text: 'TOOLS', sub: '', dim: true }));
const sourcePath = path.join(process.cwd(), 'baseai', 'tools');
- const outputPath = path.join(process.cwd(), '.baseai', 'tools');
+ const outputPath = path.join(process.cwd(), '.baseai/tools');
const builtTools = await buildTypeScriptFiles(
sourcePath,
@@ -109,7 +109,7 @@ export const buildMemory = async ({
const displayName = path.dirname(file); // This is the last directory name
try {
const { stdout } = await execAsync(
- `npx tsx -e "import memoryConfig from '${JSON.stringify(inputFile)}'; console.log(JSON.stringify(memoryConfig()))"`
+ `npx tsx -e "import memoryConfig from '${inputFile}'; console.log(JSON.stringify(memoryConfig()))"`
);
await fs.writeFile(outputFile, stdout);
@@ -160,7 +160,7 @@ const buildTypeScriptFiles = async (
try {
const { stdout } = await execAsync(
- `npx tsx -e "import config from '${JSON.stringify(inputFile)}'; console.log(JSON.stringify(config()))"`
+ `npx tsx -e "import config from '${inputFile}'; console.log(JSON.stringify(config()))"`
);
// Parse the JSON output
diff --git a/packages/baseai/src/data/models.ts b/packages/baseai/src/data/models.ts
index d8c4e055..41759877 100644
--- a/packages/baseai/src/data/models.ts
+++ b/packages/baseai/src/data/models.ts
@@ -253,12 +253,6 @@ export const modelsByProvider: ModelsByProviderInclCosts = {
}
],
[TOGETHER_AI]: [
- {
- id: 'meta-llama/Llama-3.3-70B-Instruct-Turbo',
- provider: TOGETHER_AI,
- promptCost: 0.88,
- completionCost: 0.88,
- },
{
id: 'meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo',
provider: TOGETHER_AI,
@@ -420,25 +414,9 @@ export const modelsByProvider: ModelsByProviderInclCosts = {
toolChoice: true,
parallelToolCalls: true
}
- },
- {
- id: 'claude-3-5-haiku-20241022',
- provider: ANTHROPIC,
- promptCost: 1,
- completionCost: 5,
- toolSupport: {
- toolChoice: true,
- parallelToolCalls: true
- }
}
],
[GROQ]: [
- {
- id: 'llama-3.3-70b-versatile',
- provider: GROQ,
- promptCost: 0.59,
- completionCost: 0.79,
- },
{
id: 'llama-3.1-70b-versatile',
provider: GROQ,
@@ -539,12 +517,6 @@ export const modelsByProvider: ModelsByProviderInclCosts = {
}
],
[FIREWORKS_AI]: [
- {
- id: 'llama-v3p3-70b-instruct',
- provider: FIREWORKS_AI,
- promptCost: 0.88,
- completionCost: 0.88,
- },
{
id: 'llama-v3p1-405b-instruct',
provider: FIREWORKS_AI,
diff --git a/packages/baseai/src/deploy/document.ts b/packages/baseai/src/deploy/document.ts
index 484adbca..6692d3c9 100644
--- a/packages/baseai/src/deploy/document.ts
+++ b/packages/baseai/src/deploy/document.ts
@@ -7,7 +7,9 @@ import {
handleError,
handleInvalidConfig,
listMemoryDocuments,
+ retrieveAuthentication,
uploadDocumentsToMemory,
+ type Account
} from '.';
import path from 'path';
import fs from 'fs/promises';
@@ -19,7 +21,6 @@ import {
} from '@/utils/memory/load-memory-files';
import type { MemoryI } from 'types/memory';
import { compareDocumentLists } from '@/utils/memory/compare-docs-list';
-import { retrieveAuthentication, type Account } from '@/utils/retrieve-credentials';
type Spinner = ReturnType;
diff --git a/packages/baseai/src/deploy/index.ts b/packages/baseai/src/deploy/index.ts
index 8e2a64ba..186bc71a 100644
--- a/packages/baseai/src/deploy/index.ts
+++ b/packages/baseai/src/deploy/index.ts
@@ -18,19 +18,17 @@ import path from 'path';
import color from 'picocolors';
import { type MemoryI } from 'types/memory';
import type { Pipe, PipeOld } from 'types/pipe';
+import { getStoredAuth } from './../auth/index';
import {
handleGitSyncMemories,
updateDeployedCommitHash
} from '@/utils/memory/git-sync/handle-git-sync-memories';
import { handleSingleDocDeploy } from './document';
-import {
- generateUpgradeInstructions,
- isOldMemoryConfigFormat
-} from '@/utils/memory/handle-old-memory-config';
-import {
- retrieveAuthentication,
- type Account
-} from '@/utils/retrieve-credentials';
+
+export interface Account {
+ login: string;
+ apiKey: string;
+}
interface ErrorResponse {
error?: { message: string };
@@ -159,6 +157,26 @@ async function readToolsDirectory({
}
}
+export async function retrieveAuthentication({
+ spinner
+}: {
+ spinner: Spinner;
+}): Promise {
+ spinner.start('Retrieving stored authentication');
+ try {
+ const account = await getStoredAuth();
+ if (!account) {
+ handleNoAccountFound({ spinner });
+ return null;
+ }
+ spinner.stop(`Deploying as ${color.cyan(account.login)}`);
+ return account;
+ } catch (error) {
+ handleAuthError({ spinner, error });
+ return null;
+ }
+}
+
async function deployPipes({
spinner,
pipes,
@@ -335,6 +353,23 @@ function handleDirectoryReadError({
}
}
+function handleNoAccountFound({ spinner }: { spinner: Spinner }): void {
+ spinner.stop('No account found');
+ p.log.warn('No account found. Please authenticate first.');
+ p.log.info(`Run: ${color.green('npx baseai auth')}`);
+}
+
+function handleAuthError({
+ spinner,
+ error
+}: {
+ spinner: Spinner;
+ error: unknown;
+}): void {
+ spinner.stop('Failed to retrieve authentication');
+ p.log.error(`Error retrieving stored auth: ${(error as Error).message}`);
+}
+
export function handleInvalidConfig({
spinner,
name,
@@ -447,27 +482,19 @@ export async function deployMemory({
p.log.step(`Processing documents for memory: ${memoryNameWithoutExt}`);
- if (isOldMemoryConfigFormat(memoryObject)) {
- p.note(generateUpgradeInstructions(memoryObject));
- p.cancel(
- 'Deployment cancelled. Please update your memory config file to the new format.'
- );
- process.exit(1);
- }
-
let filesToDeploy: string[] = [];
let filesToDelete: string[] = [];
let memoryDocs: MemoryDocumentI[] = [];
// Git sync memories
- if (memoryObject.git.enabled) {
+ if (memoryObject.config?.useGitRepo) {
// Get names of files to deploy, i.e., changed or new files
const {
filesToDeploy: gitFilesToDeploy,
filesToDelete: gitFilesToDelete
} = await handleGitSyncMemories({
memoryName: memoryNameWithoutExt,
- config: memoryObject,
+ config: memoryObject.config,
account
});
@@ -503,7 +530,7 @@ export async function deployMemory({
documents: memoryDocs,
account,
overwrite,
- isGitSync: memoryObject.git.enabled,
+ isGitSync: memoryObject.config?.useGitRepo,
docsToDelete: filesToDelete
});
spinner.stop(`Deployment finished memory: ${memoryObject.name}`);
@@ -539,6 +566,7 @@ export async function upsertMemory({
docsToDelete?: string[];
}): Promise {
const { createMemory } = getMemoryApiUrls({
+ account,
memoryName: memory.name
});
@@ -650,8 +678,7 @@ export async function uploadDocumentsToMemory({
const signedUrl = await getSignedUploadUrl({
documentName: doc.name,
memoryName: name,
- account,
- meta: doc.meta
+ account
});
const uploadResponse = await uploadDocument(signedUrl, doc.blob);
@@ -845,6 +872,7 @@ export async function listMemoryDocuments({
memoryName: string;
}) {
const { listDocuments } = getMemoryApiUrls({
+ account,
memoryName: memoryName
});
@@ -873,26 +901,30 @@ export async function listMemoryDocuments({
);
}
- const res = (await listResponse.json()) as { name: string }[];
- const documents = res.map((doc: { name: string }) => doc.name);
+ const res = (await listResponse.json()) as { docs: { name: string }[] };
+ const documents = res.docs.map((doc: { name: string }) => doc.name);
return documents;
}
async function getSignedUploadUrl({
documentName,
memoryName,
- account,
- meta
+ account
}: {
documentName: string;
memoryName: string;
account: Account;
- meta: Record;
}): Promise {
const { uploadDocument } = getMemoryApiUrls({
+ account,
memoryName
});
+ const isOrgAccount = account.apiKey.includes(':');
+
+ const ownerLogin = isOrgAccount
+ ? account.apiKey.split(':')[0]
+ : account.login;
try {
const response = await fetch(uploadDocument, {
method: 'POST',
@@ -901,8 +933,8 @@ async function getSignedUploadUrl({
Authorization: `Bearer ${account.apiKey}`
},
body: JSON.stringify({
- meta,
memoryName,
+ ownerLogin,
fileName: documentName
})
});
@@ -936,6 +968,7 @@ async function deleteDocument({
account: Account;
}) {
const { deleteDocument } = getMemoryApiUrls({
+ account,
memoryName,
documentName
});
@@ -1000,36 +1033,44 @@ async function uploadDocument(signedUrl: string, document: Blob) {
}
export function getMemoryApiUrls({
+ account,
memoryName,
documentName
}: {
+ account: Account;
memoryName: string;
documentName?: string;
}) {
- // Base URL
- const baseUrl = `https://api.langbase.com/v1`;
+ const isOrgAccount = account.apiKey.includes(':');
+ const ownerLogin = isOrgAccount
+ ? account.apiKey.split(':')[0]
+ : account.login;
+ const baseUrl = `https://api.langbase.com/beta`;
+ const baseUrlV1 = `https://api.langbase.com/v1`;
// Create memory URL
- const createMemory = `${baseUrl}/memory`;
-
- // Delete memory URL
- const deleteMemory = `${baseUrl}/memory/${memoryName}`;
+ const createUrlOrg = `${baseUrl}/org/${ownerLogin}/memorysets`;
+ const createUrlUser = `${baseUrl}/user/memorysets`;
// Upload document URL
- const uploadDocument = `${baseUrl}/memory/documents`;
+ const uploadDocumentOrg = `${baseUrl}/org/${ownerLogin}/memorysets/documents`;
+ const uploadDocumentUser = `${baseUrl}/user/memorysets/documents`;
// List documents URL
- const listDocuments = `${baseUrl}/memory/${memoryName}/documents`;
+ const listDocuments = `${baseUrl}/memorysets/${ownerLogin}/${memoryName}/documents`;
+
+ // Delete memory URL
+ const deleteMemory = `${baseUrl}/memorysets/${ownerLogin}/${memoryName}`;
// Delete document URL
- const deleteDocument = `${baseUrl}/memory/${memoryName}/documents/${documentName}`;
+ const deleteDocument = `${baseUrlV1}/memory/${memoryName}/documents/${documentName}`;
return {
listDocuments,
deleteMemory,
deleteDocument,
- createMemory,
- uploadDocument
+ createMemory: isOrgAccount ? createUrlOrg : createUrlUser,
+ uploadDocument: isOrgAccount ? uploadDocumentOrg : uploadDocumentUser
};
}
@@ -1049,6 +1090,7 @@ async function overwriteMemory({
// Delete old memory.
dlog(`Deleting old memory: ${memory.name}`);
const { deleteMemory } = getMemoryApiUrls({
+ account,
memoryName: memory.name
});
@@ -1139,10 +1181,10 @@ export async function deploySingleMemory({
// Retrieve authentication
const account = await retrieveAuthentication({ spinner });
if (!account) {
- p.outro(
- `No account found. Skipping deployment. \n Run: ${cyan('npx baseai@latest auth')}`
+ p.log.error(
+ 'Authentication failed. Please run "npx baseai auth" to authenticate.'
);
- process.exit(1);
+ return;
}
// Call deployMemory function
@@ -1155,7 +1197,7 @@ export async function deploySingleMemory({
});
p.outro(`Successfully deployed memory: ${memoryName}`);
- process.exit(0);
+ process.exit(1);
} catch (error) {
if (error instanceof Error) {
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
diff --git a/packages/baseai/src/dev/data/models.ts b/packages/baseai/src/dev/data/models.ts
index b82060c5..89eed1ab 100644
--- a/packages/baseai/src/dev/data/models.ts
+++ b/packages/baseai/src/dev/data/models.ts
@@ -173,12 +173,6 @@ export const modelsByProvider: ModelsByProviderInclCosts = {
}
],
[TOGETHER_AI]: [
- {
- id: 'meta-llama/Llama-3.3-70B-Instruct-Turbo',
- provider: TOGETHER_AI,
- promptCost: 0.88,
- completionCost: 0.88,
- },
{
id: 'meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo',
provider: TOGETHER_AI,
@@ -340,25 +334,9 @@ export const modelsByProvider: ModelsByProviderInclCosts = {
toolChoice: true,
parallelToolCalls: true
}
- },
- {
- id: 'claude-3-5-haiku-20241022',
- provider: ANTHROPIC,
- promptCost: 1,
- completionCost: 5,
- toolSupport: {
- toolChoice: true,
- parallelToolCalls: true
- }
}
],
[GROQ]: [
- {
- id: 'llama-3.3-70b-versatile',
- provider: GROQ,
- promptCost: 0.59,
- completionCost: 0.79,
- },
{
id: 'llama-3.1-70b-versatile',
provider: GROQ,
@@ -459,12 +437,6 @@ export const modelsByProvider: ModelsByProviderInclCosts = {
}
],
[FIREWORKS_AI]: [
- {
- id: 'llama-v3p3-70b-instruct',
- provider: FIREWORKS_AI,
- promptCost: 0.88,
- completionCost: 0.88,
- },
{
id: 'llama-v3p1-405b-instruct',
provider: FIREWORKS_AI,
diff --git a/packages/baseai/src/dev/index.ts b/packages/baseai/src/dev/index.ts
index eb2bee78..3c7b0433 100644
--- a/packages/baseai/src/dev/index.ts
+++ b/packages/baseai/src/dev/index.ts
@@ -13,7 +13,7 @@ import { customCors } from './middleware/custom-cors';
import { poweredBy } from './middleware/powered-by';
import { preFlight } from './middleware/pre-flight';
import { registerRoot } from './routes/base';
-import { registerV1PipesRun } from './routes/v1/pipes/run';
+import { registerBetaPipesRun } from './routes/beta/pipes/run';
export async function runBaseServer() {
const app = new Hono();
@@ -29,7 +29,7 @@ export async function runBaseServer() {
// Routes.
registerRoot(app);
- registerV1PipesRun(app);
+ registerBetaPipesRun(app);
const port = 9000;
diff --git a/packages/baseai/src/dev/llms/call-anthropic.ts b/packages/baseai/src/dev/llms/call-anthropic.ts
index 432b78d9..62b10c3b 100644
--- a/packages/baseai/src/dev/llms/call-anthropic.ts
+++ b/packages/baseai/src/dev/llms/call-anthropic.ts
@@ -4,26 +4,23 @@ import { handleProviderRequest } from '../utils/provider-handlers/provider-reque
import { ANTHROPIC } from '../data/models';
import { handleLlmError } from './utils';
import type { ModelParams } from 'types/providers';
-import type { Message, Pipe } from 'types/pipe';
+import type { Message } from 'types/pipe';
import { addToolsToParams } from '../utils/add-tools-to-params';
-import type { PipeTool } from 'types/tools';
export async function callAnthropic({
pipe,
messages,
llmApiKey,
- stream,
- paramsTools
+ stream
}: {
- pipe: Pipe;
+ pipe: any;
llmApiKey: string;
stream: boolean;
messages: Message[];
- paramsTools: PipeTool[] | undefined;
}) {
try {
const modelParams = buildModelParams(pipe, stream, messages);
- addToolsToParams(modelParams, pipe, paramsTools);
+ addToolsToParams(modelParams, pipe);
// Transform params according to provider's format
const transformedRequestParams = transformToProviderRequest({
@@ -47,28 +44,14 @@ export async function callAnthropic({
}
function buildModelParams(
- pipe: Pipe,
+ pipe: any,
stream: boolean,
messages: Message[]
): ModelParams {
- const model = pipe.model.split(':')[1];
- const {
- top_p,
- max_tokens,
- temperature,
- presence_penalty,
- frequency_penalty,
- stop
- } = pipe;
return {
messages,
stream,
- model,
- top_p,
- max_tokens,
- temperature,
- presence_penalty,
- frequency_penalty,
- stop
+ model: pipe.model.name,
+ ...pipe.model.params
};
}
diff --git a/packages/baseai/src/dev/llms/call-cohere.ts b/packages/baseai/src/dev/llms/call-cohere.ts
index feded453..9b639aaa 100644
--- a/packages/baseai/src/dev/llms/call-cohere.ts
+++ b/packages/baseai/src/dev/llms/call-cohere.ts
@@ -4,7 +4,7 @@ import { dlog } from '../utils/dlog';
import { COHERE } from '../data/models';
import { handleLlmError } from './utils';
import type { ModelParams } from 'types/providers';
-import type { Message, Pipe } from 'types/pipe';
+import type { Message } from 'types/pipe';
export async function callCohere({
pipe,
@@ -12,7 +12,7 @@ export async function callCohere({
llmApiKey,
stream
}: {
- pipe: Pipe;
+ pipe: any;
llmApiKey: string;
messages: Message[];
stream: boolean;
@@ -42,28 +42,14 @@ export async function callCohere({
}
function buildModelParams(
- pipe: Pipe,
+ pipe: any,
stream: boolean,
messages: Message[]
): ModelParams {
- const model = pipe.model.split(':')[1];
- const {
- top_p,
- max_tokens,
- temperature,
- presence_penalty,
- frequency_penalty,
- stop
- } = pipe;
return {
messages,
stream,
- model,
- top_p,
- max_tokens,
- temperature,
- presence_penalty,
- frequency_penalty,
- stop
+ model: pipe.model.name,
+ ...pipe.model.params
};
}
diff --git a/packages/baseai/src/dev/llms/call-fireworks.ts b/packages/baseai/src/dev/llms/call-fireworks.ts
index e7f1f457..85dc920e 100644
--- a/packages/baseai/src/dev/llms/call-fireworks.ts
+++ b/packages/baseai/src/dev/llms/call-fireworks.ts
@@ -5,7 +5,7 @@ import { FIREWORKS_AI } from '../data/models';
import { handleLlmError } from './utils';
import type { ModelParams } from 'types/providers';
-import type { Message, Pipe } from 'types/pipe';
+import type { Message } from 'types/pipe';
export async function callFireworks({
pipe,
@@ -13,7 +13,7 @@ export async function callFireworks({
llmApiKey,
stream
}: {
- pipe: Pipe;
+ pipe: any;
llmApiKey: string;
stream: boolean;
messages: Message[];
@@ -30,7 +30,7 @@ export async function callFireworks({
dlog('Fireworks request params', transformedRequestParams);
// Fireworks llama-3.1 405b behaves weirdly with stop value. Bug on their side. Omitting it.
- if (modelParams?.model === 'llama-v3p1-405b-instruct')
+ if (pipe.model.name === 'llama-v3p1-405b-instruct')
delete transformedRequestParams['stop'];
const providerOptions = { provider: FIREWORKS_AI, llmApiKey };
@@ -47,33 +47,19 @@ export async function callFireworks({
}
function buildModelParams(
- pipe: Pipe,
+ pipe: any,
stream: boolean,
messages: Message[]
): ModelParams {
// Create model strings for Fireworks AI
- const pipeModel = pipe.model.split(':')[1];
- const model =
- pipeModel === 'yi-large'
+ const modelString =
+ pipe.model.name === 'yi-large'
? 'accounts/yi-01-ai/models/yi-large'
- : `accounts/fireworks/models/${pipeModel}`;
- const {
- top_p,
- max_tokens,
- temperature,
- presence_penalty,
- frequency_penalty,
- stop
- } = pipe;
+ : `accounts/fireworks/models/${pipe.model.name}`;
return {
messages,
stream,
- model,
- top_p,
- max_tokens,
- temperature,
- presence_penalty,
- frequency_penalty,
- stop
+ model: modelString,
+ ...pipe.model.params
};
}
diff --git a/packages/baseai/src/dev/llms/call-google.ts b/packages/baseai/src/dev/llms/call-google.ts
index 19ba6792..4188e295 100644
--- a/packages/baseai/src/dev/llms/call-google.ts
+++ b/packages/baseai/src/dev/llms/call-google.ts
@@ -4,26 +4,23 @@ import { handleProviderRequest } from '../utils/provider-handlers/provider-reque
import { GOOGLE } from '../data/models';
import { applyJsonModeIfEnabledForGoogle, handleLlmError } from './utils';
import type { ModelParams } from 'types/providers';
-import type { Message, Pipe } from 'types/pipe';
+import type { Message } from 'types/pipe';
import { addToolsToParams } from '../utils/add-tools-to-params';
-import type { PipeTool } from 'types/tools';
export async function callGoogle({
pipe,
messages,
llmApiKey,
- stream,
- paramsTools
+ stream
}: {
- pipe: Pipe;
+ pipe: any;
stream: boolean;
llmApiKey: string;
messages: Message[];
- paramsTools: PipeTool[] | undefined;
}) {
try {
const modelParams = buildModelParams(pipe, stream, messages);
- addToolsToParams(modelParams, pipe, paramsTools);
+ addToolsToParams(modelParams, pipe);
// Transform params according to provider's format
const transformedRequestParams = transformToProviderRequest({
@@ -51,29 +48,15 @@ export async function callGoogle({
}
function buildModelParams(
- pipe: Pipe,
+ pipe: any,
stream: boolean,
messages: Message[]
): ModelParams {
- const model = pipe.model.split(':')[1];
- const {
- top_p,
- max_tokens,
- temperature,
- presence_penalty,
- frequency_penalty,
- stop
- } = pipe;
return {
messages,
stream,
- model,
- top_p,
- max_tokens,
- temperature,
- presence_penalty,
- frequency_penalty,
- stop
+ model: pipe.model.name,
+ ...pipe.model.params
};
}
diff --git a/packages/baseai/src/dev/llms/call-groq.ts b/packages/baseai/src/dev/llms/call-groq.ts
index 9f967402..78089721 100644
--- a/packages/baseai/src/dev/llms/call-groq.ts
+++ b/packages/baseai/src/dev/llms/call-groq.ts
@@ -4,7 +4,7 @@ import { GROQ } from '../data/models';
import transformToProviderRequest from '../utils/provider-handlers/transfrom-to-provider-request';
import { applyJsonModeIfEnabled, handleLlmError } from './utils';
import type { ModelParams } from 'types/providers';
-import type { Message, Pipe } from 'types/pipe';
+import type { Message } from 'types/pipe';
export async function callGroq({
pipe,
@@ -12,7 +12,7 @@ export async function callGroq({
llmApiKey,
stream
}: {
- pipe: Pipe;
+ pipe: any;
llmApiKey: string;
stream: boolean;
messages: Message[];
@@ -42,28 +42,14 @@ export async function callGroq({
}
function buildModelParams(
- pipe: Pipe,
+ pipe: any,
stream: boolean,
messages: Message[]
): ModelParams {
- const model = pipe.model.split(':')[1];
- const {
- top_p,
- max_tokens,
- temperature,
- presence_penalty,
- frequency_penalty,
- stop
- } = pipe;
return {
messages,
stream,
- model,
- top_p,
- max_tokens,
- temperature,
- presence_penalty,
- frequency_penalty,
- stop
+ model: pipe.model.name,
+ ...pipe.model.params
};
}
diff --git a/packages/baseai/src/dev/llms/call-llm.ts b/packages/baseai/src/dev/llms/call-llm.ts
index dcbf9b74..1d6cc00f 100644
--- a/packages/baseai/src/dev/llms/call-llm.ts
+++ b/packages/baseai/src/dev/llms/call-llm.ts
@@ -12,8 +12,9 @@ import {
} from '@/dev/data/models';
import { addContextFromMemory } from '@/utils/memory/lib';
-import type { Message, Pipe, VariablesI } from 'types/pipe';
+import type { Message, VariablesI } from 'types/pipe';
import { ApiError } from '../hono/errors';
+import type { Pipe } from '../routes/beta/pipes/run';
import { dlog } from '../utils/dlog';
import { getRunThread } from '../utils/thread/get-run-thread';
import { callAnthropic } from './call-anthropic';
@@ -26,34 +27,28 @@ import { callOpenAI } from './call-openai';
import { callPerplexity } from './call-perplexity';
import { callTogether } from './call-together';
import { callXAI } from './call-xai';
-import { getProvider } from '../utils/get-provider';
-import type { PipeTool } from 'types/tools';
export async function callLLM({
pipe,
stream,
messages,
llmApiKey,
- variables,
- paramsTools
+ variables
}: {
pipe: Pipe;
stream: boolean;
llmApiKey: string;
messages: Message[];
variables?: VariablesI;
- paramsTools: PipeTool[] | undefined;
}) {
try {
- // Get the model provider from the pipe.
- const providerString = pipe.model.split(':')[0];
- const modelProvider = getProvider(providerString);
-
- const memoryNames = pipe.memory.map(memory => memory.name);
+ // Get the model provider from the pipe config.
+ const modelProvider = pipe.model.provider;
const similarChunks = await addContextFromMemory({
+ pipe,
messages,
- memoryNames
+ memoryNames: pipe.memorysets
});
// Process the messages to be sent to the model provider.
@@ -73,8 +68,7 @@ export async function callLLM({
pipe,
stream,
messages,
- llmApiKey,
- paramsTools
+ llmApiKey
});
}
@@ -82,10 +76,9 @@ export async function callLLM({
dlog('ANTHROPIC', '✅');
return await callAnthropic({
pipe,
- stream,
messages,
llmApiKey,
- paramsTools
+ stream
});
}
@@ -93,10 +86,9 @@ export async function callLLM({
dlog('TOGETHER_AI', '✅');
return await callTogether({
pipe,
- stream,
messages,
llmApiKey,
- paramsTools,
+ stream
});
}
@@ -116,8 +108,7 @@ export async function callLLM({
pipe,
messages,
llmApiKey,
- stream,
- paramsTools
+ stream
});
}
@@ -127,8 +118,7 @@ export async function callLLM({
pipe,
messages,
llmApiKey,
- stream,
- paramsTools
+ stream
});
}
diff --git a/packages/baseai/src/dev/llms/call-ollama.ts b/packages/baseai/src/dev/llms/call-ollama.ts
index 1eb0c098..972a9cc2 100644
--- a/packages/baseai/src/dev/llms/call-ollama.ts
+++ b/packages/baseai/src/dev/llms/call-ollama.ts
@@ -4,7 +4,7 @@ import { handleProviderRequest } from '../utils/provider-handlers/provider-reque
import { OLLAMA } from '../data/models';
import { handleLlmError } from './utils';
-import type { Message, Pipe } from 'types/pipe';
+import type { Message } from 'types/pipe';
import type { ModelParams } from 'types/providers';
export async function callOllama({
@@ -13,7 +13,7 @@ export async function callOllama({
llmApiKey,
stream
}: {
- pipe: Pipe;
+ pipe: any;
llmApiKey: string;
stream: boolean;
messages: Message[];
@@ -42,28 +42,14 @@ export async function callOllama({
}
function buildModelParams(
- pipe: Pipe,
+ pipe: any,
stream: boolean,
messages: Message[]
): ModelParams {
- const model = pipe.model.split(':')[1];
- const {
- top_p,
- max_tokens,
- temperature,
- presence_penalty,
- frequency_penalty,
- stop
- } = pipe;
return {
messages,
stream,
- model,
- top_p,
- max_tokens,
- temperature,
- presence_penalty,
- frequency_penalty,
- stop
+ model: pipe.model.name,
+ ...pipe.model.params
};
}
diff --git a/packages/baseai/src/dev/llms/call-openai.ts b/packages/baseai/src/dev/llms/call-openai.ts
index 00afca25..be1eba9e 100644
--- a/packages/baseai/src/dev/llms/call-openai.ts
+++ b/packages/baseai/src/dev/llms/call-openai.ts
@@ -4,31 +4,28 @@ import { dlog } from '../utils/dlog';
import { moderate } from '../utils/moderate';
import { OPEN_AI } from '../data/models';
import { applyJsonModeIfEnabled, handleLlmError } from './utils';
-import type { Message, Pipe } from 'types/pipe';
+import type { Message } from 'types/pipe';
import type { ModelParams } from 'types/providers';
import { addToolsToParams } from '../utils/add-tools-to-params';
-import type { PipeTool } from 'types/tools';
export async function callOpenAI({
pipe,
stream,
llmApiKey,
- messages,
- paramsTools
+ messages
}: {
- pipe: Pipe;
+ pipe: any;
stream: boolean;
llmApiKey: string;
messages: Message[];
- paramsTools: PipeTool[] | undefined;
}) {
try {
validateInput(pipe, messages);
const openai = new OpenAI({ apiKey: llmApiKey });
- await moderateContent(openai, messages, pipe.moderate);
+ await moderateContent(openai, messages, pipe.meta.moderate);
const modelParams = buildModelParams(pipe, stream, messages);
- addToolsToParams(modelParams, pipe, paramsTools);
+ addToolsToParams(modelParams, pipe);
applyJsonModeIfEnabled(modelParams, pipe);
dlog('modelParams', modelParams);
@@ -38,7 +35,7 @@ export async function callOpenAI({
}
}
-function validateInput(pipe: Pipe, messages: Message[]) {
+function validateInput(pipe: any, messages: Message[]) {
if (!pipe || !pipe.model || !messages || messages.length === 0) {
throw new ApiError({
code: 'BAD_REQUEST',
@@ -68,28 +65,14 @@ async function moderateContent(
}
function buildModelParams(
- pipe: Pipe,
+ pipe: any,
stream: boolean,
messages: Message[]
): ModelParams {
- const model = pipe.model.split(':')[1];
- const {
- top_p,
- max_tokens,
- temperature,
- presence_penalty,
- frequency_penalty,
- stop
- } = pipe;
return {
messages,
stream,
- model: model || 'gpt-4o-mini',
- top_p,
- max_tokens,
- temperature,
- presence_penalty,
- frequency_penalty,
- stop
+ model: pipe.model.name || 'gpt-4o-mini',
+ ...pipe.model.params
};
}
diff --git a/packages/baseai/src/dev/llms/call-perplexity.ts b/packages/baseai/src/dev/llms/call-perplexity.ts
index 5be492d7..3596ed9e 100644
--- a/packages/baseai/src/dev/llms/call-perplexity.ts
+++ b/packages/baseai/src/dev/llms/call-perplexity.ts
@@ -4,7 +4,7 @@ import { handleProviderRequest } from '../utils/provider-handlers/provider-reque
import { PERPLEXITY } from '../data/models';
import { handleLlmError } from './utils';
-import type { Message, Pipe } from 'types/pipe';
+import type { Message } from 'types/pipe';
import type { ModelParams } from 'types/providers';
export async function callPerplexity({
@@ -13,7 +13,7 @@ export async function callPerplexity({
llmApiKey,
stream
}: {
- pipe: Pipe;
+ pipe: any;
llmApiKey: string;
stream: boolean;
messages: Message[];
@@ -42,28 +42,14 @@ export async function callPerplexity({
}
function buildModelParams(
- pipe: Pipe,
+ pipe: any,
stream: boolean,
messages: Message[]
): ModelParams {
- const model = pipe.model.split(':')[1];
- const {
- top_p,
- max_tokens,
- temperature,
- presence_penalty,
- frequency_penalty,
- stop
- } = pipe;
return {
messages,
stream,
- model,
- top_p,
- max_tokens,
- temperature,
- presence_penalty,
- frequency_penalty,
- stop
+ model: pipe.model.name,
+ ...pipe.model.params
};
}
diff --git a/packages/baseai/src/dev/llms/call-together.ts b/packages/baseai/src/dev/llms/call-together.ts
index 76baeb0d..77880db7 100644
--- a/packages/baseai/src/dev/llms/call-together.ts
+++ b/packages/baseai/src/dev/llms/call-together.ts
@@ -2,23 +2,20 @@ import OpenAI from 'openai';
import { dlog } from '../utils/dlog';
import { GROQ } from '../data/models';
import { applyJsonModeIfEnabled, handleLlmError } from './utils';
-import type { Message, Pipe } from 'types/pipe';
+import type { Message } from 'types/pipe';
import type { ModelParams } from 'types/providers';
import { addToolsToParams } from '../utils/add-tools-to-params';
-import type { PipeTool } from 'types/tools';
export async function callTogether({
pipe,
messages,
llmApiKey,
- stream,
- paramsTools
+ stream
}: {
- pipe: Pipe;
+ pipe: any;
llmApiKey: string;
stream: boolean;
messages: Message[];
- paramsTools: PipeTool[] | undefined;
}) {
try {
const modelParams = buildModelParams(pipe, stream, messages);
@@ -32,7 +29,7 @@ export async function callTogether({
// Together behaves weirdly with stop value. Omitting it.
delete modelParams['stop'];
applyJsonModeIfEnabled(modelParams, pipe);
- addToolsToParams(modelParams, pipe, paramsTools);
+ addToolsToParams(modelParams, pipe);
dlog('modelParams', modelParams);
return await together.chat.completions.create(modelParams as any);
@@ -42,28 +39,14 @@ export async function callTogether({
}
function buildModelParams(
- pipe: Pipe,
+ pipe: any,
stream: boolean,
messages: Message[]
): ModelParams {
- const model = pipe.model.split(':')[1];
- const {
- top_p,
- max_tokens,
- temperature,
- presence_penalty,
- frequency_penalty,
- stop
- } = pipe;
return {
messages,
stream,
- model,
- top_p,
- max_tokens,
- temperature,
- presence_penalty,
- frequency_penalty,
- stop
+ model: pipe.model.name,
+ ...pipe.model.params
};
}
diff --git a/packages/baseai/src/dev/llms/call-xai.ts b/packages/baseai/src/dev/llms/call-xai.ts
index d10e605d..eb4f5510 100644
--- a/packages/baseai/src/dev/llms/call-xai.ts
+++ b/packages/baseai/src/dev/llms/call-xai.ts
@@ -2,23 +2,20 @@ import OpenAI from 'openai';
import { dlog } from '../utils/dlog';
import { X_AI } from '../data/models';
import { handleLlmError } from './utils';
-import type { Message, Pipe } from 'types/pipe';
+import type { Message } from 'types/pipe';
import type { ModelParams } from 'types/providers';
import { addToolsToParams } from '../utils/add-tools-to-params';
-import type { PipeTool } from 'types/tools';
export async function callXAI({
pipe,
stream,
llmApiKey,
- messages,
- paramsTools
+ messages
}: {
- pipe: Pipe;
+ pipe: any;
stream: boolean;
llmApiKey: string;
messages: Message[];
- paramsTools: PipeTool[] | undefined;
}) {
try {
const modelParams = buildModelParams(pipe, stream, messages);
@@ -30,7 +27,7 @@ export async function callXAI({
});
// Add tools (functions) to modelParams
- addToolsToParams(modelParams, pipe, paramsTools);
+ addToolsToParams(modelParams, pipe);
dlog('modelParams', modelParams);
return await groq.chat.completions.create(modelParams as any);
@@ -40,28 +37,14 @@ export async function callXAI({
}
function buildModelParams(
- pipe: Pipe,
+ pipe: any,
stream: boolean,
messages: Message[]
): ModelParams {
- const model = pipe.model.split(':')[1];
- const {
- top_p,
- max_tokens,
- temperature,
- presence_penalty,
- frequency_penalty,
- stop
- } = pipe;
return {
messages,
stream,
- model,
- top_p,
- max_tokens,
- temperature,
- presence_penalty,
- frequency_penalty,
- stop
+ model: pipe.model.name,
+ ...pipe.model.params
};
}
diff --git a/packages/baseai/src/dev/llms/utils.ts b/packages/baseai/src/dev/llms/utils.ts
index 7870afd1..ce28d054 100644
--- a/packages/baseai/src/dev/llms/utils.ts
+++ b/packages/baseai/src/dev/llms/utils.ts
@@ -2,7 +2,6 @@ import type { ModelParams } from 'types/providers';
import { ApiError } from '../hono/errors';
import { dlog } from '../utils/dlog';
import { isJsonModeOn } from '../utils/is-json-mode';
-import type { Pipe } from 'types/pipe';
export function handleLlmError({
error,
@@ -18,10 +17,10 @@ export function handleLlmError({
});
}
-export function applyJsonModeIfEnabled(modelParams: ModelParams, pipe: Pipe) {
+export function applyJsonModeIfEnabled(modelParams: ModelParams, pipe: any) {
const hasJsonMode = isJsonModeOn({
- currentModel: modelParams.model as string,
- jsonMode: pipe.json || false
+ currentModel: pipe.model.name,
+ jsonMode: pipe.meta.json || false
});
if (hasJsonMode) {
@@ -31,12 +30,11 @@ export function applyJsonModeIfEnabled(modelParams: ModelParams, pipe: Pipe) {
export function applyJsonModeIfEnabledForGoogle(
transformedRequestParams: any,
- pipe: Pipe
+ pipe: any
) {
- const currentModel = pipe.model.split(':')[1];
const hasJsonMode = isJsonModeOn({
- currentModel,
- jsonMode: pipe.json || false
+ currentModel: pipe.model.name,
+ jsonMode: pipe.meta.json || false
});
if (hasJsonMode) {
diff --git a/packages/baseai/src/dev/providers/anthropic/chatComplete.ts b/packages/baseai/src/dev/providers/anthropic/chatComplete.ts
index c0173ee6..86b4439f 100644
--- a/packages/baseai/src/dev/providers/anthropic/chatComplete.ts
+++ b/packages/baseai/src/dev/providers/anthropic/chatComplete.ts
@@ -465,7 +465,6 @@ export const AnthropicChatCompleteStreamChunkTransform: (
choices: [
{
delta: {
- role: 'assistant',
content: ''
},
index: 0,
diff --git a/packages/baseai/src/dev/providers/google/chatComplete.ts b/packages/baseai/src/dev/providers/google/chatComplete.ts
index d5b45b34..3b90512e 100644
--- a/packages/baseai/src/dev/providers/google/chatComplete.ts
+++ b/packages/baseai/src/dev/providers/google/chatComplete.ts
@@ -439,7 +439,7 @@ export const GoogleChatCompleteStreamChunkTransform: (
model: '',
provider: 'google',
choices:
- parsedChunk.candidates?.map((generation, index) => {
+ parsedChunk.candidates?.map(generation => {
let message: ProviderMessage = {
role: 'assistant',
content: ''
@@ -473,7 +473,7 @@ export const GoogleChatCompleteStreamChunkTransform: (
}
return {
delta: message,
- index: generation.index ?? index,
+ index: generation.index,
finish_reason: generation.finishReason
};
}) ?? []
diff --git a/packages/baseai/src/dev/routes/beta/pipes/run.ts b/packages/baseai/src/dev/routes/beta/pipes/run.ts
index ddb88673..5f4b3cc4 100644
--- a/packages/baseai/src/dev/routes/beta/pipes/run.ts
+++ b/packages/baseai/src/dev/routes/beta/pipes/run.ts
@@ -115,15 +115,7 @@ const handleGenerateError = (c: any, error: unknown) => {
const handleRun = async (c: any) => {
try {
const body = await c.req.json();
-
- const llmKey = (body.llmApiKey as string) || '';
- const hiddenChars = new Array(45).fill('*').join('');
- const redactedKey = llmKey.length
- ? llmKey.slice(0, 8) + hiddenChars
- : '';
-
- const logData = { ...body, llmApiKey: redactedKey };
- logger('pipe.request', logData, 'Pipe Request Body');
+ logger('pipe.request', body, 'Pipe Request Body');
const validatedBody = validateRequestBody(body);
diff --git a/packages/baseai/src/dev/routes/v1/pipes/run.ts b/packages/baseai/src/dev/routes/v1/pipes/run.ts
deleted file mode 100644
index c58d0db2..00000000
--- a/packages/baseai/src/dev/routes/v1/pipes/run.ts
+++ /dev/null
@@ -1,155 +0,0 @@
-import { ApiError, ApiErrorZod } from '@/dev/hono/errors';
-import { callLLM } from '@/dev/llms/call-llm';
-import { dlog } from '@/dev/utils/dlog';
-import { handleStreamingResponse } from '@/dev/utils/provider-handlers/streaming-response-handler';
-import { logger } from '@/utils/logger-utils';
-import { Hono } from 'hono';
-import {
- schemaMessage,
- toolChoiceSchema,
- VariablesSchema,
- type PipeModelT
-} from 'types/pipe';
-import { pipeToolSchema } from 'types/tools';
-import { z } from 'zod';
-
-// Schema definitions
-const PipeSchema = z.object({
- name: z.string(),
- description: z.string(),
- status: z.enum(['public', 'private']),
- model: z.string(),
- stream: z.boolean(),
- json: z.boolean(),
- store: z.boolean(),
- moderate: z.boolean(),
- top_p: z.number(),
- max_tokens: z.number(),
- temperature: z.number(),
- presence_penalty: z.number(),
- frequency_penalty: z.number(),
- stop: z.array(z.string()),
- tool_choice: z
- .union([z.enum(['auto', 'required', 'none']), toolChoiceSchema])
- .default('auto'),
- parallel_tool_calls: z.boolean(),
- messages: z.array(schemaMessage),
- variables: VariablesSchema,
- tools: z.array(pipeToolSchema).default([]),
- memory: z.array(z.object({ name: z.string().trim().min(1) })).default([])
-});
-
-const RequestBodySchema = z.object({
- pipe: PipeSchema,
- stream: z.boolean(),
- messages: z.array(schemaMessage),
- llmApiKey: z.string(),
- tools: z.array(pipeToolSchema).optional(),
- variables: VariablesSchema.optional()
-});
-
-type RequestBody = z.infer;
-
-// Helper functions
-const validateRequestBody = (body: unknown): RequestBody => {
- const result = RequestBodySchema.safeParse(body);
- if (!result.success) {
- throw new ApiErrorZod({
- code: 'BAD_REQUEST',
- validationResult: result,
- customMessage: 'Invalid request body'
- });
- }
- return result.data;
-};
-
-const processLlmResponse = (c: any, body: RequestBody, rawLlmResponse: any) => {
- const isStreaming = body.stream;
-
- // Non-streaming
- if (!isStreaming && rawLlmResponse?.choices?.length > 0) {
- const completion = rawLlmResponse.choices[0]?.message?.content ?? '';
- const toolCalls = rawLlmResponse.choices[0]?.message?.tool_calls ?? [];
- const isToolCall = toolCalls.length > 0;
-
- logger('tool', isToolCall, 'Tool calls found');
- logger('tool.calls', toolCalls);
- logger('pipe.completion', completion, 'Pipe completion');
- logger('pipe.response', rawLlmResponse, 'type: (non-streaming)');
-
- return c.json({ completion, ...rawLlmResponse });
- }
-
- // Streaming
- if (isStreaming) {
- logger('pipe.response', rawLlmResponse, 'type: (streaming)');
- return handleStreamingResponse({
- response: rawLlmResponse,
- headers: {},
- c
- });
- }
- return c.json({ body });
-};
-
-const handleGenerateError = (c: any, error: unknown) => {
- if (error instanceof ApiErrorZod) {
- throw error;
- }
-
- const errorMessage =
- error instanceof Error
- ? error.message
- : 'Unexpected error occurred in /pipe/v1/run';
-
- dlog('Error /pipe/v1/run.ts:', error);
-
- throw new ApiError({
- status: error instanceof ApiError ? error.status : 500,
- code: error instanceof ApiError ? error.code : 'INTERNAL_SERVER_ERROR',
- message: errorMessage,
- docs: error instanceof ApiError ? error.docs : undefined
- });
-};
-
-// Main endpoint handler
-const handleRun = async (c: any) => {
- try {
- const body = await c.req.json();
-
- const llmKey = (body.llmApiKey as string) || '';
- const hiddenChars = new Array(45).fill('*').join('');
- const redactedKey = llmKey.length
- ? llmKey.slice(0, 8) + hiddenChars
- : '';
-
- const logData = { ...body, llmApiKey: redactedKey };
- logger('pipe.request', logData, 'Pipe Request Body');
-
- const validatedBody = validateRequestBody(body);
-
- const { pipe, messages, llmApiKey, stream, variables } = validatedBody;
- const model = pipe.model as PipeModelT;
-
- const rawLlmResponse = await callLLM({
- pipe: {
- ...pipe,
- model
- },
- messages,
- llmApiKey,
- stream,
- variables,
- paramsTools: validatedBody.tools
- });
-
- return processLlmResponse(c, validatedBody, rawLlmResponse);
- } catch (error: unknown) {
- return handleGenerateError(c, error);
- }
-};
-
-// Register the endpoint
-export const registerV1PipesRun = (app: Hono) => {
- app.post('/v1/pipes/run', handleRun);
-};
diff --git a/packages/baseai/src/dev/utils/add-tools-to-params.ts b/packages/baseai/src/dev/utils/add-tools-to-params.ts
index 83816e61..6cbef242 100644
--- a/packages/baseai/src/dev/utils/add-tools-to-params.ts
+++ b/packages/baseai/src/dev/utils/add-tools-to-params.ts
@@ -1,53 +1,30 @@
-import type { Pipe, ToolCall } from 'types/pipe';
-import { getProvider } from './get-provider';
import { getSupportedToolSettings, hasToolSupport } from './has-tool-support';
import type { ModelParams } from 'types/providers';
-import type { PipeTool } from 'types/tools';
-export function addToolsToParams(
- modelParams: ModelParams,
- pipe: Pipe,
- paramsTools: PipeTool[] | undefined
-) {
- const pipeTools = pipe.tools as unknown as string[];
- const hasParamsTools = paramsTools && paramsTools.length > 0;
-
- // 1. If no tools are provided, return the modelParams as is
- if (!hasParamsTools && !pipeTools.length) return modelParams;
-
- const [providerString, modelName] = pipe.model.split(':');
- const provider = getProvider(providerString);
+export function addToolsToParams(modelParams: ModelParams, pipe: any) {
+ if (!pipe.functions.length) return;
// Check if the model supports tool calls
const hasToolCallSupport = hasToolSupport({
- modelName,
- provider
+ modelName: pipe.model.name,
+ provider: pipe.model.provider
});
- // 2. If the model does not support tool calls, return the modelParams as is
- if (!hasToolCallSupport) return modelParams;
+ if (hasToolCallSupport) {
+ const { hasParallelToolCallSupport, hasToolChoiceSupport } =
+ getSupportedToolSettings({
+ modelName: pipe.model.name,
+ provider: pipe.model.provider
+ });
- // If tools are provided in request param, prioritize and use them
- if (hasParamsTools) {
- modelParams.tools = paramsTools as ToolCall[];
- }
+ if (hasParallelToolCallSupport) {
+ modelParams.parallel_tool_calls = pipe.model.parallel_tool_calls;
+ }
- // If tools are not provided in request param, use the tools from the pipe config
- if (!hasParamsTools && pipeTools.length) {
- modelParams.tools = pipe.tools as ToolCall[];
- }
-
- const { hasParallelToolCallSupport, hasToolChoiceSupport } =
- getSupportedToolSettings({
- modelName,
- provider
- });
-
- if (hasParallelToolCallSupport) {
- modelParams.parallel_tool_calls = pipe.parallel_tool_calls;
- }
+ if (hasToolChoiceSupport) {
+ modelParams.tool_choice = pipe.model.tool_choice;
+ }
- if (hasToolChoiceSupport) {
- modelParams.tool_choice = pipe.tool_choice;
+ modelParams.tools = pipe.functions;
}
}
diff --git a/packages/baseai/src/dev/utils/get-provider.ts b/packages/baseai/src/dev/utils/get-provider.ts
deleted file mode 100644
index d434b7e0..00000000
--- a/packages/baseai/src/dev/utils/get-provider.ts
+++ /dev/null
@@ -1,50 +0,0 @@
-import {
- ANTHROPIC,
- COHERE,
- FIREWORKS_AI,
- GOOGLE,
- GROQ,
- OLLAMA,
- OPEN_AI,
- PERPLEXITY,
- TOGETHER_AI,
- X_AI
-} from '@/data/models';
-
-type Provider =
- | typeof OPEN_AI
- | typeof ANTHROPIC
- | typeof TOGETHER_AI
- | typeof GOOGLE
- | typeof GROQ
- | typeof COHERE
- | typeof FIREWORKS_AI
- | typeof PERPLEXITY;
-
-/**
- * Retrieves the provider based on the given provider string.
- *
- * @param providerString - The provider string.
- * @returns The corresponding provider object.
- * @throws Error if the provider is unknown.
- */
-export function getProvider(providerString: string): Provider {
- const providerMap: { [key: string]: Provider } = {
- openai: OPEN_AI,
- anthropic: ANTHROPIC,
- together: TOGETHER_AI,
- google: GOOGLE,
- groq: GROQ,
- cohere: COHERE,
- fireworks: FIREWORKS_AI,
- perplexity: PERPLEXITY,
- ollama: OLLAMA,
- xai: X_AI
- };
-
- const provider = providerMap[providerString.toLowerCase()];
- if (!provider) {
- throw new Error(`Unknown provider: ${providerString}`);
- }
- return provider;
-}
diff --git a/packages/baseai/src/dev/utils/moderate.ts b/packages/baseai/src/dev/utils/moderate.ts
index 739cdde7..36b0dc0d 100644
--- a/packages/baseai/src/dev/utils/moderate.ts
+++ b/packages/baseai/src/dev/utils/moderate.ts
@@ -29,10 +29,7 @@ export async function moderate({
}
// Perform moderation on the constructed prompt text
- const moderation = await openai.moderations.create({
- model: 'omni-moderation-latest',
- input: promptText
- });
+ const moderation = await openai.moderations.create({ input: promptText });
const result = moderation?.results[0];
// dlog('moderation:', result);
diff --git a/packages/baseai/src/dev/utils/thread/add-json-mode.ts b/packages/baseai/src/dev/utils/thread/add-json-mode.ts
index 2740e351..9bd6b86e 100644
--- a/packages/baseai/src/dev/utils/thread/add-json-mode.ts
+++ b/packages/baseai/src/dev/utils/thread/add-json-mode.ts
@@ -8,7 +8,7 @@
import { jsonModeModels } from '@/data/models';
import { defaultJsonPrompt } from '@/dev/data/globals';
-import type { Pipe } from 'types/pipe';
+import type { Pipe } from '@/dev/routes/beta/pipes/run';
export function addJsonMode({
pipe,
@@ -18,11 +18,9 @@ export function addJsonMode({
systemPrompt: string;
}) {
// Return the system prompt if JSON mode is not enabled
- if (!pipe?.json) return systemPrompt;
-
- const modelName = pipe.model.split(':')[1];
+ if (!pipe.meta?.json) return systemPrompt;
// Return the system prompt if JSON mode is not supported by the current model
- if (!jsonModeModels.includes(modelName)) return systemPrompt;
+ if (!jsonModeModels.includes(pipe.model.name)) return systemPrompt;
const jsonModePrompt = getJsonPrompt(pipe);
diff --git a/packages/baseai/src/dev/utils/thread/get-few-shot-messages.ts b/packages/baseai/src/dev/utils/thread/get-few-shot-messages.ts
index f7adfcfb..aec9773b 100644
--- a/packages/baseai/src/dev/utils/thread/get-few-shot-messages.ts
+++ b/packages/baseai/src/dev/utils/thread/get-few-shot-messages.ts
@@ -1,4 +1,5 @@
-import type { Message, Pipe } from 'types/pipe';
+import type { Pipe } from '@/dev/routes/beta/pipes/run';
+import type { Message } from 'types/pipe';
export function getPipeFewShotsMessages(pipe: Pipe): Message[] {
const fewShotMessages: Message[] = pipe.messages.filter(
diff --git a/packages/baseai/src/dev/utils/thread/get-run-thread.ts b/packages/baseai/src/dev/utils/thread/get-run-thread.ts
index d4c224f5..0a335a7e 100644
--- a/packages/baseai/src/dev/utils/thread/get-run-thread.ts
+++ b/packages/baseai/src/dev/utils/thread/get-run-thread.ts
@@ -1,6 +1,7 @@
import { ApiError } from '@/dev/hono/errors';
+import type { Pipe } from '@/dev/routes/beta/pipes/run';
import type { SimilarChunk } from '@/utils/memory/db/lib';
-import type { Message, Pipe, VariablesI } from 'types/pipe';
+import type { Message, VariablesI } from 'types/pipe';
import { dlog } from '../dlog';
import { getPipeFewShotsMessages } from './get-few-shot-messages';
import { getSystemPromptMessage } from './get-system-prompt';
diff --git a/packages/baseai/src/dev/utils/thread/get-system-prompt.ts b/packages/baseai/src/dev/utils/thread/get-system-prompt.ts
index f8a5aa6b..f2e0ee7d 100644
--- a/packages/baseai/src/dev/utils/thread/get-system-prompt.ts
+++ b/packages/baseai/src/dev/utils/thread/get-system-prompt.ts
@@ -1,6 +1,7 @@
+import type { Pipe } from '@/dev/routes/beta/pipes/run';
import { defaultRagPrompt } from '@/utils/memory/constants';
import type { SimilarChunk } from '@/utils/memory/db/lib';
-import type { Message, Pipe } from 'types/pipe';
+import type { Message } from 'types/pipe';
import { addJsonMode } from './add-json-mode';
export function getSystemPromptMessage({
diff --git a/packages/baseai/src/dev/utils/thread/process-messages.ts b/packages/baseai/src/dev/utils/thread/process-messages.ts
index 4bd0fc7b..11686743 100644
--- a/packages/baseai/src/dev/utils/thread/process-messages.ts
+++ b/packages/baseai/src/dev/utils/thread/process-messages.ts
@@ -1,4 +1,5 @@
-import type { Message, Pipe, VariableI, VariablesI } from 'types/pipe';
+import type { Pipe } from '@/dev/routes/beta/pipes/run';
+import type { Message, VariableI, VariablesI } from 'types/pipe';
import { dlog } from '../dlog';
/**
diff --git a/packages/baseai/src/memory/create.ts b/packages/baseai/src/memory/create.ts
index 68131a6a..c45d3416 100644
--- a/packages/baseai/src/memory/create.ts
+++ b/packages/baseai/src/memory/create.ts
@@ -20,7 +20,7 @@ const defaultConfig = {
};
const MEMORY_CONSTANTS = {
- documentsDir: 'documents'
+ documentsDir: 'documents' // Path to store documents
};
export async function createMemory() {
@@ -48,7 +48,7 @@ export async function createMemory() {
message: 'Description of the memory',
placeholder: defaultConfig.description
}),
- useGit: () =>
+ useGitRepo: () =>
p.confirm({
message:
'Do you want to create memory from current project git repository?',
@@ -63,72 +63,115 @@ export async function createMemory() {
}
);
- const memoryNameSlugified = slugify(memoryInfo.name);
- const memoryNameCamelCase = camelCase('memory-' + memoryNameSlugified);
- const baseDir = path.join(process.cwd(), 'baseai', 'memory');
- const memoryDir = path.join(baseDir, memoryNameSlugified);
- const filePath = path.join(memoryDir, 'index.ts');
- const dbDir = path.join(process.cwd(), '.baseai', 'db');
+ let memoryFilesDir = '.';
+ let fileExtensions: string[] = ['*'];
- if (memoryInfo.useGit) {
+ if (memoryInfo.useGitRepo) {
+ // Check if the current directory is a Git repository
try {
await execAsync('git rev-parse --is-inside-work-tree');
} catch (error) {
p.cancel('The current directory is not a Git repository.');
process.exit(1);
}
+
+ memoryFilesDir = (await p.text({
+ message:
+ 'Enter the path to the directory to track (relative to current directory):',
+ initialValue: '.',
+ validate: value => {
+ if (!value.trim()) {
+ return 'The path cannot be empty.';
+ }
+ const fullPath = path.resolve(process.cwd(), value);
+ if (!fs.existsSync(fullPath)) {
+ return 'The specified path does not exist.';
+ }
+ if (!fs.lstatSync(fullPath).isDirectory()) {
+ return 'The specified path is not a directory.';
+ }
+ return;
+ }
+ })) as string;
+
+ const extensionsInput = (await p.text({
+ message:
+ 'Enter file extensions to track (use * for all, or comma-separated list, e.g., .md,.mdx):',
+ validate: value => {
+ if (value.trim() === '') {
+ return 'Please enter at least one file extension or *';
+ }
+ if (value !== '*') {
+ const extensions = value.split(',').map(ext => ext.trim());
+ const invalidExtensions = extensions.filter(
+ ext => !/^\.\w+$/.test(ext)
+ );
+ if (invalidExtensions.length > 0) {
+ return `Invalid extension(s): ${invalidExtensions.join(', ')}. Extensions should start with a dot followed by alphanumeric characters.`;
+ }
+ }
+ return;
+ }
+ })) as string;
+
+ fileExtensions =
+ extensionsInput === '*'
+ ? ['*']
+ : extensionsInput.split(',').map(ext => ext.trim());
}
- const memoryContent = `import {MemoryI} from '@baseai/core';
+ const memoryNameSlugified = slugify(memoryInfo.name);
+ const memoryNameCamelCase = camelCase('memory-' + memoryNameSlugified);
+
+ const baseDir = path.join(process.cwd(), 'baseai', 'memory');
+ const memoryDir = path.join(baseDir, memoryNameSlugified);
+ const filePath = path.join(memoryDir, 'index.ts');
+ const memoryDocumentsPath = path.join(
+ memoryDir,
+ MEMORY_CONSTANTS.documentsDir
+ );
+ const dbDir = path.join(process.cwd(), '.baseai', 'db');
+
+ const memoryContent = `import { MemoryI } from '@baseai/core';
+import path from 'path';
const ${memoryNameCamelCase} = (): MemoryI => ({
- name: '${memoryNameSlugified}',
- description: ${JSON.stringify(memoryInfo.description || '')},
- git: {
- enabled: ${memoryInfo.useGit},${
- memoryInfo.useGit
- ? `
- include: ['**/*'],
- gitignore: true,`
- : `
- include: ['${MEMORY_CONSTANTS.documentsDir}/**/*'],
- gitignore: false,`
- }
- deployedAt: '',
- embeddedAt: ''
- }
+ name: '${memoryNameSlugified}',
+ description: '${memoryInfo.description || ''}',
+ config: {
+ useGitRepo: ${memoryInfo.useGitRepo},
+ dirToTrack: path.posix.join(${memoryFilesDir
+ .split(path.sep)
+ .map(segment => `'${segment}'`)
+ .join(', ')}),
+ extToTrack: ${JSON.stringify(fileExtensions)}
+ }
});
-export default ${memoryNameCamelCase};`;
+export default ${memoryNameCamelCase};
+`;
try {
await fs.promises.mkdir(baseDir, { recursive: true });
await fs.promises.mkdir(memoryDir, { recursive: true });
await fs.promises.writeFile(filePath, memoryContent);
await fs.promises.mkdir(dbDir, { recursive: true });
+ await createDb(memoryNameSlugified);
- if (!memoryInfo.useGit) {
- const memoryDocumentsPath = path.join(
- memoryDir,
- MEMORY_CONSTANTS.documentsDir
- );
+ if (!memoryInfo.useGitRepo) {
await fs.promises.mkdir(memoryDocumentsPath, { recursive: true });
p.note(
`Add documents in baseai/memory/${memoryNameSlugified}/${cyan(`documents`)} to use them in the memory.`
);
} else {
+ const extensionsMsg = fileExtensions.includes('*')
+ ? 'all file types'
+ : `files with extensions: ${cyan(fileExtensions.join(', '))}`;
p.note(
- [
- 'All files in this Git repository will be tracked by default.',
- '',
- `To modify which files are being tracked, update the config at:`,
- cyan(filePath)
- ].join('\n')
+ `All ${extensionsMsg} under ${cyan(memoryFilesDir)} will be tracked and used in the memory.`
);
}
- await createDb(memoryNameSlugified);
-
p.outro(
heading({
text: memoryNameCamelCase,
diff --git a/packages/baseai/src/memory/embed.ts b/packages/baseai/src/memory/embed.ts
index 7fa4aa6b..73a35bd0 100644
--- a/packages/baseai/src/memory/embed.ts
+++ b/packages/baseai/src/memory/embed.ts
@@ -58,7 +58,7 @@ export async function embedMemory({
let filesToEmbed: string[] = [];
let filesToDelete: string[] = [];
- if (memoryConfig.git.enabled) {
+ if (memoryConfig?.useGitRepo) {
const { filesToDeploy, filesToDelete: gitFilesToDelete } =
await handleGitSyncMemories({
memoryName: memoryName,
@@ -78,7 +78,7 @@ export async function embedMemory({
let embedResult = 'Embeddings updated.';
if (memoryFiles && memoryFiles.length > 0) {
s.message('Generating embeddings...');
- const shouldOverwrite = memoryConfig.git.enabled ? true : overwrite;
+ const shouldOverwrite = memoryConfig?.useGitRepo ? true : overwrite;
embedResult = await generateEmbeddings({
memoryFiles,
memoryName,
@@ -87,7 +87,7 @@ export async function embedMemory({
});
}
- if (memoryConfig.git.enabled) {
+ if (memoryConfig?.useGitRepo) {
if (filesToDelete.length > 0) {
await deleteDocumentsFromDB({
memoryName,
diff --git a/packages/baseai/src/pipe/index.ts b/packages/baseai/src/pipe/index.ts
index 48b3ec81..0ba6cb93 100644
--- a/packages/baseai/src/pipe/index.ts
+++ b/packages/baseai/src/pipe/index.ts
@@ -38,7 +38,7 @@ export async function createPipe() {
name: () =>
p.text({
message: 'Name of the pipe',
- placeholder: 'ai-agent-pipe',
+ placeholder: 'AI Pipe Agent',
validate: value => {
const result = pipeNameSchema.safeParse(value);
if (!result.success) {
@@ -135,7 +135,7 @@ const ${pipeNameCamelCase} = (): PipeI => ({
// Replace with your API key https://langbase.com/docs/api-reference/api-keys
apiKey: process.env.LANGBASE_API_KEY!,
name: '${pipeNameSlugified}',
- description: ${JSON.stringify(pipeInfo.description) || ''},
+ description: '${pipeInfo.description || ''}',
status: '${pipeInfo.status}',
model: 'openai:gpt-4o-mini',
stream: true,
diff --git a/packages/baseai/src/tool/index.ts b/packages/baseai/src/tool/index.ts
index d461bd8f..4239a9ee 100644
--- a/packages/baseai/src/tool/index.ts
+++ b/packages/baseai/src/tool/index.ts
@@ -110,7 +110,7 @@ const ${camelCaseNameToolName} = (): ToolI => ({
type: 'function' as const,
function: {
name: '${camelCaseNameToolName}',
- description: ${JSON.stringify(description) || ''},
+ description: '${description}',
parameters: {},
},
});
diff --git a/packages/baseai/src/utils/memory/git-sync/get-changed-files-between-commits.ts b/packages/baseai/src/utils/memory/git-sync/get-changed-files-between-commits.ts
index a128f386..f8d582bf 100644
--- a/packages/baseai/src/utils/memory/git-sync/get-changed-files-between-commits.ts
+++ b/packages/baseai/src/utils/memory/git-sync/get-changed-files-between-commits.ts
@@ -1,23 +1,23 @@
import { execSync } from 'child_process';
/**
- * Retrieves the list of changed and deleted files between two Git commits matching specified glob patterns.
+ * Retrieves the list of changed and deleted files between two Git commits within a specified directory.
*
* @param {Object} params - The parameters for the function.
* @param {string} params.oldCommit - The old commit reference to compare from.
* @param {string} [params.latestCommit='HEAD'] - The latest commit reference to compare to. Defaults to 'HEAD'.
- * @param {string[]} params.include - Array of glob patterns to track for changes.
+ * @param {string} params.dirToTrack - The directory to track for changes.
* @returns {Promise<{ changedFiles: string[]; deletedFiles: string[] }>} - A promise that resolves to an object containing arrays of changed and deleted files.
* @throws {Error} - Throws an error if the Git command execution fails or if the commit references are invalid.
*/
export async function getChangedAndDeletedFilesBetweenCommits({
oldCommit,
latestCommit = 'HEAD',
- include
+ dirToTrack
}: {
oldCommit: string;
latestCommit: string;
- include: string[];
+ dirToTrack: string;
}): Promise<{ changedFiles: string[]; deletedFiles: string[] }> {
try {
// Validate inputs
@@ -25,53 +25,30 @@ export async function getChangedAndDeletedFilesBetweenCommits({
throw new Error('Invalid commit references');
}
- if (!Array.isArray(include) || include.length === 0) {
- throw new Error('Include patterns must be a non-empty array');
- }
-
const repoPath = process.cwd();
- // Execute the Git commands for changed and deleted files
- const changedResult = execSync(
- constructGitCommand({
- include,
- oldCommit,
- diffFilter: 'ACMRT',
- latestCommit
- }),
- {
- encoding: 'utf-8',
- cwd: repoPath
- }
- ).trim();
+ // Construct the Git commands to get changed and deleted files in the specific directory
+ const changedCommand = `git diff --diff-filter=ACMRT --name-only ${oldCommit} ${latestCommit} -- ${dirToTrack}`;
+ const deletedCommand = `git diff --diff-filter=D --name-only ${oldCommit} ${latestCommit} -- ${dirToTrack}`;
+
+ // Execute the Git commands
+ const changedResult = execSync(changedCommand, {
+ encoding: 'utf-8',
+ cwd: repoPath
+ }).trim();
- const deletedResult = execSync(
- constructGitCommand({
- include,
- oldCommit,
- diffFilter: 'D',
- latestCommit
- }),
- {
- encoding: 'utf-8',
- cwd: repoPath
- }
- ).trim();
+ const deletedResult = execSync(deletedCommand, {
+ encoding: 'utf-8',
+ cwd: repoPath
+ }).trim();
// Process the results
- const changedFiles = changedResult
- ? changedResult
- .split('\n')
- .filter(Boolean)
- .map(file => file.replace(/\//g, '-'))
- : [];
+ let changedFiles = changedResult.split('\n').filter(Boolean);
+ let deletedFiles = deletedResult.split('\n').filter(Boolean);
- const deletedFiles = deletedResult
- ? deletedResult
- .split('\n')
- .filter(Boolean)
- .map(file => file.replace(/\//g, '-'))
- : [];
+ // Resolve full paths
+ changedFiles = changedFiles.map(file => file.replace(/\//g, '-'));
+ deletedFiles = deletedFiles.map(file => file.replace(/\//g, '-'));
return { changedFiles, deletedFiles };
} catch (error) {
@@ -79,27 +56,3 @@ export async function getChangedAndDeletedFilesBetweenCommits({
throw error;
}
}
-
-// Helper function to construct the Git command for changed files
-const constructGitCommand = ({
- include,
- oldCommit,
- diffFilter,
- latestCommit
-}: {
- include: string[];
- oldCommit: string;
- diffFilter: 'ACMRT' | 'D';
- latestCommit: string;
-}) => {
- const baseCommand = `git diff --diff-filter=${diffFilter} --name-only ${oldCommit} ${latestCommit}`;
-
- // If there's only one pattern, use it directly
- if (include.length === 1) {
- return `${baseCommand} -- "${include[0]}"`;
- }
-
- // For multiple patterns, use brace expansion
- const patterns = include.map(pattern => `"${pattern}"`).join(' ');
- return `${baseCommand} -- ${patterns}`;
-};
diff --git a/packages/baseai/src/utils/memory/git-sync/handle-git-sync-memories.ts b/packages/baseai/src/utils/memory/git-sync/handle-git-sync-memories.ts
index 5bb638ee..207b5bb9 100644
--- a/packages/baseai/src/utils/memory/git-sync/handle-git-sync-memories.ts
+++ b/packages/baseai/src/utils/memory/git-sync/handle-git-sync-memories.ts
@@ -72,13 +72,13 @@ export async function handleGitSyncMemories({
// If there's no deployedCommitHash, user is deploying for the first time
// Deploy all files in the directory
const lastHashUsed = isEmbed
- ? config.git?.embeddedAt
- : config.git?.deployedAt;
+ ? config.embeddedCommitHash
+ : config.deployedCommitHash;
if (!lastHashUsed) {
filesToDeploy = allFiles;
p.log.info(
- `Found no previous ${isEmbed ? 'deployed' : 'embedded'} commit. ${isEmbed ? 'Deploying' : 'Embedding'} all ${filesToDeploy.length} files in memory "${memoryName}":`
+ `Found no previous deployed commit. Deploying all ${filesToDeploy.length} files in memory "${memoryName}":`
);
}
// Step 2.2: Otherwise, get changed files between commits
@@ -87,7 +87,7 @@ export async function handleGitSyncMemories({
await getChangedAndDeletedFilesBetweenCommits({
oldCommit: lastHashUsed,
latestCommit: 'HEAD',
- include: config.git.include
+ dirToTrack: config.dirToTrack
});
filesToDeploy = changedFiles;
diff --git a/packages/baseai/src/utils/memory/git-sync/save-deployed-commit-in-config.ts b/packages/baseai/src/utils/memory/git-sync/save-deployed-commit-in-config.ts
index 6c344f44..a8ddff82 100644
--- a/packages/baseai/src/utils/memory/git-sync/save-deployed-commit-in-config.ts
+++ b/packages/baseai/src/utils/memory/git-sync/save-deployed-commit-in-config.ts
@@ -19,108 +19,27 @@ export async function saveDeployedCommitHashInMemoryConfig({
const indexFilePath = path.join(memoryDir, 'index.ts');
let fileContents = await fs.readFile(indexFilePath, 'utf-8');
- // Check if the git block exists
- if (fileContents.includes('git:')) {
- // Find the git block including its indentation
- const gitBlockMatch = fileContents.match(/(\t*)git:\s*{[^}]*?}/);
- if (gitBlockMatch) {
- const [fullMatch, outerIndent] = gitBlockMatch;
- const innerIndent = outerIndent + '\t';
-
- // Parse existing content
- const contentMatch = fullMatch.match(
- /{\s*\n?\s*(.*?)\s*\n?\s*}/s
- );
- let existingContent = contentMatch ? contentMatch[1] : '';
-
- let contentLines = existingContent
- .split('\n')
- .map(line => line.trim().replace(/,\s*$/, '')) // Remove trailing commas
- .filter(Boolean);
-
- let newGitContent: string;
-
- // If deployedAt exists, update it while preserving formatting
- if (existingContent.includes('deployedAt:')) {
- contentLines = contentLines.map(line => {
- if (line.includes('deployedAt:')) {
- return `deployedAt: '${deployedCommitHash}'`;
- }
- return line;
- });
- } else {
- // Add deployedAt to existing content
- contentLines.push(`deployedAt: '${deployedCommitHash}'`);
- }
-
- // Add commas between lines but not after the last line
- newGitContent = contentLines
- .map((line, index) => {
- const isLast = index === contentLines.length - 1;
- return `${innerIndent}${line}${isLast ? '' : ','}`;
- })
- .join('\n');
-
- // Replace the old git block with the new one
- fileContents = fileContents.replace(
- /(\t*)git:\s*{[^}]*?}/,
- `${outerIndent}git: {\n${newGitContent}\n${outerIndent}}`
- );
- }
+ // Check if the deployedCommitHash already exists in the config
+ if (fileContents.includes('deployedCommitHash:')) {
+ // Update the existing deployedCommitHash
+ fileContents = fileContents.replace(
+ /deployedCommitHash:\s*['"].*['"]/,
+ `deployedCommitHash: '${deployedCommitHash}'`
+ );
} else {
- // Add new git config block
- const match = fileContents.match(
- /(?:const\s+\w+\s*=\s*\(\s*\)\s*(?::\s*\w+)?\s*=>\s*\({[\s\S]*?)(}\))/
+ // Add the deployedCommitHash to the config
+ fileContents = fileContents.replace(
+ /config:\s*{/,
+ `config: {\n deployedCommitHash: '${deployedCommitHash}',`
);
-
- if (match) {
- // Insert before the closing parenthesis
- const insertPosition =
- match.index! + match[0].length - match[1].length;
- const prefix = fileContents.slice(0, insertPosition);
- const suffix = fileContents.slice(insertPosition);
-
- // Match the indentation of nearby properties
- const indentMatch = prefix.match(/\n(\t+)[^\n]+\n\s*$/);
- const baseIndent = indentMatch ? indentMatch[1] : '\t';
- const innerIndent = baseIndent + '\t';
-
- const lines = [
- 'enabled: false',
- "include: ['**/*']",
- 'gitignore: false',
- `deployedAt: '${deployedCommitHash}'`
- ];
-
- const gitConfig = lines
- .map((line, index) => {
- const isLast = index === lines.length - 1;
- return `${innerIndent}${line}${isLast ? '' : ','}`;
- })
- .join('\n');
-
- fileContents = `${prefix},\n${baseIndent}git: {\n${gitConfig}\n${baseIndent}}${suffix}`;
- } else {
- throw new Error(
- 'Could not find appropriate location to insert git config'
- );
- }
}
// Write the updated contents back to the file
await fs.writeFile(indexFilePath, fileContents, 'utf-8');
- p.log.success(`Updated deployedAt hash for memory '${memoryName}'.`);
+ p.log.success(`Updated deployedCommitHash for memory '${memoryName}'.`);
} catch (error) {
- if (error instanceof Error) {
- p.cancel(
- `Failed to save deployedAt hash for memory '${memoryName}': ${error.message}`
- );
- } else {
- p.cancel(
- `Failed to save deployedAt hash for memory '${memoryName}': Unknown error`
- );
- }
+ console.error(`Error saving latest commit hash: ${error}`);
throw error;
}
}
diff --git a/packages/baseai/src/utils/memory/git-sync/save-embedded-commit-in-config.ts b/packages/baseai/src/utils/memory/git-sync/save-embedded-commit-in-config.ts
index 52051b12..edef22d2 100644
--- a/packages/baseai/src/utils/memory/git-sync/save-embedded-commit-in-config.ts
+++ b/packages/baseai/src/utils/memory/git-sync/save-embedded-commit-in-config.ts
@@ -19,108 +19,27 @@ export async function saveEmbeddedCommitHashInMemoryConfig({
const indexFilePath = path.join(memoryDir, 'index.ts');
let fileContents = await fs.readFile(indexFilePath, 'utf-8');
- // Check if the git block exists
- if (fileContents.includes('git:')) {
- // Find the git block including its indentation
- const gitBlockMatch = fileContents.match(/(\t*)git:\s*{[^}]*?}/);
- if (gitBlockMatch) {
- const [fullMatch, outerIndent] = gitBlockMatch;
- const innerIndent = outerIndent + '\t';
-
- // Parse existing content
- const contentMatch = fullMatch.match(
- /{\s*\n?\s*(.*?)\s*\n?\s*}/s
- );
- let existingContent = contentMatch ? contentMatch[1] : '';
-
- let contentLines = existingContent
- .split('\n')
- .map(line => line.trim().replace(/,\s*$/, '')) // Remove trailing commas
- .filter(Boolean);
-
- let newGitContent: string;
-
- // If embeddedAt exists, update it while preserving formatting
- if (existingContent.includes('embeddedAt:')) {
- contentLines = contentLines.map(line => {
- if (line.includes('embeddedAt:')) {
- return `embeddedAt: '${embeddedCommitHash}'`;
- }
- return line;
- });
- } else {
- // Add embeddedAt to existing content
- contentLines.push(`embeddedAt: '${embeddedCommitHash}'`);
- }
-
- // Add commas between lines but not after the last line
- newGitContent = contentLines
- .map((line, index) => {
- const isLast = index === contentLines.length - 1;
- return `${innerIndent}${line}${isLast ? '' : ','}`;
- })
- .join('\n');
-
- // Replace the old git block with the new one
- fileContents = fileContents.replace(
- /(\t*)git:\s*{[^}]*?}/,
- `${outerIndent}git: {\n${newGitContent}\n${outerIndent}}`
- );
- }
+ // Check if the embeddedCommitHash already exists in the config
+ if (fileContents.includes('embeddedCommitHash:')) {
+ // Update the existing embeddedCommitHash
+ fileContents = fileContents.replace(
+ /embeddedCommitHash:\s*['"].*['"]/,
+ `embeddedCommitHash: '${embeddedCommitHash}'`
+ );
} else {
- // Add new git config block
- const match = fileContents.match(
- /(?:const\s+\w+\s*=\s*\(\s*\)\s*(?::\s*\w+)?\s*=>\s*\({[\s\S]*?)(}\))/
+ // Add the embeddedCommitHash to the config
+ fileContents = fileContents.replace(
+ /config:\s*{/,
+ `config: {\n embeddedCommitHash: '${embeddedCommitHash}',`
);
-
- if (match) {
- // Insert before the closing parenthesis
- const insertPosition =
- match.index! + match[0].length - match[1].length;
- const prefix = fileContents.slice(0, insertPosition);
- const suffix = fileContents.slice(insertPosition);
-
- // Match the indentation of nearby properties
- const indentMatch = prefix.match(/\n(\t+)[^\n]+\n\s*$/);
- const baseIndent = indentMatch ? indentMatch[1] : '\t';
- const innerIndent = baseIndent + '\t';
-
- const lines = [
- 'enabled: false',
- "include: ['**/*']",
- 'gitignore: false',
- `embeddedAt: '${embeddedCommitHash}'`
- ];
-
- const gitConfig = lines
- .map((line, index) => {
- const isLast = index === lines.length - 1;
- return `${innerIndent}${line}${isLast ? '' : ','}`;
- })
- .join('\n');
-
- fileContents = `${prefix},\n${baseIndent}git: {\n${gitConfig}\n${baseIndent}}${suffix}`;
- } else {
- throw new Error(
- 'Could not find appropriate location to insert git config'
- );
- }
}
// Write the updated contents back to the file
await fs.writeFile(indexFilePath, fileContents, 'utf-8');
- p.log.success(`Updated embeddedAt hash for memory '${memoryName}'.`);
+ p.log.success(`Updated embeddedCommitHash for memory '${memoryName}'.`);
} catch (error) {
- if (error instanceof Error) {
- p.cancel(
- `Failed to save embeddedAt hash for memory '${memoryName}': ${error.message}`
- );
- } else {
- p.cancel(
- `Failed to save embeddedAt hash for memory '${memoryName}': Unknown error`
- );
- }
+ console.error(`Error saving latest commit hash: ${error}`);
throw error;
}
}
diff --git a/packages/baseai/src/utils/memory/handle-old-memory-config.ts b/packages/baseai/src/utils/memory/handle-old-memory-config.ts
deleted file mode 100644
index e9bd9b4d..00000000
--- a/packages/baseai/src/utils/memory/handle-old-memory-config.ts
+++ /dev/null
@@ -1,107 +0,0 @@
-/**
- * Represents the old memory configuration format for backward compatibility.
- */
-export interface OldMemoryConfig {
- name: string;
- description?: string;
- config?: OldConfigObject;
-}
-
-interface OldConfigObject {
- useGitRepo: boolean;
- dirToTrack: string;
- extToTrack: string[] | ['*'];
- deployedCommitHash?: string;
- embeddedCommitHash?: string;
-}
-
-/**
- * Type guard to check if an object is of type `OldConfigObject`.
- *
- * @param obj - The object to check.
- * @returns `true` if the object is an `OldConfigObject`, otherwise `false`.
- */
-function isOldConfigObject(obj: unknown): obj is OldConfigObject {
- return (
- typeof obj === 'object' &&
- obj !== null &&
- 'useGitRepo' in obj &&
- typeof (obj as OldConfigObject).useGitRepo === 'boolean' &&
- 'dirToTrack' in obj &&
- typeof (obj as OldConfigObject).dirToTrack === 'string' &&
- 'extToTrack' in obj &&
- Array.isArray((obj as OldConfigObject).extToTrack)
- );
-}
-
-/**
- * Checks if an object conforms to the old memory configuration format.
- *
- * @param obj - The object to check.
- * @returns `true` if the object is in the old memory configuration format, otherwise `false`.
- */
-export function isOldMemoryConfigFormat(obj: unknown): boolean {
- if (
- typeof obj !== 'object' ||
- obj === null ||
- !('name' in obj) ||
- !('config' in obj)
- ) {
- return false;
- }
-
- const typedObj = obj as { name: unknown; config: unknown };
-
- return (
- typeof typedObj.name === 'string' &&
- (typedObj.config === undefined || isOldConfigObject(typedObj.config))
- );
-}
-
-/**
- * Generates upgrade instructions for converting an old memory configuration to the new format.
- *
- * @param oldConfig - The old memory configuration.
- * @returns A string containing the upgrade instructions.
- */
-export function generateUpgradeInstructions(
- oldConfig: OldMemoryConfig
-): string {
- if (!oldConfig.config) {
- return 'Invalid memory config.';
- }
-
- const newConfigExample = {
- name: oldConfig.name,
- description: oldConfig.description || 'Your memory description',
- git: {
- enabled: oldConfig.config.useGitRepo,
- include:
- oldConfig.config.extToTrack[0] === '*'
- ? [`${oldConfig.config.dirToTrack}/**/*`]
- : oldConfig.config.extToTrack.map(
- ext => `${oldConfig.config?.dirToTrack}/**/*${ext}`
- ),
- gitignore: true,
- deployedAt: oldConfig.config.deployedCommitHash || '',
- embeddedAt: oldConfig.config.embeddedCommitHash || ''
- }
- };
-
- return `
-Your memory config is using an outdated format in baseai/memory/${oldConfig.name}/index.ts. Please update the file to this new format:
-
-${JSON.stringify(newConfigExample, null, 2)}
-
-Key changes:
-- Removed nested 'config' object structure
-- Git-related fields are now grouped under a 'git' object
-- 'useGitRepo' is now 'git.enabled'
-- 'dirToTrack' and 'extToTrack' are combined into 'git.include' glob patterns
-- 'deployedCommitHash' is now 'git.deployedAt'
-- 'embeddedCommitHash' is now 'git.embeddedAt'
-- Added new 'git.gitignore' field (defaults to true)
-
-For more information, refer to the documentation: https://baseai.dev/docs/guides/memory-from-git
-`;
-}
diff --git a/packages/baseai/src/utils/memory/lib.ts b/packages/baseai/src/utils/memory/lib.ts
index a567ceb0..516878c0 100644
--- a/packages/baseai/src/utils/memory/lib.ts
+++ b/packages/baseai/src/utils/memory/lib.ts
@@ -7,6 +7,7 @@ import * as p from '@clack/prompts';
import fs from 'fs';
import type { Message } from 'types/pipe';
import { fromZodError } from 'zod-validation-error';
+import type { Pipe } from '../../dev/routes/beta/pipes/run';
import { defaultRagPrompt, MEMORYSETS } from './constants';
import {
cosineSimilaritySearch,
@@ -134,9 +135,11 @@ export const getAugmentedContext = ({
};
export const addContextFromMemory = async ({
+ pipe,
messages,
memoryNames
}: {
+ pipe: Pipe;
messages: Message[];
memoryNames: string[];
}) => {
diff --git a/packages/baseai/src/utils/memory/load-memory-config.ts b/packages/baseai/src/utils/memory/load-memory-config.ts
index 2022d83c..8c29d07e 100644
--- a/packages/baseai/src/utils/memory/load-memory-config.ts
+++ b/packages/baseai/src/utils/memory/load-memory-config.ts
@@ -2,60 +2,77 @@ import fs from 'fs/promises';
import path from 'path';
import * as p from '@clack/prompts';
import { memoryConfigSchema, type MemoryConfigI } from 'types/memory';
-import {
- generateUpgradeInstructions,
- isOldMemoryConfigFormat,
- type OldMemoryConfig
-} from './handle-old-memory-config';
-function extractConfigObject(fileContents: string): unknown {
- try {
- // Remove import statements and exports
- const cleanedContent = fileContents
- .replace(/import\s+.*?['"];?\s*/g, '')
- .replace(/export\s+default\s+/, '');
-
- // First try to match a function that returns an object directly with parentheses
- let match = cleanedContent.match(
- /(?:const\s+)?(\w+)\s*=\s*\(\s*\)\s*(?::\s*\w+)?\s*=>\s*\(({[\s\S]*?})\)/
- );
+function parsePathJoin(joinArgs: string): string {
+ // Remove any quotes, split by comma, and trim each argument
+ const args = joinArgs
+ .split(',')
+ .map(arg => arg.trim().replace(/['"]/g, ''));
+ // Join all arguments to preserve the complete path
+ return path.join(...args);
+}
- // If no direct parentheses match, try to match function with return statement
- if (!match) {
- match = cleanedContent.match(
- /(?:const\s+)?(\w+)\s*=\s*\(\s*\)\s*(?::\s*\w+)?\s*=>\s*\{[\s\S]*?return\s+({[\s\S]*?})\s*;\s*\}/
- );
- }
+function parseConfig(configString: string): MemoryConfigI {
+ // Remove all whitespace that's not inside quotes
+ const cleanConfig = configString.replace(
+ /\s+(?=(?:(?:[^"]*"){2})*[^"]*$)/g,
+ ''
+ );
- // If still no match, try to match direct object assignment
- if (!match) {
- match = cleanedContent.match(
- /(?:const\s+)?(?:memory|\w+)\s*=\s*({[\s\S]*?});?$/m
- );
- }
+ const useGitRepoMatch = cleanConfig.match(/useGitRepo:(true|false)/);
+ const dirToTrackMatch = cleanConfig.match(
+ /dirToTrack:(?:path\.(?:posix\.)?join\((.*?)\)|['"](.+?)['"])/
+ );
+ const extToTrackMatch = cleanConfig.match(/extToTrack:(\[.*?\])/);
+ const deployedCommitHashMatch = cleanConfig.match(
+ /deployedCommitHash:['"](.+?)['"]/
+ );
+ const embeddedCommitHashMatch = cleanConfig.match(
+ /embeddedCommitHash:['"](.+?)['"]/
+ );
- if (!match) {
- throw new Error('Unable to find memory object definition');
- }
+ if (!useGitRepoMatch || !dirToTrackMatch || !extToTrackMatch) {
+ throw new Error('Unable to parse config structure');
+ }
- // The object literal will be in the last capture group
- const memoryObjStr = match[match.length - 1];
+ const useGitRepo = useGitRepoMatch[1] === 'true';
+ const dirToTrack = dirToTrackMatch[2]
+ ? dirToTrackMatch[2]
+ : parsePathJoin(dirToTrackMatch[1]);
+ const extToTrack = JSON.parse(extToTrackMatch[1].replace(/'/g, '"'));
+ const deployedCommitHash = deployedCommitHashMatch
+ ? deployedCommitHashMatch[1]
+ : undefined;
+ const embeddedCommitHash = embeddedCommitHashMatch
+ ? embeddedCommitHashMatch[1]
+ : undefined;
- // Create a new Function that returns the object literal
- const fn = new Function(`return ${memoryObjStr}`);
- return fn();
- } catch (error) {
- console.error('Parsing error:', error);
- console.error('File contents:', fileContents);
- throw new Error(
- `Failed to extract config: ${error instanceof Error ? error.message : 'Unknown error'}`
- );
+ const config: MemoryConfigI = {
+ useGitRepo,
+ dirToTrack,
+ extToTrack
+ };
+
+ if (deployedCommitHash) {
+ config.deployedCommitHash = deployedCommitHash;
}
+
+ if (embeddedCommitHash) {
+ config.embeddedCommitHash = embeddedCommitHash;
+ }
+
+ // Validate the parsed config against the schema
+ const result = memoryConfigSchema.safeParse(config);
+ if (!result.success) {
+ throw new Error(`Invalid config: ${result.error.message}`);
+ }
+
+ return config;
}
export default async function loadMemoryConfig(
memoryName: string
-): Promise {
+): Promise {
try {
const memoryDir = path.join(
process.cwd(),
@@ -65,35 +82,46 @@ export default async function loadMemoryConfig(
);
const indexFilePath = path.join(memoryDir, 'index.ts');
+ // Check if the directory exists
+ await fs.access(memoryDir);
+
+ // Check if the index.ts file exists
await fs.access(indexFilePath);
+
+ // Read the file contents
const fileContents = await fs.readFile(indexFilePath, 'utf-8');
- const configObj = extractConfigObject(fileContents);
- // Try to parse with new schema first
+ // Extract the config object, allowing for any amount of whitespace
+ const configMatch = fileContents.match(/config\s*:\s*({[\s\S]*?})/);
+ if (!configMatch) {
+ return null;
+ }
+
+ // Parse the config
try {
- return memoryConfigSchema.parse(configObj);
- } catch (parseError) {
- if (!configObj) throw parseError;
-
- // If parsing fails, check if it's an old format
- if (isOldMemoryConfigFormat(configObj)) {
- p.note(
- generateUpgradeInstructions(configObj as OldMemoryConfig)
+ const config = parseConfig(configMatch[1]);
+ return config;
+ } catch (error) {
+ if (error instanceof Error) {
+ p.cancel(
+ `Unable to read config in '${memoryName}/index.ts': ${error.message}`
);
+ } else {
p.cancel(
- 'Deployment cancelled. Please update your memory config file to the new format.'
+ `Unable to read config in '${memoryName}/index.ts': Unknown error occurred`
);
- process.exit(1);
}
-
- // If it's neither new nor old format, throw the original error
- throw parseError;
+ process.exit(1);
}
} catch (error) {
if (error instanceof Error) {
- p.cancel(`Failed to load memory '${memoryName}': ${error.message}`);
+ p.cancel(
+ `Memory '${memoryName}' does not exist or could not be loaded: ${error.message}`
+ );
} else {
- p.cancel(`Failed to load memory '${memoryName}': Unknown error`);
+ p.cancel(
+ `Memory '${memoryName}' does not exist or could not be loaded: Unknown error occurred`
+ );
}
process.exit(1);
}
diff --git a/packages/baseai/src/utils/memory/load-memory-files.ts b/packages/baseai/src/utils/memory/load-memory-files.ts
index e643309e..ebaafe24 100644
--- a/packages/baseai/src/utils/memory/load-memory-files.ts
+++ b/packages/baseai/src/utils/memory/load-memory-files.ts
@@ -5,21 +5,14 @@ import { allSupportedExtensions, MEMORYSETS } from './constants';
import { getDocumentContent } from './get-document-content';
import { formatDocSize } from './lib';
import loadMemoryConfig from './load-memory-config';
-import {
- memoryConfigSchema,
- type DocumentConfigI,
- type MemoryConfigI
-} from 'types/memory';
+import { memoryConfigSchema, type MemoryConfigI } from 'types/memory';
import { execSync } from 'child_process';
-import fg from 'fast-glob';
export interface MemoryDocumentI {
name: string;
size: string;
content: string;
blob: Blob;
- path: string;
- meta: Record;
}
export const loadMemoryFiles = async (
@@ -29,12 +22,11 @@ export const loadMemoryFiles = async (
const memoryConfig = await checkMemoryConfig(memoryName);
// useDocumentsDir
- const useDocumentsDir = !memoryConfig || !memoryConfig.git.enabled;
- const documentConfig = memoryConfig?.documents;
+ const useDocumentsDir = !memoryConfig || !memoryConfig?.useGitRepo;
// Load files from documents directory.
if (useDocumentsDir) {
- return await loadMemoryFilesFromDocsDir({ memoryName, documentConfig });
+ return await loadMemoryFilesFromDocsDir(memoryName);
}
// Load files from the repo.
@@ -58,52 +50,46 @@ export const loadMemoryFilesFromCustomDir = async ({
memoryName: string;
memoryConfig: MemoryConfigI;
}): Promise => {
- const includePatterns = memoryConfig.git.include;
+ const memoryFilesPath = memoryConfig.dirToTrack;
- if (!Array.isArray(includePatterns) || includePatterns.length === 0) {
- p.cancel(`No include patterns specified for memory '${memoryName}'`);
+ try {
+ await fs.access(memoryFilesPath);
+ } catch (error) {
+ p.cancel(
+ `Documents directory for memory '${memoryName}' does not exist.`
+ );
process.exit(1);
}
console.log('Reading documents in memory...');
- // Get all files that match the glob patterns and are tracked by git
let allFiles: string[];
try {
- // First get all git tracked files
- const gitFiles = new Set([
- ...execSync('git ls-files', { encoding: 'utf-8' })
- .split('\n')
- .filter(Boolean),
- ...execSync('git ls-files --others --exclude-standard', {
- encoding: 'utf-8'
- })
- .split('\n')
- .filter(Boolean),
- ...execSync('git diff --name-only', { encoding: 'utf-8' })
- .split('\n')
- .filter(Boolean)
- ]);
-
- // Then match against glob patterns
- const matchedFiles = await fg(includePatterns, {
- ignore: ['node_modules/**'],
- dot: true,
- gitignore: memoryConfig.git.gitignore || true
- });
-
- // Only keep files that are both tracked by git and match the patterns
- allFiles = matchedFiles.filter((file: string) => gitFiles.has(file));
+ allFiles = execSync(`git ls-files ${memoryFilesPath}`, {
+ encoding: 'utf-8'
+ })
+ .split('\n')
+ .filter(Boolean);
} catch (error) {
p.cancel(`Failed to read documents in memory '${memoryName}'.`);
process.exit(1);
}
+ // Check if all extensions are allowed.
+ const allExtensionsAllowed = memoryConfig.extToTrack[0] === '*';
+
+ // Filter files based on allowed extensions.
+ const extensionsToUse = allExtensionsAllowed
+ ? allSupportedExtensions
+ : memoryConfig.extToTrack.filter(ext =>
+ allSupportedExtensions.includes(ext)
+ );
+
const memoryFilesContent = await Promise.all(
allFiles.map(async filePath => {
- // Check if the file is allowed
- const isSupportedExtension = allSupportedExtensions.some(
- extension => filePath.endsWith(extension)
+ // Check if the file is allowed.
+ const isSupportedExtension = extensionsToUse.some(extension =>
+ filePath.endsWith(extension)
);
if (!isSupportedExtension) {
@@ -128,21 +114,12 @@ export const loadMemoryFilesFromCustomDir = async ({
return null;
}
- const memoryFile = {
- path: filePath,
+ return {
name: path.basename(filePath.replace(/\//g, '-')),
size: formatDocSize(fileContentBlob.size),
content: await getDocumentContent(fileContentBlob),
blob: fileContentBlob
};
-
- let meta = {};
-
- if (memoryConfig?.documents?.meta) {
- meta = memoryConfig.documents.meta(memoryFile) || {};
- }
-
- return { ...memoryFile, meta };
})
);
@@ -175,13 +152,9 @@ export const loadMemoryFilesFromCustomDir = async ({
* - Have unsupported file extensions.
* 5. Returns an array of `MemoryDocumentI` objects representing the valid memory files.
*/
-export const loadMemoryFilesFromDocsDir = async ({
- memoryName,
- documentConfig
-}: {
- memoryName: string;
- documentConfig?: DocumentConfigI;
-}): Promise => {
+export const loadMemoryFilesFromDocsDir = async (
+ memoryName: string
+): Promise => {
const memoryDir = path.join(process.cwd(), 'baseai', 'memory', memoryName);
const memoryFilesPath = path.join(memoryDir, 'documents');
@@ -234,21 +207,12 @@ export const loadMemoryFilesFromDocsDir = async ({
return null;
}
- const memoryFile = {
+ return {
name: file,
- path: filePath,
size: formatDocSize(fileContentBlob.size),
content: await getDocumentContent(fileContentBlob),
blob: fileContentBlob
};
-
- let meta = {};
-
- if (documentConfig?.meta) {
- meta = documentConfig.meta(memoryFile) || {};
- }
-
- return { ...memoryFile, meta };
})
);
@@ -299,6 +263,28 @@ async function checkMemoryConfig(
};
}
+/**
+ * Recursively traverses a directory and returns a list of all file paths.
+ *
+ * @param dir - The directory to traverse.
+ * @returns A promise that resolves to an array of file paths.
+ */
+const traverseDirectory = async (dir: string): Promise => {
+ const files: string[] = [];
+ const entries = await fs.readdir(dir, { withFileTypes: true });
+
+ for (const entry of entries) {
+ const fullPath = path.join(dir, entry.name);
+ if (entry.isDirectory()) {
+ files.push(...(await traverseDirectory(fullPath)));
+ } else {
+ files.push(fullPath);
+ }
+ }
+
+ return files;
+};
+
export const getMemoryFileNames = async (
memoryName: string
): Promise => {
diff --git a/packages/baseai/src/utils/retrieve-credentials.ts b/packages/baseai/src/utils/retrieve-credentials.ts
deleted file mode 100644
index accd30c7..00000000
--- a/packages/baseai/src/utils/retrieve-credentials.ts
+++ /dev/null
@@ -1,59 +0,0 @@
-import { loadConfig } from './config/config-handler';
-import fs from 'fs/promises';
-import * as p from '@clack/prompts';
-import color from 'picocolors';
-
-export interface Account {
- apiKey: string;
-}
-
-type Spinner = ReturnType;
-
-function handleNoAccountFound({ spinner }: { spinner: Spinner }): void {
- spinner.stop('No account found');
- p.log.warn('No account found. Please authenticate first.');
- p.log.info(`Run: ${color.green('npx baseai auth')}`);
-}
-function handleAuthError({
- spinner,
- error
-}: {
- spinner: Spinner;
- error: unknown;
-}): void {
- spinner.stop('Failed to retrieve authentication');
- p.log.error(`Error retrieving stored auth: ${(error as Error).message}`);
-}
-
-export async function retrieveAuthentication({
- spinner
-}: {
- spinner: Spinner;
-}): Promise {
- spinner.start('Retrieving stored authentication');
- try {
- const baiConfig = await loadConfig();
- let envFile = baiConfig.envFilePath || '.env';
- const envFileContent = await fs.readFile(envFile, 'utf-8');
-
- const apiKey = envFileContent
- .split('\n')
- .reverse()
- .find(line => line.includes('LANGBASE_API_KEY='))
- ?.split('=')[1];
-
- if (!apiKey) {
- handleNoAccountFound({ spinner });
- return null;
- }
-
- spinner.stop('Retrieved stored authentication');
-
- return {
- apiKey
- };
- } catch (error) {
- handleAuthError({ spinner, error });
- return null;
- }
-}
diff --git a/packages/baseai/types/memory.ts b/packages/baseai/types/memory.ts
index 2716493b..cf7936f8 100644
--- a/packages/baseai/types/memory.ts
+++ b/packages/baseai/types/memory.ts
@@ -1,5 +1,11 @@
import { z } from 'zod';
+export interface MemoryI {
+ name: string;
+ description?: string;
+ config?: MemoryConfigI;
+}
+
export const memoryNameSchema = z
.string()
.min(3, 'Memory name must be at least 3 characters long')
@@ -16,43 +22,28 @@ export const memoryDocSchema = z.object({
documentName: docNameSchema
});
-export const gitConfigSchema = z.object({
- enabled: z.boolean(),
- include: z
- .array(z.string().trim().min(1, 'Include pattern must not be empty'))
- .min(1, 'At least one include pattern must be specified')
- .describe('Glob patterns to include files in the memory'),
- gitignore: z.boolean().optional().default(true),
- deployedAt: z.string().trim().optional().default(''),
- embeddedAt: z.string().trim().optional().default('')
-});
-
-export const documentSchema = z.object({
- meta: z
- .function()
- .args(
- z.object({
- name: z.string(),
- size: z.string(),
- content: z.string(),
- blob: z.instanceof(Blob),
- path: z.string(),
- })
- )
- .returns(z.record(z.string()))
- .optional()
-});
-
export const memoryConfigSchema = z.object({
- name: z.string(),
- description: z.string().optional(),
- git: gitConfigSchema,
- documents: documentSchema.optional()
+ useGitRepo: z.boolean(),
+ dirToTrack: z
+ .string()
+ .trim()
+ .min(1, 'Directory to track must not be empty'),
+ extToTrack: z.union([
+ z.tuple([z.literal('*')]),
+ z
+ .array(
+ z
+ .string()
+ .trim()
+ .regex(
+ /^\.\w+$/,
+ 'File extension must start with a dot followed by alphanumeric characters'
+ )
+ )
+ .min(1, 'At least one file extension must be specified')
+ ]),
+ deployedCommitHash: z.string().optional(),
+ embeddedCommitHash: z.string().optional()
});
-export type GitConfigI = z.infer;
-
export type MemoryConfigI = z.infer;
-export type DocumentConfigI = z.infer;
-
-export type MemoryI = MemoryConfigI;
diff --git a/packages/baseai/types/model.ts b/packages/baseai/types/model.ts
index 4ec94b0f..c5242a65 100644
--- a/packages/baseai/types/model.ts
+++ b/packages/baseai/types/model.ts
@@ -29,16 +29,14 @@ export type TogetherModels =
| 'together:mistralai/Mistral-7B-Instruct-v0.2'
| 'together:mistralai/Mixtral-8x7B-Instruct-v0.1'
| 'together:mistralai/Mixtral-8x22B-Instruct-v0.1'
- | 'together:databricks/dbrx-instruct'
- | 'together:meta-llama/Llama-3.3-70B-Instruct-Turbo';
+ | 'together:databricks/dbrx-instruct';
export type AnthropicModels =
| 'anthropic:claude-3-5-sonnet-latest'
| 'anthropic:claude-3-5-sonnet-20240620'
| 'anthropic:claude-3-opus-20240229'
| 'anthropic:claude-3-sonnet-20240229'
- | 'anthropic:claude-3-haiku-20240307'
- | 'anthropic:claude-3-5-haiku-20241022';
+ | 'anthropic:claude-3-haiku-20240307';
export type GroqModels =
| 'groq:llama-3.1-70b-versatile'
@@ -47,8 +45,7 @@ export type GroqModels =
| 'groq:llama3-8b-8192'
| 'groq:mixtral-8x7b-32768'
| 'groq:gemma2-9b-it'
- | 'groq:gemma-7b-it'
- | 'groq:llama-3.3-70b-versatile';
+ | 'groq:gemma-7b-it';
export type GoogleModels =
| 'google:gemini-1.5-pro-latest'
@@ -63,8 +60,7 @@ export type FireworksAIModels =
| 'fireworks:llama-v3p1-8b-instruct'
| 'fireworks:llama-v3p1-70b-instruct'
| 'fireworks:llama-v3-70b-instruct'
- | 'fireworks:yi-large'
- | 'fireworks:llama-v3p3-70b-instruct';
+ | 'fireworks:yi-large';
export type PerplexityModels =
| 'perplexity:llama-3.1-sonar-huge-128k-online'
@@ -78,6 +74,4 @@ export type MistralAIModels =
| 'mistral:open-mistral-nemo'
| 'mistral:codestral-latest';
-export type XAIModels = 'xai:grok-beta';
-
export type OllamaModels = `ollama:${string}`;
diff --git a/packages/baseai/types/pipe.ts b/packages/baseai/types/pipe.ts
index bc18a65a..b51e37a0 100644
--- a/packages/baseai/types/pipe.ts
+++ b/packages/baseai/types/pipe.ts
@@ -153,14 +153,3 @@ export interface ToolCall {
arguments: string;
};
}
-
-const functionNameRegex = /^[a-zA-Z_$][a-zA-Z0-9_$]*$/;
-
-export const toolChoiceSchema = z
- .object({
- type: z.enum(['function']).default('function'),
- function: z.object({
- name: z.string().refine(value => functionNameRegex.test(value))
- })
- })
- .optional();
diff --git a/packages/baseai/types/tools.ts b/packages/baseai/types/tools.ts
index 7536bc86..eee73a84 100644
--- a/packages/baseai/types/tools.ts
+++ b/packages/baseai/types/tools.ts
@@ -1,5 +1,3 @@
-import { z } from 'zod';
-
export interface Tool {
run: (...args: any[]) => Promise | any;
type: 'function';
@@ -10,13 +8,11 @@ export interface Tool {
};
}
-export const pipeToolSchema = z.object({
- type: z.literal('function'),
- function: z.object({
- name: z.string(),
- description: z.string().optional(),
- parameters: z.record(z.any()).optional()
- })
-});
-
-export type PipeTool = z.infer;
+export interface PipeTool {
+ type: 'function';
+ function: {
+ name: string;
+ description?: string;
+ parameters?: Record;
+ };
+}
diff --git a/packages/core/CHANGELOG.md b/packages/core/CHANGELOG.md
index 7d6cc9b1..6f807631 100644
--- a/packages/core/CHANGELOG.md
+++ b/packages/core/CHANGELOG.md
@@ -1,151 +1,5 @@
# `baseai` SDK
-## 0.9.43
-
-### Patch Changes
-
-- Fix moderation
-
-## 0.9.42
-
-### Patch Changes
-
-- 📦 NEW: LB-LLM-Key header support
-
-## 0.9.41
-
-### Patch Changes
-
-- 🐛 FIX: Google stream
-
-## 0.9.40
-
-### Patch Changes
-
-- 📦 NEW: meta-llama/Llama-3.3-70B-Instruct-Turbo model
-
-## 0.9.39
-
-### Patch Changes
-
-- 📦 NEW: tools support in pipe.run()
-
-## 0.9.38
-
-### Patch Changes
-
-- 📦 NEW: .env file based BaseAI auth
-
-## 0.9.37
-
-### Patch Changes
-
-- 👌 IMPROVE: Remove unused type
-
-## 0.9.36
-
-### Patch Changes
-
-- 📦 NEW: Dynamically set document metadata
-
-## 0.9.35
-
-### Patch Changes
-
-- 📦 NEW: Pipe API key support in pipe.run()
-
-## 0.9.34
-
-### Patch Changes
-
-- 👌 IMPROVE: Memory config with new features and better UX
-
-## 0.9.33
-
-### Patch Changes
-
-- 📦 NEW: Params for pipe.run() sdk support
-
-## 0.9.32
-
-### Patch Changes
-
-- 👌 IMPROVE: Error handling in usePipe
-
-## 0.9.31
-
-### Patch Changes
-
-- 98f2d7c: 🐛 FIX: Local development server
-- 👌 IMPROVE: Local development server
-
-## 0.9.30
-
-### Patch Changes
-
-- 📦 NEW: Request production AI agent pipe
-
-## 0.9.29
-
-### Patch Changes
-
-- 🐛 FIX: execAsync breaking paths in Windows
-
-## 0.9.28
-
-### Patch Changes
-
-- 📦 NEW: Pipe v1 support
-
-## 0.9.27
-
-### Patch Changes
-
-- 🐛 FIX: Broken pipes and tools build paths in Windows
-
-## 0.9.26
-
-### Patch Changes
-
-- 📦 NEW: Allow empty submit with no message
-
-## 0.9.25
-
-### Patch Changes
-
-- 🐛 FIX: Request timeout and special characters in description
-
-## 0.9.24
-
-### Patch Changes
-
-- 📦 NEW: claude 3.5 Haiku
-
-## 0.9.23
-
-### Patch Changes
-
-- 📦 NEW: setThreadId function in usePipe
-
-## 0.9.22
-
-### Patch Changes
-
-- 🐛 FIX: Anthropic streaming
-- 84d789c: 🐛 FIX: Anthropic streaming
-
-## 0.9.21
-
-### Patch Changes
-
-- 👌 IMPROVE: Redact LLM API key
-
-## 0.9.20
-
-### Patch Changes
-
-- 👌 IMPROVE: logs
-
## 0.9.19
### Patch Changes
diff --git a/packages/core/package.json b/packages/core/package.json
index 6d986085..9c020551 100644
--- a/packages/core/package.json
+++ b/packages/core/package.json
@@ -1,7 +1,7 @@
{
"name": "@baseai/core",
"description": "The Web AI Framework's core - BaseAI.dev",
- "version": "0.9.43",
+ "version": "0.9.19",
"license": "Apache-2.0",
"sideEffects": false,
"main": "./dist/index.js",
@@ -119,4 +119,4 @@
"langbase.com",
"generative AI"
]
-}
\ No newline at end of file
+}
diff --git a/packages/core/src/common/request.ts b/packages/core/src/common/request.ts
index 9b0edff5..f6c60e0a 100644
--- a/packages/core/src/common/request.ts
+++ b/packages/core/src/common/request.ts
@@ -14,7 +14,6 @@ interface RequestConfig {
apiKey?: string;
baseUrl: string;
timeout?: number;
- llmKey?: string;
}
interface SendOptions extends RequestOptions {
@@ -91,7 +90,6 @@ export class Request {
return {
'Content-Type': 'application/json',
Authorization: `Bearer ${this.config.apiKey}`,
- 'LB-LLM-Key': this.config.llmKey ?? '',
...headers,
};
}
@@ -101,13 +99,12 @@ export class Request {
options,
headers,
}: MakeRequestParams): Promise {
+ // console.log(' =================== REQUEST ===================');
const resp = await fetch(url, {
method: options.method,
headers,
body: JSON.stringify(options.body),
- ...(this.config.timeout && {
- signal: AbortSignal.timeout(this.config.timeout),
- }),
+ signal: AbortSignal.timeout(this.config.timeout || 30000),
});
return resp;
}
@@ -161,8 +158,8 @@ export class Request {
}
async post(options: Omit): Promise {
- // logger('Request.post.options');
- // logger(options, {depth: null, colors: true});
+ console.log('Request.post.options');
+ console.dir(options, {depth: null, colors: true});
return this.send({...options, method: 'POST'});
}
diff --git a/packages/core/src/data/models.ts b/packages/core/src/data/models.ts
index be39bcd2..7170fc6d 100644
--- a/packages/core/src/data/models.ts
+++ b/packages/core/src/data/models.ts
@@ -113,12 +113,6 @@ export const modelsByProvider: ModelsByProviderInclCosts = {
},
],
[TOGETHER_AI]: [
- {
- id: 'meta-llama/Llama-3.3-70B-Instruct-Turbo',
- provider: TOGETHER_AI,
- promptCost: 0.88,
- completionCost: 0.88,
- },
{
id: 'meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo',
provider: TOGETHER_AI,
@@ -241,20 +235,8 @@ export const modelsByProvider: ModelsByProviderInclCosts = {
promptCost: 0.25,
completionCost: 1.25,
},
- {
- id: 'claude-3-5-haiku-20241022',
- provider: ANTHROPIC,
- promptCost: 1,
- completionCost: 5,
- },
],
[GROQ]: [
- {
- id: 'llama-3.3-70b-versatile',
- provider: GROQ,
- promptCost: 0.59,
- completionCost: 0.79,
- },
{
id: 'llama-3.1-70b-versatile',
provider: GROQ,
@@ -339,12 +321,6 @@ export const modelsByProvider: ModelsByProviderInclCosts = {
},
],
[FIREWORKS_AI]: [
- {
- id: 'llama-v3p3-70b-instruct',
- provider: FIREWORKS_AI,
- promptCost: 0.88,
- completionCost: 0.88,
- },
{
id: 'llama-v3p1-405b-instruct',
provider: FIREWORKS_AI,
diff --git a/packages/core/src/helpers/stream.ts b/packages/core/src/helpers/stream.ts
index 64593011..9061a166 100644
--- a/packages/core/src/helpers/stream.ts
+++ b/packages/core/src/helpers/stream.ts
@@ -1,7 +1,6 @@
import {ChatCompletionStream} from 'openai/lib/ChatCompletionStream';
import {ChunkStream} from 'src/pipes';
import {Stream} from 'openai/streaming';
-import {ToolCallResult} from 'types/pipes';
export interface Runner extends ChatCompletionStream {}
@@ -82,17 +81,3 @@ export function handleResponseStream({
}
return result;
}
-
-/**
- * Retrieves tool calls from a given readable stream.
- *
- * @param stream - The readable stream from which to extract tool calls.
- * @returns A promise that resolves to an array of `ToolCall` objects.
- */
-export async function getToolsFromStream(
- stream: ReadableStream,
-): Promise {
- let run = getRunner(stream);
- const {choices} = await run.finalChatCompletion();
- return choices[0].message.tool_calls;
-}
diff --git a/packages/core/src/pipes/pipes.ts b/packages/core/src/pipes/pipes.ts
index 410d74d4..f5a501e4 100644
--- a/packages/core/src/pipes/pipes.ts
+++ b/packages/core/src/pipes/pipes.ts
@@ -1,18 +1,10 @@
import type {Runner} from 'src/helpers';
-import {
- Message,
- MessageRole,
- Pipe as PipeI,
- ToolCallResult,
- Tools,
-} from '../../types/pipes';
+import {Message, MessageRole, Pipe as PipeI, ToolCall} from '../../types/pipes';
import {Request} from '../common/request';
import {getLLMApiKey} from '../utils/get-llm-api-key';
import {getApiUrl, isProd} from '../utils/is-prod';
+import {toOldPipeFormat} from '../utils/to-old-pipe-format';
import {isLocalServerRunning} from 'src/utils/local-server-running';
-import {getToolsFromStream} from 'src/helpers';
-import {ANTHROPIC} from 'src/data/models';
-import {getProvider} from 'src/utils/get-provider';
export interface Variable {
name: string;
@@ -24,15 +16,10 @@ export interface RunOptions {
variables?: Variable[];
threadId?: string;
rawResponse?: boolean;
- runTools?: boolean;
- tools?: Tools[];
- name?: string; // Pipe name for SDK,
- apiKey?: string; // pipe level key for SDK
- llmKey?: string; // LLM API key
}
export interface RunOptionsStream extends RunOptions {
- stream: boolean;
+ stream: true;
}
export interface Usage {
@@ -66,7 +53,6 @@ export interface RunResponseStream {
export interface PipeOptions extends PipeI {
maxCalls?: number;
- prod?: boolean;
}
interface ChoiceGenerate {
@@ -91,22 +77,11 @@ export class Pipe {
private tools: Record Promise>;
private maxCalls: number;
private hasTools: boolean;
- private prod: boolean;
- private baseUrl: string;
- private entityApiKey?: string;
constructor(options: PipeOptions) {
- this.prod = options.prod ?? isProd();
- this.baseUrl = getApiUrl(this.prod);
-
- this.request = new Request({
- apiKey: options.apiKey,
- baseUrl: this.baseUrl,
- });
- this.pipe = options;
- this.entityApiKey = options.apiKey;
-
- delete this.pipe.prod;
+ const baseUrl = getApiUrl();
+ this.request = new Request({apiKey: options.apiKey, baseUrl});
+ this.pipe = toOldPipeFormat(options);
delete this.pipe.apiKey;
this.tools = this.getToolsFromPipe(this.pipe);
@@ -115,7 +90,7 @@ export class Pipe {
}
private getToolsFromPipe(
- pipe: Pipe,
+ pipe: any,
): Record Promise> {
const tools: Record Promise> = {};
if (pipe.tools && Array.isArray(pipe.tools)) {
@@ -126,16 +101,14 @@ export class Pipe {
return tools;
}
- private async runTools(toolCalls: ToolCallResult[]): Promise {
- const toolPromises = toolCalls.map(async (toolCall: ToolCallResult) => {
+ private async runTools(toolCalls: ToolCall[]): Promise {
+ const toolPromises = toolCalls.map(async (toolCall: ToolCall) => {
const toolName = toolCall.function.name;
const toolParameters = JSON.parse(toolCall.function.arguments);
const toolFunction = this.tools[toolName];
if (!toolFunction) {
- throw new Error(
- `Tool ${toolName} not found. If this is intentional, please set runTools to false to disable tool execution by default.`,
- );
+ throw new Error(`Tool '${toolName}' not found`);
}
const toolResponse = await toolFunction(toolParameters);
@@ -160,7 +133,7 @@ export class Pipe {
responseMessage: Message,
toolResults: Message[],
): Message[] {
- return this.prod
+ return isProd()
? toolResults
: [...messages, responseMessage, ...toolResults];
}
@@ -172,137 +145,26 @@ export class Pipe {
private warnIfToolsWithStream(requestedStream: boolean): void {
if (this.hasTools && requestedStream) {
console.warn(
- 'Warning: Streaming is not yet supported in Anthropic models when tools are present in the pipe. Falling back to non-streaming mode.',
+ 'Warning: Streaming is not yet supported when tools are present in the pipe. Falling back to non-streaming mode.',
);
}
}
- private async handleStreamResponse(
- options: RunOptionsStream,
- response: RunResponseStream,
- ): Promise {
- const endpoint = '/v1/pipes/run';
- const stream = this.isStreamRequested(options);
- const body = {...options, stream};
-
- const [streamForToolCall, streamForReturn] = response.stream.tee();
- const tools = await getToolsFromStream(streamForToolCall);
-
- if (tools.length) {
- let messages = options.messages || [];
-
- let currentResponse: RunResponseStream = {
- stream: streamForReturn,
- threadId: response.threadId,
- rawResponse: response.rawResponse,
- };
-
- let callCount = 0;
-
- while (callCount < this.maxCalls) {
- const [streamForToolCall, streamForReturn] =
- currentResponse.stream.tee();
-
- const tools = await getToolsFromStream(streamForToolCall);
-
- if (tools.length === 0) {
- return {
- stream: streamForReturn,
- threadId: currentResponse.threadId,
- rawResponse: response.rawResponse,
- };
- }
-
- const toolResults = await this.runTools(tools);
-
- const responseMessage = {
- role: 'assistant',
- content: null,
- tool_calls: tools,
- } as Message;
-
- messages = this.getMessagesToSend(
- messages,
- responseMessage,
- toolResults,
- );
-
- currentResponse = await this.createRequest(
- endpoint,
- {
- ...body,
- messages,
- threadId: currentResponse.threadId,
- },
- );
-
- callCount++;
- }
- }
-
- return {
- ...response,
- stream: streamForReturn,
- } as RunResponseStream;
- }
-
public async run(options: RunOptionsStream): Promise;
public async run(options: RunOptions): Promise;
public async run(
options: RunOptions | RunOptionsStream,
): Promise {
- // logger('pipe.run', this.pipe.name, 'RUN');
-
- const endpoint = '/v1/pipes/run';
- // logger('pipe.run.baseUrl.endpoint', getApiUrl() + endpoint);
- // logger('pipe.run.options');
- // logger(options, {depth: null, colors: true});
-
- const providerString = this.pipe.model.split(':')[0];
- const modelProvider = getProvider(providerString);
- const isAnthropic = modelProvider === ANTHROPIC;
- const hasTools = this.pipe.tools.length > 0;
-
- // For SDK
- // Run the given pipe name
- if (options.name) {
- this.pipe = {...this.pipe, name: options.name};
- }
+ console.log('pipe.run', this.pipe.name, 'RUN');
- // For SDK
- // Run the pipe against the given Pipe API key
- if (options.apiKey) {
- this.request = new Request({
- apiKey: options.apiKey,
- baseUrl: this.baseUrl,
- ...((options.llmKey && {llmKey: options.llmKey}) || {}),
- });
- }
-
- if (options.llmKey && !options.apiKey) {
- this.request = new Request({
- apiKey: this.entityApiKey,
- baseUrl: this.baseUrl,
- llmKey: options.llmKey,
- });
- }
-
- let stream = this.isStreamRequested(options);
-
- // Anthropic models don't support streaming with tools.
- if (isAnthropic && hasTools && stream) {
- this.warnIfToolsWithStream(stream);
- stream = false;
- }
+ const endpoint = '/beta/pipes/run';
+ console.log('pipe.run.baseUrl.endpoint', getApiUrl() + endpoint);
+ console.log('pipe.run.options');
+ console.dir(options, {depth: null, colors: true});
- let runTools = options.runTools ?? true;
-
- // Do not run tools if they are explicitly provided in the options.
- if (options.tools && options.tools?.length) {
- runTools = false;
- }
-
- delete options.runTools;
+ const requestedStream = this.isStreamRequested(options);
+ const stream = this.hasTools ? false : requestedStream;
+ this.warnIfToolsWithStream(requestedStream);
const body = {...options, stream};
@@ -313,22 +175,13 @@ export class Pipe {
return {} as RunResponse | RunResponseStream;
}
- if (!runTools) {
- if (!stream) {
- return response as RunResponse;
- }
-
- return response as RunResponseStream;
- }
+ console.log('pipe.run.response');
+ console.dir(response, {depth: null, colors: true});
if (stream) {
- return await this.handleStreamResponse(
- options as RunOptionsStream,
- response as RunResponseStream,
- );
+ return response as RunResponseStream;
}
- // STREAM IS OFF
let messages = options.messages || [];
let currentResponse = response as RunResponse;
let callCount = 0;
@@ -337,21 +190,21 @@ export class Pipe {
const responseMessage = currentResponse.choices[0].message;
if (this.hasNoToolCalls(responseMessage)) {
- // logger('No more tool calls. Returning final response.');
+ console.log('No more tool calls. Returning final response.');
return currentResponse;
}
- // logger('\npipe.run.response.toolCalls');
- // logger(responseMessage.tool_calls, {
- // depth: null,
- // colors: true,
- // });
+ console.log('\npipe.run.response.toolCalls');
+ console.dir(responseMessage.tool_calls, {
+ depth: null,
+ colors: true,
+ });
const toolResults = await this.runTools(
- responseMessage.tool_calls as ToolCallResult[],
+ responseMessage.tool_calls as ToolCall[],
);
- // logger('\npipe.run.toolResults');
- // logger(toolResults, {depth: null, colors: true});
+ console.log('\npipe.run.toolResults');
+ console.dir(toolResults, {depth: null, colors: true});
messages = this.getMessagesToSend(
messages,
@@ -373,9 +226,9 @@ export class Pipe {
// Explicitly check if the new response has no tool calls
if (this.hasNoToolCalls(currentResponse.choices[0].message)) {
- // logger(
- // 'New response has no tool calls. Returning final response.',
- // );
+ console.log(
+ 'New response has no tool calls. Returning final response.',
+ );
return currentResponse;
}
}
@@ -387,7 +240,6 @@ export class Pipe {
}
private async createRequest(endpoint: string, body: any): Promise {
- const isProdEnv = this.prod;
const prodOptions = {
endpoint,
body: {
@@ -395,21 +247,17 @@ export class Pipe {
name: this.pipe.name,
},
};
+ const localOptions = {
+ endpoint,
+ body: {
+ ...body,
+ pipe: this.pipe,
+ llmApiKey: getLLMApiKey(this.pipe.model.provider),
+ },
+ };
- let localOptions = {} as any;
-
+ const isProdEnv = isProd();
if (!isProdEnv) {
- const providerString = this.pipe.model.split(':')[0];
- const modelProvider = getProvider(providerString);
- localOptions = {
- endpoint,
- body: {
- ...body,
- pipe: this.pipe,
- llmApiKey: getLLMApiKey(modelProvider),
- },
- };
-
const isServerRunning = await isLocalServerRunning();
if (!isServerRunning) return {} as T;
}
@@ -449,7 +297,7 @@ interface ContentChunk {
interface ToolCallChunk {
type: 'toolCall';
- toolCall: ToolCallResult;
+ toolCall: ToolCall;
}
interface ChoiceStream {
@@ -462,7 +310,7 @@ interface ChoiceStream {
interface Delta {
role?: MessageRole;
content?: string;
- tool_calls?: ToolCallResult[];
+ tool_calls?: ToolCall[];
}
interface UnknownChunk {
@@ -481,7 +329,7 @@ export interface ChunkStream {
export interface Chunk {
type: 'content' | 'toolCall' | 'unknown';
content?: string;
- toolCall?: ToolCallResult;
+ toolCall?: ToolCall;
rawChunk?: ChunkStream;
}
diff --git a/packages/core/src/react/use-pipe.ts b/packages/core/src/react/use-pipe.ts
index 2e3864ae..dc0b2d2a 100644
--- a/packages/core/src/react/use-pipe.ts
+++ b/packages/core/src/react/use-pipe.ts
@@ -24,7 +24,6 @@ interface UsePipeOptions {
}
const uuidSchema = z.string().uuid();
-const externalThreadIdSchema = uuidSchema.optional();
export function usePipe({
apiRoute = '/langbase/pipes/run-stream',
@@ -42,9 +41,7 @@ export function usePipe({
const [error, setError] = useState(null);
const abortControllerRef = useRef(null);
- const threadIdRef = useRef(
- initialThreadId || undefined,
- );
+ const threadIdRef = useRef(initialThreadId || null);
const messagesRef = useRef(initialMessages);
const isFirstRequestRef = useRef(true);
@@ -90,17 +87,6 @@ export function usePipe({
[updateMessages, onResponse, onFinish],
);
- const setThreadId = useCallback((newThreadId: string | undefined) => {
- const isValidThreadId =
- externalThreadIdSchema.safeParse(newThreadId).success;
-
- if (isValidThreadId) {
- threadIdRef.current = newThreadId;
- } else {
- throw new Error('Invalid thread ID');
- }
- }, []);
-
const getMessagesToSend = useCallback(
(updatedMessages: Message[]): [Message[], boolean] => {
const isInitialRequest = isFirstRequestRef.current;
@@ -155,8 +141,8 @@ export function usePipe({
const [messagesToSend, lastMessageOnly] =
getMessagesToSend(updatedMessages);
- // Ensure there's at least one message to send if not allowing empty submit
- if (messagesToSend.length === 0 && !options.allowEmptySubmit) {
+ // Ensure there's at least one message to send
+ if (messagesToSend.length === 0) {
throw new Error(
'At least one message or initial message is required',
);
@@ -186,7 +172,7 @@ export function usePipe({
signal,
});
- if (!response.ok) await processErrorResponse(response);
+ if (!response.ok) throw new Error('Failed to send message');
const newThreadId = response.headers.get('lb-thread-id');
if (newThreadId) threadIdRef.current = newThreadId;
@@ -197,12 +183,10 @@ export function usePipe({
const result: RunResponse = await response.json();
processNonStreamResponse(result);
}
- } catch (err: any) {
+ } catch (err) {
if (err instanceof Error && err.name !== 'AbortError') {
setError(err);
onError?.(err);
- } else if (err.name !== 'AbortError') {
- throw new Error('Failed to send message');
}
} finally {
setIsLoading(false);
@@ -266,16 +250,6 @@ export function usePipe({
setIsLoading(false);
}, []);
- const processErrorResponse = async (response: Response) => {
- const res = await response.json();
- if (res.error.error) {
- // Throw error object if it exists
- throw new Error(res.error.error.message);
- } else {
- throw new Error('Failed to send message');
- }
- };
-
return useMemo(
() => ({
messages,
@@ -290,7 +264,6 @@ export function usePipe({
threadId: threadIdRef.current,
sendMessage,
setInput,
- setThreadId,
}),
[
messages,
diff --git a/packages/core/src/utils/get-provider.ts b/packages/core/src/utils/get-provider.ts
deleted file mode 100644
index 5ffb2209..00000000
--- a/packages/core/src/utils/get-provider.ts
+++ /dev/null
@@ -1,52 +0,0 @@
-import {
- ANTHROPIC,
- COHERE,
- FIREWORKS_AI,
- GOOGLE,
- GROQ,
- MISTRAL_AI,
- OLLAMA,
- OPEN_AI,
- PERPLEXITY,
- TOGETHER_AI,
- X_AI,
-} from '../data/models';
-
-type Provider =
- | typeof OPEN_AI
- | typeof ANTHROPIC
- | typeof TOGETHER_AI
- | typeof GOOGLE
- | typeof GROQ
- | typeof COHERE
- | typeof FIREWORKS_AI
- | typeof PERPLEXITY;
-
-/**
- * Retrieves the provider based on the given provider string.
- *
- * @param providerString - The provider string.
- * @returns The corresponding provider object.
- * @throws Error if the provider is unknown.
- */
-export function getProvider(providerString: string): Provider {
- const providerMap: {[key: string]: Provider} = {
- openai: OPEN_AI,
- anthropic: ANTHROPIC,
- together: TOGETHER_AI,
- google: GOOGLE,
- groq: GROQ,
- cohere: COHERE,
- fireworks: FIREWORKS_AI,
- perplexity: PERPLEXITY,
- ollama: OLLAMA,
- xai: X_AI,
- mistral: MISTRAL_AI,
- };
-
- const provider = providerMap[providerString.toLowerCase()];
- if (!provider) {
- throw new Error(`Unknown provider: ${providerString}`);
- }
- return provider;
-}
diff --git a/packages/core/src/utils/is-prod.ts b/packages/core/src/utils/is-prod.ts
index 179331bf..226bd5bd 100644
--- a/packages/core/src/utils/is-prod.ts
+++ b/packages/core/src/utils/is-prod.ts
@@ -10,11 +10,8 @@ export function isLocal() {
return process.env.NODE_ENV !== 'production';
}
-export function getApiUrl(prod?: boolean) {
- if (prod) return 'https://api.langbase.com';
- else return 'http://localhost:9000';
-
+export function getApiUrl() {
// TODO: Make local port configurable.
- // return isProd() ? 'https://api.langbase.com' : 'http://localhost:9000';
+ return isProd() ? 'https://api.langbase.com' : 'http://localhost:9000';
// return isProd() ? 'http://localhost:8787' : 'http://localhost:9000';
}
diff --git a/packages/core/src/utils/local-server-running.ts b/packages/core/src/utils/local-server-running.ts
index 210724fd..1dfcf960 100644
--- a/packages/core/src/utils/local-server-running.ts
+++ b/packages/core/src/utils/local-server-running.ts
@@ -1,9 +1,8 @@
-import {getApiUrl, isProd} from './is-prod';
+import {getApiUrl} from './is-prod';
export async function isLocalServerRunning(): Promise {
try {
- const prod = isProd();
- const endpoint = getApiUrl(prod);
+ const endpoint = getApiUrl();
const response = await fetch(endpoint, {
mode: 'no-cors',
diff --git a/packages/core/src/utils/to-old-pipe-format.ts b/packages/core/src/utils/to-old-pipe-format.ts
new file mode 100644
index 00000000..5949a74e
--- /dev/null
+++ b/packages/core/src/utils/to-old-pipe-format.ts
@@ -0,0 +1,93 @@
+import type {Pipe, PipeOld} from '../../types/pipes';
+
+import {
+ ANTHROPIC,
+ COHERE,
+ FIREWORKS_AI,
+ GOOGLE,
+ GROQ,
+ OLLAMA,
+ OPEN_AI,
+ PERPLEXITY,
+ TOGETHER_AI,
+ X_AI,
+} from '../data/models';
+
+type Provider =
+ | typeof OPEN_AI
+ | typeof ANTHROPIC
+ | typeof TOGETHER_AI
+ | typeof GOOGLE
+ | typeof GROQ
+ | typeof COHERE
+ | typeof FIREWORKS_AI
+ | typeof PERPLEXITY;
+
+/**
+ * Converts a new pipe format to an old pipe format.
+ *
+ * @param newFormat - The new pipe format to convert.
+ * @returns The converted old pipe format.
+ */
+export function toOldPipeFormat(newFormat: Pipe): PipeOld {
+ const [providerString, modelName] = newFormat.model.split(':');
+
+ return {
+ name: newFormat.name,
+ description: newFormat.description || '',
+ status: newFormat.status,
+ meta: {
+ stream: newFormat.stream,
+ json: newFormat.json,
+ store: newFormat.store,
+ moderate: newFormat.moderate,
+ },
+ model: {
+ name: modelName,
+ provider: getProvider(providerString),
+ params: {
+ top_p: newFormat.top_p,
+ max_tokens: newFormat.max_tokens,
+ temperature: newFormat.temperature,
+ presence_penalty: newFormat.presence_penalty,
+ frequency_penalty: newFormat.frequency_penalty,
+ stop: newFormat.stop,
+ },
+ tool_choice: newFormat.tool_choice,
+ parallel_tool_calls: newFormat.parallel_tool_calls,
+ },
+ messages: newFormat.messages,
+ variables: newFormat.variables,
+ tools: newFormat.tools,
+ functions: newFormat.tools,
+ memorysets: newFormat.memory.map(memory => memory.name),
+ };
+}
+
+/**
+ * Retrieves the provider based on the given provider string.
+ *
+ * @param providerString - The provider string.
+ * @returns The corresponding provider object.
+ * @throws Error if the provider is unknown.
+ */
+function getProvider(providerString: string): Provider {
+ const providerMap: {[key: string]: Provider} = {
+ openai: OPEN_AI,
+ anthropic: ANTHROPIC,
+ together: TOGETHER_AI,
+ google: GOOGLE,
+ groq: GROQ,
+ cohere: COHERE,
+ fireworks: FIREWORKS_AI,
+ perplexity: PERPLEXITY,
+ ollama: OLLAMA,
+ xai: X_AI,
+ };
+
+ const provider = providerMap[providerString.toLowerCase()];
+ if (!provider) {
+ throw new Error(`Unknown provider: ${providerString}`);
+ }
+ return provider;
+}
diff --git a/packages/core/types/memory.ts b/packages/core/types/memory.ts
index aca7fbb8..e5d8205a 100644
--- a/packages/core/types/memory.ts
+++ b/packages/core/types/memory.ts
@@ -1,26 +1,13 @@
-export interface GitConfig {
- enabled: boolean;
- include: string[];
- gitignore?: boolean;
- deployedAt?: string;
- embeddedAt?: string;
-}
-
-export interface MemoryDocumentI {
- name: string;
- size: string;
- content: string;
- blob: Blob;
- path: string;
-}
-
-export interface Document {
- meta?: (doc: MemoryDocumentI) => Record;
-}
-
export interface Memory {
name: string;
description?: string;
- git: GitConfig;
- documents?: Document;
+ config?: MemoryConfig;
+}
+
+interface MemoryConfig {
+ useGitRepo: boolean;
+ dirToTrack: string;
+ extToTrack: string[];
+ deployedCommitHash?: string;
+ embeddedCommitHash?: string;
}
diff --git a/packages/core/types/model.ts b/packages/core/types/model.ts
index 960cdaad..524958f1 100644
--- a/packages/core/types/model.ts
+++ b/packages/core/types/model.ts
@@ -29,16 +29,14 @@ export type TogetherModels =
| 'together:mistralai/Mistral-7B-Instruct-v0.2'
| 'together:mistralai/Mixtral-8x7B-Instruct-v0.1'
| 'together:mistralai/Mixtral-8x22B-Instruct-v0.1'
- | 'together:databricks/dbrx-instruct'
- | 'together:meta-llama/Llama-3.3-70B-Instruct-Turbo';
+ | 'together:databricks/dbrx-instruct';
export type AnthropicModels =
| 'anthropic:claude-3-5-sonnet-latest'
| 'anthropic:claude-3-5-sonnet-20240620'
| 'anthropic:claude-3-opus-20240229'
| 'anthropic:claude-3-sonnet-20240229'
- | 'anthropic:claude-3-haiku-20240307'
- | 'anthropic:claude-3-5-haiku-20241022';
+ | 'anthropic:claude-3-haiku-20240307';
export type GroqModels =
| 'groq:llama-3.1-70b-versatile'
@@ -47,8 +45,7 @@ export type GroqModels =
| 'groq:llama3-8b-8192'
| 'groq:mixtral-8x7b-32768'
| 'groq:gemma2-9b-it'
- | 'groq:gemma-7b-it'
- | 'groq:llama-3.3-70b-versatile';
+ | 'groq:gemma-7b-it';
export type GoogleModels =
| 'google:gemini-1.5-pro-latest'
@@ -63,8 +60,7 @@ export type FireworksAIModels =
| 'fireworks:llama-v3p1-8b-instruct'
| 'fireworks:llama-v3p1-70b-instruct'
| 'fireworks:llama-v3-70b-instruct'
- | 'fireworks:yi-large'
- | 'fireworks:llama-v3p3-70b-instruct';
+ | 'fireworks:yi-large';
export type PerplexityModels =
| 'perplexity:llama-3.1-sonar-huge-128k-online'
diff --git a/packages/core/types/pipes.ts b/packages/core/types/pipes.ts
index d4390e86..21acd841 100644
--- a/packages/core/types/pipes.ts
+++ b/packages/core/types/pipes.ts
@@ -19,7 +19,7 @@ export interface Function {
arguments: string;
}
-export interface ToolCallResult {
+export interface ToolCall {
id: string;
type: 'function';
function: Function;
@@ -30,7 +30,7 @@ export interface Message {
content: string | null;
name?: string;
tool_call_id?: string;
- tool_calls?: ToolCallResult[];
+ tool_calls?: ToolCall[];
}
interface ToolFunction {
@@ -44,15 +44,6 @@ interface ToolChoiceFunction {
type ToolChoice = 'auto' | 'required' | ToolChoiceFunction;
-export interface Tools {
- type: 'function';
- function: {
- name: string;
- description?: string;
- parameters?: Record;
- };
-}
-
export type Model =
| OpenAIModels
| TogetherModels
diff --git a/packages/testing/index.ts b/packages/testing/index.ts
new file mode 100644
index 00000000..25ec8c70
--- /dev/null
+++ b/packages/testing/index.ts
@@ -0,0 +1,20 @@
+// import {Pipe} from '@baseai/core';
+// import pipeSummary from '../baseai/pipes/summary';
+
+// const pipe = new Pipe(pipeSummary());
+
+// async function main() {
+// const userMsg = 'Who is an AI Engineer?';
+
+// const response = await pipe.run({
+// messages: [
+// {
+// role: 'user',
+// content: userMsg,
+// },
+// ],
+// });
+// console.log('response: ', response);
+// }
+
+// main();
diff --git a/packages/testing/package.json b/packages/testing/package.json
new file mode 100644
index 00000000..1ae66a2c
--- /dev/null
+++ b/packages/testing/package.json
@@ -0,0 +1,15 @@
+{
+ "private": true,
+ "name": "testing",
+ "main": "index.js",
+ "scripts": {
+ "baseai": "baseai"
+ },
+ "license": "UNLICENSED",
+ "dependencies": {
+ "@baseai/core": "workspace:*"
+ },
+ "devDependencies": {
+ "baseai": "workspace:*"
+ }
+}
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index 8c662e8e..f5dfbb7c 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -154,14 +154,14 @@ importers:
specifier: ^2.0.0
version: 2.0.0
next:
- specifier: 14.2.35
- version: 14.2.35(@playwright/test@1.47.2)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
+ specifier: ^14.0.4
+ version: 14.2.5(@playwright/test@1.47.2)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
next-mdx-remote:
specifier: ^5.0.0
version: 5.0.0(@types/react@18.3.11)(react@18.3.1)
next-themes:
specifier: ^0.2.1
- version: 0.2.1(next@14.2.35(@playwright/test@1.47.2)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
+ version: 0.2.1(next@14.2.5(@playwright/test@1.47.2)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
react:
specifier: ^18.2.0
version: 18.3.1
@@ -264,10 +264,10 @@ importers:
version: 5.1.1(astro@4.15.10(@types/node@22.7.4)(rollup@4.24.0)(terser@5.34.1)(typescript@5.6.2))(tailwindcss@3.4.13(ts-node@10.9.1(@types/node@22.7.4)(typescript@5.6.2)))(ts-node@10.9.1(@types/node@22.7.4)(typescript@5.6.2))
'@astrojs/vercel':
specifier: ^7.8.1
- version: 7.8.1(astro@4.15.10(@types/node@22.7.4)(rollup@4.24.0)(terser@5.34.1)(typescript@5.6.2))(next@14.2.35(@babel/core@7.25.7)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)
+ version: 7.8.1(astro@4.15.10(@types/node@22.7.4)(rollup@4.24.0)(terser@5.34.1)(typescript@5.6.2))(next@14.2.5(@babel/core@7.25.7)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)
'@baseai/core':
- specifier: ^0.9.43
- version: 0.9.43(react@18.3.1)(zod@3.23.8)
+ specifier: ^0.9.19
+ version: 0.9.19(react@18.3.1)(zod@3.23.8)
'@radix-ui/react-slot':
specifier: ^1.1.0
version: 1.1.0(@types/react@18.3.11)(react@18.3.1)
@@ -309,14 +309,14 @@ importers:
version: 5.6.2
devDependencies:
baseai:
- specifier: ^0.9.44
- version: 0.9.44(@types/node@22.7.4)(typescript@5.6.2)
+ specifier: ^0.9.19
+ version: 0.9.19(@types/node@22.7.4)(typescript@5.6.2)
examples/nextjs:
dependencies:
'@baseai/core':
- specifier: ^0.9.43
- version: 0.9.43(react@18.3.1)(zod@3.23.8)
+ specifier: ^0.9.19
+ version: 0.9.19(react@18.3.1)(zod@3.23.8)
'@radix-ui/react-slot':
specifier: ^1.1.0
version: 1.1.0(@types/react@18.3.11)(react@18.3.1)
@@ -336,8 +336,8 @@ importers:
specifier: ^2.0.0
version: 2.0.0
next:
- specifier: 14.2.35
- version: 14.2.35(@playwright/test@1.47.2)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
+ specifier: 14.2.5
+ version: 14.2.5(@playwright/test@1.47.2)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
openai:
specifier: ^4.53.0
version: 4.67.1(zod@3.23.8)
@@ -364,8 +364,8 @@ importers:
specifier: ^18
version: 18.3.0
baseai:
- specifier: ^0.9.44
- version: 0.9.44(@types/node@20.16.10)(typescript@5.6.2)
+ specifier: ^0.9.19
+ version: 0.9.19(@types/node@20.16.10)(typescript@5.6.2)
eslint:
specifier: ^8
version: 8.57.1
@@ -388,15 +388,15 @@ importers:
examples/nodejs:
dependencies:
'@baseai/core':
- specifier: ^0.9.43
- version: 0.9.43(react@18.3.1)(zod@3.23.8)
+ specifier: ^0.9.19
+ version: 0.9.19(react@18.3.1)(zod@3.23.8)
dotenv:
specifier: ^16.4.5
version: 16.4.5
devDependencies:
baseai:
- specifier: ^0.9.44
- version: 0.9.44(@types/node@22.7.4)(typescript@5.6.2)
+ specifier: ^0.9.19
+ version: 0.9.19(@types/node@22.7.4)(typescript@5.6.2)
tsx:
specifier: ^4.19.0
version: 4.19.1
@@ -404,8 +404,8 @@ importers:
examples/remix:
dependencies:
'@baseai/core':
- specifier: ^0.9.43
- version: 0.9.43(react@18.3.1)(zod@3.23.8)
+ specifier: ^0.9.19
+ version: 0.9.19(react@18.3.1)(zod@3.23.8)
'@radix-ui/react-slot':
specifier: ^1.1.0
version: 1.1.0(@types/react@18.3.11)(react@18.3.1)
@@ -445,7 +445,7 @@ importers:
devDependencies:
'@remix-run/dev':
specifier: 2.12.0
- version: 2.12.0(@remix-run/react@2.12.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.6.2))(@remix-run/serve@2.12.0(typescript@5.6.2))(@types/node@22.7.4)(terser@5.34.1)(ts-node@10.9.1(@types/node@22.7.4)(typescript@5.6.2))(typescript@5.6.2)(vite@5.4.8(@types/node@22.7.4)(terser@5.34.1))(wrangler@3.79.0)
+ version: 2.12.0(@remix-run/react@2.12.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.6.2))(@remix-run/serve@2.12.0(typescript@5.6.2))(@types/node@22.7.4)(terser@5.34.1)(ts-node@10.9.1(@types/node@22.7.4)(typescript@5.6.2))(typescript@5.6.2)(vite@5.4.8(@types/node@22.7.4)(terser@5.34.1))(wrangler@3.78.12)
'@types/react':
specifier: ^18.2.20
version: 18.3.11
@@ -460,13 +460,13 @@ importers:
version: 6.21.0(eslint@8.57.1)(typescript@5.6.2)
'@vercel/remix':
specifier: 2.12.0
- version: 2.12.0(@remix-run/dev@2.12.0(@remix-run/react@2.12.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.6.2))(@remix-run/serve@2.12.0(typescript@5.6.2))(@types/node@22.7.4)(terser@5.34.1)(ts-node@10.9.1(@types/node@22.7.4)(typescript@5.6.2))(typescript@5.6.2)(vite@5.4.8(@types/node@22.7.4)(terser@5.34.1))(wrangler@3.79.0))(@remix-run/node@2.12.0(typescript@5.6.2))(@remix-run/server-runtime@2.12.0(typescript@5.6.2))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
+ version: 2.12.0(@remix-run/dev@2.12.0(@remix-run/react@2.12.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.6.2))(@remix-run/serve@2.12.0(typescript@5.6.2))(@types/node@22.7.4)(terser@5.34.1)(ts-node@10.9.1(@types/node@22.7.4)(typescript@5.6.2))(typescript@5.6.2)(vite@5.4.8(@types/node@22.7.4)(terser@5.34.1))(wrangler@3.78.12))(@remix-run/node@2.12.0(typescript@5.6.2))(@remix-run/server-runtime@2.12.1(typescript@5.6.2))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
autoprefixer:
specifier: ^10.4.20
version: 10.4.20(postcss@8.4.47)
baseai:
- specifier: ^0.9.44
- version: 0.9.44(@types/node@22.7.4)(typescript@5.6.2)
+ specifier: ^0.9.19
+ version: 0.9.19(@types/node@22.7.4)(typescript@5.6.2)
eslint:
specifier: ^8.38.0
version: 8.57.1
@@ -525,8 +525,8 @@ importers:
specifier: ^8.0.0
version: 8.0.0
chalk:
- specifier: 5.6.0
- version: 5.6.0
+ specifier: ^5.3.0
+ version: 5.3.0
cli-alerts:
specifier: ^2.0.0
version: 2.0.0
@@ -548,6 +548,9 @@ importers:
compute-cosine-similarity:
specifier: ^1.1.0
version: 1.1.0
+ conf:
+ specifier: ^13.0.1
+ version: 13.0.1
cosmiconfig:
specifier: ^9.0.0
version: 9.0.0(typescript@5.6.2)
@@ -560,9 +563,6 @@ importers:
execa:
specifier: ^9.4.0
version: 9.4.0
- fast-glob:
- specifier: ^3.3.2
- version: 3.3.2
figures:
specifier: ^6.1.0
version: 6.1.0
@@ -704,6 +704,16 @@ importers:
specifier: 1.6.0
version: 1.6.0(@edge-runtime/vm@4.0.3)(@types/node@22.7.4)(jsdom@25.0.1(canvas@2.11.2))(terser@5.34.1)
+ packages/testing:
+ dependencies:
+ '@baseai/core':
+ specifier: workspace:*
+ version: link:../core
+ devDependencies:
+ baseai:
+ specifier: workspace:*
+ version: link:../baseai
+
tools/eslint-config:
devDependencies:
'@next/eslint-plugin-next':
@@ -1033,8 +1043,8 @@ packages:
resolution: {integrity: sha512-vwIVdXG+j+FOpkwqHRcBgHLYNL7XMkufrlaFvL9o6Ai9sJn9+PdyIL5qa0XzTZw084c+u9LOls53eoZWP/W5WQ==}
engines: {node: '>=6.9.0'}
- '@baseai/core@0.9.43':
- resolution: {integrity: sha512-jb0EUJjWqvvqq6Kh7xKTdCwgez4/hoJH3B8VaoWYLXYquOpCsGmVe7hgtt22bgJ2r6LPucglLShGz5uv9DbbKA==}
+ '@baseai/core@0.9.19':
+ resolution: {integrity: sha512-V/nfeDzLcw7RoB9I8retACth1hZXjihyw6Ym7eoEPwpsyq0wmS9lbUUzKVPFBYSU63PNuG+iPTPzflW2SvoxWQ==}
engines: {node: '>=18'}
peerDependencies:
react: ^18 || ^19
@@ -2173,8 +2183,8 @@ packages:
'@microsoft/tsdoc@0.14.2':
resolution: {integrity: sha512-9b8mPpKrfeGRuhFH5iO1iwCLeIIsV6+H1sRfxbkoGXIyQE2BTsPd9zqSqQJ+pv5sJ/hT5M1zvOFL02MnEezFug==}
- '@next/env@14.2.35':
- resolution: {integrity: sha512-DuhvCtj4t9Gwrx80dmz2F4t/zKQ4ktN8WrMwOuVzkJfBilwAwGr6v16M5eI8yCuZ63H9TTuEU09Iu2HqkzFPVQ==}
+ '@next/env@14.2.5':
+ resolution: {integrity: sha512-/zZGkrTOsraVfYjGP8uM0p6r0BDT6xWpkjdVbcz66PJVSpwXX3yNiRycxAuDfBKGWBrZBXRuK/YVlkNgxHGwmA==}
'@next/eslint-plugin-next@14.2.14':
resolution: {integrity: sha512-kV+OsZ56xhj0rnTn6HegyTGkoa16Mxjrpk7pjWumyB2P8JVQb8S9qtkjy/ye0GnTr4JWtWG4x/2qN40lKZ3iVQ==}
@@ -2193,56 +2203,56 @@ packages:
'@mdx-js/react':
optional: true
- '@next/swc-darwin-arm64@14.2.33':
- resolution: {integrity: sha512-HqYnb6pxlsshoSTubdXKu15g3iivcbsMXg4bYpjL2iS/V6aQot+iyF4BUc2qA/J/n55YtvE4PHMKWBKGCF/+wA==}
+ '@next/swc-darwin-arm64@14.2.5':
+ resolution: {integrity: sha512-/9zVxJ+K9lrzSGli1///ujyRfon/ZneeZ+v4ptpiPoOU+GKZnm8Wj8ELWU1Pm7GHltYRBklmXMTUqM/DqQ99FQ==}
engines: {node: '>= 10'}
cpu: [arm64]
os: [darwin]
- '@next/swc-darwin-x64@14.2.33':
- resolution: {integrity: sha512-8HGBeAE5rX3jzKvF593XTTFg3gxeU4f+UWnswa6JPhzaR6+zblO5+fjltJWIZc4aUalqTclvN2QtTC37LxvZAA==}
+ '@next/swc-darwin-x64@14.2.5':
+ resolution: {integrity: sha512-vXHOPCwfDe9qLDuq7U1OYM2wUY+KQ4Ex6ozwsKxp26BlJ6XXbHleOUldenM67JRyBfVjv371oneEvYd3H2gNSA==}
engines: {node: '>= 10'}
cpu: [x64]
os: [darwin]
- '@next/swc-linux-arm64-gnu@14.2.33':
- resolution: {integrity: sha512-JXMBka6lNNmqbkvcTtaX8Gu5by9547bukHQvPoLe9VRBx1gHwzf5tdt4AaezW85HAB3pikcvyqBToRTDA4DeLw==}
+ '@next/swc-linux-arm64-gnu@14.2.5':
+ resolution: {integrity: sha512-vlhB8wI+lj8q1ExFW8lbWutA4M2ZazQNvMWuEDqZcuJJc78iUnLdPPunBPX8rC4IgT6lIx/adB+Cwrl99MzNaA==}
engines: {node: '>= 10'}
cpu: [arm64]
os: [linux]
- '@next/swc-linux-arm64-musl@14.2.33':
- resolution: {integrity: sha512-Bm+QulsAItD/x6Ih8wGIMfRJy4G73tu1HJsrccPW6AfqdZd0Sfm5Imhgkgq2+kly065rYMnCOxTBvmvFY1BKfg==}
+ '@next/swc-linux-arm64-musl@14.2.5':
+ resolution: {integrity: sha512-NpDB9NUR2t0hXzJJwQSGu1IAOYybsfeB+LxpGsXrRIb7QOrYmidJz3shzY8cM6+rO4Aojuef0N/PEaX18pi9OA==}
engines: {node: '>= 10'}
cpu: [arm64]
os: [linux]
- '@next/swc-linux-x64-gnu@14.2.33':
- resolution: {integrity: sha512-FnFn+ZBgsVMbGDsTqo8zsnRzydvsGV8vfiWwUo1LD8FTmPTdV+otGSWKc4LJec0oSexFnCYVO4hX8P8qQKaSlg==}
+ '@next/swc-linux-x64-gnu@14.2.5':
+ resolution: {integrity: sha512-8XFikMSxWleYNryWIjiCX+gU201YS+erTUidKdyOVYi5qUQo/gRxv/3N1oZFCgqpesN6FPeqGM72Zve+nReVXQ==}
engines: {node: '>= 10'}
cpu: [x64]
os: [linux]
- '@next/swc-linux-x64-musl@14.2.33':
- resolution: {integrity: sha512-345tsIWMzoXaQndUTDv1qypDRiebFxGYx9pYkhwY4hBRaOLt8UGfiWKr9FSSHs25dFIf8ZqIFaPdy5MljdoawA==}
+ '@next/swc-linux-x64-musl@14.2.5':
+ resolution: {integrity: sha512-6QLwi7RaYiQDcRDSU/os40r5o06b5ue7Jsk5JgdRBGGp8l37RZEh9JsLSM8QF0YDsgcosSeHjglgqi25+m04IQ==}
engines: {node: '>= 10'}
cpu: [x64]
os: [linux]
- '@next/swc-win32-arm64-msvc@14.2.33':
- resolution: {integrity: sha512-nscpt0G6UCTkrT2ppnJnFsYbPDQwmum4GNXYTeoTIdsmMydSKFz9Iny2jpaRupTb+Wl298+Rh82WKzt9LCcqSQ==}
+ '@next/swc-win32-arm64-msvc@14.2.5':
+ resolution: {integrity: sha512-1GpG2VhbspO+aYoMOQPQiqc/tG3LzmsdBH0LhnDS3JrtDx2QmzXe0B6mSZZiN3Bq7IOMXxv1nlsjzoS1+9mzZw==}
engines: {node: '>= 10'}
cpu: [arm64]
os: [win32]
- '@next/swc-win32-ia32-msvc@14.2.33':
- resolution: {integrity: sha512-pc9LpGNKhJ0dXQhZ5QMmYxtARwwmWLpeocFmVG5Z0DzWq5Uf0izcI8tLc+qOpqxO1PWqZ5A7J1blrUIKrIFc7Q==}
+ '@next/swc-win32-ia32-msvc@14.2.5':
+ resolution: {integrity: sha512-Igh9ZlxwvCDsu6438FXlQTHlRno4gFpJzqPjSIBZooD22tKeI4fE/YMRoHVJHmrQ2P5YL1DoZ0qaOKkbeFWeMg==}
engines: {node: '>= 10'}
cpu: [ia32]
os: [win32]
- '@next/swc-win32-x64-msvc@14.2.33':
- resolution: {integrity: sha512-nOjfZMy8B94MdisuzZo9/57xuFVLHJaDj5e/xrduJp9CV2/HrfxTRH2fbyLe+K9QT41WBLUd4iXX3R7jBp0EUg==}
+ '@next/swc-win32-x64-msvc@14.2.5':
+ resolution: {integrity: sha512-tEQ7oinq1/CjSG9uSTerca3v4AZ+dFa+4Yu6ihaG8Ud8ddqLQgFGcnwYls13H5X5CPDPZJdYxyeMui6muOLd4g==}
engines: {node: '>= 10'}
cpu: [x64]
os: [win32]
@@ -3466,6 +3476,14 @@ packages:
ajv:
optional: true
+ ajv-formats@3.0.1:
+ resolution: {integrity: sha512-8iUql50EUR+uUcdRQ3HDqa6EVyo3docL8g5WJ3FNcWmu62IbkGUue/pEyLBW8VGKKucTPgqeks4fIU1DA4yowQ==}
+ peerDependencies:
+ ajv: ^8.0.0
+ peerDependenciesMeta:
+ ajv:
+ optional: true
+
ajv-keywords@3.5.2:
resolution: {integrity: sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==}
peerDependencies:
@@ -3652,6 +3670,9 @@ packages:
asynckit@0.4.0:
resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==}
+ atomically@2.0.3:
+ resolution: {integrity: sha512-kU6FmrwZ3Lx7/7y3hPS5QnbJfaohcIul5fGqf7ok+4KklIEk9tJ0C2IQPdacSbVUWv6zVHXEBWoWd6NrVMT7Cw==}
+
autoprefixer@10.4.20:
resolution: {integrity: sha512-XY25y5xSv/wEoqzDyXXME4AFfkZI0P23z6Fs3YgymDnKJkCGOnkL0iTxCa85UTqaSgfcqyf3UA6+c7wUvx/16g==}
engines: {node: ^10 || ^12 || >=14}
@@ -3687,8 +3708,8 @@ packages:
base64-js@1.5.1:
resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==}
- baseai@0.9.44:
- resolution: {integrity: sha512-NhRxhWEBW/pmaODcCMFAXNZgM0XVQHrUsEeUkmXuHV4UXSfiJPg/W1r6yDwGq9bCz/pqPNXdHlU9vy0+0GsOTw==}
+ baseai@0.9.19:
+ resolution: {integrity: sha512-3V1HQYXNJIzsogCOYYvZXwx4QnL9PbhDqiArflxkaD4zHNdEUNsgYZ00XuT+73a4WushkqnnToYSzQss99nGLQ==}
hasBin: true
basic-auth@2.0.1:
@@ -3845,10 +3866,6 @@ packages:
resolution: {integrity: sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==}
engines: {node: ^12.17.0 || ^14.13 || >=16.0.0}
- chalk@5.6.0:
- resolution: {integrity: sha512-46QrSQFyVSEyYAgQ22hQ+zDa60YHA4fBstHmtSApj1Y5vKtG27fWowW03jCk5KcbXEWPZUIR894aARCA/G1kfQ==}
- engines: {node: ^12.17.0 || ^14.13 || >=16.0.0}
-
character-entities-html4@2.1.0:
resolution: {integrity: sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==}
@@ -4057,6 +4074,10 @@ packages:
concat-map@0.0.1:
resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==}
+ conf@13.0.1:
+ resolution: {integrity: sha512-l9Uwc9eOnz39oADzGO2cSBDi7siv8lwO+31ocQ2nOJijnDiW3pxqm9VV10DPYUO28wW83DjABoUqY1nfHRR2hQ==}
+ engines: {node: '>=18'}
+
confbox@0.1.7:
resolution: {integrity: sha512-uJcB/FKZtBMCJpK8MQji6bJHgu1tixKPxRLeGkNzBoOZzpnZUJm0jm2/sBDWcuBx1dYgxV4JU+g5hmNxCyAmdA==}
@@ -4226,6 +4247,10 @@ packages:
dayjs@1.11.13:
resolution: {integrity: sha512-oaMBel6gjolK862uaPQOVTA7q3TZhuSvuMQAAglQDOWYO9A91IrAOUJEyKVlqJlHE0vq5p5UXxzdPfMH/x6xNg==}
+ debounce-fn@6.0.0:
+ resolution: {integrity: sha512-rBMW+F2TXryBwB54Q0d8drNEI+TfoS9JpNTAoVpukbWEhjXQq4rySFYLaqXMFXwdv61Zb2OHtj5bviSoimqxRQ==}
+ engines: {node: '>=18'}
+
debug@2.6.9:
resolution: {integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==}
peerDependencies:
@@ -4407,6 +4432,10 @@ packages:
dom-accessibility-api@0.5.16:
resolution: {integrity: sha512-X7BJ2yElsnOJ30pZF4uIIDfBEVgF4XEBxL9Bxhy6dnrm5hkzqmsWHGTiHqRiITNhMyFLyAiWndIJP7Z1NTteDg==}
+ dot-prop@9.0.0:
+ resolution: {integrity: sha512-1gxPBJpI/pcjQhKgIU91II6Wkay+dLcN3M6rf2uwP8hRur3HtQXjVrdAK3sjC0piaEuxzMwjXChcETiJl47lAQ==}
+ engines: {node: '>=18'}
+
dotenv@16.0.3:
resolution: {integrity: sha512-7GO6HghkA5fYG9TYnNxi14/7K9f5occMlp3zXAuSxn7CKCxt9xbNWG7yF8hTCSUchlfWSe3uLmlPfigevRItzQ==}
engines: {node: '>=12'}
@@ -4482,6 +4511,10 @@ packages:
resolution: {integrity: sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==}
engines: {node: '>=6'}
+ env-paths@3.0.0:
+ resolution: {integrity: sha512-dtJUTepzMW3Lm/NPxRf3wP4642UWhjL2sQxc+ym2YMj1m/H2zDNQOlezafzkHwn6sMstjHTwG6iQQsctDW/b1A==}
+ engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0}
+
environment@1.1.0:
resolution: {integrity: sha512-xUtoPkMggbz0MPyPiIWr1Kp4aeWJjDZ6SMvURhimjdZgsRuDplF5/s9hcgGhyXMhs+6vpnuoiZ2kFiu3FMnS8Q==}
engines: {node: '>=18'}
@@ -6056,6 +6089,9 @@ packages:
json-schema-traverse@1.0.0:
resolution: {integrity: sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==}
+ json-schema-typed@8.0.1:
+ resolution: {integrity: sha512-XQmWYj2Sm4kn4WeTYvmpKEbyPsL7nBsb647c7pMe6l02/yx2+Jfc4dT6UZkEXnIUb5LhD55r2HPsJ1milQ4rDg==}
+
json-stable-stringify-without-jsonify@1.0.1:
resolution: {integrity: sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==}
@@ -6799,8 +6835,8 @@ packages:
react: '*'
react-dom: '*'
- next@14.2.35:
- resolution: {integrity: sha512-KhYd2Hjt/O1/1aZVX3dCwGXM1QmOV4eNM2UTacK5gipDdPN/oHHK/4oVGy7X8GMfPMsUTUEmGlsy0EY1YGAkig==}
+ next@14.2.5:
+ resolution: {integrity: sha512-0f8aRfBVL+mpzfBjYfQuLWh2WyAwtJXCRfkPF4UJ5qd2YwrHczsrSzXU4tRMV0OAxR8ZJZWPFn6uhSC56UTsLA==}
engines: {node: '>=18.17.0'}
hasBin: true
peerDependencies:
@@ -8077,7 +8113,6 @@ packages:
source-map@0.8.0-beta.0:
resolution: {integrity: sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA==}
engines: {node: '>= 8'}
- deprecated: The work that was done in this beta branch won't be included in future versions
sourcemap-codec@1.4.8:
resolution: {integrity: sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA==}
@@ -8252,6 +8287,9 @@ packages:
strip-literal@2.1.0:
resolution: {integrity: sha512-Op+UycaUt/8FbN/Z2TWPBLge3jWrP3xj10f3fnYxf052bKuS3EKs1ZQcVGjnEMdsNVAM+plXRdmjrZ/KgG3Skw==}
+ stubborn-fs@1.2.5:
+ resolution: {integrity: sha512-H2N9c26eXjzL/S/K+i/RHHcFanE74dptvvjM8iwzwbVcWY/zjBbgRqF3K0DY4+OD+uTTASTBvDoxPDaPN02D7g==}
+
style-to-object@0.4.4:
resolution: {integrity: sha512-HYNoHZa2GorYNyqiCaBgsxvcJIn7OHq6inEga+E6Ke3m5JkoqpQbnFssk4jwe+K7AhGa2fcha4wSOf1Kn01dMg==}
@@ -8650,6 +8688,10 @@ packages:
uid-promise@1.0.0:
resolution: {integrity: sha512-R8375j0qwXyIu/7R0tjdF06/sElHqbmdmWC9M2qQHpEVbvE4I5+38KJI7LUUmQMp7NVq4tKHiBMkT0NFM453Ig==}
+ uint8array-extras@1.4.0:
+ resolution: {integrity: sha512-ZPtzy0hu4cZjv3z5NW9gfKnNLjoz4y6uv4HlelAjDK7sY/xOkKZv9xK/WQpcsBB3jEybChz9DPC2U/+cusjJVQ==}
+ engines: {node: '>=18'}
+
ultrahtml@1.5.3:
resolution: {integrity: sha512-GykOvZwgDWZlTQMtp5jrD4BVL+gNn2NVlVafjcFUJ7taY20tqYdwdoWBFy6GBJsNTZe1GkGPkSl5knQAjtgceg==}
@@ -9144,6 +9186,9 @@ packages:
whatwg-url@7.1.0:
resolution: {integrity: sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg==}
+ when-exit@2.1.3:
+ resolution: {integrity: sha512-uVieSTccFIr/SFQdFWN/fFaQYmV37OKtuaGphMAzi4DmmUlrvRBJW5WSLkHyjNQY/ePJMz3LoiX9R3yy1Su6Hw==}
+
which-boxed-primitive@1.0.2:
resolution: {integrity: sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==}
@@ -9589,10 +9634,10 @@ snapshots:
transitivePeerDependencies:
- supports-color
- '@astrojs/vercel@7.8.1(astro@4.15.10(@types/node@22.7.4)(rollup@4.24.0)(terser@5.34.1)(typescript@5.6.2))(next@14.2.35(@babel/core@7.25.7)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)':
+ '@astrojs/vercel@7.8.1(astro@4.15.10(@types/node@22.7.4)(rollup@4.24.0)(terser@5.34.1)(typescript@5.6.2))(next@14.2.5(@babel/core@7.25.7)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)':
dependencies:
'@astrojs/internal-helpers': 0.4.1
- '@vercel/analytics': 1.3.1(next@14.2.35(@babel/core@7.25.7)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)
+ '@vercel/analytics': 1.3.1(next@14.2.5(@babel/core@7.25.7)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)
'@vercel/edge': 1.1.2
'@vercel/nft': 0.27.4
astro: 4.15.10(@types/node@22.7.4)(rollup@4.24.0)(terser@5.34.1)(typescript@5.6.2)
@@ -9846,7 +9891,7 @@ snapshots:
'@babel/helper-validator-identifier': 7.25.7
to-fast-properties: 2.0.0
- '@baseai/core@0.9.43(react@18.3.1)(zod@3.23.8)':
+ '@baseai/core@0.9.19(react@18.3.1)(zod@3.23.8)':
dependencies:
openai: 4.67.1(zod@3.23.8)
optionalDependencies:
@@ -10797,7 +10842,7 @@ snapshots:
'@microsoft/tsdoc@0.14.2': {}
- '@next/env@14.2.35': {}
+ '@next/env@14.2.5': {}
'@next/eslint-plugin-next@14.2.14':
dependencies:
@@ -10814,31 +10859,31 @@ snapshots:
'@mdx-js/loader': 3.0.1(webpack@5.95.0(esbuild@0.17.19))
'@mdx-js/react': 3.0.1(@types/react@18.3.11)(react@18.3.1)
- '@next/swc-darwin-arm64@14.2.33':
+ '@next/swc-darwin-arm64@14.2.5':
optional: true
- '@next/swc-darwin-x64@14.2.33':
+ '@next/swc-darwin-x64@14.2.5':
optional: true
- '@next/swc-linux-arm64-gnu@14.2.33':
+ '@next/swc-linux-arm64-gnu@14.2.5':
optional: true
- '@next/swc-linux-arm64-musl@14.2.33':
+ '@next/swc-linux-arm64-musl@14.2.5':
optional: true
- '@next/swc-linux-x64-gnu@14.2.33':
+ '@next/swc-linux-x64-gnu@14.2.5':
optional: true
- '@next/swc-linux-x64-musl@14.2.33':
+ '@next/swc-linux-x64-musl@14.2.5':
optional: true
- '@next/swc-win32-arm64-msvc@14.2.33':
+ '@next/swc-win32-arm64-msvc@14.2.5':
optional: true
- '@next/swc-win32-ia32-msvc@14.2.33':
+ '@next/swc-win32-ia32-msvc@14.2.5':
optional: true
- '@next/swc-win32-x64-msvc@14.2.33':
+ '@next/swc-win32-x64-msvc@14.2.5':
optional: true
'@nicolo-ribaudo/eslint-scope-5-internals@5.1.1-v1':
@@ -11167,7 +11212,7 @@ snapshots:
'@radix-ui/rect@1.1.0': {}
- '@remix-run/dev@2.12.0(@remix-run/react@2.12.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.6.2))(@remix-run/serve@2.12.0(typescript@5.6.2))(@types/node@22.7.4)(terser@5.34.1)(ts-node@10.9.1(@types/node@22.7.4)(typescript@5.6.2))(typescript@5.6.2)(vite@5.4.8(@types/node@22.7.4)(terser@5.34.1))(wrangler@3.79.0)':
+ '@remix-run/dev@2.12.0(@remix-run/react@2.12.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.6.2))(@remix-run/serve@2.12.0(typescript@5.6.2))(@types/node@22.7.4)(terser@5.34.1)(ts-node@10.9.1(@types/node@22.7.4)(typescript@5.6.2))(typescript@5.6.2)(vite@5.4.8(@types/node@22.7.4)(terser@5.34.1))(wrangler@3.78.12)':
dependencies:
'@babel/core': 7.25.7
'@babel/generator': 7.25.7
@@ -11593,7 +11638,7 @@ snapshots:
'@types/node-fetch@2.6.11':
dependencies:
- '@types/node': 22.7.4
+ '@types/node': 20.16.10
form-data: 4.0.0
'@types/node-forge@1.3.11':
@@ -11956,11 +12001,11 @@ snapshots:
'@vanilla-extract/private@1.0.6': {}
- '@vercel/analytics@1.3.1(next@14.2.35(@babel/core@7.25.7)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)':
+ '@vercel/analytics@1.3.1(next@14.2.5(@babel/core@7.25.7)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)':
dependencies:
server-only: 0.0.1
optionalDependencies:
- next: 14.2.35(@babel/core@7.25.7)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
+ next: 14.2.5(@babel/core@7.25.7)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
react: 18.3.1
'@vercel/build-utils@8.4.5': {}
@@ -12109,9 +12154,9 @@ snapshots:
- encoding
- supports-color
- '@vercel/remix@2.12.0(@remix-run/dev@2.12.0(@remix-run/react@2.12.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.6.2))(@remix-run/serve@2.12.0(typescript@5.6.2))(@types/node@22.7.4)(terser@5.34.1)(ts-node@10.9.1(@types/node@22.7.4)(typescript@5.6.2))(typescript@5.6.2)(vite@5.4.8(@types/node@22.7.4)(terser@5.34.1))(wrangler@3.79.0))(@remix-run/node@2.12.0(typescript@5.6.2))(@remix-run/server-runtime@2.12.0(typescript@5.6.2))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)':
+ '@vercel/remix@2.12.0(@remix-run/dev@2.12.0(@remix-run/react@2.12.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.6.2))(@remix-run/serve@2.12.0(typescript@5.6.2))(@types/node@22.7.4)(terser@5.34.1)(ts-node@10.9.1(@types/node@22.7.4)(typescript@5.6.2))(typescript@5.6.2)(vite@5.4.8(@types/node@22.7.4)(terser@5.34.1))(wrangler@3.78.12))(@remix-run/node@2.12.0(typescript@5.6.2))(@remix-run/server-runtime@2.12.1(typescript@5.6.2))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)':
dependencies:
- '@remix-run/dev': 2.12.0(@remix-run/react@2.12.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.6.2))(@remix-run/serve@2.12.0(typescript@5.6.2))(@types/node@22.7.4)(terser@5.34.1)(ts-node@10.9.1(@types/node@22.7.4)(typescript@5.6.2))(typescript@5.6.2)(vite@5.4.8(@types/node@22.7.4)(terser@5.34.1))(wrangler@3.79.0)
+ '@remix-run/dev': 2.12.0(@remix-run/react@2.12.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.6.2))(@remix-run/serve@2.12.0(typescript@5.6.2))(@types/node@22.7.4)(terser@5.34.1)(ts-node@10.9.1(@types/node@22.7.4)(typescript@5.6.2))(typescript@5.6.2)(vite@5.4.8(@types/node@22.7.4)(terser@5.34.1))(wrangler@3.78.12)
'@remix-run/node': 2.12.0(typescript@5.6.2)
'@remix-run/server-runtime': 2.12.0(typescript@5.6.2)
'@vercel/static-config': 3.0.0
@@ -12424,6 +12469,10 @@ snapshots:
optionalDependencies:
ajv: 8.17.1
+ ajv-formats@3.0.1(ajv@8.17.1):
+ optionalDependencies:
+ ajv: 8.17.1
+
ajv-keywords@3.5.2(ajv@6.12.6):
dependencies:
ajv: 6.12.6
@@ -12711,6 +12760,11 @@ snapshots:
asynckit@0.4.0: {}
+ atomically@2.0.3:
+ dependencies:
+ stubborn-fs: 1.2.5
+ when-exit: 2.1.3
+
autoprefixer@10.4.20(postcss@8.4.47):
dependencies:
browserslist: 4.24.0
@@ -12739,7 +12793,7 @@ snapshots:
base64-js@1.5.1: {}
- baseai@0.9.44(@types/node@20.16.10)(typescript@5.6.2):
+ baseai@0.9.19(@types/node@20.16.10)(typescript@5.6.2):
dependencies:
'@antfu/ni': 0.23.0
'@clack/core': 0.3.4
@@ -12748,7 +12802,7 @@ snapshots:
'@hono/zod-openapi': 0.16.2(hono@4.6.3)(zod@3.23.8)
'@sindresorhus/slugify': 2.2.1
camelcase: 8.0.0
- chalk: 5.6.0
+ chalk: 5.3.0
cli-alerts: 2.0.0
cli-handle-error: 4.4.0
cli-handle-unhandled: 1.1.1
@@ -12756,11 +12810,11 @@ snapshots:
cli-table3: 0.6.5
cli-welcome: 3.0.0
compute-cosine-similarity: 1.1.0
+ conf: 13.0.1
cosmiconfig: 9.0.0(typescript@5.6.2)
cosmiconfig-typescript-loader: 5.0.0(@types/node@20.16.10)(cosmiconfig@9.0.0(typescript@5.6.2))(typescript@5.6.2)
dotenv: 16.4.5
execa: 9.4.0
- fast-glob: 3.3.2
figures: 6.1.0
get-package-json-file: 2.0.0
hono: 4.6.3
@@ -12787,7 +12841,7 @@ snapshots:
- supports-color
- typescript
- baseai@0.9.44(@types/node@22.7.4)(typescript@5.6.2):
+ baseai@0.9.19(@types/node@22.7.4)(typescript@5.6.2):
dependencies:
'@antfu/ni': 0.23.0
'@clack/core': 0.3.4
@@ -12796,7 +12850,7 @@ snapshots:
'@hono/zod-openapi': 0.16.2(hono@4.6.3)(zod@3.23.8)
'@sindresorhus/slugify': 2.2.1
camelcase: 8.0.0
- chalk: 5.6.0
+ chalk: 5.3.0
cli-alerts: 2.0.0
cli-handle-error: 4.4.0
cli-handle-unhandled: 1.1.1
@@ -12804,11 +12858,11 @@ snapshots:
cli-table3: 0.6.5
cli-welcome: 3.0.0
compute-cosine-similarity: 1.1.0
+ conf: 13.0.1
cosmiconfig: 9.0.0(typescript@5.6.2)
cosmiconfig-typescript-loader: 5.0.0(@types/node@22.7.4)(cosmiconfig@9.0.0(typescript@5.6.2))(typescript@5.6.2)
dotenv: 16.4.5
execa: 9.4.0
- fast-glob: 3.3.2
figures: 6.1.0
get-package-json-file: 2.0.0
hono: 4.6.3
@@ -12880,7 +12934,7 @@ snapshots:
dependencies:
ansi-align: 3.0.1
camelcase: 8.0.0
- chalk: 5.6.0
+ chalk: 5.3.0
cli-boxes: 3.0.0
string-width: 7.2.0
type-fest: 4.26.1
@@ -13010,7 +13064,7 @@ snapshots:
chalk-template@1.1.0:
dependencies:
- chalk: 5.6.0
+ chalk: 5.3.0
chalk@2.4.2:
dependencies:
@@ -13030,8 +13084,6 @@ snapshots:
chalk@5.3.0: {}
- chalk@5.6.0: {}
-
character-entities-html4@2.1.0: {}
character-entities-legacy@3.0.0: {}
@@ -13122,7 +13174,7 @@ snapshots:
cli-meow-help@4.0.0:
dependencies:
- chalk: 5.6.0
+ chalk: 5.3.0
chalk-template: 1.1.0
cli-table3: 0.6.5
@@ -13141,7 +13193,7 @@ snapshots:
cli-welcome@3.0.0:
dependencies:
- chalk: 5.6.0
+ chalk: 5.3.0
clear-any-console: 1.16.2
client-only@0.0.1: {}
@@ -13243,6 +13295,18 @@ snapshots:
concat-map@0.0.1: {}
+ conf@13.0.1:
+ dependencies:
+ ajv: 8.17.1
+ ajv-formats: 3.0.1(ajv@8.17.1)
+ atomically: 2.0.3
+ debounce-fn: 6.0.0
+ dot-prop: 9.0.0
+ env-paths: 3.0.0
+ json-schema-typed: 8.0.1
+ semver: 7.6.3
+ uint8array-extras: 1.4.0
+
confbox@0.1.7: {}
consola@3.2.3: {}
@@ -13393,6 +13457,10 @@ snapshots:
dayjs@1.11.13: {}
+ debounce-fn@6.0.0:
+ dependencies:
+ mimic-function: 5.0.1
+
debug@2.6.9:
dependencies:
ms: 2.0.0
@@ -13538,6 +13606,10 @@ snapshots:
dom-accessibility-api@0.5.16: {}
+ dot-prop@9.0.0:
+ dependencies:
+ type-fest: 4.26.1
+
dotenv@16.0.3: {}
dotenv@16.4.5: {}
@@ -13608,6 +13680,8 @@ snapshots:
env-paths@2.2.1: {}
+ env-paths@3.0.0: {}
+
environment@1.1.0: {}
err-code@2.0.3: {}
@@ -15568,6 +15642,8 @@ snapshots:
json-schema-traverse@1.0.0: {}
+ json-schema-typed@8.0.1: {}
+
json-stable-stringify-without-jsonify@1.0.1: {}
json5@1.0.2:
@@ -15707,7 +15783,7 @@ snapshots:
log-symbols@6.0.0:
dependencies:
- chalk: 5.6.0
+ chalk: 5.3.0
is-unicode-supported: 1.3.0
log-symbols@7.0.0:
@@ -16773,15 +16849,15 @@ snapshots:
- '@types/react'
- supports-color
- next-themes@0.2.1(next@14.2.35(@playwright/test@1.47.2)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1):
+ next-themes@0.2.1(next@14.2.5(@playwright/test@1.47.2)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1):
dependencies:
- next: 14.2.35(@playwright/test@1.47.2)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
+ next: 14.2.5(@playwright/test@1.47.2)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
react: 18.3.1
react-dom: 18.3.1(react@18.3.1)
- next@14.2.35(@babel/core@7.25.7)(react-dom@18.3.1(react@18.3.1))(react@18.3.1):
+ next@14.2.5(@babel/core@7.25.7)(react-dom@18.3.1(react@18.3.1))(react@18.3.1):
dependencies:
- '@next/env': 14.2.35
+ '@next/env': 14.2.5
'@swc/helpers': 0.5.5
busboy: 1.6.0
caniuse-lite: 1.0.30001666
@@ -16791,23 +16867,23 @@ snapshots:
react-dom: 18.3.1(react@18.3.1)
styled-jsx: 5.1.1(@babel/core@7.25.7)(react@18.3.1)
optionalDependencies:
- '@next/swc-darwin-arm64': 14.2.33
- '@next/swc-darwin-x64': 14.2.33
- '@next/swc-linux-arm64-gnu': 14.2.33
- '@next/swc-linux-arm64-musl': 14.2.33
- '@next/swc-linux-x64-gnu': 14.2.33
- '@next/swc-linux-x64-musl': 14.2.33
- '@next/swc-win32-arm64-msvc': 14.2.33
- '@next/swc-win32-ia32-msvc': 14.2.33
- '@next/swc-win32-x64-msvc': 14.2.33
+ '@next/swc-darwin-arm64': 14.2.5
+ '@next/swc-darwin-x64': 14.2.5
+ '@next/swc-linux-arm64-gnu': 14.2.5
+ '@next/swc-linux-arm64-musl': 14.2.5
+ '@next/swc-linux-x64-gnu': 14.2.5
+ '@next/swc-linux-x64-musl': 14.2.5
+ '@next/swc-win32-arm64-msvc': 14.2.5
+ '@next/swc-win32-ia32-msvc': 14.2.5
+ '@next/swc-win32-x64-msvc': 14.2.5
transitivePeerDependencies:
- '@babel/core'
- babel-plugin-macros
optional: true
- next@14.2.35(@playwright/test@1.47.2)(react-dom@18.3.1(react@18.3.1))(react@18.3.1):
+ next@14.2.5(@playwright/test@1.47.2)(react-dom@18.3.1(react@18.3.1))(react@18.3.1):
dependencies:
- '@next/env': 14.2.35
+ '@next/env': 14.2.5
'@swc/helpers': 0.5.5
busboy: 1.6.0
caniuse-lite: 1.0.30001666
@@ -16817,15 +16893,15 @@ snapshots:
react-dom: 18.3.1(react@18.3.1)
styled-jsx: 5.1.1(@babel/core@7.25.7)(react@18.3.1)
optionalDependencies:
- '@next/swc-darwin-arm64': 14.2.33
- '@next/swc-darwin-x64': 14.2.33
- '@next/swc-linux-arm64-gnu': 14.2.33
- '@next/swc-linux-arm64-musl': 14.2.33
- '@next/swc-linux-x64-gnu': 14.2.33
- '@next/swc-linux-x64-musl': 14.2.33
- '@next/swc-win32-arm64-msvc': 14.2.33
- '@next/swc-win32-ia32-msvc': 14.2.33
- '@next/swc-win32-x64-msvc': 14.2.33
+ '@next/swc-darwin-arm64': 14.2.5
+ '@next/swc-darwin-x64': 14.2.5
+ '@next/swc-linux-arm64-gnu': 14.2.5
+ '@next/swc-linux-arm64-musl': 14.2.5
+ '@next/swc-linux-x64-gnu': 14.2.5
+ '@next/swc-linux-x64-musl': 14.2.5
+ '@next/swc-win32-arm64-msvc': 14.2.5
+ '@next/swc-win32-ia32-msvc': 14.2.5
+ '@next/swc-win32-x64-msvc': 14.2.5
'@playwright/test': 1.47.2
transitivePeerDependencies:
- '@babel/core'
@@ -17080,7 +17156,7 @@ snapshots:
ora@8.1.0:
dependencies:
- chalk: 5.6.0
+ chalk: 5.3.0
cli-cursor: 5.0.0
cli-spinners: 2.9.2
is-interactive: 2.0.0
@@ -18398,6 +18474,8 @@ snapshots:
dependencies:
js-tokens: 9.0.0
+ stubborn-fs@1.2.5: {}
+
style-to-object@0.4.4:
dependencies:
inline-style-parser: 0.1.1
@@ -18884,6 +18962,8 @@ snapshots:
uid-promise@1.0.0: {}
+ uint8array-extras@1.4.0: {}
+
ultrahtml@1.5.3: {}
unbox-primitive@1.0.2:
@@ -19493,6 +19573,8 @@ snapshots:
tr46: 1.0.1
webidl-conversions: 4.0.2
+ when-exit@2.1.3: {}
+
which-boxed-primitive@1.0.2:
dependencies:
is-bigint: 1.0.4