Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
390 changes: 381 additions & 9 deletions Cargo.lock

Large diffs are not rendered by default.

13 changes: 9 additions & 4 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,8 +1,13 @@
[workspace]
resolver = "2"
members = [
"rig-core", "rig-lancedb",
"rig-mongodb", "rig-neo4j",
"rig-qdrant", "rig-core/rig-core-derive",
"rig-sqlite", "rig-eternalai"
"rig-core",
"rig-lancedb",
"rig-mongodb",
"rig-neo4j",
"rig-postgres",
"rig-qdrant",
"rig-core/rig-core-derive",
"rig-sqlite",
"rig-eternalai",
]
36 changes: 36 additions & 0 deletions rig-postgres/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
[package]
name = "rig-postgres"
version = "0.1.0"
edition = "2021"
description = "PostgreSQL-based vector store implementation for the rig framework"
license = "MIT"
readme = "README.md"
repository = "https://github.com/0xPlaygrounds/rig"

[dependencies]
rig-core = { path = "../rig-core", version = "0.6.0", features = ["derive"] }
serde = { version = "1.0.215", features = ["derive"] }
serde_json = "1.0.133"

tracing = "0.1.40"
sqlx = { version = "0.8.3", features = [
"runtime-tokio",
"postgres",
"uuid",
"json",
] }
pgvector = { version = "0.4", features = ["sqlx"] }
uuid = { version = "1.11.0", features = ["v4", "serde"] }

[dev-dependencies]
anyhow = "1.0.94"
log = "0.4.22"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
tokio-test = "0.4.4"
tokio = { version = "1.43.0", features = ["macros", "rt-multi-thread"] }


testcontainers = "0.23.1"
httpmock = "0.7.0"

dotenvy = "0.15.7"
7 changes: 7 additions & 0 deletions rig-postgres/LICENSE
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
Copyright (c) 2024, Playgrounds Analytics Inc.

Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:

The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
36 changes: 36 additions & 0 deletions rig-postgres/Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
# Makefile

MAKEFLAGS += -j2
-include .env
export

CURRENT_BRANCH := $(shell git rev-parse --abbrev-ref HEAD)
CURRENT_PATH := $(shell pwd)
DEFAULT_BRANCH := $(shell git remote show upstream | sed -n '/HEAD branch/s/.*: //p')
AMM := ${HOME}/amm

.PHONY: gitRebase
gitRebase:
git checkout $(DEFAULT_BRANCH) && \
git pull upstream $(DEFAULT_BRANCH) && \
git push origin $(DEFAULT_BRANCH) && \
git checkout $(CURRENT_BRANCH) && \
git rebase $(DEFAULT_BRANCH)
git push --force origin $(CURRENT_BRANCH)

.PHONY: gitAmmend
gitAmmend:
git add . && git commit --amend --no-edit && git push --force origin $(CURRENT_BRANCH)


.PHONY: test
test:
cargo watch -qcx 'test'

.PHONY: fix
fix:
cargo clippy --fix

.PHONY: run
run:
cargo run --example vector_search_postgres
134 changes: 134 additions & 0 deletions rig-postgres/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,134 @@
<div style="display: flex; align-items: center; justify-content: center;">
<picture>
<source media="(prefers-color-scheme: dark)" srcset="../img/rig_logo_dark.svg">
<source media="(prefers-color-scheme: light)" srcset="../img/rig_logo.svg">
<img src="../img/rig_logo.svg" width="200" alt="Rig logo">
</picture>
<span style="font-size: 48px; margin: 0 20px; font-weight: regular; font-family: Open Sans, sans-serif;"> + </span>
<picture>
<source srcset="https://www.postgresql.org/media/img/about/press/elephant.png">
<img src="https://www.postgresql.org/media/img/about/press/elephant.png" width="200" alt="Postgres logo">
</picture>
</div>

<br><br>

## Rig-postgres

This companion crate implements a Rig vector store based on PostgreSQL.

## Usage

Add the companion crate to your `Cargo.toml`, along with the rig-core crate:

```toml
[dependencies]
rig-core = "0.4.0"
rig-postgres = "0.1.0"
```

You can also run `cargo add rig-core rig-postgres` to add the most recent versions of the dependencies to your project.

## PostgreSQL setup

The crate utilizes [pgvector](https://github.com/pgvector/pgvector) extension, which is available for PostgreSQL version 13 and later. Use any of the [official](https://www.postgresql.org/download/) or alternative methods to install psql.

You can install Postgres using Docker:

```sh
docker pull pgvector/pgvector:pg17

docker run -e POSTGRES_USER=myuser \
-e POSTGRES_PASSWORD=mypassword \
-e POSTGRES_DB=mydatabase \
--name my_postgres \
-p 5432:5432 \
-d ankane/pgvector
```

Now you can configure Postgres, the recommended way is using sqlx and migrations (you can find an example inside integration tests folder).

Example sql:

```sql
-- ensure PgVector extension is installed
CREATE EXTENSION IF NOT EXISTS vector;

-- create table with embeddings using 1536 dimensions (based on OpenAI model text-embedding-3-small)
CREATE TABLE documents (
id uuid DEFAULT gen_random_uuid(), -- we can have repeated entries
document jsonb NOT NULL,
embedded_text text NOT NULL,
embedding vector(1536)
);

-- create index on embeddings
CREATE INDEX IF NOT EXISTS document_embeddings_idx ON documents
USING hnsw(embedding vector_cosine_ops); -- recommended for text embeddings

```

You can change the table name and the number of dimensions but keep the same fields schema.

You can use different indexes depending the type of distance method you want to use, check [PgVector documentation](https://github.com/pgvector/pgvector?tab=readme-ov-file#querying).

## Usage

Declare the database URL:

```sh
export DATABASE_URL="postgres://myuser:mypassword@localhost:5432/mydatabase"
```

Define the document you want to index, it has to implement `Embed`, `Serialize` and `Deserialize`.

> Note: you can index different type of documents in the same table.

Example:

```rust
#[derive(Embed, Clone, Serialize, Deserialize, Debug)]
pub struct Product {
name: String,
category: String,
#[embed]
description: String,
price: f32
}
```

Example usage

```rust
// Create OpenAI client
let openai_client = rig::providers::openai::Client::from_env();
let model = openai_client.embedding_model(rig::providers::openai::TEXT_EMBEDDING_3_SMALL);

// connect to Postgres
let database_url = std::env::var("DATABASE_URL").expect("DATABASE_URL not set");
let pool = PgPoolOptions::new() .connect(&database_url) .await?;

// run migrations (optional but recommended)
sqlx::migrate!("./migrations").run(&pool).await?;

// init documents
let products: Vec<Product> = ...;

let documents = EmbeddingsBuilder::new(model.clone())
.documents(products)
.unwrap()
.build()
.await?;

// Create your index
let vector_store = PostgresVectorStore::default(model, pool);

// store documents
vector_store.insert_documents(documents).await?;

// retrieve embeddings
let results = vector_store.top_n::<Product>("Which phones have more than 16Gb and support 5G", 50).await?

...

```
14 changes: 14 additions & 0 deletions rig-postgres/examples/migrations/001_setup.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
-- ensure extension is installed
CREATE EXTENSION IF NOT EXISTS vector;

-- create table with embeddings using 1536 dimensions (text-embedding-3-small)
CREATE TABLE documents (
id uuid DEFAULT gen_random_uuid(), -- we can have repeated entries
document jsonb NOT NULL,
embedded_text text NOT NULL,
embedding vector(1536)
);

-- create index on embeddings
CREATE INDEX IF NOT EXISTS document_embeddings_idx ON documents
USING hnsw(embedding vector_cosine_ops);
101 changes: 101 additions & 0 deletions rig-postgres/examples/vector_search_postgres.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
use rig::{embeddings::EmbeddingsBuilder, vector_store::VectorStoreIndex, Embed};
use rig_postgres::PostgresVectorStore;
use serde::{Deserialize, Serialize};
use sqlx::postgres::PgPoolOptions;

// A vector search needs to be performed on the `definitions` field, so we derive the `Embed` trait for `WordDefinition`
// and tag that field with `#[embed]`.
// We are not going to store the definitions on our database so we skip the `Serialize` trait
#[derive(Embed, Serialize, Deserialize, Clone, Debug, Eq, PartialEq, Default)]
struct WordDefinition {
id: String,
word: String,
#[serde(skip)] // we don't want to serialize this field, we use only to create embeddings
#[embed]
definitions: Vec<String>,
}

impl std::fmt::Display for WordDefinition {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.word)
}
}

#[tokio::main]
async fn main() -> Result<(), anyhow::Error> {
// load environment variables from .env file
dotenvy::dotenv().ok();

// Create OpenAI client
let openai_client = rig::providers::openai::Client::from_env();
let model = openai_client.embedding_model(rig::providers::openai::TEXT_EMBEDDING_3_SMALL);

// setup Postgres
let database_url = std::env::var("DATABASE_URL").expect("DATABASE_URL not set");
let pool = PgPoolOptions::new()
.max_connections(50)
.idle_timeout(std::time::Duration::from_secs(5))
.connect(&database_url)
.await
.expect("Failed to create postgres pool");

// make sure database is setup
sqlx::migrate!("./examples/migrations").run(&pool).await?;

// create test documents with mocked embeddings
let words = vec![
WordDefinition {
id: "doc0".to_string(),
word: "flurbo".to_string(),
definitions: vec![
"1. *flurbo* (name): A flurbo is a green alien that lives on cold planets.".to_string(),
"2. *flurbo* (name): A fictional digital currency that originated in the animated series Rick and Morty.".to_string()
]
},
WordDefinition {
id: "doc1".to_string(),
word: "glarb-glarb".to_string(),
definitions: vec![
"1. *glarb-glarb* (noun): A glarb-glarb is a ancient tool used by the ancestors of the inhabitants of planet Jiro to farm the land.".to_string(),
"2. *glarb-glarb* (noun): A fictional creature found in the distant, swampy marshlands of the planet Glibbo in the Andromeda galaxy.".to_string()
]
},
WordDefinition {
id: "doc2".to_string(),
word: "linglingdong".to_string(),
definitions: vec![
"1. *linglingdong* (noun): A term used by inhabitants of the far side of the moon to describe humans.".to_string(),
"2. *linglingdong* (noun): A rare, mystical instrument crafted by the ancient monks of the Nebulon Mountain Ranges on the planet Quarm.".to_string()
]
}];

let documents = EmbeddingsBuilder::new(model.clone())
.documents(words)
.unwrap()
.build()
.await
.expect("Failed to create embeddings");

// delete documents from table to have a clean start (optional, not recommended for production)
sqlx::query("TRUNCATE documents").execute(&pool).await?;

// init vector store
let vector_store = PostgresVectorStore::with_defaults(model, pool);
vector_store.insert_documents(documents).await?;

// query vector
let query = "What does \"glarb-glarb\" mean?";

let results = vector_store.top_n::<WordDefinition>(query, 2).await?;

println!("#{} results for query: {}", results.len(), query);
for (distance, _id, doc) in results.iter() {
println!("Result distance {} for word: {}", distance, doc);

// expected output (even if we have 2 entries on glarb-glarb the index only gives closest match)
// Result distance 0.2988549857990437 for word: glarb-glarb
//Result distance 0.7072261746390949 for word: linglingdong
}

Ok(())
}
Loading
Loading