[go: nahoru, domu]

Skip to content

Commit

Permalink
cleanup: fix database name and change to chunk count
Browse files Browse the repository at this point in the history
  • Loading branch information
drew-harris authored and densumesh committed Jun 28, 2024
1 parent 0a0f2c2 commit a0c4f7b
Show file tree
Hide file tree
Showing 3 changed files with 3 additions and 3 deletions.
2 changes: 1 addition & 1 deletion server/src/data/models.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1394,7 +1394,7 @@ pub enum EventType {
#[display(fmt = "chunk_updated")]
ChunkUpdated { chunk_id: uuid::Uuid },
#[display(fmt = "bulk_chunks_deleted")]
BulkChunksDeleted { chunk_ids: Vec<uuid::Uuid> },
BulkChunksDeleted { message: String },
#[display(fmt = "dataset_delete_failed")]
DatasetDeleteFailed { error: String },
#[display(fmt = "qdrant_index_failed")]
Expand Down
2 changes: 1 addition & 1 deletion server/src/operators/dataset_operator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -311,7 +311,7 @@ pub async fn delete_chunks_in_dataset(
Event::from_details(
id,
EventType::BulkChunksDeleted {
chunk_ids: chunk_ids.clone(),
message: format!("Deleted {} chunks", chunk_ids.len()),
},
),
clickhouse_client.clone(),
Expand Down
2 changes: 1 addition & 1 deletion server/src/operators/event_operator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ pub async fn create_event_query(
.unwrap_or(false)
{
client
.query("INSERT INTO trieve.dataset_events (id, dataset_id, event_type, event_data, created_at) VALUES (?, ?, ?, ?, now())")
.query("INSERT INTO default.dataset_events (id, dataset_id, event_type, event_data, created_at) VALUES (?, ?, ?, ?, now())")
.bind(event.id)
.bind(event.dataset_id)
.bind(event.event_type)
Expand Down

0 comments on commit a0c4f7b

Please sign in to comment.