Image search
Image
search uses an image as a search input to perform vector similarity search.
Additional information
Configure image search
To use images as search inputs, configure an image vectorizer module for your collection.
For details, see the modules reference page:
Named vectors
v1.24
Any vector-based search on collections with named vectors configured must include a target
vector name in the query. This allows Weaviate to find the correct vector to compare with the query vector.
- Python (v4)
- Python (v3)
- JS/TS (Beta)
- JS/TS
- GraphQL
from weaviate.classes.query import MetadataQuery
reviews = client.collections.get("WineReviewNV")
response = reviews.query.near_text(
query="a sweet German white wine",
limit=2,
target_vector="title_country", # Specify the target vector for named vector collections
return_metadata=MetadataQuery(distance=True)
)
for o in response.objects:
print(o.properties)
print(o.metadata.distance)
# Unfortunately, named vectors are not suppored in the v3 API / Python client.
# Please upgrade to the v4 API / Python client to use named vectors.
let result
const myCollection = client.collections.get('WineReviewNV');
result = await myCollection.query.nearText(['a sweet German white wine'],{
limit: 2,
targetVector: 'title_country',
returnMetadata: ['distance']
})
for (let object of result.objects) {
console.log(JSON.stringify(object.properties, null, 2));
console.log(JSON.stringify(object.metadata?.distance, null, 2));
}
result = await client.graphql
.get()
.withClassName('WineReviewNV')
.withNearText({
concepts: ['a sweet German white wine'],
targetVectors: ['title_country'],
})
.withLimit(2)
.withFields('title review_body country')
.do();
console.log(JSON.stringify(result, null, 2));
{
Get {
WineReviewNV(
limit: 2
nearText: {
targetVectors: ["title_country"]
concepts: ["a sweet German white wine"]
}
) {
title
review_body
country
}
}
}
By local image path
Use the Near Image
operator to execute image search.
If your query image is stored in a file, you can use the client library to search by its filename.
- Python (v4)
- Python (v3)
- JS/TS (Beta)
- JS/TS
from pathlib import Path
dogs = client.collections.get("Dog")
response = dogs.query.near_image(
near_image=Path("./images/search-image.jpg"), # Provide a `Path` object
return_properties=["breed"],
limit=1
)
print(response.objects[0])
client.close()
response = (
client.query
.get("Dog", "breed")
.with_near_image({"image": "image.jpg"}) # default `encode=True` reads & encodes the file
.with_limit(1)
.do()
)
Not available yet. Vote for the feature request. DYI code below.
// Read the file into a base-64 encoded string
const contentsBase64 = await fs.promises.readFile('image.jpg', { encoding: 'base64' });
// Query based on base64-encoded image
const myCollection = client.collections.get('Dog');
const result = await myCollection.query.nearImage(contentsBase64,{
returnProperties: ['breed'],
limit: 1,
})
console.log(JSON.stringify(result.objects, null, 2));
Not available yet. Vote for the feature request. DYI code below.
// Read the file into a base-64 encoded string
const contentsBase64 = await fs.promises.readFile('image.jpg', { encoding: 'base64' });
// Query based on base64-encoded image
result = await client.graphql
.get()
.withClassName('Dog')
.withNearImage({
image: contentsBase64,
})
.withLimit(1)
.withFields('breed')
.do();
console.log(JSON.stringify(result, null, 2));
Example response
{
"data": {
"Get": {
"Dog": [
{
"breed": "Corgi"
}
]
}
}
}
client.close()
By the base64 representation
You can search by a base64 representation of an image:
- Python (v4)
- Python (v3)
- JS/TS (Beta)
- JS/TS
base64_string="SOME_BASE_64_REPRESENTATION"
# Get the collection containing images
dogs = client.collections.get("Dog")
# Perform query
response = dogs.query.near_image(
near_image=base64_string,
return_properties=["breed"],
limit=1
)
print(response.objects[0])
client.close()
base64_string="SOME_BASE_64_REPRESENTATION"
# Perform query
response = (
client.query
.get("Dog", "breed")
.with_near_image(
{"image": base64_string},
encode=False # False because the image is already base64-encoded
)
.with_limit(1)
.do()
)
print(json.dumps(response, indent=2))
const base64String = 'SOME_BASE_64_REPRESENTATION';
// Perform query
const myCollection = client.collections.get('Dog');
const result = await myCollection.query.nearImage(base64String,{
returnProperties: ['breed'],
limit: 1,
})
console.log(JSON.stringify(result.objects, null, 2));
const base64String = 'SOME_BASE_64_REPRESENTATION';
// Perform query
let result = await client.graphql
.get()
.withClassName('Dog')
.withNearImage({
image: base64String,
})
.withLimit(1)
.withFields('breed')
.do();
console.log(JSON.stringify(result, null, 2));
Example response
{
"data": {
"Get": {
"Dog": [
{
"breed": "Corgi"
}
]
}
}
}
client.close()
Create a base64 representation of an online image.
You can create a base64 representation of an online image, and use it as input for similarity search as shown above.
- Python
- JS/TS
import base64, requests
def url_to_base64(url):
image_response = requests.get(url)
content = image_response.content
return base64.b64encode(content).decode("utf-8")
base64_img = url_to_base64("https://upload.wikimedia.org/wikipedia/commons/thumb/1/14/Deutsches_Museum_Portrait_4.jpg/500px-Deutsches_Museum_Portrait_4.jpg")
client.close()
import { readFileSync } from 'fs'
const urlToBase64 = async (imageUrl) => {
const response = await fetch(imageUrl);
const content = await response.buffer();
return content.toString('base64');
}
urlToBase64('https://upload.wikimedia.org/wikipedia/commons/thumb/1/14/Deutsches_Museum_Portrait_4.jpg/500px-Deutsches_Museum_Portrait_4.jpg')
.then(base64 => { console.log(base64) });
Combination with other operators
A Near Image
search can be combined with any other operators (like filter, limit, etc.), just as other similarity search operators.
See the similarity search
page for more details.
Related pages
Questions and feedback
If you have any questions or feedback, let us know in our user forum.