Previous
Query in the app
Pull captured data into your own programs using the Viam data client API. You can run the same SQL and MQL queries available in the app’s query editor from Python or Go code.
Not sure what fields to query? Run SELECT data FROM readings WHERE component_name = 'YOUR-COMPONENT' LIMIT 1 in the query editor first. Switch to table view to see nested fields as dot-notation column headers. Use those paths in your code. See the readings table schema for the full reference.
To get your credentials:
Find your organization ID in the Viam app by clicking your organization name and selecting Settings.
pip install viam-sdk
import asyncio
from viam.rpc.dial import DialOptions
from viam.app.viam_client import ViamClient
API_KEY = "YOUR-API-KEY"
API_KEY_ID = "YOUR-API-KEY-ID"
ORG_ID = "YOUR-ORGANIZATION-ID"
async def main():
opts = ViamClient.Options.with_api_key(
api_key=API_KEY,
api_key_id=API_KEY_ID
)
client = await ViamClient.create_from_dial_options(opts)
data_client = client.data_client
# ... your queries here ...
client.close()
if __name__ == "__main__":
asyncio.run(main())
mkdir query-data && cd query-data
go mod init query-data
go get go.viam.com/rdk
package main
import (
"context"
"fmt"
"go.viam.com/rdk/app"
"go.viam.com/rdk/logging"
)
func main() {
ctx := context.Background()
logger := logging.NewDebugLogger("query-data")
viamClient, err := app.CreateViamClientWithAPIKey(
ctx, app.Options{}, "YOUR-API-KEY", "YOUR-API-KEY-ID", logger)
if err != nil {
logger.Fatal(err)
}
defer viamClient.Close()
dataClient := viamClient.DataClient()
// ... your queries here ...
}
Use tabular_data_by_sql to run SQL queries. Results come back as a list of rows.
# Returns a list of dictionaries, one per row
results = await data_client.tabular_data_by_sql(
organization_id=ORG_ID,
sql_query=(
"SELECT time_received, "
" data.readings.temperature AS temperature "
"FROM readings "
"WHERE component_name = 'my-sensor' "
"ORDER BY time_received DESC "
"LIMIT 5"
),
)
for row in results:
print(row)
// Returns a slice of maps, one per row
results, err := dataClient.TabularDataBySQL(ctx, orgID,
"SELECT time_received, "+
"data.readings.temperature AS temperature "+
"FROM readings "+
"WHERE component_name = 'my-sensor' "+
"ORDER BY time_received DESC LIMIT 5")
if err != nil {
logger.Fatal(err)
}
for _, row := range results {
fmt.Printf("%v\n", row)
}
Use tabular_data_by_mql for MongoDB aggregation pipelines. MQL is more powerful than SQL for grouping, computing averages, and reshaping nested data.
# Returns a list of dictionaries from the aggregation result
results = await data_client.tabular_data_by_mql(
organization_id=ORG_ID,
query=[
{"$match": {"component_name": "my-sensor"}},
{"$group": {
"_id": "$component_name",
"avg_temp": {"$avg": "$data.readings.temperature"},
"count": {"$sum": 1},
}},
],
)
for entry in results:
print(entry)
// Returns a slice of maps from the aggregation result
results, err := dataClient.TabularDataByMQL(ctx, orgID,
[]map[string]interface{}{
{"$match": map[string]interface{}{
"component_name": "my-sensor",
}},
{"$group": map[string]interface{}{
"_id": "$component_name",
"avg_temp": map[string]interface{}{"$avg": "$data.readings.temperature"},
"count": map[string]interface{}{"$sum": 1},
}},
}, nil)
if err != nil {
logger.Fatal(err)
}
for _, entry := range results {
fmt.Printf("%v\n", entry)
}
Was this page helpful?
Glad to hear it! If you have any other feedback please let us know:
We're sorry about that. To help us improve, please tell us what we can do better:
Thank you!