Manage high-volume, high-velocity data without sacrificing performance.
Single Datastore
Run analytics across multiple workload types with a single purpose-built platform.
Columnar Design
Scales without limits with built-in storage and query performance optimization.
Native SQL
Query directly with InfluxDB using standard SQL.
Real-Time Query
Sub-second query response on leading-edge data.
Unlimited Cardinality
Analyze billions of time series data points per second without limitations or caps.
Superior Data Compression
Maximize data compression to store more data at a fraction of the cost.
Developers choose InfluxDB
More downloads, more open source users, and a larger community than any other time series database in the world.
1B+
Downloads of InfluxDB via Docker
1M+
Open source instances live today
2,800+
Contributors
5B+
Downloads of InfluxData’s Telegraf
#1
Time series database Source: DB Engines
Code in the languages you love
No need to conform to a new language or technology. InfluxDB supports multiple programming and query languages, with client libraries and integrations to make things simple, all powered by a RESTful API.
from influxdb_client_3 import InfluxDBClient3
import pandas
import os
database = os.getenv('INFLUX_DATABASE')
token = os.getenv('INFLUX_TOKEN')
host="https://us-east-1-1.aws.cloud2.influxdata.com"
def querySQL():
client = InfluxDBClient3(host, database=database, token=token)
table = client.query(
'''SELECT
room,
DATE_BIN(INTERVAL '1 day', time) AS _time,
AVG(temp) AS temp,
AVG(hum) AS hum,
AVG(co) AS co
FROM home
WHERE time >= now() - INTERVAL '90 days'
GROUP BY room, _time
ORDER BY _time'''
)
print(table.to_pandas().to_markdown())
client.close()
querySQL()
c
from influxdb_client_3 import InfluxDBClient3
import os
database = os.getenv('INFLUX_DATABASE')
token = os.getenv('INFLUX_TOKEN')
host="https://us-east-1-1.aws.cloud2.influxdata.com"
def write_line_protocol():
client = InfluxDBClient3(host, database=database, token=token)
record = "home,room=Living\\ Room temp=22.2,hum=36.4,co=17i"
print("Writing record:", record )
client.write(record)
client.close()
write_line_protocol()
c
@main
struct QueryCpuData: AsyncParsableCommand {
@Option(name: .shortAndLong, help: "The name or id of the bucket destination.")
private var bucket: String
@Option(name: .shortAndLong, help: "The name or id of the organization destination.")
private var org: String
@Option(name: .shortAndLong, help: "Authentication token.")
private var token: String
@Option(name: .shortAndLong, help: "HTTP address of InfluxDB.")
private var url: String
}
extension QueryCpuData {
mutating func run() async throws {
//
// Initialize Client with default Bucket and Organization
//
let client = InfluxDBClient(
url: url,
token: token,
options: InfluxDBClient.InfluxDBOptions(bucket: bucket, org: org))
// Flux query
let query = """
from(bucket: "\(self.bucket)")
|> range(start: -10m)
|> filter(fn: (r) => r["_measurement"] == "cpu")
|> filter(fn: (r) => r["cpu"] == "cpu-total")
|> filter(fn: (r) => r["_field"] == "usage_user" or r["_field"] == "usage_system")
|> last()
"""
print("\nQuery to execute:\n\(query)\n")
let response = try await client.queryAPI.queryRaw(query: query)
let csv = String(decoding: response, as: UTF8.self)
print("InfluxDB response: \(csv)")
client.close()
}
}
c
import ArgumentParser
import Foundation
import InfluxDBSwift
import InfluxDBSwiftApis
@main
struct WriteData: AsyncParsableCommand {
@Option(name: .shortAndLong, help: "The name or id of the bucket destination.")
private var bucket: String
@Option(name: .shortAndLong, help: "The name or id of the organization destination.")
private var org: String
@Option(name: .shortAndLong, help: "Authentication token.")
private var token: String
@Option(name: .shortAndLong, help: "HTTP address of InfluxDB.")
private var url: String
}
extension WriteData {
mutating func run() async throws {
//
// Initialize Client with default Bucket and Organization
//
let client = InfluxDBClient(
url: url,
token: token,
options: InfluxDBClient.InfluxDBOptions(bucket: bucket, org: org))
//
// Record defined as Data Point
//
let recordPoint = InfluxDBClient
.Point("demo")
.addTag(key: "type", value: "point")
.addField(key: "value", value: .int(2))
//
// Record defined as Data Point with Timestamp
//
let recordPointDate = InfluxDBClient
.Point("demo")
.addTag(key: "type", value: "point-timestamp")
.addField(key: "value", value: .int(2))
.time(time: .date(Date()))
try await client.makeWriteAPI().write(points: [recordPoint, recordPointDate])
print("Written data:\n\n\([recordPoint, recordPointDate].map { "\t- \($0)" }.joined(separator: "\n"))")
print("\nSuccess!")
client.close()
}
}
c
import {InfluxDBClient} from '@influxdata/influxdb3-client'
import {tableFromArrays} from 'apache-arrow';
const database = process.env.INFLUX_DATABASE;
const token = process.env.INFLUX_TOKEN;
const host = "https://us-east-1-1.aws.cloud2.influxdata.com";
async function main() {
const client = new InfluxDBClient({host, token})
const query = `
SELECT
room,
DATE_BIN(INTERVAL '1 day', time) AS _time,
AVG(temp) AS temp,
AVG(hum) AS hum,
AVG(co) AS co
FROM home
WHERE time >= now() - INTERVAL '90 days'
GROUP BY room, _time
ORDER BY _time
`
const result = await client.query(query, database)
const data = {room: [], day: [], temp: []}
for await (const row of result) {
data.day.push(new Date(row._time).toISOString())
data.room.push(row.room)
data.temp.push(row.temp)
}
console.table([...tableFromArrays(data)])
client.close()
}
main()
c
import {InfluxDBClient} from '@influxdata/influxdb3-client'
const database = process.env.INFLUX_DATABASE;
const token = process.env.INFLUX_TOKEN;
const host = "https://us-east-1-1.aws.cloud2.influxdata.com";
async function main() {
const client = new InfluxDBClient({host, token})
const record = "home,room=Living\\ Room temp=22.2,hum=36.4,co=17i"
await client.write(record, database)
client.close()
}
main()
c
package com.influxdb3.examples;
import com.influxdb.v3.client.InfluxDBClient;
import java.util.stream.Stream;
public final class Query {
private Query() {
//not called
}
/**
* @throws Exception
*/
public static void main() throws Exception {
final String hostUrl = "https://us-east-1-1.aws.cloud2.influxdata.com";
final char[] authToken = (System.getenv("INFLUX_TOKEN")).toCharArray();
final String database = System.getenv("INFLUX_DATABASE");
try (InfluxDBClient client = InfluxDBClient.getInstance(hostUrl, authToken, database)) {
String sql = """
SELECT
room,
DATE_BIN(INTERVAL '1 day', time) AS _time,
AVG(temp) AS temp, AVG(hum) AS hum, AVG(co) AS co
FROM home
WHERE time >= now() - INTERVAL '90 days'
GROUP BY room, _time
ORDER BY _time""";
String layoutHeading = "| %-16s | %-12s | %-6s |%n";
System.out.printf("--------------------------------------------------------%n");
System.out.printf(layoutHeading, "day", "room", "temp");
System.out.printf("--------------------------------------------------------%n");
String layout = "| %-16s | %-12s | %.2f |%n";
try (Stream
c
package com.influxdb3.examples;
import com.influxdb.v3.client.InfluxDBClient;
public final class Write {
public static void main() throws Exception {
final String hostUrl = "https://us-east-1-1.aws.cloud2.influxdata.com";
final char[] authToken = (System.getenv("INFLUX_TOKEN")).toCharArray();
final String database = System.getenv("INFLUX_DATABASE");
try (InfluxDBClient client = InfluxDBClient.getInstance(hostUrl, authToken, database)) {
String record = "home,room=Living\\ Room temp=22.2,hum=36.4,co=17i";
System.out.printf("Write record: %s%n", record);
client.writeRecord(record);
}
}
}
c
InfluxDB2::Client.use('https://localhost:8086', 'my-token', org: 'my-org') do |client|
result = client
.create_query_api
.query_raw(query: 'from(bucket:"my-bucket") |> range(start: 1970-01-01) |> last()')
puts result
end
c
InfluxDB2::Client.use('https://localhost:8086', 'my-token',
bucket: 'my-bucket',
org: 'my-org',
precision: InfluxDB2::WritePrecision::NANOSECOND) do |client|
write_api = client.create_write_api
write_api.write(data: 'h2o,location=west value=33i 15')
end
c
package example
import org.apache.pekko.actor.ActorSystem
import org.apache.pekko.stream.scaladsl.Sink
import com.influxdb.client.scala.InfluxDBClientScalaFactory
import com.influxdb.query.FluxRecord
import scala.concurrent.Await
import scala.concurrent.duration.Duration
object InfluxDB2ScalaExample {
implicit val system: ActorSystem = ActorSystem("it-tests")
def main(args: Array[String]): Unit = {
val influxDBClient = InfluxDBClientScalaFactory
.create("http://localhost:8086", "my-token".toCharArray, "my-org")
val fluxQuery = ("from(bucket: \"my-bucket\")\n"
+ " |> range(start: -1d)"
+ " |> filter(fn: (r) => (r[\"_measurement\"] == \"cpu\" and r[\"_field\"] == \"usage_system\"))")
//Result is returned as a stream
val results = influxDBClient.getQueryScalaApi().query(fluxQuery)
//Example of additional result stream processing on client side
val sink = results
//filter on client side using `filter` built-in operator
.filter(it => "cpu0" == it.getValueByKey("cpu"))
//take first 20 records
.take(20)
//print results
.runWith(Sink.foreach[FluxRecord](it => println(s"Measurement: ${it.getMeasurement}, value: ${it.getValue}")
))
// wait to finish
Await.result(sink, Duration.Inf)
influxDBClient.close()
system.terminate()
}
}
using System;
using System.Threading.Tasks;
using InfluxDB3.Client;
using InfluxDB3.Client.Query;
namespace InfluxDBv3;
public class Query
{
static async Task QuerySQL()
{
const string hostUrl = "https://us-east-1-1.aws.cloud2.influxdata.com";
string? database = System.Environment.GetEnvironmentVariable("INFLUX_DATABASE");
string? authToken = System.Environment.GetEnvironmentVariable("INFLUX_TOKEN");
using var client = new InfluxDBClient(hostUrl, authToken: authToken, database: database);
const string sql = @"
SELECT
room,
DATE_BIN(INTERVAL '1 day', time) AS _time,
AVG(temp) AS temp,
AVG(hum) AS hum,
AVG(co) AS co
FROM home
WHERE time >= now() - INTERVAL '90 days'
GROUP BY room, _time
ORDER BY _time
";
Console.WriteLine("{0,-30}{1,-15}{2,-15}", "day", "room", "temp");
await foreach (var row in client.Query(query: sql))
{
Console.WriteLine("{0,-30}{1,-15}{2,-15}", row[1], row[0], row[2]);
}
Console.WriteLine();
}
}
c
using System;
using System.Threading.Tasks;
using InfluxDB3.Client;
using InfluxDB3.Client.Query;
namespace InfluxDBv3;
public class Write
{
public static async Task WriteLineProtocol()
{
const string hostUrl = "https://us-east-1-1.aws.cloud2.influxdata.com";
string? database = System.Environment.GetEnvironmentVariable("INFLUX_DATABASE");
string? authToken = System.Environment.GetEnvironmentVariable("INFLUX_TOKEN");
using var client = new InfluxDBClient(hostUrl, authToken: authToken, database: database);
const string record = "home,room=Living\\ Room temp=22.2,hum=36.4,co=17i";
Console.WriteLine("Write record: {0,-30}", record);
await client.WriteRecordAsync(record: record);
}
}
"InfluxDB is a high-speed read and write database. The data is written in real-time, you can read it in real-time, and while reading, you can apply your machine learning model. So, in real-time, you can forecast and detect anomalies."
“We decided, from a monitoring perspective, that we are... going with a best of breed setup. So, we put the best tools in place, like InfluxDB for metrics monitoring.”
"I was blown away with how easy it was to install and configure InfluxDB. The clustering was easy. The documentation was great, and the support has been second to none."
Dylan Shorter Engineer III, Software and Product Integration Engineering
“With InfluxDB Cloud Dedicated, the great thing is that we don't need to think about data storage costs or usage anymore because data storage gets way cheaper.”
Ricardo Kissinger Head of IT Infrastructure and IT Security
Today, InfluxDB deployments span multiple industries, with customers running at scale in any environment—public and private cloud, on-premises, and at the edge.
Manufacturing
Energy and Utilities
Telecommunications
Consumer IoT
Industrial IoT
Aerospace
Manufacturing
Analyze production data streams in real-time to identify bottlenecks, prevent downtime, and power predictive maintenance for your industrial equipment.
Monitor, optimize, and manage renewable energy and traditional power systems to achieve smart grid balancing and optimization. Forecast and predict maintenance needs for renewable energy sources, such as wind turbines and solar farms.
Analyze network performance and usage patterns in telecommunication infrastructure. Improve quality of service, optimize infrastructure resources, and reduce operational costs.
Get real-time insights from satellites, networks, and every stage of the launch operation process. Reduce errors and accelerate time to market in this mission-critical space.
Monitor, optimize, and manage renewable energy and traditional power systems to achieve smart grid balancing and optimization. Forecast and predict maintenance needs for your renewable energy sources such as wind turbines and solar farms.
Analyze network performance and usage patterns in telecommunication infrastructure. Improve quality of service, optimize infrastructure resources, and reduce operational costs.
Get real-time insights from satellites, networks, and every stage of the launch operation process. Reduce errors and accelerate time to market in this mission-critical space.