!ps
PID TTY TIME CMD
2048646 pts/18 00:00:00 ps
Benedict Thekkel
Inside WSL
systemd─┬─2*[agetty]
├─cron
├─dbus-daemon
├─grafana───20*[{grafana}]
├─influxd───53*[{influxd}]
├─init-systemd(Ub─┬─SessionLeader───Relay(400)─┬─bash───tmux: client
│ │ ├─python───22*[{python}]
│ │ └─tmux: server─┬─bash───+
│ │ ├─6*[bash+
│ │ ├─bash───+
│ │ └─bash───+
│ ├─SessionLeader───Relay(707)───bash
│ ├─SessionLeader───Relay(8620)───bash───tmux: client
│ ├─SessionLeader───Relay(1997513)───bash───tmux: clien+
│ ├─init───{init}
│ ├─login───bash
│ └─{init-systemd(Ub}
├─networkd-dispat
├─packagekitd───2*[{packagekitd}]
├─polkitd───2*[{polkitd}]
├─rsyslogd───3*[{rsyslogd}]
├─smtpd───6*[smtpd]
├─snapd───18*[{snapd}]
├─16*[snapfuse]
├─sshd
├─subiquity-serve───python3.10─┬─python3
│ └─5*[{python3.10}]
├─systemd───(sd-pam)
├─systemd-journal
├─systemd-logind
├─systemd-resolve
├─systemd-udevd───2*[systemd-udevd]
└─unattended-upgr───{unattended-upgr}
First step, Must do
[APP]
INFLUX_URL = <INFLUX_URL>
INFLUX_TOKEN = <INFLUX_TOKEN>
INFLUX_ORG = <INFLUX_ORG_ID>
INFLUX_BUCKET = iot_center
INFLUX_BUCKET_AUTH = iot_center_devices
# Import the dependencies.
import configparser
from datetime import datetime
from uuid import uuid4
# Import client library classes.
from influxdb_client import Authorization, InfluxDBClient, Permission, PermissionResource, Point, WriteOptions
from influxdb_client.client.write_api import SYNCHRONOUS
import influxdb_client
# Get the configuration key-value pairs.
config = configparser.ConfigParser()
config.read('config.ini')
['config.ini']
authorization_api = client.authorizations_api()
auth_list = authorization_api.find_authorizations()
for auth in auth_list:
print(auth.token)
p_5lZramPcqYC4EPNRVpRhAFs4RvWc8cPXtt9exsIPH8ybJP3lKlf5KMKjLf2ueHFlcmo45ZBxeeU3ZtKQ-LDQ==
CeAAU2CprLUJoammpG9G7JuGTZ2qosK_shCBdXZvnIai46GFf7VB8jfd8XV6yeAeRjAc8nwAYkyhYL5BhDji2g==
U1HdRByFCwZJhoNaKqVg9tja0zHw23TO7-pUxSSFhzDxt740WZ1DRUZoRWx6Brs5lDRBJqU-bB11kkVGBxtNfg==
IPPdYDQH2s43p7jCro1SI_oJ8aGhFjkDEpgMSFuGs-vXuKnOMy1mvQzVLBoth2tKmvJGpy6ePo5c93sO8xTUkg==
_tasks
sample-bucket
_monitoring
energy_data
power_data
test
Weather
client = InfluxDBClient(url=config.get('APP', 'INFLUX_URL'),
token=config.get('APP', 'INFLUX_TOKEN'),
org=config.get('APP', 'INFLUX_ORG'),)
buckets_api = client.buckets_api()
bucket_list = buckets_api.find_buckets()
bucket_name = 'solar_forecast'
if bucket_name not in [bucket.name for bucket in bucket_list.buckets]:
buckets_api.create_bucket(bucket_name = bucket_name)
else:
print('bucket already exists')
bucket already exists
# Specify the name of the bucket you want to delete
bucket_name = "your_bucket_name"
try:
# Delete the bucket
client.buckets_api().delete_bucket(bucket_name=bucket_name)
print(f"Bucket '{bucket_name}' deleted successfully.")
except ApiException as e:
print(f"Failed to delete bucket '{bucket_name}': {e}")
query_api = client.query_api()
query = '''from(bucket: "sample-bucket")
|> range(start: 2024-02-28T03:50:00.000Z, stop: 2024-02-28T04:50:00.000Z)
|> filter(fn: (r) => r["_measurement"] == "airSensors")
|> filter(fn: (r) => r["_field"] == "humidity")
|> filter(fn: (r) => r["sensor_id"] == "TLM0101" or r["sensor_id"] == "TLM0102")
|> aggregateWindow(every: 1s, fn: mean, createEmpty: false)
|> yield(name: "mean")
|> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value")'''
result | table | _start | _stop | _time | _value | _field | _measurement | sensor_id | |
---|---|---|---|---|---|---|---|---|---|
0 | mean | 0 | 2024-02-28 03:50:00+00:00 | 2024-02-28 04:50:00+00:00 | 2024-02-28 03:50:05+00:00 | 34.889344 | humidity | airSensors | TLM0101 |
1 | mean | 0 | 2024-02-28 03:50:00+00:00 | 2024-02-28 04:50:00+00:00 | 2024-02-28 03:50:15+00:00 | 34.919323 | humidity | airSensors | TLM0101 |
2 | mean | 0 | 2024-02-28 03:50:00+00:00 | 2024-02-28 04:50:00+00:00 | 2024-02-28 03:50:25+00:00 | 34.929411 | humidity | airSensors | TLM0101 |
3 | mean | 0 | 2024-02-28 03:50:00+00:00 | 2024-02-28 04:50:00+00:00 | 2024-02-28 03:50:35+00:00 | 34.920287 | humidity | airSensors | TLM0101 |
4 | mean | 0 | 2024-02-28 03:50:00+00:00 | 2024-02-28 04:50:00+00:00 | 2024-02-28 03:50:45+00:00 | 34.969323 | humidity | airSensors | TLM0101 |
... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
705 | mean | 1 | 2024-02-28 03:50:00+00:00 | 2024-02-28 04:50:00+00:00 | 2024-02-28 04:48:25+00:00 | 35.477672 | humidity | airSensors | TLM0102 |
706 | mean | 1 | 2024-02-28 03:50:00+00:00 | 2024-02-28 04:50:00+00:00 | 2024-02-28 04:48:35+00:00 | 35.434914 | humidity | airSensors | TLM0102 |
707 | mean | 1 | 2024-02-28 03:50:00+00:00 | 2024-02-28 04:50:00+00:00 | 2024-02-28 04:48:45+00:00 | 35.466550 | humidity | airSensors | TLM0102 |
708 | mean | 1 | 2024-02-28 03:50:00+00:00 | 2024-02-28 04:50:00+00:00 | 2024-02-28 04:48:55+00:00 | 35.433109 | humidity | airSensors | TLM0102 |
709 | mean | 1 | 2024-02-28 03:50:00+00:00 | 2024-02-28 04:50:00+00:00 | 2024-02-28 04:49:05+00:00 | 35.482699 | humidity | airSensors | TLM0102 |
710 rows × 9 columns
'2024-03-01T05:20:09Z'
dictionary = {"measurement": "h2o_feet",
"tags": {"location": "us-west"},
"fields": {"level": 150},
"time": date
}
dictionary
{'measurement': 'h2o_feet',
'tags': {'location': 'us-west'},
'fields': {'level': 150},
'time': '2024-03-01T05:20:09Z'}