Export site measurement data to file
The following code exmple shows how you can query all the electricity measurements
at a site for given time period, and store those measurements to a local .csv
file.
In this particular example we store all the energy measurements at the specified site
measured in a 1 day period with a 10 minute aggregation level.
- JavaScript
- Python
import fs from 'fs'
import axios from 'axios'
import csvWriter from 'csv-write-stream'
storeSiteElectricityMeasurements(
'<INSERT_API_KEY>',
'<INSERT_SITE_ID>',
'energy',
'2023-10-01',
'2023-10-08',
'1d',
'energy_data_export.csv'
).then(() => console.log('completed'))
const storeSiteElectricityMeasurements = async(
apiKey,
siteId,
type,
from,
to,
aggregation,
filename,
) => {
let nextUrl = `https://api.sensorfact.nl/v1/sites/${siteId}/electricity/measurements/assets?from=${from}&to=${to}&measurement_type=${type}&aggregation=${aggregation}`
while (nextUrl) {
const response = await axios.get(nextUrl, {headers: {'x-api-key': apiKey}})
const { data } = response.data
await writeMeasurements(type, data, filename)
nextUrl = response.data.paging?.next ?? null
}
}
const writeMeasurements = async(type, assets, filename) => {
const writer = !fs.existsSync(filename)
? csvWriter({ headers: ['asset_id', 'type', 'unit', 'timestamp', 'value']})
: csvWriter({sendHeaders: false});
writer.pipe(fs.createWriteStream(file, {flags: 'a'}))
const rows = assets.flatMap(asset => asset.measurements.map(measurement => ({
asset_id: asset.id,
type,
unit: asset.unit,
timestamp: measurement.time,
value: measurement.value
})))
for (const row of rows) {
writer.write(row)
}
writer.end()
}
import requests
import csv
key = "<INSERT_YOUR_API_KEY_HERE>"
site_id = "<INSERT_YOUR_SITE_ID_HERE>"
from_date = "2023-10-01"
to_date = "2023-10-08"
aggregation = "1d"
measurement_type = "energy"
next_url = f"https://api.sensorfact.nl/v1/sites/{site_id}/electricity/measurements/assets?from={from_date}&to={to_date}&measurement_type={measurement_type}&aggregation={aggregation}"
rows=[]
while next_url:
r=requests.get(next_url, headers={"x-api-key":key})
response = r.json()
assets = response["data"]
for asset in assets:
print(asset["name"])
for measurement in asset["measurements"]:
row={
"time": measurement["time"],
"asset_id": asset["id"],
"asset_name": asset["name"],
"unit": asset["unit"],
"value": measurement["value"]
}
rows.append(row)
if ("paging" in response):
next_url = response["paging"]["next"]
else:
next_url = None
# csv field names
fields = ["time", "asset_id", "asset_name", "unit", "value"]
with open("export.csv", "w", newline="") as file:
writer = csv.DictWriter(file, fieldnames = fields)
writer.writeheader()
writer.writerows(rows)
print("done")