Files
vc-kirby-app/app/src/main/java/com/logitech/vc/kirbytest/Database.kt
2024-04-12 12:13:02 +02:00

256 lines
9.0 KiB
Kotlin

package com.logitech.vc.kirbytest
import android.content.ContentValues
import android.content.Context
import android.database.sqlite.SQLiteDatabase
import android.database.sqlite.SQLiteOpenHelper
import android.net.Uri
import android.provider.BaseColumns
import android.util.Log
import com.google.gson.Gson
import com.google.gson.JsonSyntaxException
import com.google.gson.reflect.TypeToken
import java.io.File
import java.time.Instant
import java.time.ZoneId
import java.time.format.DateTimeFormatter
import java.util.SortedMap
object LoggerContract {
// Table contents are grouped together in an anonymous object.
object LogEntry : BaseColumns {
const val TABLE_NAME = "measurements"
const val COLUMN_NAME_TS = "ts"
const val COLUMN_NAME_PAYLOAD = "payload"
}
private const val SQL_CREATE_ENTRIES =
"CREATE TABLE ${LogEntry.TABLE_NAME} (" +
"${BaseColumns._ID} INTEGER PRIMARY KEY," +
"${LogEntry.COLUMN_NAME_TS} TEXT," +
"${LogEntry.COLUMN_NAME_PAYLOAD} TEXT)"
private const val SQL_DELETE_ENTRIES = "DROP TABLE IF EXISTS ${LogEntry.TABLE_NAME}"
class LoggerDbHelper(context: Context) :
SQLiteOpenHelper(context, DATABASE_NAME, null, DATABASE_VERSION) {
override fun onCreate(db: SQLiteDatabase) {
db.execSQL(SQL_CREATE_ENTRIES)
}
override fun onUpgrade(db: SQLiteDatabase, oldVersion: Int, newVersion: Int) {
// This database is only a cache for online data, so its upgrade policy is
// to simply to discard the data and start over
db.execSQL(SQL_DELETE_ENTRIES)
onCreate(db)
}
override fun onDowngrade(db: SQLiteDatabase, oldVersion: Int, newVersion: Int) {
onUpgrade(db, oldVersion, newVersion)
}
companion object {
// If you change the database schema, you must increment the database version.
const val DATABASE_VERSION = 2
const val DATABASE_NAME = "Logger.db"
}
}
class LoggerDb(context: Context) {
private val dbHelper = LoggerDbHelper(context)
private val dbWrite = dbHelper.writableDatabase
private val dbRead = dbHelper.writableDatabase
private val tag = "LoggerDb"
val context: Context = context
fun writeLog(payload: Any): Long? {
val gson = Gson()
val jsonString = gson.toJson(payload)
val ts = Instant.now().toString()
val values = ContentValues().apply {
put(LogEntry.COLUMN_NAME_TS, ts)
put(LogEntry.COLUMN_NAME_PAYLOAD, jsonString)
}
return dbWrite?.insert(LogEntry.TABLE_NAME, null, values)
}
fun getExportFileUri(): Uri? {
val file = File(context.filesDir, "export.csv")
if (!file.exists()) {
file.createNewFile()
}
file.setReadable(true, false)
return Uri.fromFile(file)
}
fun exportToCsv() {
val uri = getExportFileUri() ?: return
exportToUri(uri)
}
fun exportToUri(uri: Uri) {
val projection =
arrayOf(BaseColumns._ID, LogEntry.COLUMN_NAME_PAYLOAD, LogEntry.COLUMN_NAME_TS)
val sortOrder = "${BaseColumns._ID} ASC"
val cursor = dbRead.query(
LogEntry.TABLE_NAME, // The table to query
projection, // The array of columns to return (pass null to get all)
null, // The columns for the WHERE clause
null, // The values for the WHERE clause
null, // don't group the rows
null, // don't filter by row groups
sortOrder // The sort order
)
try {
val gson = Gson()
var headerWritten = false
val sep = ","
context.contentResolver.openOutputStream(uri)?.use { writer ->
val newLine = '\n'
with(cursor) {
while (moveToNext()) {
val ts = getString(getColumnIndexOrThrow(LogEntry.COLUMN_NAME_TS))
val storedField =
getString(getColumnIndexOrThrow(LogEntry.COLUMN_NAME_PAYLOAD))
val payload = parsePayload(storedField)
try {
if (!headerWritten) {
val headerRow =
"timestamp" + sep + "local_time" + sep + payload.keys.joinToString(sep) + newLine
writer.write(headerRow.toByteArray())
headerWritten = true
}
val localTime = convertIsoToLocalTime(ts)
val row = ts + sep + localTime + sep + payload.values.joinToString(sep) + newLine
writer.write(row.toByteArray())
} catch (exception: JsonSyntaxException) {
Log.e(tag, exception.toString())
}
}
}
}
} catch (e: Exception) {
e.printStackTrace()
}
cursor.close()
}
fun exportToMultipleCSV() {
val projection =
arrayOf(BaseColumns._ID, LogEntry.COLUMN_NAME_PAYLOAD, LogEntry.COLUMN_NAME_TS)
val sortOrder = "${BaseColumns._ID} ASC"
val cursor = dbRead.query(
LogEntry.TABLE_NAME, // The table to query
projection, // The array of columns to return (pass null to get all)
null, // The columns for the WHERE clause
null, // The values for the WHERE clause
null, // don't group the rows
null, // don't filter by row groups
sortOrder // The sort order
)
val files = HashMap<String, File>()
try {
val sep = ","
val newLine = '\n'
with(cursor) {
while (moveToNext()) {
val ts = getString(getColumnIndexOrThrow(LogEntry.COLUMN_NAME_TS))
val storedField =
getString(getColumnIndexOrThrow(LogEntry.COLUMN_NAME_PAYLOAD))
try {
val payload = parsePayload(storedField)
val deviceId = payload.getOrDefault("bleAddress", "unknown") as String
val fileName = "kirby_export_${deviceId.replace(":", "")}.csv"
val f = files.getOrElse(deviceId) {
val file = File(context.filesDir, fileName)
if (!file.exists()) {
file.createNewFile()
}
val headerRow =
"timestamp" + sep + "local_time" + sep + payload.keys.joinToString(
sep
) + newLine
file.writeText(headerRow)
files[deviceId] = file
Log.i(tag, file.absolutePath)
file.setReadable(true, false)
file
}
val localTime = convertIsoToLocalTime(ts)
val row =
ts + sep + localTime + sep + payload.values.joinToString(sep) + newLine
f.appendText(row)
} catch (exception: JsonSyntaxException) {
Log.e(tag, exception.toString())
}
}
}
} catch (e: Exception) {
e.printStackTrace()
}
cursor.close()
}
fun reset() {
dbWrite.execSQL("DELETE FROM ${LogEntry.TABLE_NAME}")
dbWrite.execSQL("VACUUM")
}
fun close() {
dbHelper.close()
}
}
}
fun parsePayload(payload: String): SortedMap<String, Any> {
val type = object : TypeToken<HashMap<String, Any>>() {}.type
val gson = Gson()
val parsed : HashMap<String, Any> = gson.fromJson(payload, type)
return parsed.toSortedMap()
}
fun convertIsoToLocalTime(isoDateTime: String): String {
val systemZone = ZoneId.systemDefault()
val formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")
return Instant.parse(isoDateTime).atZone(systemZone).format(formatter)
}