some improvements in logging and naming/structuring

This commit is contained in:
Timo Volkmann 2020-12-12 16:34:35 +01:00
parent 1ea6822202
commit 391020ec47
6 changed files with 329 additions and 308 deletions

View File

@ -1,44 +1,44 @@
package core package core
import ( import (
"errors" "errors"
"github.com/sirupsen/logrus" "github.com/sirupsen/logrus"
) )
type dispatcher struct { type dispatcher struct {
listeners map[int16]chan string listeners map[int16]chan string
counter int16 counter int16
} }
func NewDispatcher() *dispatcher { func NewDispatcher() *dispatcher {
return &dispatcher{ return &dispatcher{
listeners: make(map[int16]chan string), listeners: make(map[int16]chan string),
counter: 0, counter: 0,
} }
} }
func (d *dispatcher) Publish(message string) { func (d *dispatcher) Publish(message string) {
logrus.Debugf("publish to %v listeners:\n%v\n", len(d.listeners)) logrus.Debugf("publish to %v listeners\n", len(d.listeners))
logrus.Debug(message) logrus.Debug(message)
for _, ch := range d.listeners { for _, ch := range d.listeners {
ch <- message ch <- message
} }
} }
func (d *dispatcher) Subscribe() (id int16, receiver <-chan string) { func (d *dispatcher) Subscribe() (id int16, receiver <-chan string) {
key := d.counter key := d.counter
d.counter++ d.counter++
rec := make(chan string) rec := make(chan string)
d.listeners[key] = rec d.listeners[key] = rec
return key, rec return key, rec
} }
func (d *dispatcher) Unsubscribe(id int16) error { func (d *dispatcher) Unsubscribe(id int16) error {
receiver, ok := d.listeners[id] receiver, ok := d.listeners[id]
if !ok { if !ok {
return errors.New("no subscription with id") return errors.New("no subscription with id")
} }
delete(d.listeners, id) delete(d.listeners, id)
close(receiver) close(receiver)
return nil return nil
} }

View File

@ -1,174 +1,164 @@
package core package core
import ( import (
"errors" "errors"
"git.timovolkmann.de/gyrogpsc/ublox" "git.timovolkmann.de/gyrogpsc/ublox"
"github.com/sirupsen/logrus" "github.com/sirupsen/logrus"
"github.com/tidwall/gjson" "github.com/tidwall/gjson"
"math" "math"
"time" "time"
) )
type sourceId string type sourceId string
const ( const (
SOURCE_TCP sourceId = "SOURCE_TCP" SOURCE_TCP sourceId = "SOURCE_TCP"
SOURCE_SERIAL sourceId = "SOURCE_SERIAL" SOURCE_SERIAL sourceId = "SOURCE_SERIAL"
) )
type sensorData struct { type sensorData struct {
itow uint32 itow uint32
source sourceId source sourceId
ServerTime time.Time ServerTime time.Time
Timestamp int64 Timestamp int64
Position [3]float64 Position [3]float64
Orientation [3]float64 Orientation [3]float64
}
type recordPair struct {
RecordTime time.Time
data map[sourceId]sensorData
}
type rawRecord struct {
RecordTime time.Time
sensorData
} }
func (s sensorData) isSameEpoch(n sensorData) bool { func (s sensorData) isSameEpoch(n sensorData) bool {
if n.itow == 0 { if n.itow == 0 {
return false return false
} }
return s.itow == n.itow return s.itow == n.itow
} }
// Consolidates two sensordata elements if they are in the same epoch // Consolidates two sensordata elements if they are in the same epoch
func (s sensorData) ConsolidateEpochsOnly(n sensorData) sensorData { func (s sensorData) ConsolidateEpochsOnly(n sensorData) sensorData {
s.checkSources(&n) s.checkSources(&n)
if s.isSameEpoch(n) { if s.isSameEpoch(n) {
null := sensorData{} null := sensorData{}
if n.Timestamp == null.Timestamp { if n.Timestamp == null.Timestamp {
n.Timestamp = s.Timestamp n.Timestamp = s.Timestamp
} }
if n.Position == null.Position { if n.Position == null.Position {
n.Position = s.Position n.Position = s.Position
} }
if n.Orientation == null.Orientation { if n.Orientation == null.Orientation {
n.Orientation = s.Orientation n.Orientation = s.Orientation
} }
} }
return n return n
} }
// Consolidates two sensordata elements but ignores timestamps // Consolidates two sensordata elements but ignores timestamps
func (s sensorData) ConsolidateExTime(n sensorData) sensorData { func (s sensorData) ConsolidateExTime(n sensorData) sensorData {
s.checkSources(&n) s.checkSources(&n)
null := sensorData{} null := sensorData{}
if n.Position == null.Position { if n.Position == null.Position {
n.Position = s.Position n.Position = s.Position
} }
if n.Orientation == null.Orientation { if n.Orientation == null.Orientation {
n.Orientation = s.Orientation n.Orientation = s.Orientation
} }
return n return n
} }
func (s *sensorData) checkSources(n *sensorData) { func (s *sensorData) checkSources(n *sensorData) {
if (s.source != n.source && *s != sensorData{}) { if (s.source != n.source && *s != sensorData{}) {
logrus.Println(s) logrus.Println(s)
logrus.Println(n) logrus.Println(n)
logrus.Fatalln("Do not consolidate sensorData from different Sources") logrus.Fatalln("Do not consolidate sensorData from different Sources")
} }
} }
var ( var (
errNotImplemented = errors.New("message not implemented") errNotImplemented = errors.New("message not implemented")
errRawMessage = errors.New("raw message") errRawMessage = errors.New("raw message")
) )
func ConvertUbxToSensorData(msg interface{}) (*sensorData, error) { func ConvertUbxToSensorData(msg interface{}) (*sensorData, error) {
sd := &sensorData{ sd := &sensorData{
ServerTime: time.Now(), ServerTime: time.Now(),
source: SOURCE_SERIAL, source: SOURCE_SERIAL,
} }
switch v := msg.(type) { switch v := msg.(type) {
case *ublox.NavPvt: case *ublox.NavPvt:
//logrus.Println("NAV-PVT") //logrus.Println("NAV-PVT")
sd.itow = v.ITOW_ms sd.itow = v.ITOW_ms
sd.Timestamp = time.Date(int(v.Year_y), time.Month(v.Month_month), int(v.Day_d), int(v.Hour_h), int(v.Min_min), int(v.Sec_s), int(v.Nano_ns), time.UTC).UnixNano() sd.Timestamp = time.Date(int(v.Year_y), time.Month(v.Month_month), int(v.Day_d), int(v.Hour_h), int(v.Min_min), int(v.Sec_s), int(v.Nano_ns), time.UTC).UnixNano()
sd.Position[0] = float64(v.Lat_dege7) / 1e+7 sd.Position[0] = float64(v.Lat_dege7) / 1e+7
sd.Position[1] = float64(v.Lon_dege7) / 1e+7 sd.Position[1] = float64(v.Lon_dege7) / 1e+7
sd.Position[2] = float64(v.HMSL_mm) / 1e+3 // mm in m sd.Position[2] = float64(v.HMSL_mm) / 1e+3 // mm in m
case *ublox.HnrPvt: case *ublox.HnrPvt:
//logrus.Println("HNR-PVT") //logrus.Println("HNR-PVT")
sd.itow = v.ITOW_ms sd.itow = v.ITOW_ms
sd.Timestamp = time.Date(int(v.Year_y), time.Month(v.Month_month), int(v.Day_d), int(v.Hour_h), int(v.Min_min), int(v.Sec_s), int(v.Nano_ns), time.UTC).UnixNano() sd.Timestamp = time.Date(int(v.Year_y), time.Month(v.Month_month), int(v.Day_d), int(v.Hour_h), int(v.Min_min), int(v.Sec_s), int(v.Nano_ns), time.UTC).UnixNano()
sd.Position[0] = float64(v.Lat_dege7) / 1e+7 sd.Position[0] = float64(v.Lat_dege7) / 1e+7
sd.Position[1] = float64(v.Lon_dege7) / 1e+7 sd.Position[1] = float64(v.Lon_dege7) / 1e+7
sd.Position[2] = float64(v.HMSL_mm) / 1e+3 // mm in m sd.Position[2] = float64(v.HMSL_mm) / 1e+3 // mm in m
case *ublox.NavAtt: case *ublox.NavAtt:
//logrus.Println("NAV-ATT") //logrus.Println("NAV-ATT")
sd.itow = v.ITOW_ms sd.itow = v.ITOW_ms
sd.Orientation[0] = float64(v.Pitch_deg) * 1e-5 sd.Orientation[0] = float64(v.Pitch_deg) * 1e-5
sd.Orientation[1] = float64(v.Roll_deg) * 1e-5 sd.Orientation[1] = float64(v.Roll_deg) * 1e-5
sd.Orientation[2] = float64(v.Heading_deg) * 1e-5 sd.Orientation[2] = float64(v.Heading_deg) * 1e-5
case *ublox.RawMessage: case *ublox.RawMessage:
//class := make([]byte, 2) //class := make([]byte, 2)
//binary.LittleEndian.PutUint16(class, v.ClassID()) //binary.LittleEndian.PutUint16(class, v.ClassID())
//logrus.Printf("%#v, %#v", class[0],class[1]) //logrus.Printf("%#v, %#v", class[0],class[1])
return nil, nil return nil, nil
default: default:
return nil, errNotImplemented return nil, errNotImplemented
} }
return sd, nil return sd, nil
} }
func ConvertSensorDataPhone(jsonData []byte) (*sensorData, error) { func ConvertSensorDataPhone(jsonData []byte) (*sensorData, error) {
if gjson.Get(string(jsonData), "os").String() == "hyperimu" { if gjson.Get(string(jsonData), "os").String() == "hyperimu" {
return convertAndroidHyperImu(jsonData) return convertAndroidHyperImu(jsonData)
} }
return convertIPhoneSensorLog(jsonData) return convertIPhoneSensorLog(jsonData)
} }
func convertIPhoneSensorLog(jsonData []byte) (*sensorData, error) { func convertIPhoneSensorLog(jsonData []byte) (*sensorData, error) {
timestamp := gjson.Get(string(jsonData), "locationTimestamp_since1970").Float() timestamp := gjson.Get(string(jsonData), "locationTimestamp_since1970").Float()
lat := gjson.Get(string(jsonData), "locationLatitude").Float() lat := gjson.Get(string(jsonData), "locationLatitude").Float()
lon := gjson.Get(string(jsonData), "locationLongitude").Float() lon := gjson.Get(string(jsonData), "locationLongitude").Float()
alt := gjson.Get(string(jsonData), "locationAltitude").Float() alt := gjson.Get(string(jsonData), "locationAltitude").Float()
pitch := gjson.Get(string(jsonData), "motionPitch").Float() * 180 / math.Pi pitch := gjson.Get(string(jsonData), "motionPitch").Float() * 180 / math.Pi
roll := gjson.Get(string(jsonData), "motionRoll").Float() * 180 / math.Pi roll := gjson.Get(string(jsonData), "motionRoll").Float() * 180 / math.Pi
yaw := gjson.Get(string(jsonData), "motionYaw").Float() * 180 / math.Pi yaw := gjson.Get(string(jsonData), "motionYaw").Float() * 180 / math.Pi
sd := &sensorData{ sd := &sensorData{
ServerTime: time.Now(), ServerTime: time.Now(),
source: SOURCE_TCP, source: SOURCE_TCP,
Timestamp: int64(timestamp * float64(time.Second)), Timestamp: int64(timestamp * float64(time.Second)),
Position: [3]float64{lat, lon, alt}, Position: [3]float64{lat, lon, alt},
Orientation: [3]float64{pitch, roll, yaw}, Orientation: [3]float64{pitch, roll, yaw},
//Timestamp: time.Unix(0, prep.Timestamp * int64(time.Millisecond)), //Timestamp: time.Unix(0, prep.Timestamp * int64(time.Millisecond)),
} }
//logrus.Println(string(pretty.Pretty(jsonData))) //logrus.Println(string(pretty.Pretty(jsonData)))
//logrus.Println(sd) //logrus.Println(sd)
return sd, nil return sd, nil
} }
func convertAndroidHyperImu(jsonData []byte) (*sensorData, error) { func convertAndroidHyperImu(jsonData []byte) (*sensorData, error) {
timestamp := gjson.Get(string(jsonData), "Timestamp").Int() timestamp := gjson.Get(string(jsonData), "Timestamp").Int()
lat := gjson.Get(string(jsonData), "GPS.0").Float() lat := gjson.Get(string(jsonData), "GPS.0").Float()
lon := gjson.Get(string(jsonData), "GPS.1").Float() lon := gjson.Get(string(jsonData), "GPS.1").Float()
alt := gjson.Get(string(jsonData), "GPS.2").Float() alt := gjson.Get(string(jsonData), "GPS.2").Float()
pitch := gjson.Get(string(jsonData), "orientation.0").Float() pitch := gjson.Get(string(jsonData), "orientation.0").Float()
roll := gjson.Get(string(jsonData), "orientation.1").Float() roll := gjson.Get(string(jsonData), "orientation.1").Float()
yaw := gjson.Get(string(jsonData), "orientation.2").Float() yaw := gjson.Get(string(jsonData), "orientation.2").Float()
sd := &sensorData{ sd := &sensorData{
ServerTime: time.Now(), ServerTime: time.Now(),
source: SOURCE_TCP, source: SOURCE_TCP,
Timestamp: timestamp * int64(time.Millisecond), Timestamp: timestamp * int64(time.Millisecond),
Position: [3]float64{lat, lon, alt}, Position: [3]float64{lat, lon, alt},
Orientation: [3]float64{pitch, roll, yaw}, Orientation: [3]float64{pitch, roll, yaw},
//Timestamp: time.Unix(0, prep.Timestamp * int64(time.Millisecond)), //Timestamp: time.Unix(0, prep.Timestamp * int64(time.Millisecond)),
} }
return sd, nil return sd, nil
} }

View File

@ -92,11 +92,11 @@ func (t *trackingService) StopRecord() {
t.opMode = LIVE t.opMode = LIVE
t.pipe.StopRecord() t.pipe.StopRecord()
m1.Lock() mRec.Lock()
m2.Lock() mRaw.Lock()
err := t.repo.Save(*t.current) err := t.repo.Save(*t.current)
m2.Unlock() mRaw.Unlock()
m1.Unlock() mRec.Unlock()
if err != nil { if err != nil {
logrus.Println(err) logrus.Println(err)
@ -140,9 +140,9 @@ func (t *trackingService) LoadTracking(trackingId uuid.UUID) {
} }
func (t *trackingService) safelyReplaceTracking(tr Tracking) { func (t *trackingService) safelyReplaceTracking(tr Tracking) {
m1.Lock() mRec.Lock()
m2.Lock() mRaw.Lock()
*t.current = tr *t.current = tr
m2.Unlock() mRaw.Unlock()
m1.Unlock() mRec.Unlock()
} }

View File

@ -1,62 +1,75 @@
package core package core
import ( import (
"github.com/google/uuid" "github.com/google/uuid"
"github.com/sirupsen/logrus" "github.com/sirupsen/logrus"
"sync" "sync"
"time" "time"
) )
var m1 sync.RWMutex
var m2 sync.RWMutex
type Tracking struct { type Tracking struct {
TrackingMetadata TrackingMetadata
Records []recordPair Records []recordPair
Rawdata []rawRecord Rawdata []rawRecord
} }
var mRec sync.RWMutex
var mRaw sync.RWMutex
type TrackingMetadata struct { type TrackingMetadata struct {
UUID uuid.UUID UUID uuid.UUID
TimeCreated time.Time TimeCreated time.Time
Collectors []CollectorType Collectors []CollectorType
} }
// persistence wrapper for sensordata
type recordPair struct {
RecordTimeKey time.Time // uniqueness ensured through mutex
DataPair map[sourceId]sensorData
}
type rawRecord struct {
RecordTimeKey time.Time // uniqueness ensured through mutex
Data sensorData
}
// END persistence wrapper for sensordata
func (s *Tracking) EnqueuePair(tcp sensorData, ser sensorData) { func (s *Tracking) EnqueuePair(tcp sensorData, ser sensorData) {
rp := recordPair{ rp := recordPair{
RecordTime: time.Now(), RecordTimeKey: time.Now(),
data: map[sourceId]sensorData{ DataPair: map[sourceId]sensorData{
tcp.source: tcp, tcp.source: tcp,
ser.source: ser, ser.source: ser,
}, },
} }
m1.Lock() mRec.Lock()
s.Records = append(s.Records, rp) s.Records = append(s.Records, rp)
logrus.Debugln("tracking Records: len->", len(s.Records)) logrus.Debugln("tracking Records: len->", len(s.Records))
m1.Unlock() mRec.Unlock()
} }
func (s *Tracking) EnqueueRaw(data sensorData) { func (s *Tracking) EnqueueRaw(data sensorData) {
sr := rawRecord{ sr := rawRecord{
time.Now(), time.Now(),
data, data,
} }
m1.Lock() mRec.Lock()
s.Rawdata = append(s.Rawdata, sr) s.Rawdata = append(s.Rawdata, sr)
logrus.Debugln("raw data points: len->", len(s.Rawdata)) logrus.Debugln("raw data points: len->", len(s.Rawdata))
m1.Unlock() mRec.Unlock()
} }
func emptyTracking() Tracking { func emptyTracking() Tracking {
return Tracking{ return Tracking{
TrackingMetadata: TrackingMetadata{ TrackingMetadata: TrackingMetadata{
UUID: uuid.New(), UUID: uuid.New(),
}, },
Records: []recordPair{}, Records: []recordPair{},
Rawdata: []rawRecord{}, Rawdata: []rawRecord{},
} }
} }
func (s *Tracking) isEmpty() bool { func (s *Tracking) isEmpty() bool {
return len(s.Rawdata)+len(s.Records) == 0 return len(s.Rawdata)+len(s.Records) == 0
} }

2
go.mod
View File

@ -12,7 +12,7 @@ require (
github.com/spf13/viper v1.7.1 github.com/spf13/viper v1.7.1
github.com/stretchr/testify v1.6.1 // indirect github.com/stretchr/testify v1.6.1 // indirect
github.com/tidwall/gjson v1.6.0 github.com/tidwall/gjson v1.6.0
github.com/tidwall/pretty v1.0.2 // indirect github.com/tidwall/pretty v1.0.2
go.bug.st/serial v1.1.1 go.bug.st/serial v1.1.1
golang.org/x/sync v0.0.0-20190423024810-112230192c58 golang.org/x/sync v0.0.0-20190423024810-112230192c58
) )

View File

@ -1,99 +1,116 @@
package storage package storage
import ( import (
"encoding/binary" "encoding/binary"
"encoding/json" "encoding/json"
"git.timovolkmann.de/gyrogpsc/core" "git.timovolkmann.de/gyrogpsc/core"
"github.com/dgraph-io/badger/v2" "github.com/dgraph-io/badger/v2"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/sirupsen/logrus" "github.com/sirupsen/logrus"
"os" "github.com/tidwall/pretty"
"path/filepath" "os"
"path/filepath"
"strconv"
) )
// Must implement Repo // Must implement Repo
type badgerStore struct { type badgerStore struct {
trackings *badger.DB trackingsDb *badger.DB
records *badger.DB recordsDb *badger.DB
rawdata *badger.DB rawdataDb *badger.DB
} }
func NewRepository(c *core.Configuration) *badgerStore { func NewRepository(c *core.Configuration) *badgerStore {
dir, _ := os.Getwd() dir, _ := os.Getwd()
logrus.Debug(dir) logrus.Debug(dir)
if _, err := os.Stat(filepath.Join(dir,"_db")); os.IsNotExist(err) { if _, err := os.Stat(filepath.Join(dir, "_db")); os.IsNotExist(err) {
os.Mkdir(filepath.Join(dir,"_db"), os.ModePerm) os.Mkdir(filepath.Join(dir, "_db"), os.ModePerm)
} }
tr, err := badger.Open(badger.DefaultOptions("_db/trackings")) tr, err := badger.Open(badger.DefaultOptions("_db/trackings"))
dp, err := badger.Open(badger.DefaultOptions("_db/records")) dp, err := badger.Open(badger.DefaultOptions("_db/records"))
rd, err := badger.Open(badger.DefaultOptions("_db/raw")) rd, err := badger.Open(badger.DefaultOptions("_db/raw"))
if err != nil { if err != nil {
logrus.Error(err) logrus.Error(err)
} }
return &badgerStore{trackings: tr, records: dp, rawdata: rd} return &badgerStore{trackingsDb: tr, recordsDb: dp, rawdataDb: rd}
} }
func (r *badgerStore) isDbAvailable() bool { func (r *badgerStore) isDbAvailable() bool {
return r.trackings.IsClosed() || r.records.IsClosed() || r.rawdata.IsClosed() return r.trackingsDb.IsClosed() || r.recordsDb.IsClosed() || r.rawdataDb.IsClosed()
} }
func (r *badgerStore) Save(tr core.Tracking) error { func (r *badgerStore) Save(tr core.Tracking) error {
if ok := r.isDbAvailable(); ok { if ok := r.isDbAvailable(); ok {
logrus.Error("unable to write to database. database closed!") logrus.Error("unable to write to database. database closed!")
return badger.ErrDBClosed return badger.ErrDBClosed
} }
ts, err := tr.TimeCreated.MarshalText() ts, err := tr.TimeCreated.MarshalText()
if err != nil { if err != nil {
logrus.Error(err, tr) logrus.Error(err, tr)
} }
logrus.Info("save tracking:", tr.TimeCreated) logrus.Info("save tracking:", tr.TimeCreated)
meta, err := json.Marshal(tr.TrackingMetadata) meta, err := json.Marshal(tr.TrackingMetadata)
if err != nil { if err != nil {
logrus.Error(err, tr) logrus.Error(err, tr)
return err return err
} }
err = r.records.Update(func(txn *badger.Txn) error { err = r.recordsDb.Update(func(txn *badger.Txn) error {
for _, v := range tr.Records { for _, v := range tr.Records {
k := createDataKey(tr.UUID, v.RecordTime.UnixNano()) k := createRecordKey(tr.UUID, v.RecordTimeKey.UnixNano())
j, err := json.Marshal(v) j, err := json.Marshal(v.DataPair)
if err != nil { logrus.Debugln("save record k/v:\n", tr.UUID.String(), strconv.FormatInt(v.RecordTimeKey.UnixNano(), 10))
return err logrus.Debugln(string(pretty.Pretty(j)))
} if err != nil {
txn.Set(k, j) return err
} }
return nil txn.Set(k, j)
}) }
if err != nil { return nil
logrus.Error(err, tr) })
return err if err != nil {
} logrus.Error(err, tr)
err = r.records.Update(func(txn *badger.Txn) error { return err
for _, v := range tr.Rawdata { }
k := createDataKey(tr.UUID, v.Timestamp) err = r.rawdataDb.Update(func(txn *badger.Txn) error {
j, err := json.Marshal(v) for _, v := range tr.Rawdata {
if err != nil { k := createRecordKey(tr.UUID, v.RecordTimeKey.UnixNano())
return err j, err := json.Marshal(v)
} logrus.Debugln("save raw k/v:\n", tr.UUID.String(), strconv.FormatInt(v.RecordTimeKey.UnixNano(), 10))
txn.Set(k, j) logrus.Debugln(string(pretty.Pretty(j)))
} if err != nil {
return nil return err
}) }
if err != nil { txn.Set(k, j)
logrus.Error(err, tr) }
return err return nil
} })
err = r.trackings.Update(func(txn *badger.Txn) error { if err != nil {
err := txn.Set(ts, meta) logrus.Error(err, tr)
return err return err
}) }
if err != nil { err = r.trackingsDb.Update(func(txn *badger.Txn) error {
logrus.Error(err, tr) logrus.Debug("save tracking meta k/v:\n", string(ts), string(meta))
return err err := txn.Set(ts, meta)
} return err
logrus.Info("sucessfully saved tracking") })
return nil if err != nil {
logrus.Error(err, tr)
return err
}
r.trackingsDb.PrintHistogram(nil)
dr := 0.5
err = r.trackingsDb.RunValueLogGC(dr)
logrus.Debug("DB GC:", err)
err = r.recordsDb.RunValueLogGC(dr)
logrus.Debug("DB GC:", err)
err = r.rawdataDb.RunValueLogGC(dr)
logrus.Debug("DB GC:", err)
r.trackingsDb.PrintHistogram(nil)
logrus.Info("sucessfully saved tracking")
return nil
} }
//func (r *badgerStore) Save(tracking *core.Tracking) error { //func (r *badgerStore) Save(tracking *core.Tracking) error {
@ -112,9 +129,9 @@ func (r *badgerStore) Save(tr core.Tracking) error {
// ch := make(chan error, 3) // ch := make(chan error, 3)
// go func() { // go func() {
// defer wg.Done() // defer wg.Done()
// err = r.records.Update(func(txn *badger.Txn) error { // err = r.recordsDb.Update(func(txn *badger.Txn) error {
// for _, v := range tracking.Records { // for _, v := range tracking.Records {
// k := createDataKey(tracking.UUID, v.RecordTime.UnixNano()) // k := createRecordKey(tracking.UUID, v.RecordTime.UnixNano())
// j, err := json.Marshal(v) // j, err := json.Marshal(v)
// if err != nil { // if err != nil {
// return err // return err
@ -127,9 +144,9 @@ func (r *badgerStore) Save(tr core.Tracking) error {
// }() // }()
// go func() { // go func() {
// defer wg.Done() // defer wg.Done()
// err = r.records.Update(func(txn *badger.Txn) error { // err = r.recordsDb.Update(func(txn *badger.Txn) error {
// for _, v := range tracking.Rawdata { // for _, v := range tracking.Rawdata {
// k := createDataKey(tracking.UUID, v.Timestamp) // k := createRecordKey(tracking.UUID, v.Timestamp)
// j, err := json.Marshal(v) // j, err := json.Marshal(v)
// if err != nil { // if err != nil {
// return err // return err
@ -142,7 +159,7 @@ func (r *badgerStore) Save(tr core.Tracking) error {
// }() // }()
// go func() { // go func() {
// defer wg.Done() // defer wg.Done()
// err = r.trackings.Update(func(txn *badger.Txn) error { // err = r.trackingsDb.Update(func(txn *badger.Txn) error {
// err := txn.Set(ts, meta) // err := txn.Set(ts, meta)
// return err // return err
// }) // })
@ -165,33 +182,34 @@ func (r *badgerStore) Save(tr core.Tracking) error {
//} //}
func (r *badgerStore) LoadAll() ([]core.TrackingMetadata, error) { func (r *badgerStore) LoadAll() ([]core.TrackingMetadata, error) {
panic("implement me") panic("implement me")
} }
func (r *badgerStore) Load(id uuid.UUID) (core.Tracking, error) { func (r *badgerStore) Load(id uuid.UUID) (core.Tracking, error) {
panic("implement me") panic("implement me")
} }
func createDataKey(uid uuid.UUID, timestamp int64) []byte { func createRecordKey(uid uuid.UUID, timestamp int64) []byte {
prefix, err := uid.MarshalText() prefix, err := uid.MarshalText()
if err != nil || timestamp < 0 { if err != nil || timestamp < 0 {
logrus.Error("unable to create key", err) logrus.Error("unable to create key", err)
} }
suffix := make([]byte, 8) suffix := make([]byte, 8)
binary.LittleEndian.PutUint64(suffix, uint64(timestamp)) binary.BigEndian.PutUint64(suffix, uint64(timestamp))
return append(prefix, suffix...)
return append(prefix, suffix...)
} }
func unmarshalDataKey(key []byte) (uuid.UUID, int64) { func unmarshalDataKey(key []byte) (uuid.UUID, int64) {
if len(key) != 24 { if len(key) != 24 {
panic("corrupted key") panic("corrupted key")
} }
prefix := key[0:15] prefix := key[0:15]
suffix := key[15:24] suffix := key[15:24]
uid, err := uuid.FromBytes(prefix) uid, err := uuid.FromBytes(prefix)
if err != nil { if err != nil {
panic("corrupted key") panic("corrupted key")
} }
timestamp := int64(binary.LittleEndian.Uint64(suffix)) timestamp := int64(binary.BigEndian.Uint64(suffix))
return uid, timestamp return uid, timestamp
} }