diff --git a/src/cmd/open_speed_data/position.go b/src/cmd/open_speed_data/position.go index c5e067f..20327e5 100644 --- a/src/cmd/open_speed_data/position.go +++ b/src/cmd/open_speed_data/position.go @@ -2,6 +2,8 @@ package main import ( "fmt" + "image" + "math" "time" "labelimg" @@ -41,3 +43,62 @@ type FramePosition struct { Time time.Duration Positions []labelimg.Label } + +type VehiclePosition struct { + Frame int + Time time.Duration + VehicleID int + Position labelimg.Label +} + +// TrackVehicles tracks detected objects and correlates across frames +// based on logic from identifyvehicles from https://github.com/mbauman/TrafficSpeed/blob/master/TrafficSpeed.ipynb +func TrackVehicles(frames []FramePosition) []VehiclePosition { + var vehicleCount int + var vehicles []VehiclePosition + var lastFrameVehicles []VehiclePosition + + for _, frame := range frames { + var currentFrameVehicles []VehiclePosition + for _, position := range frame.Positions { + // is position overlap from lastFrame + var vehicleID int + // TODO: use median point not center + if closest := ClosestPosition(position.Center, lastFrameVehicles); position.Center.In(closest.Position.Bounds) { + vehicleID = closest.VehicleID + } else { + vehicleCount++ + vehicleID = vehicleCount + } + currentFrameVehicles = append(currentFrameVehicles, VehiclePosition{ + Frame: frame.Frame, + Time: frame.Time, + VehicleID: vehicleID, + Position: position, + }) + } + lastFrameVehicles = currentFrameVehicles + vehicles = append(vehicles, currentFrameVehicles...) + } + return vehicles +} + +func ClosestPosition(point image.Point, v []VehiclePosition) VehiclePosition { + var closest VehiclePosition + var min float64 = -1 + for _, p := range v { + // TODO: use median point not center + d := distance(point, p.Position.Center) + if min == -1 || d < min { + min = d + closest = p + } + } + return closest +} + +func distance(a, b image.Point) float64 { + x := math.Abs(float64(a.X) - float64(b.X)) + y := math.Abs(float64(a.Y) - float64(b.Y)) + return math.Sqrt((x * x) + (y * y)) +} diff --git a/src/cmd/open_speed_data/project.go b/src/cmd/open_speed_data/project.go index 9b71e59..39120d6 100644 --- a/src/cmd/open_speed_data/project.go +++ b/src/cmd/open_speed_data/project.go @@ -67,12 +67,18 @@ type Response struct { Step4MaskImg template.URL `json:"step_4_mask_img,omitempty"` BackgroundImg template.URL `json:"background_img,omitempty"` FrameAnalysis []FrameAnalysis `json:"frame_analysis,omitempty"` - FramePositions []FramePosition + VehiclePositions []VehiclePosition Step6Img template.URL `json:"step_6_img,omitempty"` DebugImages []template.URL } +type frameImage struct { + Frame int + Time time.Duration + Image *image.RGBA +} + func NewProject(f string) *Project { // overview_gif // overview_img @@ -135,6 +141,8 @@ func (p *Project) Run() error { bg := &avgimg.MedianRGBA{} var bgavg *image.RGBA var err error + var framePositions []FramePosition + var pendingAnalysis []frameImage analyzer := &Analyzer{ BWCutoff: p.Tolerance, BlurRadius: p.Blur, @@ -261,13 +269,28 @@ func (p *Project) Run() error { log.Printf("saving frame %d for analysis later", frame) analysis.images = append(analysis.images, rgbImg) } + // set every frame, so this ends w/ the last value + if p.Step == 6 && bgavg == nil { + pendingAnalysis = append(pendingAnalysis, frameImage{frame, pkt.Time, rgbImg}) + } if p.Step == 6 && bgavg != nil { // process pending frames + log.Printf("extracting vehicle position from %d pending frames", len(pendingAnalysis)) + for _, pf := range pendingAnalysis { + if pf.Frame%50 == 0 && pf.Frame > 0 { + log.Printf("... frame %d", pf.Frame) + } + positions := analyzer.Positions(pf.Image) + if len(positions) > 0 { + framePositions = append(framePositions, FramePosition{pf.Frame, pf.Time, positions}) + } + } + pendingAnalysis = nil positions := analyzer.Positions(rgbImg) if len(positions) > 0 { - p.Response.FramePositions = append(p.Response.FramePositions, FramePosition{frame, pkt.Time, positions}) + framePositions = append(framePositions, FramePosition{frame, pkt.Time, positions}) } } if frame == 500 { @@ -275,6 +298,10 @@ func (p *Project) Run() error { } } + if p.Step == 6 { + p.Response.VehiclePositions = TrackVehicles(framePositions) + } + if p.Step == 5 && bgavg != nil { analysis.Calculate(bgavg, p.Blur, p.ContiguousPixels, p.MinMass, p.Tolerance) p.Response.FrameAnalysis = append(p.Response.FrameAnalysis, *analysis) diff --git a/src/cmd/open_speed_data/templates.go b/src/cmd/open_speed_data/templates.go index 13e7e75..5fcea3f 100644 --- a/src/cmd/open_speed_data/templates.go +++ b/src/cmd/open_speed_data/templates.go @@ -277,40 +277,22 @@ const tpl = ` {{ if eq .Step 6 }}

Step 6: Position Detection

- ... - {{ if .Response.FramePositions }} + {{ if .Response.VehiclePositions }} - + - {{ range .Response.FramePositions }} + {{ range .Response.VehiclePositions }} - + + + + {{ end }} diff --git a/src/labelimg/labelimg.go b/src/labelimg/labelimg.go index 459d908..fa03cac 100644 --- a/src/labelimg/labelimg.go +++ b/src/labelimg/labelimg.go @@ -84,7 +84,7 @@ func New(g *image.Gray, contiguousPixels, minPixels int) *image.Paletted { if i == 0 { // add new color if len(p.Palette) == 254 { - log.Printf("skipping detectino of x,y (%d,%d); over 255 max", x, y) + log.Printf("skipping detection of x,y (%d,%d); over 255 max", x, y) continue } i = uint8(len(p.Palette))
FrameTimePositionFrameTimeVehiclePositionMassSize
{{.Frame}} {{.Time}} - {{ if .Positions }} - - - - - - - - {{ range $i, $p := .Positions }} - - - - - - - {{ end }} - -
MassPositionSize
{{$i}}{{$p.Pixels }} pixels{{$p.Center.X}}x{{$p.Center.Y}}{{$p.Bounds.Dx}}x{{$p.Bounds.Dy}}
- {{ end }} -
{{.VehicleID}}{{.Position.Center.X}}x{{.Position.Center.Y}}{{.Position.Pixels }} pixels{{.Position.Bounds.Dx}}x{{.Position.Bounds.Dy}}