clean up log output; apply mask to bitmask

This commit is contained in:
Jehiah Czebotar
2016-01-01 22:03:00 -05:00
parent 54f921de44
commit 82f6b92b70
6 changed files with 136 additions and 57 deletions

View File

@@ -10,7 +10,7 @@ read 10 random frames and generate a background from the averages
function avg_background(f::VideoIO.VideoReader, rrc::Function)
seekstart(f)
bg = float32(convert(Image{ColorTypes.RGB{Float32}}, rrc(f)))
println("rrc summary $(summary(bg))")
# println("rrc summary $(summary(bg))")
# bg = convert(Image{ColorTypes.RGB{Float32}}, frame)
step = duration(f)/20
@@ -19,7 +19,7 @@ function avg_background(f::VideoIO.VideoReader, rrc::Function)
while count < 20
total_pos += step
count+=1
println("generating background: seeking $step to $total_pos for frame $count")
println("background frame $count @ $total_pos seconds")
seek(f, total_pos)
frame = float32(rrc(f))
bg += frame

View File

@@ -2,6 +2,8 @@ using Images, Colors
import Base: .^
import FixedPointNumbers: UFixed8
include("./mask.jl")
# Absolute value is defined for RGB colors, but it's a little wonky -- it's the *sum* of the absolute values
# of the components. It is exactly what we want, but it's not defined for arrays of RGBs, so we add that definition here:
@vectorize_1arg AbstractRGB Base.abs
@@ -18,10 +20,10 @@ function labelimg_base(img::Image, background::Image)
# grayim(map(ColorTypes.RGB{Float32}, i))
end
function label(img::Image, background::Image, blur=[3,3], tolerance=0.06)
function label(img::Image, background::Image, masks::Array, blur=[3,3], tolerance=0.06)
i = imfilter_gaussian(grayim(abs((convert(Image{RGB{Float32}}, img) - convert(Image{RGB{Float32}}, background)).^2)),blur) .> tolerance
i::BitMatrix
label_components(i) # This is like MATLAB's bwlabel
label_components(mask(i, masks)) # This is like MATLAB's bwlabel
end
# """
@@ -35,8 +37,8 @@ end
"""
Show an image with labeled (by color) regions
"""
function labelimg_example(img::Image, background::Image, blur=[3,3], tolerance=0.06)
labels = label(img, background, blur, tolerance)
function labelimg_example(img::Image, background::Image, masks::Array, blur=[3,3], tolerance=0.06)
labels = label(img, background, masks, blur, tolerance)
# from https://github.com/JuliaGraphics/Colors.jl/blob/master/src/names_data.jl
colors = [colorant"black", # this is the background
colorant"red", colorant"yellow", colorant"green", colorant"blue",

View File

@@ -16,6 +16,7 @@ include("./base64img.jl")
include("./mask.jl")
include("./background.jl")
include("./labelimg.jl")
include("./positions.jl")
jsonContentType = Dict{AbstractString,AbstractString}([("Content-Type", "application/json")])
@@ -42,7 +43,7 @@ http = HttpHandler() do req::Request, res::Response
f = VideoIO.openvideo(io)
resp["frames"] = length(f)
resp["duration_seconds"] = duration(f)
println("$(resp["frames"])frames, duration: $(resp["duration_seconds"]) seconds")
video_summary(f)
img = read(f, Image)
resp["video_resolution"] = "$(size(img.data, 1))x$(size(img.data, 2))"
@@ -51,7 +52,6 @@ http = HttpHandler() do req::Request, res::Response
println("Generating overview image (step 2)")
resp["step_2_img"] = base64img("image/png", img)
# println("img is $(resp["step_two_size"])")
if haskey(job, "rotate") && job["rotate"] != 0.00001
println("Rotating $(job["rotate"]) radians")
@@ -62,11 +62,11 @@ http = HttpHandler() do req::Request, res::Response
resp["step_3_img"] = base64img("image/png", img)
if haskey(job, "bbox")
println("cropping to $(job["bbox"])")
println("cropping to [$(job["bbox"]["a"]["x"]):$(job["bbox"]["b"]["x"]), $(job["bbox"]["a"]["y"]):$(job["bbox"]["b"]["y"])]")
# println("before crop $(summary(img))")
job["bbox_region"] = (job["bbox"]["a"]["x"]:job["bbox"]["b"]["x"], job["bbox"]["a"]["y"]:job["bbox"]["b"]["y"])
img = crop(img, job["bbox_region"])
println("after crop $(summary(img))")
println("Cropped image is $(summary(img))")
else
# set crop region to no-op size
job["bbox_region"] = (1:size(img.data,1), 1:size(img.data, 2))
@@ -75,8 +75,8 @@ http = HttpHandler() do req::Request, res::Response
resp["step_4_img"] = base64img("image/png", img)
if haskey(job, "masks")
println("Applying masks: $(job["masks"])")
masked = mask(img, job["masks"])
# println("Applying masks: $(job["masks"])")
masked = mask_img(img, job["masks"])
resp["step_4_mask_img"] = base64img("image/png", masked)
end
@@ -105,28 +105,30 @@ http = HttpHandler() do req::Request, res::Response
background = avg_background(f, rrc)
resp["background_img"] = base64img("image/png", background)
if haskey(job, "masks")
background = mask(background, job["masks"])
end
# pick five frames
frame_analysis = Array{Any, 1}()
i = 0
blur_arg=[job["blur"], job["blur"]]
if haskey(job, "masks")
mask_args = job["masks"]
else
mask_args = Array{Any,1}()
end
while i < 4
e = Dict{AbstractString,Any}()
pos = i * (duration(f)/5) # increment by a smaller fraction so we don't get the last frame
pos = floor(Int, i * (duration(f)/5)) # increment by a smaller fraction so we don't get the last frame
println("analyzing frame at $pos seconds")
e["ts"] = pos
seek(f, pos)
frame = rrc(f)
if haskey(job, "masks")
frame = mask(frame, job["masks"])
end
e["highlight"] = base64img("image/png", labelimg_base(frame, background))
e["colored"] = base64img("image/png", labelimg_example(frame, background, blur_arg, job["tolerance"]))
# e["labels"] = label(frame, background)
e["colored"] = base64img("image/png", labelimg_example(frame, background, mask_args, blur_arg, job["tolerance"]))
e["positions"] = positions(label(frame, background, mask_args, blur_arg, job["tolerance"]))
# println("$i positions json is $(JSON.json(e["positions"]))")
push!(frame_analysis, e)
i += 1
end

View File

@@ -7,36 +7,35 @@ using Images
# NullMask bool `json:"null_mask,omitempty"`
# }
function mask(A::Image, masks)
println("$(summary(A))")
# a = convert(Image{ColorTypes.RGB{Float64}}, A)
# a = convert(Image{ColorTypes.RGB{UFixed8}}, A)
# T = eltype(A.data)
# a = convert(Image{ColorTypes.RGB{T}}, A)
# 1920x1080 Images.Image{ColorTypes.RGB{FixedPointNumbers.UFixed{UInt8,8}},2,Array{ColorTypes.RGB{FixedPointNumbers.UFixed{UInt8,8}},2}}
function mask_img(A::Image, masks::Array)
fill = zero(eltype(A.data))
# fill = zero(ColorTypes.RGB{UFixed8})
for m in masks
println("applying mask $m")
if haskey(m, "null_mask")
continue
end
if haskey(m, "start")
println("masking [:, $(m["start"]):$(m["end"])]")
A.data[:, m["start"]:m["end"]] = fill
else
bbox = m["bbox"]
println("masking [$(bbox["a"]["x"]):$(bbox["b"]["x"]), $(bbox["a"]["y"]):$(bbox["b"]["y"])]")
A.data[bbox["a"]["x"]:bbox["b"]["x"], bbox["a"]["y"]:bbox["b"]["y"]] = fill
end
end
A
end
# function mask{T}(A::BitMatrix, maskData)
# mask = imfilter_gaussian(grayim(abs((convert(Image{RGB{Float32}}, img) - convert(Image{RGB{Float32}}, background)).^2)),[3,3]) .> .06
# mask[:,13:14] = false
# mask[:,29:33] = false
# mask[:,40:43] = false
# mask[:,53:66] = false
#
# grayim(map(UFixed8, mask))
function mask(A::BitMatrix, masks::Array)
for m in masks
if haskey(m, "null_mask")
continue
end
if haskey(m, "start")
A[:, m["start"]:m["end"]] = false
else
bbox = m["bbox"]
A[bbox["a"]["x"]:bbox["b"]["x"], bbox["a"]["y"]:bbox["b"]["y"]] = false
end
end
A
end

View File

@@ -59,7 +59,7 @@ const tpl = `
{{ if .Rotate }}
<h2>Step 2: Rotation</h2>
<p>Rotation Angle <code>{{.Rotate}} radians</code></p>
<input type="hidden" name="rotate" value="{{.Rotate}}" />
<input type="hidden" name="rotate" value="{{.Rotate | printf "%0.5f"}}" />
<div><img src="{{.Response.Step3Img}}" style="width: 25%; height: 25%;"></div>
{{ end }}
@@ -113,6 +113,9 @@ const tpl = `
<h2>Step 4: Mask Regions</h2>
<p>Masked regions: {{range .Masks }}<code>{{.}}</code> {{end}}</p>
<div><img src="{{.Response.Step4MaskImg}}" style="width: 40%; height: 40%;"></div>
{{ range .Masks }}
<input type="hidden" name="mask" value="{{.}}" />
{{ end }}
{{ end }}
{{ if eq .Step "step_four" }}
@@ -122,8 +125,8 @@ const tpl = `
Depending on the visual perspective the masked rows should be closer to wheel position to account for
tall vehicles in the lane.
</p>
<p>Instructions: Note the X and Y from the image, and enter masks as a row range <code>row:row</code>
or a bounding box pair of coordinates <code>10x20 20x30</code>. To continue without masks enter a mask of <code>-</code>.</p>
<p>Instructions: Note the X and Y from the image, and enter masks as a row range <kbd>row:row</kbd>
or a bounding box pair of coordinates <kbd>10x20 20x30</kbd>. To continue without masks enter a mask of <kbd>-</kbd>.</p>
<div class="form-group">
<label>Mask: <input name="mask" type="text" /></label>
@@ -150,27 +153,54 @@ const tpl = `
{{ if eq .Step "step_five" }}
<h2>Step 5: Object Detection</h2>
<p>The threshold must be set for what size triggers vehicle detection.</p>
<p>Background Image:</p>
<img src="{{.Response.BackgroundImg}}" style="width: 50%; height: 50%;">
<div class="form-group">
<label>Blur (pixels): <input name="blur" id="blur" type="text" value="{{.Blur}}" /></label>
</div>
<p>The tunables below adjust what is detected as "active" in an image, and what is treated as a vehicle.</p>
<div class="form-group">
<label>Tolerance: <input name="tolerance" id="tolerance" type="text" value="{{.Tolerance}}" /></label>
<span class="help-block">The required difference from the background.</span>
</div>
<div class="form-group">
<label>Blur (pixels): <input name="blur" id="blur" type="text" value="{{.Blur}}" /></label>
<span class="help-block">Bluring helps define features better and make a single blob for better detection.</span>
</div>
<div class="form-group">
<label>Min Mass: <input name="min_mass" id="min_mass" type="text" value="{{.MinMass}}" /></label>
<span class="help-block">Filters out small areas that are detected in the image (such as pedestrians).</span>
</div>
<button type="submit" class="btn">Check</button>
<button type="submit" class="btn btn-primary">Continue</button>
<p>Background Image:</p>
<img src="{{.Response.BackgroundImg}}" style="width: 50%; height: 50%;">
<div class="row">
{{ range .Response.FrameAnalysis }}
<div class="col-xs-12 col-md-8 col-lg-6">
<p>Time index <code>{{.Timestamp}} seconds</code></p>
<p>Active Image:</p>
<h4>Time index <code>{{.Timestamp}} seconds</code></h4>
<p>Active Image: (before masking)</p>
<img src="{{.Highlight}}" class="img-responsive">
<p>Detected Areas:</p>
<p>Detected Areas: (after masking)</p>
<img src="{{.Colored}}" class="img-responsive">
{{ if .Positions }}
<table class="table table-striped">
<thead>
<tr>
<th></th><th>Mass</th><th>Position</th><th>Size</th>
</tr>
</thead>
<tbody>
{{ range $i, $p := .Positions }}
<tr>
<th>{{$i}}</th>
<td>{{$p.Mass }} pixels</td>
<td>{{$p.X | printf "%0.f"}}x{{$p.Y | printf "%0.f"}}</td>
<td>{{$p.Size}}</td>
</tr>
{{ end }}
</tbody>
</table>
{{ end }}
</div>
{{ end }}
</div>
@@ -241,6 +271,7 @@ type Project struct {
Masks []Mask `json:"masks,omitempty"`
Tolerance float64 `json:"tolerance"`
Blur int64 `json:"blur"`
MinMass int64 `json:"min_mass"`
Step string `json:"step"`
Response Response `json:"response,omitempty"`
@@ -264,7 +295,36 @@ type FrameAnalysis struct {
Timestamp float64 `json:"ts"`
Highlight template.URL `json:"highlight,omitempty"`
Colored template.URL `json:"colored,omitempty"`
Labels interface{} `json:"labels,omitempty"`
Positions []Position `json:"positions,omitempty"`
}
// Position matches Position in position.jl
type Position struct {
X float64 `json:"x"`
Y float64 `json:"y"`
Mass int `json:"mass"`
XSpan []int `json:"xspan"` // [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16]
YSpan []int `json:"yspan"`
}
func (p Position) Span() string {
mm := func(d []int) (min int, max int) {
for i, n := range d {
if n < min || i == 0 {
min = n
}
if n > max || i == 0 {
max = n
}
}
return
}
xmin, xmax := mm(p.XSpan)
ymin, ymax := mm(p.YSpan)
return fmt.Sprintf("x:%d-%d y:%d-%d", xmin, xmax, ymin, ymax)
}
func (p Position) Size() string {
return fmt.Sprintf("%dx%d", len(p.XSpan), len(p.YSpan))
}
func (p *Project) getStep() string {
@@ -360,15 +420,18 @@ func (p *Project) Run(backend string) error {
func main() {
backend := flag.String("backend", "http://127.0.0.1:8000", "base path to backend processing service")
flag.Parse()
t := template.Must(template.New("webpage").Parse(tpl))
http.Handle("/data/", http.StripPrefix("/data/", http.FileServer(http.Dir("../data/"))))
http.HandleFunc("/", func(w http.ResponseWriter, req *http.Request) {
t := template.Must(template.New("webpage").Parse(tpl))
req.ParseForm()
p := &Project{
Filename: req.Form.Get("filename"),
Blur: 3,
Tolerance: 0.06,
MinMass: 100,
}
if f, err := strconv.ParseFloat(req.Form.Get("rotate"), 64); err == nil {
@@ -384,6 +447,11 @@ func main() {
p.Blur = int64(i)
}
}
if v := req.Form.Get("min_mass"); v != "" {
if i, err := strconv.Atoi(v); err == nil {
p.MinMass = int64(i)
}
}
p.BBox = ParseBBox(req.Form.Get("bbox"))
p1, p2 := req.Form.Get("point1"), req.Form.Get("point2")

View File

@@ -20,8 +20,7 @@ function Base.seek(avin::VideoIO.AVInput, time, video_stream = 1)
base_per_frame = (c.time_base.num * c.ticks_per_frame * s.time_base.den / c.time_base.den * s.time_base.num)
avg_frame_rate = s.avg_frame_rate.num / s.avg_frame_rate.den
pos = floor(Int, time * base_per_frame * avg_frame_rate)
println("c.time_base $(c.time_base) s.time_base $(s.time_base)")
println("seeking ahead $(time) sec by increasing position $pos (frame rate $avg_frame_rate/sec)")
println("seeking to $(time) sec @ position $pos (frame rate $avg_frame_rate/sec)")
# Seek
# pos (aka Timestamp) is in AVStream.time_base units or, if no stream is specified, in AV_TIME_BASE units.
@@ -32,6 +31,15 @@ function Base.seek(avin::VideoIO.AVInput, time, video_stream = 1)
return avin
end
function video_summary(v::VideoIO.VideoReader, video_stream=1)
stream_info = v.avin.video_info[video_stream]
s = stream_info.stream
c = stream_info.codec_ctx
avg_frame_rate = s.avg_frame_rate.num / s.avg_frame_rate.den
println("duration: $(duration(v)) sec, $(length(v)) frames. frame rate: $avg_frame_rate/sec")
println("codec time_base $(c.time_base) stream time_base $(s.time_base)")
end
function duration(avin::VideoIO.AVInput, video_stream = 1)
stream_info = avin.video_info[video_stream]
return stream_info.stream.duration / stream_info.stream.time_base.den