demo save

This commit is contained in:
xingnaidong1
2022-01-06 17:28:07 +08:00
parent f66e081ea9
commit a538f53f13
43 changed files with 36551 additions and 1 deletions

201
LICENSE Normal file
View File

@@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2017 Karl Kroening
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

185
README.md
View File

@@ -1 +1,184 @@
# ffmpeg-go
# ffmpeg-go
ffmpeg-go is golang port of https://github.com/kkroening/ffmpeg-python
check examples/example_test.go and ffmpeg_test.go for more examples.
# Examples
```go
split := Input(TestInputFile1).VFlip().Split()
split0, split1 := split.Get("0"), split.Get("1")
overlayFile := Input(TestOverlayFile).Crop(10, 10, 158, 112)
err := Concat([]*Stream{
split0.Trim(KwArgs{"start_frame": 10, "end_frame": 20}),
split1.Trim(KwArgs{"start_frame": 30, "end_frame": 40})}).
Overlay(overlayFile.HFlip(), "").
DrawBox(50, 50, 120, 120, "red", 5).
Output(TestOutputFile1).
OverWriteOutput().
Run()
```
## Transcoding From One Codec To Another
```go
err := ffmpeg.Input("./sample_data/in1.mp4").
Output("./sample_data/out1.mp4", ffmpeg.KwArgs{"c:v": "libx265"}).
OverWriteOutput().ErrorToStdOut().Run()
```
## Cut Video From Timestamp
```go
err := ffmpeg.Input("./sample_data/in1.mp4", ffmpeg.KwArgs{"ss": 1}).
Output("./sample_data/out1.mp4", ffmpeg.KwArgs{"t": 1}).OverWriteOutput().Run()
assert.Nil(t, err)
```
## Add Watermark For Video
```go
// show watermark with size 64:-1 in the top left corner after seconds 1
overlay := ffmpeg.Input("./sample_data/overlay.png").Filter("scale", ffmpeg.Args{"64:-1"})
err := ffmpeg.Filter(
[]*ffmpeg.Stream{
ffmpeg.Input("./sample_data/in1.mp4"),
overlay,
}, "overlay", ffmpeg.Args{"10:10"}, ffmpeg.KwArgs{"enable": "gte(t,1)"}).
Output("./sample_data/out1.mp4").OverWriteOutput().ErrorToStdOut().Run()
```
result:
![img.png](./docs/example_overlay.png)
## Cut Video For Gif
```go
err := ffmpeg.Input("./sample_data/in1.mp4", ffmpeg.KwArgs{"ss": "1"}).
Output("./sample_data/out1.gif", ffmpeg.KwArgs{"s": "320x240", "pix_fmt": "rgb24", "t": "3", "r": "3"}).
OverWriteOutput().ErrorToStdOut().Run()
```
result:
![img.png](./docs/example_gif.gif)
## Task Frame From Video
```bash
func ExampleReadFrameAsJpeg(inFileName string, frameNum int) io.Reader {
buf := bytes.NewBuffer(nil)
err := ffmpeg.Input(inFileName).
Filter("select", ffmpeg.Args{fmt.Sprintf("gte(n,%d)", frameNum)}).
Output("pipe:", ffmpeg.KwArgs{"vframes": 1, "format": "image2", "vcodec": "mjpeg"}).
WithOutput(buf, os.Stdout).
Run()
if err != nil {
panic(err)
}
return buf
}
reader := ExampleReadFrameAsJpeg("./sample_data/in1.mp4", 5)
img, err := imaging.Decode(reader)
if err != nil {
t.Fatal(err)
}
err = imaging.Save(img, "./sample_data/out1.jpeg")
if err != nil {
t.Fatal(err)
}
```
result :
![image](./examples/sample_data/out1.jpeg)
## Get Multiple Output
```go
// get multiple output with different size/bitrate
input := ffmpeg.Input("./sample_data/in1.mp4").Split()
out1 := input.Get("0").Filter("scale", ffmpeg.Args{"1920:-1"}).
Output("./sample_data/1920.mp4", ffmpeg.KwArgs{"b:v": "5000k"})
out2 := input.Get("1").Filter("scale", ffmpeg.Args{"1280:-1"}).
Output("./sample_data/1280.mp4", ffmpeg.KwArgs{"b:v": "2800k"})
err := ffmpeg.MergeOutputs(out1, out2).OverWriteOutput().ErrorToStdOut().Run()
```
## Show FFmpeg Progress
see complete example at: [showProgress](./examples/showProgress.go)
```bash
func ExampleShowProgress(inFileName, outFileName string) {
a, err := ffmpeg.Probe(inFileName)
if err != nil {
panic(err)
}
totalDuration := gjson.Get(a, "format.duration").Float()
err = ffmpeg.Input(inFileName).
Output(outFileName, ffmpeg.KwArgs{"c:v": "libx264", "preset": "veryslow"}).
GlobalArgs("-progress", "unix://"+TempSock(totalDuration)).
OverWriteOutput().
Run()
if err != nil {
panic(err)
}
}
ExampleShowProgress("./sample_data/in1.mp4", "./sample_data/out2.mp4")
```
result
```bash
progress: .0
progress: 0.72
progress: 1.00
progress: done
```
## Integrate FFmpeg-go With Open-CV (gocv) For Face-detect
see complete example at: [opencv](./examples/opencv.go)
result: ![image](./examples/sample_data/face-detect.jpg)
## Set Cpu limit/request For FFmpeg-go
```go
e := ComplexFilterExample("./sample_data/in1.mp4", "./sample_data/overlay.png", "./sample_data/out2.mp4")
err := e.RunWithResource(0.1, 0.5)
if err != nil {
assert.Nil(t, err)
}
```
result from command top: we will see ffmpeg used 0.5 core as expected.
```bash
> top
PID USER PR NI VIRT RES SHR S %CPU %MEM TIME+ COMMAND
1386105 root 20 0 2114152 273780 31672 R 50.2 1.7 0:16.79 ffmpeg
```
# View Progress Graph
function view generate [mermaid](https://mermaid-js.github.io/mermaid/#/) chart, which can be use in markdown or view [online](https://mermaid-js.github.io/mermaid-live-editor/)
```go
split := Input(TestInputFile1).VFlip().Split()
split0, split1 := split.Get("0"), split.Get("1")
overlayFile := Input(TestOverlayFile).Crop(10, 10, 158, 112)
b, err := Concat([]*Stream{
split0.Trim(KwArgs{"start_frame": 10, "end_frame": 20}),
split1.Trim(KwArgs{"start_frame": 30, "end_frame": 40})}).
Overlay(overlayFile.HFlip(), "").
DrawBox(50, 50, 120, 120, "red", 5).
Output(TestOutputFile1).
OverWriteOutput().View(ViewTypeFlowChart)
fmt.Println(b)
```
![image](./docs/flowchart2.png)

152
dag.go Normal file
View File

@@ -0,0 +1,152 @@
package ffmpeg_go
import (
"errors"
)
// Node in a directed-acyclic graph (DAG).
//
// Edges:
// DagNodes are connected by edges. An edge connects two nodes with a label for each side:
// - ``upstream_node``: upstream/parent node
// - ``upstream_label``: label on the outgoing side of the upstream node
// - ``downstream_node``: downstream/child node
// - ``downstream_label``: label on the incoming side of the downstream node
//
// For example, DagNode A may be connected to DagNode B with an edge labelled "foo" on A's side, and "bar" on B's
// side:
//
// _____ _____
// | | | |
// | A >[foo]---[bar]> B |
// |_____| |_____|
//
// Edge labels may be integers or strings, and nodes cannot have more than one incoming edge with the same label.
//
// DagNodes may have any number of incoming edges and any number of outgoing edges. DagNodes keep track only of
// their incoming edges, but the entire graph structure can be inferred by looking at the furthest downstream
// nodes and working backwards.
//
// Hashing:
// DagNodes must be hashable, and two nodes are considered to be equivalent if they have the same hash value.
//
// Nodes are immutable, and the hash should remain constant as a result. If a node with new contents is required,
// create a new node and throw the old one away.
//
// String representation:
// In order for graph visualization tools to show useful information, nodes must be representable as strings. The
// ``String`` operator should provide a more or less "full" representation of the node, and the ``ShortRepr``
// property should be a shortened, concise representation.
//
// Again, because nodes are immutable, the string representations should remain constant.
type DagNode interface {
Hash() int
// Compare two nodes
Equal(other DagNode) bool
// Return a full string representation of the node.
String() string
// Return a partial/concise representation of the node
ShortRepr() string
// Provides information about all incoming edges that connect to this node.
//
// The edge map is a dictionary that maps an ``incoming_label`` to ``(outgoing_node, outgoing_label)``. Note that
// implicity, ``incoming_node`` is ``self``. See "Edges" section above.
IncomingEdgeMap() map[Label]NodeInfo
}
type Label string
type NodeInfo struct {
Node DagNode
Label Label
Selector Selector
}
type Selector string
type DagEdge struct {
DownStreamNode DagNode
DownStreamLabel Label
UpStreamNode DagNode
UpStreamLabel Label
UpStreamSelector Selector
}
func GetInComingEdges(downStreamNode DagNode, inComingEdgeMap map[Label]NodeInfo) []DagEdge {
var edges []DagEdge
for _, downStreamLabel := range _getAllLabelsInSorted(inComingEdgeMap) {
upStreamInfo := inComingEdgeMap[downStreamLabel]
edges = append(edges, DagEdge{
DownStreamNode: downStreamNode,
DownStreamLabel: downStreamLabel,
UpStreamNode: upStreamInfo.Node,
UpStreamLabel: upStreamInfo.Label,
UpStreamSelector: upStreamInfo.Selector,
})
}
return edges
}
func GetOutGoingEdges(upStreamNode DagNode, outOutingEdgeMap map[Label][]NodeInfo) []DagEdge {
var edges []DagEdge
for _, upStreamLabel := range _getAllLabelsSorted(outOutingEdgeMap) {
downStreamInfos := outOutingEdgeMap[upStreamLabel]
for _, downStreamInfo := range downStreamInfos {
edges = append(edges, DagEdge{
DownStreamNode: downStreamInfo.Node,
DownStreamLabel: downStreamInfo.Label,
UpStreamNode: upStreamNode,
UpStreamLabel: upStreamLabel,
UpStreamSelector: downStreamInfo.Selector,
})
}
}
return edges
}
func TopSort(downStreamNodes []DagNode) (sortedNodes []DagNode, outOutingEdgeMaps map[int]map[Label][]NodeInfo, err error) {
markedNodes := map[int]struct{}{}
markedSortedNodes := map[int]struct{}{}
outOutingEdgeMaps = map[int]map[Label][]NodeInfo{}
var visit func(upStreamNode, downstreamNode DagNode, upStreamLabel, downStreamLabel Label, downStreamSelector Selector) error
visit = func(upStreamNode, downstreamNode DagNode, upStreamLabel, downStreamLabel Label, downStreamSelector Selector) error {
if _, ok := markedNodes[upStreamNode.Hash()]; ok {
return errors.New("graph if not DAG")
}
if downstreamNode != nil {
if a, ok := outOutingEdgeMaps[upStreamNode.Hash()]; !ok || a == nil {
outOutingEdgeMaps[upStreamNode.Hash()] = map[Label][]NodeInfo{}
}
outgoingEdgeMap := outOutingEdgeMaps[upStreamNode.Hash()]
outgoingEdgeMap[upStreamLabel] = append(outgoingEdgeMap[upStreamLabel], NodeInfo{
Node: downstreamNode,
Label: downStreamLabel,
Selector: downStreamSelector,
})
}
if _, ok := markedSortedNodes[upStreamNode.Hash()]; !ok {
markedNodes[upStreamNode.Hash()] = struct{}{}
for _, edge := range GetInComingEdges(upStreamNode, upStreamNode.IncomingEdgeMap()) {
err := visit(edge.UpStreamNode, edge.DownStreamNode, edge.UpStreamLabel, edge.DownStreamLabel, edge.UpStreamSelector)
if err != nil {
return err
}
}
delete(markedNodes, upStreamNode.Hash())
sortedNodes = append(sortedNodes, upStreamNode)
markedSortedNodes[upStreamNode.Hash()] = struct{}{}
}
return nil
}
for len(downStreamNodes) > 0 {
node := downStreamNodes[len(downStreamNodes)-1]
downStreamNodes = downStreamNodes[:len(downStreamNodes)-1]
err = visit(node, nil, "", "", "")
if err != nil {
return
}
}
return
}

43
debug.go Normal file
View File

@@ -0,0 +1,43 @@
// +build debug
package ffmpeg_go
import (
"fmt"
"log"
"strings"
)
func DebugNodes(node []DagNode) {
b := strings.Builder{}
for _, n := range node {
b.WriteString(fmt.Sprintf("%s\n", n.String()))
}
log.Println(b.String())
}
func DebugOutGoingMap(node []DagNode, m map[int]map[Label][]NodeInfo) {
b := strings.Builder{}
h := map[int]DagNode{}
for _, n := range node {
h[n.Hash()] = n
}
for k, v := range m {
b.WriteString(fmt.Sprintf("[Key]: %s", h[k].String()))
b.WriteString(" [Value]: {")
for l, mm := range v {
if l == "" {
l = "None"
}
b.WriteString(fmt.Sprintf("%s: [", l))
for _, x := range mm {
b.WriteString(x.Node.String())
b.WriteString(", ")
}
b.WriteString("]")
}
b.WriteString("}")
b.WriteString("\n")
}
log.Println(b.String())
}

7
debugx.go Normal file
View File

@@ -0,0 +1,7 @@
// +build !debug
package ffmpeg_go
func DebugNodes(node []DagNode) {}
func DebugOutGoingMap(node []DagNode, m map[int]map[Label][]NodeInfo) {}

BIN
docs/example_gif.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 106 KiB

BIN
docs/example_overlay.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 49 KiB

BIN
docs/flowchart.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 36 KiB

BIN
docs/flowchart2.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 65 KiB

101
examples/example_test.go Normal file
View File

@@ -0,0 +1,101 @@
package examples
import (
"testing"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/credentials"
"github.com/disintegration/imaging"
"github.com/stretchr/testify/assert"
ffmpeg "github.com/u2takey/ffmpeg-go"
)
//
// More simple examples please refer to ffmpeg_test.go
//
func TestExampleStream(t *testing.T) {
ExampleStream("./sample_data/in1.mp4", "./sample_data/out1.mp4", false)
}
func TestExampleReadFrameAsJpeg(t *testing.T) {
reader := ExampleReadFrameAsJpeg("./sample_data/in1.mp4", 5)
img, err := imaging.Decode(reader)
if err != nil {
t.Fatal(err)
}
err = imaging.Save(img, "./sample_data/out1.jpeg")
if err != nil {
t.Fatal(err)
}
}
func TestExampleShowProgress(t *testing.T) {
ExampleShowProgress("./sample_data/in1.mp4", "./sample_data/out2.mp4")
}
func TestSimpleS3StreamExample(t *testing.T) {
err := ffmpeg.Input("./sample_data/in1.mp4", nil).
Output("s3://data-1251825869/test_out.ts", ffmpeg.KwArgs{
"aws_config": &aws.Config{
Credentials: credentials.NewStaticCredentials("xx", "yyy", ""),
//Endpoint: aws.String("xx"),
Region: aws.String("yyy"),
},
// outputS3 use stream output, so you can only use supported format
// if you want mp4 format for example, you can output it to a file, and then call s3 sdk to do upload
"format": "mpegts",
}).
Run()
assert.Nil(t, err)
}
func TestExampleChangeCodec(t *testing.T) {
err := ffmpeg.Input("./sample_data/in1.mp4").
Output("./sample_data/out1.mp4", ffmpeg.KwArgs{"c:v": "libx265"}).
OverWriteOutput().ErrorToStdOut().Run()
assert.Nil(t, err)
}
func TestExampleCutVideo(t *testing.T) {
err := ffmpeg.Input("./sample_data/in1.mp4", ffmpeg.KwArgs{"ss": 1}).
Output("./sample_data/out1.mp4", ffmpeg.KwArgs{"t": 1}).OverWriteOutput().Run()
assert.Nil(t, err)
}
func TestExampleScaleVideo(t *testing.T) {
err := ffmpeg.Input("./sample_data/in1.mp4").
Output("./sample_data/out1.mp4", ffmpeg.KwArgs{"vf": "scale=w=480:h=240"}).
OverWriteOutput().ErrorToStdOut().Run()
assert.Nil(t, err)
}
func TestExampleAddWatermark(t *testing.T) {
// show watermark with size 64:-1 in the top left corner after seconds 1
overlay := ffmpeg.Input("./sample_data/overlay.png").Filter("scale", ffmpeg.Args{"64:-1"})
err := ffmpeg.Filter(
[]*ffmpeg.Stream{
ffmpeg.Input("./sample_data/in1.mp4"),
overlay,
}, "overlay", ffmpeg.Args{"10:10"}, ffmpeg.KwArgs{"enable": "gte(t,1)"}).
Output("./sample_data/out1.mp4").OverWriteOutput().ErrorToStdOut().Run()
assert.Nil(t, err)
}
func TestExampleCutVideoForGif(t *testing.T) {
err := ffmpeg.Input("./sample_data/in1.mp4", ffmpeg.KwArgs{"ss": "1"}).
Output("./sample_data/out1.gif", ffmpeg.KwArgs{"s": "320x240", "pix_fmt": "rgb24", "t": "3", "r": "3"}).
OverWriteOutput().ErrorToStdOut().Run()
assert.Nil(t, err)
}
func TestExampleMultipleOutput(t *testing.T) {
input := ffmpeg.Input("./sample_data/in1.mp4").Split()
out1 := input.Get("0").Filter("scale", ffmpeg.Args{"1920:-1"}).
Output("./sample_data/1920.mp4", ffmpeg.KwArgs{"b:v": "5000k"})
out2 := input.Get("1").Filter("scale", ffmpeg.Args{"1280:-1"}).
Output("./sample_data/1280.mp4", ffmpeg.KwArgs{"b:v": "2800k"})
err := ffmpeg.MergeOutputs(out1, out2).OverWriteOutput().ErrorToStdOut().Run()
assert.Nil(t, err)
}

226
examples/opencv_test.go Normal file
View File

@@ -0,0 +1,226 @@
// +build gocv
// uncomment line above for gocv examples
package examples
import (
"encoding/json"
"fmt"
"image"
"image/color"
"io"
"log"
"testing"
ffmpeg "github.com/u2takey/ffmpeg-go"
"gocv.io/x/gocv"
)
func getVideoSize(fileName string) (int, int) {
log.Println("Getting video size for", fileName)
data, err := ffmpeg.Probe(fileName)
if err != nil {
panic(err)
}
log.Println("got video info", data)
type VideoInfo struct {
Streams []struct {
CodecType string `json:"codec_type"`
Width int
Height int
} `json:"streams"`
}
vInfo := &VideoInfo{}
err = json.Unmarshal([]byte(data), vInfo)
if err != nil {
panic(err)
}
for _, s := range vInfo.Streams {
if s.CodecType == "video" {
return s.Width, s.Height
}
}
return 0, 0
}
// TestExampleOpenCvFaceDetect will: take a video as input => use opencv for face detection => draw box and show a window
// This example depends on gocv and opencv, please refer: https://pkg.go.dev/gocv.io/x/gocv for installation.
// func TestExampleOpenCvFaceDetectWithVideo(t *testing.T) {
// inputFile := "./sample_data/head-pose-face-detection-male-short.mp4"
// xmlFile := "./sample_data/haarcascade_frontalface_default.xml"
// w, h := getVideoSize(inputFile)
// log.Println(w, h)
// pr1, pw1 := io.Pipe()
// readProcess(inputFile, pw1)
// openCvProcess(xmlFile, pr1, w, h)
// log.Println("Done")
// }
func readProcess(infileName string, writer io.WriteCloser) {
log.Println("Starting ffmpeg process1")
go func() {
err := ffmpeg.Input(infileName).
Output("pipe:",
ffmpeg.KwArgs{
"format": "rawvideo", "pix_fmt": "rgb24",
}).
WithOutput(writer).
ErrorToStdOut().
Run()
log.Println("ffmpeg process1 done")
_ = writer.Close()
if err != nil {
panic(err)
}
}()
return
}
func openCvProcess(xmlFile string, reader io.ReadCloser, w, h int) {
// open display window
window := gocv.NewWindow("Face Detect")
defer window.Close()
// color for the rect when faces detected
blue := color.RGBA{B: 255}
classifier := gocv.NewCascadeClassifier()
defer classifier.Close()
if !classifier.Load(xmlFile) {
fmt.Printf("Error reading cascade file: %v\n", xmlFile)
return
}
frameSize := w * h * 3
buf := make([]byte, frameSize, frameSize)
for {
n, err := io.ReadFull(reader, buf)
if n == 0 || err == io.EOF {
return
} else if n != frameSize || err != nil {
panic(fmt.Sprintf("read error: %d, %s", n, err))
}
img, err := gocv.NewMatFromBytes(h, w, gocv.MatTypeCV8UC3, buf)
if err != nil {
fmt.Println("decode fail", err)
}
if img.Empty() {
continue
}
img2 := gocv.NewMat()
gocv.CvtColor(img, &img2, gocv.ColorBGRToRGB)
// detect faces
rects := classifier.DetectMultiScale(img2)
fmt.Printf("found %d faces\n", len(rects))
// draw a rectangle around each face on the original image, along with text identifing as "Human"
for _, r := range rects {
gocv.Rectangle(&img2, r, blue, 3)
size := gocv.GetTextSize("Human", gocv.FontHersheyPlain, 1.2, 2)
pt := image.Pt(r.Min.X+(r.Min.X/2)-(size.X/2), r.Min.Y-2)
gocv.PutText(&img2, "Human", pt, gocv.FontHersheyPlain, 1.2, blue, 2)
}
// show the image in the window, and wait 1 millisecond
window.IMShow(img2)
img.Close()
img2.Close()
if window.WaitKey(10) >= 0 {
break
}
}
return
}
// TestExampleOpenCvFaceDetectWithCamera will: task stream from webcam => use opencv for face detection => output with ffmpeg
// This example depends on gocv and opencv, please refer: https://pkg.go.dev/gocv.io/x/gocv for installation.
func TestExampleOpenCvFaceDetectWithCamera(t *testing.T) {
// deviceID := "0" // camera device id
deviceID := "./sample_data/head-pose-face-detection-male-short.mp4"
xmlFile := "./sample_data/haarcascade_frontalface_default.xml"
webcam, err := gocv.OpenVideoCapture(deviceID)
if err != nil {
fmt.Printf("error opening video capture device: %v\n", deviceID)
return
}
defer webcam.Close()
// prepare image matrix
img := gocv.NewMat()
defer img.Close()
if ok := webcam.Read(&img); !ok {
panic(fmt.Sprintf("Cannot read device %v", deviceID))
}
fmt.Printf("img: %vX%v\n", img.Cols(), img.Rows())
pr1, pw1 := io.Pipe()
writeProcess("./sample_data/face_detect.mp4", pr1, img.Cols(), img.Rows())
// color for the rect when faces detected
blue := color.RGBA{B: 255}
// load classifier to recognize faces
classifier := gocv.NewCascadeClassifier()
defer classifier.Close()
if !classifier.Load(xmlFile) {
fmt.Printf("Error reading cascade file: %v\n", xmlFile)
return
}
fmt.Printf("Start reading device: %v\n", deviceID)
for i := 0; i < 200; i++ {
if ok := webcam.Read(&img); !ok {
fmt.Printf("Device closed: %v\n", deviceID)
return
}
if img.Empty() {
continue
}
// detect faces
rects := classifier.DetectMultiScale(img)
fmt.Printf("found %d faces\n", len(rects))
// draw a rectangle around each face on the original image, along with text identifing as "Human"
for _, r := range rects {
gocv.Rectangle(&img, r, blue, 3)
size := gocv.GetTextSize("Human", gocv.FontHersheyPlain, 1.2, 2)
pt := image.Pt(r.Min.X+(r.Min.X/2)-(size.X/2), r.Min.Y-2)
gocv.PutText(&img, "Human", pt, gocv.FontHersheyPlain, 1.2, blue, 2)
}
pw1.Write(img.ToBytes())
}
pw1.Close()
log.Println("Done")
}
func writeProcess(outputFile string, reader io.ReadCloser, w, h int) {
log.Println("Starting ffmpeg process1")
go func() {
err := ffmpeg.Input("pipe:",
ffmpeg.KwArgs{"format": "rawvideo",
"pix_fmt": "bgr24", "s": fmt.Sprintf("%dx%d", w, h),
}).
Overlay(ffmpeg.Input("./sample_data/overlay.png"), "").
Output(outputFile).
WithInput(reader).
ErrorToStdOut().
OverWriteOutput().
Run()
log.Println("ffmpeg process1 done")
if err != nil {
panic(err)
}
_ = reader.Close()
}()
}

View File

@@ -0,0 +1,23 @@
package examples
import (
"bytes"
"fmt"
"io"
"os"
ffmpeg "github.com/u2takey/ffmpeg-go"
)
func ExampleReadFrameAsJpeg(inFileName string, frameNum int) io.Reader {
buf := bytes.NewBuffer(nil)
err := ffmpeg.Input(inFileName).
Filter("select", ffmpeg.Args{fmt.Sprintf("gte(n,%d)", frameNum)}).
Output("pipe:", ffmpeg.KwArgs{"vframes": 1, "format": "image2", "vcodec": "mjpeg"}).
WithOutput(buf, os.Stdout).
Run()
if err != nil {
panic(err)
}
return buf
}

Binary file not shown.

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 46 KiB

File diff suppressed because it is too large Load Diff

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 106 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 16 KiB

Binary file not shown.

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.2 KiB

107
examples/showProgress.go Normal file
View File

@@ -0,0 +1,107 @@
package examples
import (
"encoding/json"
"fmt"
"log"
"math/rand"
"net"
"os"
"path"
"regexp"
"strconv"
"strings"
"time"
ffmpeg "github.com/u2takey/ffmpeg-go"
)
// ExampleShowProgress is an example of using the ffmpeg `-progress` option with a
// unix-domain socket to report progress
func ExampleShowProgress(inFileName, outFileName string) {
a, err := ffmpeg.Probe(inFileName)
if err != nil {
panic(err)
}
totalDuration, err := probeDuration(a)
if err != nil {
panic(err)
}
err = ffmpeg.Input(inFileName).
Output(outFileName, ffmpeg.KwArgs{"c:v": "libx264", "preset": "veryslow"}).
GlobalArgs("-progress", "unix://"+TempSock(totalDuration)).
OverWriteOutput().
Run()
if err != nil {
panic(err)
}
}
func TempSock(totalDuration float64) string {
// serve
rand.Seed(time.Now().Unix())
sockFileName := path.Join(os.TempDir(), fmt.Sprintf("%d_sock", rand.Int()))
l, err := net.Listen("unix", sockFileName)
if err != nil {
panic(err)
}
go func() {
re := regexp.MustCompile(`out_time_ms=(\d+)`)
fd, err := l.Accept()
if err != nil {
log.Fatal("accept error:", err)
}
buf := make([]byte, 16)
data := ""
progress := ""
for {
_, err := fd.Read(buf)
if err != nil {
return
}
data += string(buf)
a := re.FindAllStringSubmatch(data, -1)
cp := ""
if len(a) > 0 && len(a[len(a)-1]) > 0 {
c, _ := strconv.Atoi(a[len(a)-1][len(a[len(a)-1])-1])
cp = fmt.Sprintf("%.2f", float64(c)/totalDuration/1000000)
}
if strings.Contains(data, "progress=end") {
cp = "done"
}
if cp == "" {
cp = ".0"
}
if cp != progress {
progress = cp
fmt.Println("progress: ", progress)
}
}
}()
return sockFileName
}
type probeFormat struct {
Duration string `json:"duration"`
}
type probeData struct {
Format probeFormat `json:"format"`
}
func probeDuration(a string) (float64, error) {
pd := probeData{}
err := json.Unmarshal([]byte(a), &pd)
if err != nil {
return 0, err
}
f, err := strconv.ParseFloat(pd.Format.Duration, 64)
if err != nil {
return 0, err
}
return f, nil
}

137
examples/stream.go Normal file
View File

@@ -0,0 +1,137 @@
package examples
import (
"encoding/json"
"fmt"
"io"
"log"
ffmpeg "github.com/u2takey/ffmpeg-go"
)
// ExampleStream
// inFileName: input filename
// outFileName: output filename
// dream: Use DeepDream frame processing (requires tensorflow)
func ExampleStream(inFileName, outFileName string, dream bool) {
if inFileName == "" {
inFileName = "./in1.mp4"
}
if outFileName == "" {
outFileName = "./out.mp4"
}
if dream {
panic("Use DeepDream With Tensorflow haven't been implemented")
}
runExampleStream(inFileName, outFileName)
}
func getVideoSize(fileName string) (int, int) {
log.Println("Getting video size for", fileName)
data, err := ffmpeg.Probe(fileName)
if err != nil {
panic(err)
}
log.Println("got video info", data)
type VideoInfo struct {
Streams []struct {
CodecType string `json:"codec_type"`
Width int
Height int
} `json:"streams"`
}
vInfo := &VideoInfo{}
err = json.Unmarshal([]byte(data), vInfo)
if err != nil {
panic(err)
}
for _, s := range vInfo.Streams {
if s.CodecType == "video" {
return s.Width, s.Height
}
}
return 0, 0
}
func startFFmpegProcess1(infileName string, writer io.WriteCloser) <-chan error {
log.Println("Starting ffmpeg process1")
done := make(chan error)
go func() {
err := ffmpeg.Input(infileName).
Output("pipe:",
ffmpeg.KwArgs{
"format": "rawvideo", "pix_fmt": "rgb24",
}).
WithOutput(writer).
Run()
log.Println("ffmpeg process1 done")
_ = writer.Close()
done <- err
close(done)
}()
return done
}
func startFFmpegProcess2(outfileName string, buf io.Reader, width, height int) <-chan error {
log.Println("Starting ffmpeg process2")
done := make(chan error)
go func() {
err := ffmpeg.Input("pipe:",
ffmpeg.KwArgs{"format": "rawvideo",
"pix_fmt": "rgb24", "s": fmt.Sprintf("%dx%d", width, height),
}).
Output(outfileName, ffmpeg.KwArgs{"pix_fmt": "yuv420p"}).
OverWriteOutput().
WithInput(buf).
Run()
log.Println("ffmpeg process2 done")
done <- err
close(done)
}()
return done
}
func process(reader io.ReadCloser, writer io.WriteCloser, w, h int) {
go func() {
frameSize := w * h * 3
buf := make([]byte, frameSize, frameSize)
for {
n, err := io.ReadFull(reader, buf)
if n == 0 || err == io.EOF {
_ = writer.Close()
return
} else if n != frameSize || err != nil {
panic(fmt.Sprintf("read error: %d, %s", n, err))
}
for i := range buf {
buf[i] = buf[i] / 3
}
n, err = writer.Write(buf)
if n != frameSize || err != nil {
panic(fmt.Sprintf("write error: %d, %s", n, err))
}
}
}()
return
}
func runExampleStream(inFile, outFile string) {
w, h := getVideoSize(inFile)
log.Println(w, h)
pr1, pw1 := io.Pipe()
pr2, pw2 := io.Pipe()
done1 := startFFmpegProcess1(inFile, pw1)
process(pr1, pw2, w, h)
done2 := startFFmpegProcess2(outFile, pr2, w, h)
err := <-done1
if err != nil {
panic(err)
}
err = <-done2
if err != nil {
panic(err)
}
log.Println("Done")
}

View File

@@ -0,0 +1,33 @@
//+build linux
package examples
import (
"testing"
"github.com/stretchr/testify/assert"
ffmpeg "github.com/u2takey/ffmpeg-go"
)
func ComplexFilterExample(testInputFile, testOverlayFile, testOutputFile string) *ffmpeg.Stream {
split := ffmpeg.Input(testInputFile).VFlip().Split()
split0, split1 := split.Get("0"), split.Get("1")
overlayFile := ffmpeg.Input(testOverlayFile).Crop(10, 10, 158, 112)
return ffmpeg.Concat([]*ffmpeg.Stream{
split0.Trim(ffmpeg.KwArgs{"start_frame": 10, "end_frame": 20}),
split1.Trim(ffmpeg.KwArgs{"start_frame": 30, "end_frame": 40})}).
Overlay(overlayFile.HFlip(), "").
DrawBox(50, 50, 120, 120, "red", 5).
Output(testOutputFile).
OverWriteOutput()
}
// PID USER PR NI VIRT RES SHR S %CPU %MEM TIME+ COMMAND
// 1386105 root 20 0 2114152 273780 31672 R 50.2 1.7 0:16.79 ffmpeg
func TestLimitCpu(t *testing.T) {
e := ComplexFilterExample("./sample_data/in1.mp4", "./sample_data/overlay.png", "./sample_data/out2.mp4")
err := e.WithCpuCoreRequest(0.1).WithCpuCoreLimit(0.5).RunLinux()
if err != nil {
assert.Nil(t, err)
}
}

138
ffmpeg.go Normal file
View File

@@ -0,0 +1,138 @@
package ffmpeg_go
import (
"context"
"errors"
"io"
"log"
"os"
"strings"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/s3/s3manager"
)
// Input file URL (ffmpeg ``-i`` option)
//
// Any supplied kwargs are passed to ffmpeg verbatim (e.g. ``t=20``,
// ``f='mp4'``, ``acodec='pcm'``, etc.).
//
// To tell ffmpeg to read from stdin, use ``pipe:`` as the filename.
//
// Official documentation: `Main options <https://ffmpeg.org/ffmpeg.html#Main-options>`__
func Input(filename string, kwargs ...KwArgs) *Stream {
args := MergeKwArgs(kwargs)
args["filename"] = filename
if fmt := args.PopString("f"); fmt != "" {
if args.HasKey("format") {
panic(errors.New("can't specify both `format` and `f` options"))
}
args["format"] = fmt
}
return NewInputNode("input", nil, args).Stream("", "")
}
// Add extra global command-line argument(s), e.g. ``-progress``.
func (s *Stream) GlobalArgs(args ...string) *Stream {
if s.Type != "OutputStream" {
panic("cannot overwrite outputs on non-OutputStream")
}
return NewGlobalNode("global_args", []*Stream{s}, args, nil).Stream("", "")
}
// Overwrite output files without asking (ffmpeg ``-y`` option)
//
// Official documentation: `Main options <https://ffmpeg.org/ffmpeg.html#Main-options>`_
func (s *Stream) OverwriteOutput(stream *Stream) *Stream {
if s.Type != "OutputStream" {
panic("cannot overwrite outputs on non-OutputStream")
}
return NewGlobalNode("overwrite_output", []*Stream{stream}, []string{"-y"}, nil).Stream("", "")
}
// Include all given outputs in one ffmpeg command line
func MergeOutputs(streams ...*Stream) *Stream {
return NewMergeOutputsNode("merge_output", streams).Stream("", "")
}
//Output file URL
//
// Syntax:
// `ffmpeg.Output([]*Stream{stream1, stream2, stream3...}, filename, kwargs)`
//
// Any supplied keyword arguments are passed to ffmpeg verbatim (e.g.
// ``t=20``, ``f='mp4'``, ``acodec='pcm'``, ``vcodec='rawvideo'``,
// etc.). Some keyword-arguments are handled specially, as shown below.
//
// Args:
// video_bitrate: parameter for ``-b:v``, e.g. ``video_bitrate=1000``.
// audio_bitrate: parameter for ``-b:a``, e.g. ``audio_bitrate=200``.
// format: alias for ``-f`` parameter, e.g. ``format='mp4'``
// (equivalent to ``f='mp4'``).
//
// If multiple streams are provided, they are mapped to the same
// output.
//
// To tell ffmpeg to write to stdout, use ``pipe:`` as the filename.
//
// Official documentation: `Synopsis <https://ffmpeg.org/ffmpeg.html#Synopsis>`__
// """
func Output(streams []*Stream, fileName string, kwargs ...KwArgs) *Stream {
args := MergeKwArgs(kwargs)
if !args.HasKey("filename") {
if fileName == "" {
panic("filename must be provided")
}
args["filename"] = fileName
}
return NewOutputNode("output", streams, nil, args).Stream("", "")
}
func (s *Stream) Output(fileName string, kwargs ...KwArgs) *Stream {
if s.Type != "FilterableStream" {
log.Panic("cannot output on non-FilterableStream")
}
if strings.HasPrefix(fileName, "s3://") {
return s.outputS3Stream(fileName, kwargs...)
}
return Output([]*Stream{s}, fileName, kwargs...)
}
func (s *Stream) outputS3Stream(fileName string, kwargs ...KwArgs) *Stream {
r, w := io.Pipe()
fileL := strings.SplitN(strings.TrimPrefix(fileName, "s3://"), "/", 2)
if len(fileL) != 2 {
log.Panic("s3 file format not valid")
}
args := MergeKwArgs(kwargs)
awsConfig := args.PopDefault("aws_config", &aws.Config{}).(*aws.Config)
bucket, key := fileL[0], fileL[1]
o := Output([]*Stream{s}, "pipe:", args).
WithOutput(w, os.Stdout)
done := make(chan struct{})
runHook := RunHook{
f: func() {
defer func() {
done <- struct{}{}
}()
sess, err := session.NewSession(awsConfig)
uploader := s3manager.NewUploader(sess)
_, err = uploader.Upload(&s3manager.UploadInput{
Bucket: &bucket,
Key: &key,
Body: r,
})
//fmt.Println(ioutil.ReadAll(r))
if err != nil {
log.Println("upload fail", err)
}
},
done: done,
closer: w,
}
o.Context = context.WithValue(o.Context, "run_hook", &runHook)
return o
}

360
ffmpeg_test.go Normal file
View File

@@ -0,0 +1,360 @@
package ffmpeg_go
import (
"bytes"
"fmt"
"testing"
"github.com/stretchr/testify/assert"
"github.com/u2takey/go-utils/rand"
)
const (
TestInputFile1 = "./examples/sample_data/in1.mp4"
TestOutputFile1 = "./examples/sample_data/out1.mp4"
TestOverlayFile = "./examples/sample_data/overlay.png"
)
func TestFluentEquality(t *testing.T) {
base1 := Input("dummy1.mp4")
base2 := Input("dummy1.mp4")
base3 := Input("dummy2.mp4")
t1 := base1.Trim(KwArgs{"start_frame": 10, "end_frame": 20})
t2 := base1.Trim(KwArgs{"start_frame": 10, "end_frame": 20})
t3 := base1.Trim(KwArgs{"start_frame": 10, "end_frame": 30})
t4 := base2.Trim(KwArgs{"start_frame": 10, "end_frame": 20})
t5 := base3.Trim(KwArgs{"start_frame": 10, "end_frame": 20})
assert.Equal(t, t1.Hash(), t2.Hash())
assert.Equal(t, t1.Hash(), t4.Hash())
assert.NotEqual(t, t1.Hash(), t3.Hash())
assert.NotEqual(t, t1.Hash(), t5.Hash())
}
func TestFluentConcat(t *testing.T) {
base1 := Input("dummy1.mp4", nil)
trim1 := base1.Trim(KwArgs{"start_frame": 10, "end_frame": 20})
trim2 := base1.Trim(KwArgs{"start_frame": 30, "end_frame": 40})
trim3 := base1.Trim(KwArgs{"start_frame": 50, "end_frame": 60})
concat1 := Concat([]*Stream{trim1, trim2, trim3})
concat2 := Concat([]*Stream{trim1, trim2, trim3})
concat3 := Concat([]*Stream{trim1, trim3, trim2})
assert.Equal(t, concat1.Hash(), concat2.Hash())
assert.NotEqual(t, concat1.Hash(), concat3.Hash())
}
func TestRepeatArgs(t *testing.T) {
o := Input("dummy.mp4", nil).Output("dummy2.mp4",
KwArgs{"streamid": []string{"0:0x101", "1:0x102"}})
assert.Equal(t, o.GetArgs(), []string{"-i", "dummy.mp4", "-streamid", "0:0x101", "-streamid", "1:0x102", "dummy2.mp4"})
}
func TestGlobalArgs(t *testing.T) {
o := Input("dummy.mp4", nil).Output("dummy2.mp4", nil).GlobalArgs("-progress", "someurl")
assert.Equal(t, o.GetArgs(), []string{
"-i",
"dummy.mp4",
"dummy2.mp4",
"-progress",
"someurl",
})
}
func TestSimpleExample(t *testing.T) {
err := Input(TestInputFile1, nil).
Output(TestOutputFile1, nil).
OverWriteOutput().
Run()
assert.Nil(t, err)
}
func TestSimpleOverLayExample(t *testing.T) {
err := Input(TestInputFile1, nil).
Overlay(Input(TestOverlayFile), "").
Output(TestOutputFile1).OverWriteOutput().
Run()
assert.Nil(t, err)
}
func TestSimpleOutputArgs(t *testing.T) {
cmd := Input(TestInputFile1).Output("imageFromVideo_%d.jpg", KwArgs{"vf": "fps=3", "qscale:v": 2})
assert.Equal(t, []string{
"-i", "./examples/sample_data/in1.mp4", "-qscale:v",
"2", "-vf", "fps=3", "imageFromVideo_%d.jpg"}, cmd.GetArgs())
}
func TestAutomaticStreamSelection(t *testing.T) {
// example from http://ffmpeg.org/ffmpeg-all.html
input := []*Stream{Input("A.avi"), Input("B.mp4")}
out1 := Output(input, "out1.mkv")
out2 := Output(input, "out2.wav")
out3 := Output(input, "out3.mov", KwArgs{"map": "1:a", "c:a": "copy"})
cmd := MergeOutputs(out1, out2, out3)
printArgs(cmd.GetArgs())
printGraph(cmd)
}
func TestLabeledFiltergraph(t *testing.T) {
// example from http://ffmpeg.org/ffmpeg-all.html
in1, in2, in3 := Input("A.avi"), Input("B.mp4"), Input("C.mkv")
in2Split := in2.Get("v").Hue(KwArgs{"s": 0}).Split()
overlay := Filter([]*Stream{in1, in2}, "overlay", nil)
aresample := Filter([]*Stream{in1, in2, in3}, "aresample", nil)
out1 := Output([]*Stream{in2Split.Get("outv1"), overlay, aresample}, "out1.mp4", KwArgs{"an": ""})
out2 := Output([]*Stream{in1, in2, in3}, "out2.mkv")
out3 := in2Split.Get("outv2").Output("out3.mkv", KwArgs{"map": "1:a:0"})
cmd := MergeOutputs(out1, out2, out3)
printArgs(cmd.GetArgs())
printGraph(cmd)
}
func ComplexFilterExample() *Stream {
split := Input(TestInputFile1).VFlip().Split()
split0, split1 := split.Get("0"), split.Get("1")
overlayFile := Input(TestOverlayFile).Crop(10, 10, 158, 112)
return Concat([]*Stream{
split0.Trim(KwArgs{"start_frame": 10, "end_frame": 20}),
split1.Trim(KwArgs{"start_frame": 30, "end_frame": 40})}).
Overlay(overlayFile.HFlip(), "").
DrawBox(50, 50, 120, 120, "red", 5).
Output(TestOutputFile1).
OverWriteOutput()
}
func TestComplexFilterExample(t *testing.T) {
assert.Equal(t, []string{
"-i",
TestInputFile1,
"-i",
TestOverlayFile,
"-filter_complex",
"[0]vflip[s0];" +
"[s0]split=2[s1][s2];" +
"[s1]trim=end_frame=20:start_frame=10[s3];" +
"[s2]trim=end_frame=40:start_frame=30[s4];" +
"[s3][s4]concat=n=2[s5];" +
"[1]crop=158:112:10:10[s6];" +
"[s6]hflip[s7];" +
"[s5][s7]overlay=eof_action=repeat[s8];" +
"[s8]drawbox=50:50:120:120:red:t=5[s9]",
"-map",
"[s9]",
TestOutputFile1,
"-y",
}, ComplexFilterExample().GetArgs())
}
func TestCombinedOutput(t *testing.T) {
i1 := Input(TestInputFile1)
i2 := Input(TestOverlayFile)
out := Output([]*Stream{i1, i2}, TestOutputFile1)
assert.Equal(t, []string{
"-i",
TestInputFile1,
"-i",
TestOverlayFile,
"-map",
"0",
"-map",
"1",
TestOutputFile1,
}, out.GetArgs())
}
func TestFilterWithSelector(t *testing.T) {
i := Input(TestInputFile1)
v1 := i.Video().HFlip()
a1 := i.Audio().Filter("aecho", Args{"0.8", "0.9", "1000", "0.3"})
out := Output([]*Stream{a1, v1}, TestOutputFile1)
assert.Equal(t, []string{
"-i",
TestInputFile1,
"-filter_complex",
"[0:a]aecho=0.8:0.9:1000:0.3[s0];[0:v]hflip[s1]",
"-map",
"[s0]",
"-map",
"[s1]",
TestOutputFile1}, out.GetArgs())
}
func ComplexFilterAsplitExample() *Stream {
split := Input(TestInputFile1).VFlip().ASplit()
split0 := split.Get("0")
split1 := split.Get("1")
return Concat([]*Stream{
split0.Filter("atrim", nil, KwArgs{"start": 10, "end": 20}),
split1.Filter("atrim", nil, KwArgs{"start": 30, "end": 40}),
}).Output(TestOutputFile1).OverWriteOutput()
}
func TestFilterConcatVideoOnly(t *testing.T) {
in1 := Input("in1.mp4")
in2 := Input("in2.mp4")
args := Concat([]*Stream{in1, in2}).Output("out.mp4").GetArgs()
assert.Equal(t, []string{
"-i",
"in1.mp4",
"-i",
"in2.mp4",
"-filter_complex",
"[0][1]concat=n=2[s0]",
"-map",
"[s0]",
"out.mp4",
}, args)
}
func TestFilterConcatAudioOnly(t *testing.T) {
in1 := Input("in1.mp4")
in2 := Input("in2.mp4")
args := Concat([]*Stream{in1, in2}, KwArgs{"v": 0, "a": 1}).Output("out.mp4").GetArgs()
assert.Equal(t, []string{
"-i",
"in1.mp4",
"-i",
"in2.mp4",
"-filter_complex",
"[0][1]concat=a=1:n=2:v=0[s0]",
"-map",
"[s0]",
"out.mp4",
}, args)
}
func TestFilterConcatAudioVideo(t *testing.T) {
in1 := Input("in1.mp4")
in2 := Input("in2.mp4")
joined := Concat([]*Stream{in1.Video(), in1.Audio(), in2.HFlip(), in2.Get("a")}, KwArgs{"v": 1, "a": 1}).Node
args := Output([]*Stream{joined.Get("0"), joined.Get("1")}, "out.mp4").GetArgs()
assert.Equal(t, []string{
"-i",
"in1.mp4",
"-i",
"in2.mp4",
"-filter_complex",
"[1]hflip[s0];[0:v][0:a][s0][1:a]concat=a=1:n=2:v=1[s1][s2]",
"-map",
"[s1]",
"-map",
"[s2]",
"out.mp4",
}, args)
}
func TestFilterASplit(t *testing.T) {
out := ComplexFilterAsplitExample()
args := out.GetArgs()
assert.Equal(t, []string{
"-i",
TestInputFile1,
"-filter_complex",
"[0]vflip[s0];[s0]asplit=2[s1][s2];[s1]atrim=end=20:start=10[s3];[s2]atrim=end=40:start=30[s4];[s3][s4]concat=n=2[s5]",
"-map",
"[s5]",
TestOutputFile1,
"-y",
}, args)
}
func TestOutputBitrate(t *testing.T) {
args := Input("in").Output("out", KwArgs{"video_bitrate": 1000, "audio_bitrate": 200}).GetArgs()
assert.Equal(t, []string{"-i", "in", "-b:v", "1000", "-b:a", "200", "out"}, args)
}
func TestOutputVideoSize(t *testing.T) {
args := Input("in").Output("out", KwArgs{"video_size": "320x240"}).GetArgs()
assert.Equal(t, []string{"-i", "in", "-video_size", "320x240", "out"}, args)
}
func TestCompile(t *testing.T) {
out := Input("dummy.mp4").Output("dummy2.mp4")
assert.Equal(t, out.Compile().Args, []string{"ffmpeg", "-i", "dummy.mp4", "dummy2.mp4"})
}
func TestPipe(t *testing.T) {
width, height := 32, 32
frameSize := width * height * 3
frameCount, startFrame := 10, 2
_, _ = frameCount, frameSize
out := Input(
"pipe:0",
KwArgs{
"format": "rawvideo",
"pixel_format": "rgb24",
"video_size": fmt.Sprintf("%dx%d", width, height),
"framerate": 10}).
Trim(KwArgs{"start_frame": startFrame}).
Output("pipe:1", KwArgs{"format": "rawvideo"})
args := out.GetArgs()
assert.Equal(t, args, []string{
"-f",
"rawvideo",
"-video_size",
fmt.Sprintf("%dx%d", width, height),
"-framerate",
"10",
"-pixel_format",
"rgb24",
"-i",
"pipe:0",
"-filter_complex",
"[0]trim=start_frame=2[s0]",
"-map",
"[s0]",
"-f",
"rawvideo",
"pipe:1",
})
inBuf := bytes.NewBuffer(nil)
for i := 0; i < frameSize*frameCount; i++ {
inBuf.WriteByte(byte(rand.IntnRange(0, 255)))
}
outBuf := bytes.NewBuffer(nil)
err := out.WithInput(inBuf).WithOutput(outBuf).Run()
assert.Nil(t, err)
assert.Equal(t, outBuf.Len(), frameSize*(frameCount-startFrame))
}
func TestView(t *testing.T) {
a, err := ComplexFilterExample().View(ViewTypeFlowChart)
assert.Nil(t, err)
b, err := ComplexFilterAsplitExample().View(ViewTypeStateDiagram)
assert.Nil(t, err)
t.Log(a)
t.Log(b)
}
func printArgs(args []string) {
for _, a := range args {
fmt.Printf("%s ", a)
}
fmt.Println()
}
func printGraph(s *Stream) {
fmt.Println()
v, _ := s.View(ViewTypeFlowChart)
fmt.Println(v)
}
//func TestAvFoundation(t *testing.T) {
// out := Input("default:none", KwArgs{"f": "avfoundation", "framerate": "30"}).
// Output("output.mp4", KwArgs{"format": "mp4"}).
// OverWriteOutput()
// assert.Equal(t, []string{"-f", "avfoundation", "-framerate",
// "30", "-i", "default:none", "-f", "mp4", "output.mp4", "-y"}, out.GetArgs())
// err := out.Run()
// assert.Nil(t, err)
//}

135
filters.go Normal file
View File

@@ -0,0 +1,135 @@
package ffmpeg_go
import (
"fmt"
"strconv"
)
func AssertType(hasType, expectType string, action string) {
if hasType != expectType {
panic(fmt.Sprintf("cannot %s on non-%s", action, expectType))
}
}
func FilterMultiOutput(streamSpec []*Stream, filterName string, args Args, kwArgs ...KwArgs) *Node {
return NewFilterNode(filterName, streamSpec, -1, args, MergeKwArgs(kwArgs))
}
func Filter(streamSpec []*Stream, filterName string, args Args, kwArgs ...KwArgs) *Stream {
return FilterMultiOutput(streamSpec, filterName, args, MergeKwArgs(kwArgs)).Stream("", "")
}
func (s *Stream) Filter(filterName string, args Args, kwArgs ...KwArgs) *Stream {
AssertType(s.Type, "FilterableStream", "filter")
return Filter([]*Stream{s}, filterName, args, MergeKwArgs(kwArgs))
}
func (s *Stream) Split() *Node {
AssertType(s.Type, "FilterableStream", "split")
return NewFilterNode("split", []*Stream{s}, 1, nil, nil)
}
func (s *Stream) ASplit() *Node {
AssertType(s.Type, "FilterableStream", "asplit")
return NewFilterNode("asplit", []*Stream{s}, 1, nil, nil)
}
func (s *Stream) SetPts(expr string) *Node {
AssertType(s.Type, "FilterableStream", "setpts")
return NewFilterNode("setpts", []*Stream{s}, 1, []string{expr}, nil)
}
func (s *Stream) Trim(kwargs ...KwArgs) *Stream {
AssertType(s.Type, "FilterableStream", "trim")
return NewFilterNode("trim", []*Stream{s}, 1, nil, MergeKwArgs(kwargs)).Stream("", "")
}
func (s *Stream) Overlay(overlayParentNode *Stream, eofAction string, kwargs ...KwArgs) *Stream {
AssertType(s.Type, "FilterableStream", "overlay")
if eofAction == "" {
eofAction = "repeat"
}
args := MergeKwArgs(kwargs)
args["eof_action"] = eofAction
return NewFilterNode("overlay", []*Stream{s, overlayParentNode}, 2, nil, args).Stream("", "")
}
func (s *Stream) HFlip(kwargs ...KwArgs) *Stream {
AssertType(s.Type, "FilterableStream", "hflip")
return NewFilterNode("hflip", []*Stream{s}, 1, nil, MergeKwArgs(kwargs)).Stream("", "")
}
func (s *Stream) VFlip(kwargs ...KwArgs) *Stream {
AssertType(s.Type, "FilterableStream", "vflip")
return NewFilterNode("vflip", []*Stream{s}, 1, nil, MergeKwArgs(kwargs)).Stream("", "")
}
func (s *Stream) Crop(x, y, w, h int, kwargs ...KwArgs) *Stream {
AssertType(s.Type, "FilterableStream", "crop")
return NewFilterNode("crop", []*Stream{s}, 1, []string{
strconv.Itoa(w), strconv.Itoa(h), strconv.Itoa(x), strconv.Itoa(y),
}, MergeKwArgs(kwargs)).Stream("", "")
}
func (s *Stream) DrawBox(x, y, w, h int, color string, thickness int, kwargs ...KwArgs) *Stream {
AssertType(s.Type, "FilterableStream", "drawbox")
args := MergeKwArgs(kwargs)
if thickness != 0 {
args["t"] = thickness
}
return NewFilterNode("drawbox", []*Stream{s}, 1, []string{
strconv.Itoa(x), strconv.Itoa(y), strconv.Itoa(w), strconv.Itoa(h), color,
}, args).Stream("", "")
}
func (s *Stream) Drawtext(text string, x, y int, escape bool, kwargs ...KwArgs) *Stream {
AssertType(s.Type, "FilterableStream", "drawtext")
args := MergeKwArgs(kwargs)
if escape {
text = fmt.Sprintf("%q", text)
}
if text != "" {
args["text"] = text
}
if x != 0 {
args["x"] = x
}
if y != 0 {
args["y"] = y
}
return NewFilterNode("drawtext", []*Stream{s}, 1, nil, args).Stream("", "")
}
func Concat(streams []*Stream, kwargs ...KwArgs) *Stream {
args := MergeKwArgs(kwargs)
vsc := args.GetDefault("v", 1).(int)
asc := args.GetDefault("a", 0).(int)
sc := vsc + asc
if len(streams)%sc != 0 {
panic("streams count not valid")
}
args["n"] = len(streams) / sc
return NewFilterNode("concat", streams, -1, nil, args).Stream("", "")
}
func (s *Stream) Concat(streams []*Stream, kwargs ...KwArgs) *Stream {
return Concat(append(streams, s), MergeKwArgs(kwargs))
}
func (s *Stream) ZoomPan(kwargs ...KwArgs) *Stream {
AssertType(s.Type, "FilterableStream", "zoompan")
return NewFilterNode("zoompan", []*Stream{s}, 1, nil, MergeKwArgs(kwargs)).Stream("", "")
}
func (s *Stream) Hue(kwargs ...KwArgs) *Stream {
AssertType(s.Type, "FilterableStream", "hue")
return NewFilterNode("hue", []*Stream{s}, 1, nil, MergeKwArgs(kwargs)).Stream("", "")
}
// todo fix this
func (s *Stream) ColorChannelMixer(kwargs ...KwArgs) *Stream {
AssertType(s.Type, "FilterableStream", "colorchannelmixer")
return NewFilterNode("colorchannelmixer", []*Stream{s}, 1, nil, MergeKwArgs(kwargs)).Stream("", "")
}

11
go.mod Normal file
View File

@@ -0,0 +1,11 @@
module github.com/u2takey/ffmpeg-go
go 1.14
require (
github.com/aws/aws-sdk-go v1.38.20
github.com/disintegration/imaging v1.6.2
github.com/stretchr/testify v1.4.0
github.com/u2takey/go-utils v0.0.0-20200713025200-4704d09fc2c7
gocv.io/x/gocv v0.25.0
)

61
go.sum Normal file
View File

@@ -0,0 +1,61 @@
github.com/aws/aws-sdk-go v1.38.20 h1:QbzNx/tdfATbdKfubBpkt84OM6oBkxQZRw6+bW2GyeA=
github.com/aws/aws-sdk-go v1.38.20/go.mod h1:hcU610XS61/+aQV88ixoOzUoG7v3b31pl2zKMmprdro=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/disintegration/imaging v1.6.2 h1:w1LecBlG2Lnp8B3jk5zSuNqd7b4DXhcjwek1ei82L+c=
github.com/disintegration/imaging v1.6.2/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4=
github.com/docker/distribution v2.7.1+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
github.com/go-logr/logr v0.1.0/go.mod h1:ixOQHD9gLJUVQQ2ZOR7zLEifBX6tGkNJF4QyIY7sIas=
github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg=
github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8=
github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U=
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/opencontainers/go-digest v1.0.0-rc1/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s=
github.com/opencontainers/selinux v1.5.2/go.mod h1:yTcKuYAh6R95iDpefGLQaPaRwJFwyzAJufJyiTt7s0g=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk=
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/u2takey/go-utils v0.0.0-20200713025200-4704d09fc2c7 h1:PT7mE8HJE1mwaSazrOdSeByJ1FoV33/fHUZrBB+zwVU=
github.com/u2takey/go-utils v0.0.0-20200713025200-4704d09fc2c7/go.mod h1:ATqKFpgjUIlhGRs8j59gXmu8Cmpo1QQEHV6vwu1hs28=
gocv.io/x/gocv v0.25.0 h1:vM50jL3v9OEqWSi+urelX5M1ptZeFWA/VhGPvdTqsJU=
gocv.io/x/gocv v0.25.0/go.mod h1:Rar2PS6DV+T4FL+PM535EImD/h13hGVaHhnCu1xarBs=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8 h1:hVwzHzIUGRjiF7EcUjqNxk3NCfkPxbDKRdnNE1Rpg0U=
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20201110031124-69a78807bb2b h1:uwuIcX0g4Yl1NC5XAz37xsr2lTtcqevgzYNVt49waME=
golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191115151921-52ab43148777/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3 h1:cokOdA+Jmi5PJGXLlLllQSgYigAEfHXJAERHVMaCc2k=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10=
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
sigs.k8s.io/yaml v1.2.0/go.mod h1:yfXDCHCao9+ENCvLSE62v9VSji2MKu5jeNfTrofGhJc=

24
graph.go Normal file
View File

@@ -0,0 +1,24 @@
package ffmpeg_go
import "time"
// for json spec
type GraphNode struct {
Name string `json:"name"`
InputStreams []string `json:"input_streams"`
OutputStreams []string `json:"output_streams"`
Args Args `json:"args"`
KwArgs KwArgs `json:"kw_args"`
}
type GraphOptions struct {
Timeout time.Duration
OverWriteOutput bool
}
type Graph struct {
OutputStream string `json:"output_stream"`
GraphOptions GraphOptions `json:"graph_options"`
Nodes []GraphNode `json:"nodes"`
}

BIN
main/rtmp Executable file

Binary file not shown.

210
main/rtmp.go Normal file
View File

@@ -0,0 +1,210 @@
package main
import (
"encoding/json"
"fmt"
"image"
"image/color"
"io"
"log"
"os"
ffmpeg "github.com/u2takey/ffmpeg-go"
"gocv.io/x/gocv"
)
func getVideoSize(fileName string) (int, int) {
log.Println("Getting video size for", fileName)
data, err := ffmpeg.Probe(fileName)
if err != nil {
panic(err)
}
log.Println("got video info", data)
type VideoInfo struct {
Streams []struct {
CodecType string `json:"codec_type"`
Width int
Height int
} `json:"streams"`
}
vInfo := &VideoInfo{}
err = json.Unmarshal([]byte(data), vInfo)
if err != nil {
panic(err)
}
for _, s := range vInfo.Streams {
if s.CodecType == "video" {
return s.Width, s.Height
}
}
return 0, 0
}
func startFFmpegProcess1(infileName string, writer io.WriteCloser) <-chan error {
log.Println("Starting ffmpeg process1")
done := make(chan error)
go func() {
err := ffmpeg.Input(infileName).
Output("pipe:",
ffmpeg.KwArgs{
"format": "rawvideo", "pix_fmt": "bgr24",
}).
WithOutput(writer).
Run()
log.Println("ffmpeg process1 done")
_ = writer.Close()
done <- err
close(done)
}()
return done
}
func process(reader io.ReadCloser, writer io.WriteCloser, w, h int) {
go func() {
frameSize := w * h * 3
buf := make([]byte, frameSize, frameSize)
for {
n, err := io.ReadFull(reader, buf)
if n == 0 || err == io.EOF {
_ = writer.Close()
return
} else if n != frameSize || err != nil {
panic(fmt.Sprintf("read error: %d, %s", n, err))
}
////if this open, the video picture will be gray
// for i := range buf {
// buf[i] = buf[i] / 3
// }
n, err = writer.Write(buf)
if n != frameSize || err != nil {
panic(fmt.Sprintf("write error: %d, %s", n, err))
}
}
}()
return
}
func startFFmpegProcess2(outfileName string, buf io.Reader, width, height int) <-chan error {
log.Println("Starting ffmpeg process2")
done := make(chan error)
go func() {
err := ffmpeg.Input("pipe:",
ffmpeg.KwArgs{"format": "rawvideo",
"pix_fmt": "bgr24", "s": fmt.Sprintf("%dx%d", width, height),
}).
Output(outfileName, ffmpeg.KwArgs{
"pix_fmt": "yuv420p", "c:v": "libx264", "preset": "ultrafast", "f": "flv", "r": "25",
}).
OverWriteOutput().
WithInput(buf).
Run()
log.Println("ffmpeg process2 done")
done <- err
close(done)
}()
return done
}
func runExampleStream(inFile, outFile string) {
w, h := getVideoSize(inFile)
log.Println(w, h)
pr1, pw1 := io.Pipe()
pr2, pw2 := io.Pipe()
done1 := startFFmpegProcess1(inFile, pw1)
process(pr1, pw2, w, h)
done2 := startFFmpegProcess2(outFile, pr2, w, h)
err := <-done1
if err != nil {
panic(err)
}
err = <-done2
if err != nil {
panic(err)
}
log.Println("Done")
}
func runOpenCvFaceDetectWithCamera(input, output string) {
xmlFile := "../examples/sample_data/haarcascade_frontalface_default.xml"
webcam, err := gocv.OpenVideoCapture(input)
if err != nil {
fmt.Printf("error opening video capture device: %v\n", input)
return
}
defer webcam.Close()
// prepare image matrix
img := gocv.NewMat()
defer img.Close()
if ok := webcam.Read(&img); !ok {
panic(fmt.Sprintf("Cannot read device %v", input))
}
fmt.Printf("img: %vX%v\n", img.Cols(), img.Rows())
pr1, pw1 := io.Pipe()
// writeProcess("./sample_data/face_detect.mp4", pr1, img.Cols(), img.Rows())
// writeProcess(output, pr1, img.Cols(), img.Rows())
startFFmpegProcess2(output, pr1, img.Cols(), img.Rows())
// color for the rect when faces detected
blue := color.RGBA{B: 255}
// load classifier to recognize faces
classifier := gocv.NewCascadeClassifier()
defer classifier.Close()
if !classifier.Load(xmlFile) {
fmt.Printf("Error reading cascade file: %v\n", xmlFile)
return
}
fmt.Printf("Start reading device: %v\n", input)
for i := 0; i < 2000; i++ {
if ok := webcam.Read(&img); !ok {
fmt.Printf("Device closed: %v\n", input)
return
}
if img.Empty() {
continue
}
// detect faces
rects := classifier.DetectMultiScale(img)
fmt.Printf("found %d faces\n", len(rects))
// draw a rectangle around each face on the original image, along with text identifing as "Human"
for _, r := range rects {
gocv.Rectangle(&img, r, blue, 3)
size := gocv.GetTextSize("Human", gocv.FontHersheyPlain, 1.2, 2)
pt := image.Pt(r.Min.X+(r.Min.X/2)-(size.X/2), r.Min.Y-2)
gocv.PutText(&img, "Human", pt, gocv.FontHersheyPlain, 1.2, blue, 2)
}
// pw1.Write(img.ToBytes())
//test buf to image
img1, err := gocv.NewMatFromBytes(img.Rows(), img.Cols(), gocv.MatTypeCV8UC3, img.ToBytes())
if err != nil {
fmt.Println("change fail")
}
gocv.IMWrite("test1.jpg", img)
gocv.IMWrite("test.jpg", img1)
os.Exit(1)
}
pw1.Close()
log.Println("Done")
}
func main() {
input := "rtsp://admin:cvdev2018@192.168.1.51"
output := "rtmp://127.0.0.1:1935/live/stream"
fmt.Println("test rtmp begin")
// runExampleStream(input, output)
runOpenCvFaceDetectWithCamera(input, output)
fmt.Println("test rtmp end")
}

BIN
main/test.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 262 KiB

BIN
main/test1.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 262 KiB

277
node.go Normal file
View File

@@ -0,0 +1,277 @@
package ffmpeg_go
import (
"context"
"errors"
"fmt"
"strings"
"github.com/u2takey/go-utils/sets"
)
type Stream struct {
Node *Node
Label Label
Selector Selector
Type string
Context context.Context
}
type RunHook struct {
f func()
done <-chan struct{}
closer interface {
Close() error
}
}
func NewStream(node *Node, streamType string, label Label, selector Selector) *Stream {
return &Stream{
Node: node,
Label: label,
Selector: selector,
Type: streamType,
Context: context.Background(),
}
}
func (s *Stream) Hash() int {
return s.Node.Hash() + getHash(s.Label)
}
func (s *Stream) Equal(other Stream) bool {
return s.Hash() == other.Hash()
}
func (s *Stream) String() string {
return fmt.Sprintf("node: %s, label: %s, selector: %s", s.Node.String(), s.Label, s.Selector)
}
func (s *Stream) Get(index string) *Stream {
if s.Selector != "" {
panic(errors.New("stream already has a selector"))
}
return s.Node.Stream(s.Label, Selector(index))
}
func (s *Stream) Audio() *Stream {
return s.Get("a")
}
func (s *Stream) Video() *Stream {
return s.Get("v")
}
func getStreamMap(streamSpec []*Stream) map[int]*Stream {
m := map[int]*Stream{}
for i := range streamSpec {
m[i] = streamSpec[i]
}
return m
}
func getStreamMapNodes(streamMap map[int]*Stream) (ret []*Node) {
for k := range streamMap {
ret = append(ret, streamMap[k].Node)
}
return ret
}
func getStreamSpecNodes(streamSpec []*Stream) []*Node {
return getStreamMapNodes(getStreamMap(streamSpec))
}
type Node struct {
streamSpec []*Stream
name string
incomingStreamTypes sets.String
outgoingStreamType string
minInputs int
maxInputs int
args []string
kwargs KwArgs
nodeType string
}
func NewNode(streamSpec []*Stream,
name string,
incomingStreamTypes sets.String,
outgoingStreamType string,
minInputs int,
maxInputs int,
args []string,
kwargs KwArgs,
nodeType string) *Node {
n := &Node{
streamSpec: streamSpec,
name: name,
incomingStreamTypes: incomingStreamTypes,
outgoingStreamType: outgoingStreamType,
minInputs: minInputs,
maxInputs: maxInputs,
args: args,
kwargs: kwargs,
nodeType: nodeType,
}
n.__checkInputLen()
n.__checkInputTypes()
return n
}
func NewInputNode(name string, args []string, kwargs KwArgs) *Node {
return NewNode(nil,
name,
nil,
"FilterableStream",
0,
0,
args,
kwargs,
"InputNode")
}
func NewFilterNode(name string, streamSpec []*Stream, maxInput int, args []string, kwargs KwArgs) *Node {
return NewNode(streamSpec,
name,
sets.NewString("FilterableStream"),
"FilterableStream",
1,
maxInput,
args,
kwargs,
"FilterNode")
}
func NewOutputNode(name string, streamSpec []*Stream, args []string, kwargs KwArgs) *Node {
return NewNode(streamSpec,
name,
sets.NewString("FilterableStream"),
"OutputStream",
1,
-1,
args,
kwargs,
"OutputNode")
}
func NewMergeOutputsNode(name string, streamSpec []*Stream) *Node {
return NewNode(streamSpec,
name,
sets.NewString("OutputStream"),
"OutputStream",
1,
-1,
nil,
nil,
"MergeOutputsNode")
}
func NewGlobalNode(name string, streamSpec []*Stream, args []string, kwargs KwArgs) *Node {
return NewNode(streamSpec,
name,
sets.NewString("OutputStream"),
"OutputStream",
1,
1,
args,
kwargs,
"GlobalNode")
}
func (n *Node) __checkInputLen() {
streamMap := getStreamMap(n.streamSpec)
if n.minInputs >= 0 && len(streamMap) < n.minInputs {
panic(fmt.Sprintf("Expected at least %d input stream(s); got %d", n.minInputs, len(streamMap)))
}
if n.maxInputs >= 0 && len(streamMap) > n.maxInputs {
panic(fmt.Sprintf("Expected at most %d input stream(s); got %d", n.maxInputs, len(streamMap)))
}
}
func (n *Node) __checkInputTypes() {
streamMap := getStreamMap(n.streamSpec)
for _, s := range streamMap {
if !n.incomingStreamTypes.Has(s.Type) {
panic(fmt.Sprintf("Expected incoming stream(s) to be of one of the following types: %s; got %s", n.incomingStreamTypes, s.Type))
}
}
}
func (n *Node) __getIncomingEdgeMap() map[Label]NodeInfo {
incomingEdgeMap := map[Label]NodeInfo{}
streamMap := getStreamMap(n.streamSpec)
for i, s := range streamMap {
incomingEdgeMap[Label(fmt.Sprintf("%v", i))] = NodeInfo{
Node: s.Node,
Label: s.Label,
Selector: s.Selector,
}
}
return incomingEdgeMap
}
func (n *Node) Hash() int {
b := 0
for downStreamLabel, upStreamInfo := range n.IncomingEdgeMap() {
b += getHash(fmt.Sprintf("%s%d%s%s", downStreamLabel, upStreamInfo.Node.Hash(), upStreamInfo.Label, upStreamInfo.Selector))
}
b += getHash(n.args)
b += getHash(n.kwargs)
return b
}
func (n *Node) String() string {
return fmt.Sprintf("%s (%s) <%s>", n.name, getString(n.args), getString(n.kwargs))
}
func (n *Node) Equal(other DagNode) bool {
return n.Hash() == other.Hash()
}
func (n *Node) ShortRepr() string {
return n.name
}
func (n *Node) IncomingEdgeMap() map[Label]NodeInfo {
return n.__getIncomingEdgeMap()
}
func (n *Node) GetInComingEdges() []DagEdge {
return GetInComingEdges(n, n.IncomingEdgeMap())
}
func (n *Node) Stream(label Label, selector Selector) *Stream {
return NewStream(n, n.outgoingStreamType, label, selector)
}
func (n *Node) Get(a string) *Stream {
l := strings.Split(a, ":")
if len(l) == 2 {
return n.Stream(Label(l[0]), Selector(l[1]))
}
return n.Stream(Label(a), "")
}
func (n *Node) GetFilter(outgoingEdges []DagEdge) string {
if n.nodeType != "FilterNode" {
panic("call GetFilter on non-FilterNode")
}
args, kwargs, ret := n.args, n.kwargs, ""
if n.name == "split" || n.name == "asplit" {
args = []string{fmt.Sprintf("%d", len(outgoingEdges))}
}
// args = Args(args).EscapeWith("\\'=:")
for _, k := range kwargs.EscapeWith("\\'=:").SortedKeys() {
v := getString(kwargs[k])
if v != "" {
args = append(args, fmt.Sprintf("%s=%s", k, v))
} else {
args = append(args, fmt.Sprintf("%s", k))
}
}
ret = escapeChars(n.name, "\\'=:")
if len(args) > 0 {
ret += fmt.Sprintf("=%s", strings.Join(args, ":"))
}
return escapeChars(ret, "\\'[],;")
}

42
probe.go Normal file
View File

@@ -0,0 +1,42 @@
package ffmpeg_go
import (
"bytes"
"context"
"os/exec"
"time"
)
// Probe Run ffprobe on the specified file and return a JSON representation of the output.
func Probe(fileName string, kwargs ...KwArgs) (string, error) {
return ProbeWithTimeout(fileName, 0, MergeKwArgs(kwargs))
}
func ProbeWithTimeout(fileName string, timeOut time.Duration, kwargs KwArgs) (string, error) {
args := KwArgs{
"show_format": "",
"show_streams": "",
"of": "json",
}
return ProbeWithTimeoutExec(fileName, timeOut, MergeKwArgs([]KwArgs{args, kwargs}))
}
func ProbeWithTimeoutExec(fileName string, timeOut time.Duration, kwargs KwArgs) (string, error) {
args := ConvertKwargsToCmdLineArgs(kwargs)
args = append(args, fileName)
ctx := context.Background()
if timeOut > 0 {
var cancel func()
ctx, cancel = context.WithTimeout(context.Background(), timeOut)
defer cancel()
}
cmd := exec.CommandContext(ctx, "ffprobe", args...)
buf := bytes.NewBuffer(nil)
cmd.Stdout = buf
err := cmd.Run()
if err != nil {
return "", err
}
return string(buf.Bytes()), nil
}

39
probe_test.go Normal file
View File

@@ -0,0 +1,39 @@
package ffmpeg_go
import (
"encoding/json"
"fmt"
"strconv"
"testing"
"github.com/stretchr/testify/assert"
)
func TestProbe(t *testing.T) {
data, err := Probe(TestInputFile1, nil)
assert.Nil(t, err)
duration, err := probeOutputDuration(data)
assert.Nil(t, err)
assert.Equal(t, fmt.Sprintf("%f", duration), "7.036000")
}
type probeFormat struct {
Duration string `json:"duration"`
}
type probeData struct {
Format probeFormat `json:"format"`
}
func probeOutputDuration(a string) (float64, error) {
pd := probeData{}
err := json.Unmarshal([]byte(a), &pd)
if err != nil {
return 0, err
}
f, err := strconv.ParseFloat(pd.Format.Duration, 64)
if err != nil {
return 0, err
}
return f, nil
}

268
run.go Normal file
View File

@@ -0,0 +1,268 @@
package ffmpeg_go
import (
"context"
"fmt"
"io"
"log"
"os"
"os/exec"
"sort"
"strings"
"time"
)
func getInputArgs(node *Node) []string {
var args []string
if node.name == "input" {
kwargs := node.kwargs.Copy()
filename := kwargs.PopString("filename")
format := kwargs.PopString("format")
videoSize := kwargs.PopString("video_size")
if format != "" {
args = append(args, "-f", format)
}
if videoSize != "" {
args = append(args, "-video_size", videoSize)
}
args = append(args, ConvertKwargsToCmdLineArgs(kwargs)...)
args = append(args, "-i", filename)
} else {
panic("unsupported node input name")
}
return args
}
func formatInputStreamName(streamNameMap map[string]string, edge DagEdge, finalArg bool) string {
prefix := streamNameMap[fmt.Sprintf("%d%s", edge.UpStreamNode.Hash(), edge.UpStreamLabel)]
suffix := ""
format := "[%s%s]"
if edge.UpStreamSelector != "" {
suffix = fmt.Sprintf(":%s", edge.UpStreamSelector)
}
if finalArg && edge.UpStreamNode.(*Node).nodeType == "InputNode" {
format = "%s%s"
}
return fmt.Sprintf(format, prefix, suffix)
}
func formatOutStreamName(streamNameMap map[string]string, edge DagEdge) string {
return fmt.Sprintf("[%s]", streamNameMap[fmt.Sprintf("%d%s", edge.UpStreamNode.Hash(), edge.UpStreamLabel)])
}
func _getFilterSpec(node *Node, outOutingEdgeMap map[Label][]NodeInfo, streamNameMap map[string]string) string {
var input, output []string
for _, e := range node.GetInComingEdges() {
input = append(input, formatInputStreamName(streamNameMap, e, false))
}
outEdges := GetOutGoingEdges(node, outOutingEdgeMap)
for _, e := range outEdges {
output = append(output, formatOutStreamName(streamNameMap, e))
}
return fmt.Sprintf("%s%s%s", strings.Join(input, ""), node.GetFilter(outEdges), strings.Join(output, ""))
}
func _getAllLabelsInSorted(m map[Label]NodeInfo) []Label {
var r []Label
for a := range m {
r = append(r, a)
}
sort.Slice(r, func(i, j int) bool {
return r[i] < r[j]
})
return r
}
func _getAllLabelsSorted(m map[Label][]NodeInfo) []Label {
var r []Label
for a := range m {
r = append(r, a)
}
sort.Slice(r, func(i, j int) bool {
return r[i] < r[j]
})
return r
}
func _allocateFilterStreamNames(nodes []*Node, outOutingEdgeMaps map[int]map[Label][]NodeInfo, streamNameMap map[string]string) {
sc := 0
for _, n := range nodes {
om := outOutingEdgeMaps[n.Hash()]
// todo sort
for _, l := range _getAllLabelsSorted(om) {
if len(om[l]) > 1 {
panic(fmt.Sprintf(`encountered %s with multiple outgoing edges
with same upstream label %s; a 'split'' filter is probably required`, n.name, l))
}
streamNameMap[fmt.Sprintf("%d%s", n.Hash(), l)] = fmt.Sprintf("s%d", sc)
sc += 1
}
}
}
func _getFilterArg(nodes []*Node, outOutingEdgeMaps map[int]map[Label][]NodeInfo, streamNameMap map[string]string) string {
_allocateFilterStreamNames(nodes, outOutingEdgeMaps, streamNameMap)
var filterSpec []string
for _, n := range nodes {
filterSpec = append(filterSpec, _getFilterSpec(n, outOutingEdgeMaps[n.Hash()], streamNameMap))
}
return strings.Join(filterSpec, ";")
}
func _getGlobalArgs(node *Node) []string {
return node.args
}
func _getOutputArgs(node *Node, streamNameMap map[string]string) []string {
if node.name != "output" {
panic("Unsupported output node")
}
var args []string
if len(node.GetInComingEdges()) == 0 {
panic("Output node has no mapped streams")
}
for _, e := range node.GetInComingEdges() {
streamName := formatInputStreamName(streamNameMap, e, true)
if streamName != "0" || len(node.GetInComingEdges()) > 1 {
args = append(args, "-map", streamName)
}
}
kwargs := node.kwargs.Copy()
filename := kwargs.PopString("filename")
if kwargs.HasKey("format") {
args = append(args, "-f", kwargs.PopString("format"))
}
if kwargs.HasKey("video_bitrate") {
args = append(args, "-b:v", kwargs.PopString("video_bitrate"))
}
if kwargs.HasKey("audio_bitrate") {
args = append(args, "-b:a", kwargs.PopString("audio_bitrate"))
}
if kwargs.HasKey("video_size") {
args = append(args, "-video_size", kwargs.PopString("video_size"))
}
args = append(args, ConvertKwargsToCmdLineArgs(kwargs)...)
args = append(args, filename)
return args
}
func (s *Stream) GetArgs() []string {
var args []string
nodes := getStreamSpecNodes([]*Stream{s})
var dagNodes []DagNode
streamNameMap := map[string]string{}
for i := range nodes {
dagNodes = append(dagNodes, nodes[i])
}
sorted, outGoingMap, err := TopSort(dagNodes)
if err != nil {
panic(err)
}
DebugNodes(sorted)
DebugOutGoingMap(sorted, outGoingMap)
var inputNodes, outputNodes, globalNodes, filterNodes []*Node
for i := range sorted {
n := sorted[i].(*Node)
switch n.nodeType {
case "InputNode":
streamNameMap[fmt.Sprintf("%d", n.Hash())] = fmt.Sprintf("%d", len(inputNodes))
inputNodes = append(inputNodes, n)
case "OutputNode":
outputNodes = append(outputNodes, n)
case "GlobalNode":
globalNodes = append(globalNodes, n)
case "FilterNode":
filterNodes = append(filterNodes, n)
}
}
// input args from inputNodes
for _, n := range inputNodes {
args = append(args, getInputArgs(n)...)
}
// filter args from filterNodes
filterArgs := _getFilterArg(filterNodes, outGoingMap, streamNameMap)
if filterArgs != "" {
args = append(args, "-filter_complex", filterArgs)
}
// output args from outputNodes
for _, n := range outputNodes {
args = append(args, _getOutputArgs(n, streamNameMap)...)
}
// global args with outputNodes
for _, n := range globalNodes {
args = append(args, _getGlobalArgs(n)...)
}
if s.Context.Value("OverWriteOutput") != nil {
args = append(args, "-y")
}
return args
}
func (s *Stream) WithTimeout(timeOut time.Duration) *Stream {
if timeOut > 0 {
s.Context, _ = context.WithTimeout(s.Context, timeOut)
}
return s
}
func (s *Stream) OverWriteOutput() *Stream {
s.Context = context.WithValue(s.Context, "OverWriteOutput", struct{}{})
return s
}
func (s *Stream) WithInput(reader io.Reader) *Stream {
s.Context = context.WithValue(s.Context, "Stdin", reader)
return s
}
func (s *Stream) WithOutput(out ...io.Writer) *Stream {
if len(out) > 0 {
s.Context = context.WithValue(s.Context, "Stdout", out[0])
}
if len(out) > 1 {
s.Context = context.WithValue(s.Context, "Stderr", out[1])
}
return s
}
func (s *Stream) WithErrorOutput(out io.Writer) *Stream {
s.Context = context.WithValue(s.Context, "Stderr", out)
return s
}
func (s *Stream) ErrorToStdOut() *Stream {
return s.WithErrorOutput(os.Stdout)
}
// for test
func (s *Stream) Compile() *exec.Cmd {
args := s.GetArgs()
cmd := exec.CommandContext(s.Context, "ffmpeg", args...)
if a, ok := s.Context.Value("Stdin").(io.Reader); ok {
cmd.Stdin = a
}
if a, ok := s.Context.Value("Stdout").(io.Writer); ok {
cmd.Stdout = a
}
if a, ok := s.Context.Value("Stderr").(io.Writer); ok {
cmd.Stderr = a
}
log.Printf("compiled command: ffmpeg %s\n", strings.Join(args, " "))
return cmd
}
func (s *Stream) Run() error {
if s.Context.Value("run_hook") != nil {
hook := s.Context.Value("run_hook").(*RunHook)
go hook.f()
defer func() {
if hook.closer != nil {
_ = hook.closer.Close()
}
<-hook.done
}()
}
return s.Compile().Run()
}

138
run_linux.go Normal file
View File

@@ -0,0 +1,138 @@
package ffmpeg_go
import (
"context"
"errors"
"io/ioutil"
"os"
"path/filepath"
"strconv"
"github.com/u2takey/go-utils/rand"
)
const (
cgroupConfigKey = "cgroupConfig"
cpuRoot = "/sys/fs/cgroup/cpu,cpuacct"
cpuSetRoot = "/sys/fs/cgroup/cpuset"
procsFile = "cgroup.procs"
cpuSharesFile = "cpu.shares"
cfsPeriodUsFile = "cpu.cfs_period_us"
cfsQuotaUsFile = "cpu.cfs_quota_us"
cpuSetCpusFile = "cpuset.cpus"
cpuSetMemsFile = "cpuset.mems"
)
type cgroupConfig struct {
cpuRequest float32
cpuLimit float32
cpuset string
memset string
}
func (s *Stream) setCGroupConfig(f func(config *cgroupConfig)) *Stream {
a := s.Context.Value(cgroupConfigKey)
if a == nil {
a = &cgroupConfig{}
}
f(a.(*cgroupConfig))
s.Context = context.WithValue(s.Context, cgroupConfigKey, a)
return s
}
func (s *Stream) WithCpuCoreRequest(n float32) *Stream {
return s.setCGroupConfig(func(config *cgroupConfig) {
config.cpuRequest = n
})
}
func (s *Stream) WithCpuCoreLimit(n float32) *Stream {
return s.setCGroupConfig(func(config *cgroupConfig) {
config.cpuLimit = n
})
}
func (s *Stream) WithCpuSet(n string) *Stream {
return s.setCGroupConfig(func(config *cgroupConfig) {
config.cpuset = n
})
}
func (s *Stream) WithMemSet(n string) *Stream {
return s.setCGroupConfig(func(config *cgroupConfig) {
config.memset = n
})
}
func writeCGroupFile(rootPath, file string, value string) error {
return ioutil.WriteFile(filepath.Join(rootPath, file), []byte(value), 0755)
}
func (s *Stream) RunLinux() error {
a := s.Context.Value(cgroupConfigKey).(*cgroupConfig)
if a.cpuRequest > a.cpuLimit {
return errors.New("cpuCoreLimit should greater or equal to cpuCoreRequest")
}
name := "ffmpeg_go_" + rand.String(6)
rootCpuPath, rootCpuSetPath := filepath.Join(cpuRoot, name), filepath.Join(cpuSetRoot, name)
err := os.MkdirAll(rootCpuPath, 0777)
if err != nil {
return err
}
err = os.MkdirAll(rootCpuSetPath, 0777)
if err != nil {
return err
}
defer func() { _ = os.Remove(rootCpuPath); _ = os.Remove(rootCpuSetPath) }()
share := int(1024 * a.cpuRequest)
period := 100000
quota := int(a.cpuLimit * 100000)
if share > 0 {
err = writeCGroupFile(rootCpuPath, cpuSharesFile, strconv.Itoa(share))
if err != nil {
return err
}
}
err = writeCGroupFile(rootCpuPath, cfsPeriodUsFile, strconv.Itoa(period))
if err != nil {
return err
}
if quota > 0 {
err = writeCGroupFile(rootCpuPath, cfsQuotaUsFile, strconv.Itoa(quota))
if err != nil {
return err
}
}
if a.cpuset != "" && a.memset != "" {
err = writeCGroupFile(rootCpuSetPath, cpuSetCpusFile, a.cpuset)
if err != nil {
return err
}
err = writeCGroupFile(rootCpuSetPath, cpuSetMemsFile, a.memset)
if err != nil {
return err
}
}
cmd := s.Compile()
err = cmd.Start()
if err != nil {
return err
}
if share > 0 || quota > 0 {
err = writeCGroupFile(rootCpuPath, procsFile, strconv.Itoa(cmd.Process.Pid))
if err != nil {
return err
}
}
if a.cpuset != "" && a.memset != "" {
err = writeCGroupFile(rootCpuSetPath, procsFile, strconv.Itoa(cmd.Process.Pid))
if err != nil {
return err
}
}
return cmd.Wait()
}

222
utils.go Normal file
View File

@@ -0,0 +1,222 @@
package ffmpeg_go
import (
"fmt"
"hash/fnv"
"sort"
"strconv"
"strings"
"github.com/u2takey/go-utils/sets"
)
func getString(item interface{}) string {
if a, ok := item.(interface{ String() string }); ok {
return a.String()
}
switch a := item.(type) {
case string:
return a
case []string:
return strings.Join(a, ", ")
case Args:
return strings.Join(a, ", ")
case []interface{}:
var r []string
for _, b := range a {
r = append(r, getString(b))
}
return strings.Join(r, ", ")
case KwArgs:
var keys, r []string
for k := range a {
keys = append(keys, k)
}
sort.Strings(keys)
for _, k := range keys {
r = append(r, fmt.Sprintf("%s: %s", k, getString(a[k])))
}
return fmt.Sprintf("{%s}", strings.Join(r, ", "))
case map[string]interface{}:
var keys, r []string
for k := range a {
keys = append(keys, k)
}
sort.Strings(keys)
for _, k := range keys {
r = append(r, fmt.Sprintf("%s: %s", k, getString(a[k])))
}
return fmt.Sprintf("{%s}", strings.Join(r, ", "))
}
return fmt.Sprintf("%v", item)
}
func getHash(item interface{}) int {
h := fnv.New64()
switch a := item.(type) {
case interface{ Hash() int }:
return a.Hash()
case string:
_, _ = h.Write([]byte(a))
return int(h.Sum64())
case []byte:
_, _ = h.Write(a)
return int(h.Sum64())
case map[string]interface{}:
b := 0
for k, v := range a {
b += getHash(k) + getHash(v)
}
return b
case KwArgs:
b := 0
for k, v := range a {
b += getHash(k) + getHash(v)
}
return b
default:
_, _ = h.Write([]byte(getString(item)))
return int(h.Sum64())
}
}
func escapeChars(text, chars string) string {
s := sets.NewString()
for _, a := range chars {
s.Insert(string(a))
}
sl := s.List()
if s.Has("\\") {
s.Delete("\\")
sl = append([]string{"\\"}, s.List()...)
}
for _, ch := range sl {
text = strings.ReplaceAll(text, ch, "\\"+ch)
}
return text
}
type Args []string
func (a Args) Sorted() Args {
sort.Strings(a)
return a
}
func (a Args) EscapeWith(chars string) Args {
out := Args{}
for _, b := range a {
out = append(out, escapeChars(b, chars))
}
return out
}
type KwArgs map[string]interface{}
func MergeKwArgs(args []KwArgs) KwArgs {
a := KwArgs{}
for _, b := range args {
for c := range b {
a[c] = b[c]
}
}
return a
}
func (a KwArgs) EscapeWith(chars string) KwArgs {
out := KwArgs{}
for k, v := range a {
out[escapeChars(k, chars)] = escapeChars(getString(v), chars)
}
return out
}
func (a KwArgs) Copy() KwArgs {
r := KwArgs{}
for k := range a {
r[k] = a[k]
}
return r
}
func (a KwArgs) SortedKeys() []string {
var r []string
for k := range a {
r = append(r, k)
}
sort.Strings(r)
return r
}
func (a KwArgs) GetString(k string) string {
if v, ok := a[k]; ok {
return fmt.Sprintf("%v", v)
}
return ""
}
func (a KwArgs) PopString(k string) string {
if c, ok := a[k]; ok {
defer delete(a, k)
return fmt.Sprintf("%v", c)
}
return ""
}
func (a KwArgs) HasKey(k string) bool {
_, ok := a[k]
return ok
}
func (a KwArgs) GetDefault(k string, defaultV interface{}) interface{} {
if v, ok := a[k]; ok {
return v
}
return defaultV
}
func (a KwArgs) PopDefault(k string, defaultV interface{}) interface{} {
if v, ok := a[k]; ok {
defer delete(a, k)
return v
}
return defaultV
}
func ConvertKwargsToCmdLineArgs(kwargs KwArgs) []string {
var keys, args []string
for k := range kwargs {
keys = append(keys, k)
}
sort.Strings(keys)
for _, k := range keys {
v := kwargs[k]
switch a := v.(type) {
case string:
args = append(args, fmt.Sprintf("-%s", k))
if a != "" {
args = append(args, a)
}
case []string:
for _, r := range a {
args = append(args, fmt.Sprintf("-%s", k))
if r != "" {
args = append(args, r)
}
}
case []int:
for _, r := range a {
args = append(args, fmt.Sprintf("-%s", k))
args = append(args, strconv.Itoa(r))
}
case int:
args = append(args, fmt.Sprintf("-%s", k))
args = append(args, strconv.Itoa(a))
default:
args = append(args, fmt.Sprintf("-%s", k))
args = append(args, fmt.Sprintf("%v", a))
}
}
return args
}

98
view.go Normal file
View File

@@ -0,0 +1,98 @@
package ffmpeg_go
import (
"bytes"
"fmt"
)
type ViewType string
const (
// FlowChart the diagram type for output in flowchart style (https://mermaid-js.github.io/mermaid/#/flowchart) (including current state
ViewTypeFlowChart ViewType = "flowChart"
// StateDiagram the diagram type for output in stateDiagram style (https://mermaid-js.github.io/mermaid/#/stateDiagram)
ViewTypeStateDiagram ViewType = "stateDiagram"
)
func (s *Stream) View(viewType ViewType) (string, error) {
switch viewType {
case ViewTypeFlowChart:
return visualizeForMermaidAsFlowChart(s)
case ViewTypeStateDiagram:
return visualizeForMermaidAsStateDiagram(s)
default:
return "", fmt.Errorf("unknown ViewType: %s", viewType)
}
}
func visualizeForMermaidAsStateDiagram(s *Stream) (string, error) {
var buf bytes.Buffer
nodes := getStreamSpecNodes([]*Stream{s})
var dagNodes []DagNode
for i := range nodes {
dagNodes = append(dagNodes, nodes[i])
}
sorted, outGoingMap, err := TopSort(dagNodes)
if err != nil {
return "", err
}
buf.WriteString("stateDiagram\n")
for _, node := range sorted {
next := outGoingMap[node.Hash()]
for k, v := range next {
for _, nextNode := range v {
label := string(k)
if label == "" {
label = "<>"
}
buf.WriteString(fmt.Sprintf(` %s --> %s: %s`, node.ShortRepr(), nextNode.Node.ShortRepr(), label))
buf.WriteString("\n")
}
}
}
return buf.String(), nil
}
// visualizeForMermaidAsFlowChart outputs a visualization of a FSM in Mermaid format (including highlighting of current state).
func visualizeForMermaidAsFlowChart(s *Stream) (string, error) {
var buf bytes.Buffer
nodes := getStreamSpecNodes([]*Stream{s})
var dagNodes []DagNode
for i := range nodes {
dagNodes = append(dagNodes, nodes[i])
}
sorted, outGoingMap, err := TopSort(dagNodes)
if err != nil {
return "", err
}
buf.WriteString("graph LR\n")
for _, node := range sorted {
buf.WriteString(fmt.Sprintf(` %d[%s]`, node.Hash(), node.ShortRepr()))
buf.WriteString("\n")
}
buf.WriteString("\n")
for _, node := range sorted {
next := outGoingMap[node.Hash()]
for k, v := range next {
for _, nextNode := range v {
// todo ignore merged output
label := string(k)
if label == "" {
label = "<>"
}
buf.WriteString(fmt.Sprintf(` %d --> |%s| %d`, node.Hash(), fmt.Sprintf("%s:%s", nextNode.Label, label), nextNode.Node.Hash()))
buf.WriteString("\n")
}
}
}
buf.WriteString("\n")
return buf.String(), nil
}