Skip to content

Commit

Permalink
Merge pull request #85 from AIFARMS/dev
Browse files Browse the repository at this point in the history
fix: optimize video frame generation, add sanity checking and clean up code
  • Loading branch information
pradeepsen99 authored Mar 17, 2024
2 parents 13bb257 + 63f669f commit 1e57a80
Show file tree
Hide file tree
Showing 14 changed files with 686 additions and 576 deletions.
976 changes: 517 additions & 459 deletions package-lock.json

Large diffs are not rendered by default.

4 changes: 3 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
"predeploy": "npm run build",
"deploy": "NODE_ENV=production node_modules//gh-pages/bin/gh-pages.js -d build",
"start": "react-scripts start",
"dev": "NODE_ENV=dev react-scripts start",
"build": "react-scripts build",
"test": "react-scripts test",
"eject": "react-scripts eject",
Expand All @@ -52,7 +53,8 @@
"@babel/preset-env": "*",
"@babel/preset-react": "*",
"babel-jest": "^24.9.0",
"gh-pages": "^3.2.0"
"gh-pages": "^3.2.0",
"redux-immutable-state-invariant": "^2.1.0"
},
"babel": {
"presets": [
Expand Down
20 changes: 19 additions & 1 deletion src/processing/actions.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ function updateFrameData(frame_number, data){
})
}
function getFrameData(frame_number){
console.log(store.getState())
return JSON.parse(JSON.stringify(store.getState().frame_data.data[frame_number]))
}

Expand Down Expand Up @@ -130,6 +131,20 @@ function getMetaData(){
return JSON.parse(JSON.stringify(store.getState().metadata))
}

function togglePlay(){
store.dispatch({
type: "play_status/togglePlay",
payload: {}
})
}

function initPlay(){
store.dispatch({
type: "play_status/init",
payload: {}
})
}

export {initFrameData,
updateFrameData,
getFrameData,
Expand All @@ -149,4 +164,7 @@ export {initFrameData,
setMediaType,
setTotalFrames,
setSkipValue,
getMetaData}
getMetaData,
togglePlay,
initPlay
}
Empty file removed src/reducer/box_count.js
Empty file.
Empty file removed src/reducer/duration.js
Empty file.
2 changes: 1 addition & 1 deletion src/reducer/frame_data.js
Original file line number Diff line number Diff line change
Expand Up @@ -28,4 +28,4 @@ const frameDataSlice = createSlice({
})

export const {init, modifyFrame} = frameDataSlice.actions
export default frameDataSlice.reducer
export default frameDataSlice.reducer
Empty file removed src/reducer/media_inputs.js
Empty file.
13 changes: 8 additions & 5 deletions src/reducer/metadata.js
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,13 @@ const metadataSlice = createSlice({
state.vertical_res = payload.payload.vertical_res
},
setFrameRate(state, payload){
var curr = JSON.parse(JSON.stringify(state))['frame_rate']
state.total_frames = state.total_frames / curr
state.total_frames = state.total_frames * payload.payload.frame_rate
state.frame_rate = payload.payload.frame_rate
// When the frame rate is changed, we first calculate the current frame rate and then multiply the total frames by the ratio of the new frame rate to the old frame rate
if (state.media_type != "in_image") { // only for videos
var curr_framerate = JSON.parse(JSON.stringify(state))['frame_rate']
state.total_frames = state.total_frames / curr_framerate
state.total_frames = state.total_frames * payload.payload.frame_rate
state.frame_rate = payload.payload.frame_rate
}
},
setMediaType(state, payload){
state.media_type = payload.payload.media_type
Expand All @@ -41,4 +44,4 @@ const metadataSlice = createSlice({
})

export const {init, setRes, setFrameRate, setMediaType, setTotalFrames, setSkipValue} = metadataSlice.actions
export default metadataSlice.reducer
export default metadataSlice.reducer
22 changes: 22 additions & 0 deletions src/reducer/play_status.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import {createSlice} from '@reduxjs/toolkit'

const initialState = {play: false}

const playStatusSlice = createSlice({
name: 'play_status',
initialState,
reducers:{
init(state, payload){
state.play = false;
},
togglePlay(state, payload){
state.play = !state.play
},
middleware: (getDefaultMiddleware) => getDefaultMiddleware({
serializableCheck: false
})
}
})

export const {init, togglePlay} = playStatusSlice.actions
export default playStatusSlice.reducer
2 changes: 1 addition & 1 deletion src/setupTests.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@
// allows you to do things like:
// expect(element).toHaveTextContent(/react/i)
// learn more: https://github.com/testing-library/jest-dom
import '@testing-library/jest-dom/extend-expect';
import "@testing-library/jest-dom/extend-expect";
7 changes: 5 additions & 2 deletions src/store.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import columnDataSlice from './reducer/column_annot'
import currFrameDataSlice from './reducer/current_frame'
import mediaDataSlice from './reducer/media_data'
import metadataSlice from './reducer/metadata'
import playStatusSlice from './reducer/play_status'

export default configureStore({
reducer: {
Expand All @@ -13,6 +14,8 @@ export default configureStore({
column_annot: columnDataSlice,
current_frame: currFrameDataSlice,
media_data: mediaDataSlice,
metadata: metadataSlice
metadata: metadataSlice,
play_status: playStatusSlice,
},
})
devTools: process.env.NODE_ENV !== 'production',
})
153 changes: 71 additions & 82 deletions src/ui_elements/Components/fabric_canvas.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,7 @@ import {INPUT_IMAGE, INPUT_VIDEO} from '../../static_data/const'
const fabric = require("fabric").fabric;


var temp_color;

const canvasBackgroundUpdate = (currFrameData, inputType, image_url, scaling_factor_width, scaling_factor_height, fabricCanvas, video) => {

const canvasBackgroundUpdate = (currFrameData, inputType, image_url, scaling_factor_width, scaling_factor_height, fabricCanvas, video, playing=false) => {
if(inputType == INPUT_IMAGE){ //This is for when images are uploaded
var img = new Image()
img.onload = function() {
Expand All @@ -40,40 +37,16 @@ const canvasBackgroundUpdate = (currFrameData, inputType, image_url, scaling_fac
img.src = URL.createObjectURL(image_url)
return;
}else{ //This is for videos

let canvas = document.createElement('canvas');
let context = canvas.getContext('2d');
let [w, h] = [scaling_factor_width, scaling_factor_height]
canvas.width = scaling_factor_width;
canvas.height = scaling_factor_height;

context.drawImage(video, 0, 0, w, h);
let base64ImageData = canvas.toDataURL();

var img = new Image()
img.onload = function() {
fabricCanvas.clear()
if(currFrameData != undefined){
fabric.util.enlivenObjects(currFrameData, function (enlivenedObjects){
enlivenedObjects.forEach(function (obj, index) {
fabricCanvas.add(obj);
});
fabricCanvas.renderAll();
})
}
var f_img = new fabric.Image(img, {
objectCaching: false,
scaleX: scaling_factor_width / img.width,
scaleY: scaling_factor_height / img.height
});
fabricCanvas.setBackgroundImage(f_img);

fabricCanvas.renderAll();
canvas.remove()
};

img.src = base64ImageData
return;
fabricCanvas.remove(...fabricCanvas.getObjects());
if(currFrameData != undefined){
fabric.util.enlivenObjects(currFrameData, function (enlivenedObjects){
enlivenedObjects.forEach(function (obj, index) {
fabricCanvas.add(obj);
});
fabricCanvas.renderAll();
})
}
fabricCanvas.renderAll();
}
}

Expand All @@ -83,8 +56,11 @@ export default function FabricRender(props){
const [upload, setUpload] = useState(false)
const metadata_redux = useSelector(state => state.metadata)
const frame_redux = useSelector(state => state.frame_data)
var image_data = useSelector(state => state.media_data)
var currframe_redux = useSelector(state => state.current_frame)['data']
const image_data_store = useSelector(state => state.media_data)
const currFrame = useSelector(state => state.current_frame)
const currframe_redux = currFrame['data']
const play_redux = useSelector(state => state.play_status.play)
const image_data = image_data_store['data'][props.stream_num]

var save_data = () => {
console.log(fabricCanvas)
Expand Down Expand Up @@ -155,9 +131,6 @@ export default function FabricRender(props){
this.selection = true;
});




var el = ReactDOM.findDOMNode(this);
var canvas_elem = document.getElementsByTagName('canvas')[props.stream_num*2]
temp_fabricCanvas.initialize(canvas_elem, {
Expand All @@ -169,62 +142,78 @@ export default function FabricRender(props){
setFabricCanvas(temp_fabricCanvas)
}, []);

image_data = image_data['data'][props.stream_num]


useEffect(() => {
if(fabricCanvas){

var video = document.getElementsByTagName('video')[props.stream_num]
if(upload == true){
video.currentTime = (video.duration * ((currframe_redux+1)/metadata_redux['total_frames']))
}
if(metadata_redux['media_type'] == INPUT_VIDEO){
canvasBackgroundUpdate(getFrameData(currframe_redux), INPUT_VIDEO, image_data[0], props.scaling_factor_width, props.scaling_factor_height, fabricCanvas,video)
canvasBackgroundUpdate(getFrameData(currframe_redux), INPUT_VIDEO, image_data[0], props.scaling_factor_width, props.scaling_factor_height, fabricCanvas,video,play_redux)
}else if (metadata_redux['media_type'] == INPUT_IMAGE){
canvasBackgroundUpdate(getFrameData(currframe_redux), INPUT_IMAGE, image_data[currframe_redux], props.scaling_factor_width, props.scaling_factor_height, fabricCanvas)
canvasBackgroundUpdate(getFrameData(currframe_redux), INPUT_IMAGE, image_data[currframe_redux], props.scaling_factor_width, props.scaling_factor_height, fabricCanvas,play_redux)
}
}
}, [frame_redux])
}, [currFrame])

useEffect(() => {
if(fabricCanvas){
if(fabricCanvas.getObjects().length != 0){
updateFrameData(currindex, fabricCanvas.getObjects())
}
setCurrindex(currframe_redux)
var video = document.getElementsByTagName('video')[props.stream_num]
if(upload == true){
video.currentTime = (video.duration * ((currframe_redux+1)/metadata_redux['total_frames']))
}
if (upload==false){
return
}
}, [currframe_redux])
var video = document.getElementsByTagName('video')[props.stream_num]
if(play_redux){
video.play()
fabricCanvas.remove(...fabricCanvas.getObjects());
fabric.util.requestAnimFrame(function renderLoop() {
fabricCanvas.renderAll();
fabric.util.requestAnimFrame(renderLoop);
});
}else{
video.pause()
let frame_number = Math.ceil((video.currentTime / video.duration) * metadata_redux['total_frames'])
console.log("FRAME NUMBER", frame_number)
setCurrentFrame(frame_number)
//canvasBackgroundUpdate(getFrameData(frame_number), INPUT_VIDEO, image_data[0], props.scaling_factor_width, props.scaling_factor_height, fabricCanvas, video,play_redux)
}
}, [play_redux])

if(fabricCanvas != null && image_data != undefined){

if(fabricCanvas != null && image_data != undefined && upload===false && play_redux===false){
if(image_data.length > 0){
if(metadata_redux['media_type'] == INPUT_VIDEO){
var video = document.getElementsByTagName('video')[props.stream_num]
if(upload === false){
var source = document.createElement('source');
source.src = URL.createObjectURL(image_data[0])
source.type = "video/mp4"
video.appendChild(source)
video.onloadedmetadata = function(){
initAnnotationData(parseInt(video.duration))
initFrameData(parseInt(video.duration))
setTotalFrames(parseInt(video.duration))
video.currentTime=0

}
video.oncanplaythrough = function(){
if(upload === false){
canvasBackgroundUpdate(getFrameData(currindex), INPUT_VIDEO, image_data[0], props.scaling_factor_width, props.scaling_factor_height, fabricCanvas,video)
}
setUpload(true)
var source = document.createElement('source');
source.src = URL.createObjectURL(image_data[0])
source.type = "video/mp4"
video.appendChild(source)
video.onloadedmetadata = function(){
initAnnotationData(parseInt(video.duration))
initFrameData(parseInt(video.duration))
setTotalFrames(parseInt(video.duration))
video.currentTime=0
}
video.oncanplaythrough = function(){
if(upload === false){
var new_vid = new fabric.Image(video, {
objectCaching: false,
scaleX: props.scaling_factor_width / video.videoWidth,
scaleY: props.scaling_factor_height / video.videoHeight
})
video.width = video.videoWidth
video.height = video.videoHeight
fabricCanvas.setBackgroundImage(new_vid);
console.log(new_vid)
console.log(props.scaling_factor_width / video.videoWidth, props.scaling_factor_height / video.videoHeight)
fabricCanvas.renderAll();
}

}else if(upload === true){
//video.currentTime = (video.duration * ((currframe_redux+1)/metadata_redux['total_frames']))
canvasBackgroundUpdate(getFrameData(currframe_redux), INPUT_VIDEO, image_data[0], props.scaling_factor_width, props.scaling_factor_height, fabricCanvas, video)
setUpload(true)
}
}else if(metadata_redux['media_type'] == INPUT_IMAGE){
canvasBackgroundUpdate(getFrameData(currframe_redux), INPUT_IMAGE, image_data[currframe_redux], props.scaling_factor_width, props.scaling_factor_height, fabricCanvas)
canvasBackgroundUpdate(getFrameData(currframe_redux), INPUT_IMAGE, image_data[currframe_redux], props.scaling_factor_width, props.scaling_factor_height, fabricCanvas,play_redux)
}
//canvasBackgroundUpdate(getFrameData(currframe_redux), INPUT_VIDEO, image_data[0], props.scaling_factor_width, props.scaling_factor_height, fabricCanvas, currframe_redux)
}
}

Expand All @@ -238,4 +227,4 @@ export default function FabricRender(props){
</div>
</div>
)
}
}
Loading

0 comments on commit 1e57a80

Please sign in to comment.