0% found this document useful (0 votes)
45 views

Particle Filter Code

The document describes functions for tracking moving objects in a video file. It initializes objects for reading video, detecting foreground, analyzing blobs, and displaying tracking results. Functions are defined for detecting objects in each frame, predicting track locations, assigning detections to tracks, updating tracks, deleting lost tracks, and creating new tracks from unused detections. The tracking results are displayed by drawing bounding boxes and IDs for reliable tracks on the processed video frames.

Uploaded by

SPARSH RAJ
Copyright
© © All Rights Reserved
Available Formats
Download as DOCX, PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
45 views

Particle Filter Code

The document describes functions for tracking moving objects in a video file. It initializes objects for reading video, detecting foreground, analyzing blobs, and displaying tracking results. Functions are defined for detecting objects in each frame, predicting track locations, assigning detections to tracks, updating tracks, deleting lost tracks, and creating new tracks from unused detections. The tracking results are displayed by drawing bounding boxes and IDs for reliable tracks on the processed video frames.

Uploaded by

SPARSH RAJ
Copyright
© © All Rights Reserved
Available Formats
Download as DOCX, PDF, TXT or read online on Scribd
You are on page 1/ 8

% function obj = setupSystemObjects(file_name)

% Initialize Video I/O


% Create objects for reading a video from a file, drawing the tracked
% objects in each frame, and playing the video.
% Inputs:
% file_name: string
% Outputs:
% obj sturct
function obj = setupSystemObjects(file_name)
global obj;
global tracks;

% Create a video file reader.


obj.reader = vision.VideoFileReader(file_name);

% Create two video players, one to display the video,


% and one to display the foreground mask.
obj.maskPlayer = vision.VideoPlayer('Position', [740, 400, 700, 400]);
obj.videoPlayer = vision.VideoPlayer('Position', [20, 400, 700, 400]);

% Create System objects for foreground detection and blob analysis

% The foreground detector is used to segment moving objects from


% the background. It outputs a binary mask, where the pixel value
% of 1 corresponds to the foreground and the value of 0 corresponds
% to the background.

obj.detector = vision.ForegroundDetector('NumGaussians', 3, ...


'NumTrainingFrames', 40, 'MinimumBackgroundRatio', 0.7);

% Connected groups of foreground pixels are likely to correspond to moving


% objects. The blob analysis System object is used to find such groups
% (called 'blobs' or 'connected components'), and compute their
% characteristics, such as area, centroid, and the bounding box.

obj.blobAnalyser = vision.BlobAnalysis('BoundingBoxOutputPort', true, ...


'AreaOutputPort', true, 'CentroidOutputPort', true, ...
'MinimumBlobArea', 600);
end

% function [centroids, bboxes, mask] = detectObjects(frame)


% This function is to dectect moving object
% Inputs:
% frame: frame
% Outputs:
% centroids: array
% bboxes: array
% mask: array
function [centroids, bboxes, mask] = detectObjects(frame)
global obj;
global tracks;
% Detect foreground.
mask = obj.detector.step(frame);

% Apply morphological operations to remove noise and fill in holes.


mask = imopen(mask, strel('rectangle', [3,3]));
mask = imclose(mask, strel('rectangle', [15, 15]));
mask = imfill(mask, 'holes');

% Perform blob analysis to find connected components.


[~, centroids, bboxes] = obj.blobAnalyser.step(mask);
end

% function predictNewLocationsOfTracks(mask,centroids)
% Use the Particle filter to predict the centroid of each
% track in the current frame, and update its bounding box accordingly.
% Inputs:
% mask: array
% centroids: array
% Outputs:
%
function predictNewLocationsOfTracks(mask,centroids)
global obj;
global tracks;
for i = 1:length(tracks)
bbox = tracks(i).bbox;
% Predict the current location of the track.
tracks(i).particles = pfDiffusion(tracks(i).particles,mask);
tracks(i).particles = pfResample(tracks(i).particles,mask,centroids);
predictedCentroid = mean(tracks(i).particles);
% Shift the bounding box so that its center is at
% the predicted location.
predictedCentroid = int32(predictedCentroid(1:2)) - bbox(3:4) / 2;
tracks(i).bbox = [predictedCentroid, bbox(3:4)];
end
end

% function [assignments, unassignedTracks, unassignedDetections] = ...


% detectionToTrackAssignment(centroids)
% compute an assignment which minimizes the total cost
% Inputs:
% centroids: array
% Outputs:
% assignments: array
% unassignedTracks: array
% unassignedDetections: array
function [assignments, unassignedTracks, unassignedDetections] = ...
detectionToTrackAssignment(centroids)
global obj;
global tracks;
nTracks = length(tracks);
nDetections = size(centroids, 1);
% Compute the cost of assigning each detection to each track.
cost = zeros(nTracks, nDetections);
for i = 1:nTracks
cost(i, :) = distance(mean(tracks(i).particles), centroids);
end

% Solve the assignment problem.


costOfNonAssignment = 20;
[assignments, unassignedTracks, unassignedDetections] = ...
assignDetectionsToTracks(cost, costOfNonAssignment);
end

% function updateAssignedTracks(assignments,centroids, bboxes)


% updates each assigned track with the corresponding detection
% Inputs:
% assignments: array
% centroids: array
% bboxes: array
% Outputs:
%
function updateAssignedTracks(assignments,centroids, bboxes)
global obj;
global tracks;
numAssignedTracks = size(assignments, 1);
for i = 1:numAssignedTracks
trackIdx = assignments(i, 1);
detectionIdx = assignments(i, 2);
centroid = centroids(detectionIdx, :);
bbox = bboxes(detectionIdx, :);

% Correct the estimate of the object's location


% using the new detection.
tracks(trackIdx).particles= pfCorrect(tracks(trackIdx).particles,
centroid);
% Replace predicted bounding box with detected
% bounding box.
tracks(trackIdx).bbox = bbox;

% Update track's age.


tracks(trackIdx).age = tracks(trackIdx).age + 1;

% Update visibility.
tracks(trackIdx).totalVisibleCount = ...
tracks(trackIdx).totalVisibleCount + 1;
tracks(trackIdx).consecutiveInvisibleCount = 0;
end
end

% function tracks=updateUnassignedTracks(unassignedTracks)
% Mark each unassigned track as invisible, and increase its age by 1.
% Inputs:
% unassignedTracks: array
% Outputs:
% tracks: struct
function tracks=updateUnassignedTracks(unassignedTracks)
global obj;
global tracks;
for i = 1:length(unassignedTracks)
ind = unassignedTracks(i);
tracks(ind).age = tracks(ind).age + 1;
tracks(ind).consecutiveInvisibleCount = ...
tracks(ind).consecutiveInvisibleCount + 1;
end
end

% function deleteLostTracks()
% deletes tracks that have been invisible for too many consecutive frames
% Inputs:
%
% Outputs:
%
function deleteLostTracks()
global obj;
global tracks;
if isempty(tracks)
return;
end

invisibleForTooLong = 30;
ageThreshold = 8;

% Compute the fraction of the track's age for which it was visible.
ages = [tracks(:).age];
totalVisibleCounts = [tracks(:).totalVisibleCount];
visibility = totalVisibleCounts ./ ages;

% Find the indices of 'lost' tracks.


lostInds = (ages < ageThreshold & visibility < 0.4) | ...
[tracks(:).consecutiveInvisibleCount] >= invisibleForTooLong;

% Delete lost tracks.


tracks = tracks(~lostInds);
end

% function nextId = createNewTracks(centroids, unassignedDetections,


bboxes,nextId)
% Create new tracks from unassigned detections
% Inputs:
% centroids array
% unassignedDetections: array
% bboxes: array
% nextId: integer
% Outputs:
% nextId: array
function nextId = createNewTracks(centroids, unassignedDetections,
bboxes,nextId)
global obj;
global tracks;
centroids = centroids(unassignedDetections, :);
bboxes = bboxes(unassignedDetections, :);

for i = 1:size(centroids, 1)
centroid = centroids(i,:);
bbox = bboxes(i, :);

% Create a Particle filter object.


particles=ones(100,2)*0.5*[centroid;centroid];
% Create a new track.
newTrack = struct(...
'id', nextId, ...
'bbox', bbox, ...
'particles', particles, ...
'age', 1, ...
'totalVisibleCount', 1, ...
'consecutiveInvisibleCount', 0, ...
'showId', 0);
% Add it to the array of tracks.
tracks(end + 1) = newTrack;

% Increment the next id.


nextId = nextId + 1;
end
end

% function showId = displayTrackingResults( frame,mask,showId)


% draws a bounding box and label ID for each track on the video frame and the
foreground mask.
% It then displays the frame and the mask in their respective video players
% Inputs:
% frame frame
% mask: array
% showId: integer
% Outputs:
% showId: integer
function showId = displayTrackingResults( frame,mask,showId)
global obj;
global tracks;
% Convert the frame and the mask to uint8 RGB.
frame = im2uint8(frame);
mask = uint8(repmat(mask, [1, 1, 3])) .* 255;

minVisibleCount = 8;
if ~isempty(tracks)

% Noisy detections tend to result in short-lived tracks.


% Only display tracks that have been visible for more than
% a minimum number of frames.
reliableTrackInds = ...
[tracks(:).totalVisibleCount] > minVisibleCount;
reliableTracks = tracks(reliableTrackInds);

for i=1:length(tracks)
if reliableTrackInds(i) == 1 && tracks(i).showId == 0
tracks(i).showId = showId;
showId = showId + 1;
end
end

% Display the objects. If an object has not been detected


% in this frame, display its predicted bounding box.
if ~isempty(reliableTracks)
% Get bounding boxes.
bboxes = cat(1, reliableTracks.bbox);

% Get ids.
ids = int32([reliableTracks(:).showId]);

% Create labels for objects indicating the ones for


% which we display the predicted rather than the actual
% location.
labels = cellstr(int2str(ids'));
predictedTrackInds = ...
[reliableTracks(:).consecutiveInvisibleCount] > 0;
isPredicted = cell(size(labels));
isPredicted(predictedTrackInds) = {' predicted'};
labels = strcat(labels, isPredicted);

% Draw the objects on the frame.


for i=1:length(labels)
frame = insertObjectAnnotation(frame, 'circle', ...
[tracks(i).particles(:,1),tracks(i).particles(:,2)
ones(size(tracks(i).particles,1),1)*1],labels(i));
end
frame = insertObjectAnnotation(frame, 'rectangle', ...
bboxes, labels);
% Draw the objects on the mask.
mask = insertObjectAnnotation(mask, 'rectangle', ...
bboxes, labels);
end
end

% Display the mask and the frame.


obj.maskPlayer.step(mask);
obj.videoPlayer.step(frame);
end

% function Particles = pfPredict(Particles)


% This function performs particle diffusion
% Inputs:
% Particles: structure
% mask: array
% Outputs:
% Particles: structure
function Particles = pfDiffusion(Particles,mask)
Particles= Particles + randn(size(Particles,1) ,2)*4;
for i = 1: size(Particles,1)
if(Particles(i,1)>size(mask,2)-1)
Particles(i,1) = size(mask,2);
end

if(Particles(i,2)>size(mask,1)-1)
Particles(i,2) = size(mask,1);
end

if(Particles(i,1)<1)
Particles(i,1) = 1;
end

if(Particles(i,2)<1)
Particles(i,2) = 1;
end
end

end

% function Particles = pfresample(Particles)


% This function performs systematic re-sampling
% Inputs:
% Particles: structure
% mask: array
% centroids: array
% Outputs:
% Particles: structure
function Particles = pfresample(Particles,mask,centroids)
W = zeros(1,100)';
for i=1:size(Particles,1)
W(i) = mask(round(Particles(i,2)),round(Particles(i,1)));
end

if ~sum(W)
return;
end

W=W/sum(W);
cdf = cumsum(W);
M = size(Particles,1);
r_0 = rand / M;
for m = 1 : M
i = find(cdf >= r_0,1,'first');
Particles(m,1) = Particles(i,1);
Particles(m,2) = Particles(i,2);
r_0 = r_0 + 1/M;
end
end

% function Particles = pfCorrect(Particles)


% This function performs particle Correct
% move particles into centroid
% Inputs:
% Particles: structure
% centroid: array
% Outputs:
% Particles: structure
function Particles = pfCorrect(Particles, centroid)
Particles=repmat(centroid,size(Particles,1),1);
end

You might also like