c# a generic error occurred in gdi+

  • Thread starter Thread starter Btb4198
  • Start date Start date
B

Btb4198

Guest
I keep getting this error c# a generic error occurred in gdi+
at this line :


public Bitmap GetCurrentVideoFrame( )
{
lock ( sync )
{
try
{
return (currentFrame == null) ? null : AForge.Imaging.Image.Clone(currentFrame);

}
catch (Exception err)
{
MessageBox.Show(err.Message);
return null;
}
}
}

and I do not know why..

here is my source code :


using AForge.Video;
using System;
using System.Threading;
using PixeLINK;
using System.Windows.Forms;
using System.Drawing;
using System.IO;


namespace Pixelation_Tool
{
class PixeLinkDataSource : IVideoSource
{
public string Source
{
set;
get;
}

public PixeLinkDataSource()
{

}

TransferBits transfer = new TransferBits();
public bool IsConnected = false;
public int m_hCamera = 0;
// received frames count
private int framesReceived = 0;
// recieved byte count
private long bytesReceived;
// frame interval in milliseconds
private bool isRunning = false;
private int frameInterval = 0;
private Thread thread = null;
private ManualResetEvent stopEvent = null;

private PixeLINK.PixelFormat format;
private bool PixeLinkCam;

public event NewFrameEventHandler NewFrame;
public event VideoSourceErrorEventHandler VideoSourceError;
public event PlayingFinishedEventHandler PlayingFinished;
public bool IsRunning
{
get
{
return isRunning;
}
set
{
isRunning = value;
}

}
public int FramesReceived
{
get
{
return framesReceived;
}
}
public int FrameInterval
{
get { return frameInterval; }
set { frameInterval = value; }
}
public long BytesReceived
{
get
{
return bytesReceived;
}
}
public void SignalToStop()
{
// stop thread
if (thread != null)
{
// signal to stop
stopEvent.Set();
}
}

public void Start()
{
bool connected = false;
if (IsRunning)
return;
if (IsConnected == false)
{

connected = Cam_Connect();
}

if (connected == false)
return;

if (thread == null)
{
framesReceived = 0;
bytesReceived = 0;

// create events
stopEvent = new ManualResetEvent(false);


//create and start new thread
thread = new Thread(new ThreadStart(WorkerThread));
thread.Name = Source; // mainly for debugging
thread.Start();
}


}

void Free()
{
if (this.IsRunning)
{
thread.Abort();
WaitForStop();
}
}
void WorkerThread()
{
DateTime start;
while (true)
{
try
{


// start time
start = DateTime.Now;
Bitmap temp = Framerate2();

if (NewFrame != null)
NewFrame(this, new NewFrameEventArgs(temp));

// free image
temp.Dispose();
}
catch (Exception e)
{
// MessageBox.Show(e.Message);
}
}
}

public Bitmap Framerate2()
{

Bitmap temp = null;
if (0 == m_hCamera)
{
return temp;
}
IsRunning = true;
float[] parameters = new float[1];
parameters[0] = 1.5F;
transfer.bits = new byte[getBufferSize()];
FrameDescriptor frameDesc = new FrameDescriptor();
int destBufferSize = 0;
//gets frame from camera
ReturnCode rc = Api.GetNextFrame(m_hCamera, transfer.bits.Length, transfer.bits, ref frameDesc);

//gets the buffer size of the frame streamed from the camera and formats the frame to BMP format
Api.FormatImage(transfer.bits, ref frameDesc, (PixeLINK.ImageFormat.Bmp), null, ref destBufferSize);
transfer.FormattedBuf = new byte[destBufferSize];
rc = Api.FormatImage(transfer.bits, ref frameDesc, (PixeLINK.ImageFormat.Bmp), transfer.FormattedBuf, ref destBufferSize);

temp = ByteToImage(transfer.FormattedBuf);
framesReceived++;
bytesReceived += destBufferSize;
return temp;

}

public int getBufferSize()
{
try
{
FeatureFlags flags = 0;
int numParms = 1;
float[] parms = new float[numParms];
Api.GetFeature(m_hCamera, Feature.PixelFormat, ref flags, ref numParms, parms);
format = (PixeLINK.PixelFormat)parms[0];
float bytesPerPixel = Api.BytesPerPixel((PixeLINK.PixelFormat)parms[0]);

return (int)(bytesPerPixel * (float)GetNumPixels());
}
catch (AccessViolationException et)
{
return -1;
}
catch (Exception err)
{
return -1;
}

}

public int GetNumPixels()
{

int numParms = 4;
FeatureFlags flags = 0;
float[] parms = new float[numParms];
ReturnCode rc = Api.GetFeature(m_hCamera, Feature.Roi, ref flags, ref numParms, parms);

int width = System.Convert.ToInt32(parms[(int)FeatureParameterIndex.RoiWidth]);
int height = System.Convert.ToInt32(parms[(int)FeatureParameterIndex.RoiHeight]);


//records height and width of the streamed image
// imageHeight = height;
// imageWidth = width;

return (width * height);

}
public static Bitmap ByteToImage(byte[] blob)
{
MemoryStream mStream = new MemoryStream();
byte[] pData = blob;
mStream.Write(pData, 0, Convert.ToInt32(pData.Length));
Bitmap bm = new Bitmap(mStream, false);
mStream.Dispose();
return bm;
}

public void Stop()
{
if (this.IsRunning)
{
Api.Uninitialize(m_hCamera);
IsRunning = false;
thread.Abort();
}
}

public void WaitForStop()
{
if (thread != null)
{
// wait for thread stop
// thread.Join();

Free();
}
}


public bool Cam_Connect()
{
bool temp = false;
try
{
ReturnCode rc = Api.Initialize(0, ref m_hCamera);
if (!Api.IsSuccess(rc))
{
MessageBox.Show("ERROR: Unable to initialize a camera");
m_hCamera = 0;
IsConnected = false;
return temp;
}
//start camera stream
rc = Api.SetStreamState(m_hCamera, StreamState.Start);
PixeLinkCam = true;
temp = true;
IsConnected = true;
}
catch (Exception e)
{
MessageBox.Show(e.Message);
temp = false;
}

return temp;

}



}

}

so when I connect to my Pixlink camera using my source code I get that error,
but when i connect my wedcam using your guys code I never get that error .

private Bitmap Andrew()
{
Bitmap tempBitmap = new Bitmap(videoSourcePlayer.Width, videoSourcePlayer.Height);
Graphics.FromImage(tempBitmap);
Graphics g = Graphics.FromImage(tempBitmap);
Bitmap originalBmp = videoSourcePlayer.GetCurrentVideoFrame();
while (originalBmp == null)
{
originalBmp = videoSourcePlayer.GetCurrentVideoFrame();
}
g.DrawImage(originalBmp, videoSourcePlayer.imagex, videoSourcePlayer.imagey, videoSourcePlayer.imgWidth, videoSourcePlayer.imgHeight);

return tempBitmap;
}





so it most be something I am doing wrong .private Bitmap Andrew()



can someone please help me ?

Continue reading...
 
Back
Top