C
CMCTS
Guest
All,
I have a device that I need to send a string plus 2bytes of data.
the data is a Int16 value.
when i convert the value to binary i get:
bits = 0000000101000110 which is 326.
the device has a header of 11 characters and only accepts the string if formatted with the header plus the 2 byte value of the initial Int16 for a total of 13 bytes.
However the header plus the bits is 14 bytes.
How can i get this to 13 bytes.
The device will also then send me back a message with a 8 byte header plus the new value of the sent Int16 in a 2 byte format.
initial string
string Finalmessage = header + (Int16)State;
byte[] cmd = Encoding.ASCII.GetBytes(Finalmessage);
ParseRecievedData(WallServer.Sendmessaage(cmd, cmd.Length),cmd.Length);
server emulator method :
static public byte[] Sendmessaage(byte[] msg, int numberOfBytes)
{
Console.PrintLine("Wall Server Initial legnth = " + msg.Length);
byte[] r = msg.Take(13).ToArray();
foreach (char t in msg)
{
Console.PrintLine("data recieved == {0}", t);
}
string rx = new string(msg.Take(numberOfBytes).Select(b => (char)b).ToArray());
Console.PrintLine("Wall Server Message : {0}", rx);
Console.PrintLine(rx);
Console.PrintLine("Wall Server Incoming legnth = " + rx.Length);
byte[] bytesToUse = msg.Skip(11).Take(4).ToArray();
byte[] header = Encoding.ASCII.GetBytes("COMPLETE");
byte[] reply = Combine(header,bytesToUse);
return (reply);
}
what my parser for the return data looks like:
private static void ParseRecievedData(byte[] Rx, int numberOfBytes) //For testing
{
string rx = new string(Rx.Take(numberOfBytes).Select(b => (char)b).ToArray());
byte[] bytes = Encoding.UTF8.GetBytes(rx);
byte[] bytesToUse = bytes.Skip(8).Take(4).ToArray();
Console.PrintLine("New Byte Array Legnth = " + bytesToUse.Length);
foreach (char b in bytesToUse)
{
Console.PrintLine("Byte recieved == {0}", b);
}
Console.PrintLine("Incoming Message = " + rx);
Console.PrintLine("Incoming legnth = " + Rx.Length);
string s = System.Text.Encoding.UTF8.GetString(bytesToUse, 0, bytesToUse.Length);
State = Int16.Parse(s);
}
i cant seem to get this to work.
So initial string is 13 bytes(not including checksum and carriage return).
server emulation sees 14 bytes.
Need the value of initial Int16 (State) to always be sent as only 2 bytes.
Hope this makes sense.
Thank you in advance.
Continue reading...
I have a device that I need to send a string plus 2bytes of data.
the data is a Int16 value.
when i convert the value to binary i get:
bits = 0000000101000110 which is 326.
the device has a header of 11 characters and only accepts the string if formatted with the header plus the 2 byte value of the initial Int16 for a total of 13 bytes.
However the header plus the bits is 14 bytes.
How can i get this to 13 bytes.
The device will also then send me back a message with a 8 byte header plus the new value of the sent Int16 in a 2 byte format.
initial string
string Finalmessage = header + (Int16)State;
byte[] cmd = Encoding.ASCII.GetBytes(Finalmessage);
ParseRecievedData(WallServer.Sendmessaage(cmd, cmd.Length),cmd.Length);
server emulator method :
static public byte[] Sendmessaage(byte[] msg, int numberOfBytes)
{
Console.PrintLine("Wall Server Initial legnth = " + msg.Length);
byte[] r = msg.Take(13).ToArray();
foreach (char t in msg)
{
Console.PrintLine("data recieved == {0}", t);
}
string rx = new string(msg.Take(numberOfBytes).Select(b => (char)b).ToArray());
Console.PrintLine("Wall Server Message : {0}", rx);
Console.PrintLine(rx);
Console.PrintLine("Wall Server Incoming legnth = " + rx.Length);
byte[] bytesToUse = msg.Skip(11).Take(4).ToArray();
byte[] header = Encoding.ASCII.GetBytes("COMPLETE");
byte[] reply = Combine(header,bytesToUse);
return (reply);
}
what my parser for the return data looks like:
private static void ParseRecievedData(byte[] Rx, int numberOfBytes) //For testing
{
string rx = new string(Rx.Take(numberOfBytes).Select(b => (char)b).ToArray());
byte[] bytes = Encoding.UTF8.GetBytes(rx);
byte[] bytesToUse = bytes.Skip(8).Take(4).ToArray();
Console.PrintLine("New Byte Array Legnth = " + bytesToUse.Length);
foreach (char b in bytesToUse)
{
Console.PrintLine("Byte recieved == {0}", b);
}
Console.PrintLine("Incoming Message = " + rx);
Console.PrintLine("Incoming legnth = " + Rx.Length);
string s = System.Text.Encoding.UTF8.GetString(bytesToUse, 0, bytesToUse.Length);
State = Int16.Parse(s);
}
i cant seem to get this to work.
So initial string is 13 bytes(not including checksum and carriage return).
server emulation sees 14 bytes.
Need the value of initial Int16 (State) to always be sent as only 2 bytes.
Hope this makes sense.
Thank you in advance.
Continue reading...