[DllImport("user32.dll")]
static extern int ToUnicodeEx(uint wVirtKey, uint wScanCode, byte []
lpKeyState, [Out, MarshalAs(UnmanagedType.LPWStr)] StringBuilder pwszBuff,
int cchBuff, uint wFlags, IntPtr dwhkl);
Public Declare Function ToUnicodeEx Lib "user32" (
wVirtKey As UInteger,
wScanCode As UInteger,
lpKeyState As Byte(),
<Out()>
<MarshalAs(UnmanagedType.LPWStr, SizeConst:=64)>
ByVal lpChar As System.Text.StringBuilder,
cchBuff As Integer,
wFlags As UInteger,
dwhkl As IntPtr) As Integer
None.
Convert a ASCI Character into Unicode Character with the Keyboard Layout
I've had managed heap corruption when calling this, even when the appropriate StringBuilder.Capacity is provided as the cchBuff parameter, unless the SizeConst is also specified on the MarshalAs attribute. If someone more knowledgeable can provide additional information, please edit this.
[DllImport("user32.dll")]
static extern int ToUnicodeEx(uint wVirtKey, uint wScanCode, byte [] lpKeyState, [Out, MarshalAs(UnmanagedType.LPWStr)] System.Text.StringBuilder pwszBuff, int cchBuff, uint wFlags, IntPtr dwhkl);
[DllImport("user32.dll")]
static extern bool GetKeyboardState(byte [] lpKeyState);
[DllImport("user32.dll")]
static extern uint MapVirtualKey(uint uCode, uint uMapType);
[DllImport("user32.dll")]
static extern IntPtr GetKeyboardLayout(uint idThread);
public static string VKCodeToUnicode(uint VKCode)
{
System.Text.StringBuilder sbString = new System.Text.StringBuilder();
byte[] bKeyState = new byte[255];
bool bKeyStateStatus = GetKeyboardState(bKeyState);
if (!bKeyStateStatus)
return "";
uint lScanCode = MapVirtualKey(VKCode,0);
IntPtr HKL = GetKeyboardLayout(0);
ToUnicodeEx(VKCode, lScanCode, bKeyState, sbString, (int)5, (uint)0, HKL);
return sbString.ToString();
}
Do you know one? Please contribute it!