crypt32: Fix output buffer handling for CRYPT_STRING_BINARY case.
Signed-off-by: Nikolay Sivov <nsivov@codeweavers.com> Signed-off-by: Alexandre Julliard <julliard@winehq.org>
This commit is contained in:
parent
d18d38bc98
commit
5652a19110
|
@ -77,23 +77,20 @@ static BOOL EncodeBinaryToBinaryA(const BYTE *pbBinary,
|
|||
{
|
||||
BOOL ret = TRUE;
|
||||
|
||||
if (pszString)
|
||||
{
|
||||
if (*pcchString < cbBinary)
|
||||
{
|
||||
if (!pszString)
|
||||
*pcchString = cbBinary;
|
||||
else
|
||||
{
|
||||
SetLastError(ERROR_INSUFFICIENT_BUFFER);
|
||||
*pcchString = cbBinary;
|
||||
ret = FALSE;
|
||||
}
|
||||
else if (cbBinary)
|
||||
memcpy(pszString, pbBinary, cbBinary);
|
||||
}
|
||||
else
|
||||
{
|
||||
if (cbBinary)
|
||||
memcpy(pszString, pbBinary, cbBinary);
|
||||
|
||||
*pcchString = cbBinary;
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue