Has anybody managed to get SetCurrentConsoleFontEx work properly? SOF SetCurrentConsoleFontEx doesn't set the font from GetCurrentConsoleFontEx (https://stackoverflow.com/questions/35112414/setcurrentconsolefontex-doesnt-set-the-font-from-getcurrentconsolefontex) says its behaviour is weird, and I can confirm that. If somebody has working code, I'd like to see it.
A small test:
#define _WIN32_WINNT 0x0600
#include <Windows.h>
#include <WinCon.h>
/*
typedef struct _CONSOLE_FONT_INFOEX {
ULONG cbSize;
DWORD nFont;
COORD dwFontSize;
UINT FontFamily;
UINT FontWeight;
WCHAR FaceName[LF_FACESIZE];
} CONSOLE_FONT_INFOEX, *PCONSOLE_FONT_INFOEX;
BOOL WINAPI GetCurrentConsoleFontEx(HANDLE hConsoleOutput, BOOL bMaximumWindow, PCONSOLE_FONT_INFOEX lpConsoleCurrentFontEx);
BOOL WINAPI SetCurrentConsoleFontEx(HANDLE hConsoleOutput, BOOL bMaximumWindow, PCONSOLE_FONT_INFOEX lpConsoleCurrentFontEx);
*/
int printf(const char *format, ...);
int main(void)
{
HANDLE out = GetStdHandle(STD_OUTPUT_HANDLE);
CONSOLE_FONT_INFOEX cfi1, cfi2;
cfi1.cbSize = sizeof(cfi1);
cfi2.cbSize = sizeof(cfi2);
if (0 == GetCurrentConsoleFontEx(out, FALSE, &cfi1))
return -1;
printf("%d,%d %ls\n", cfi1.dwFontSize.X, cfi1.dwFontSize.Y, cfi1.FaceName);
cfi1.nFont = 0;
cfi1.dwFontSize.Y = 18;
cfi1.FontFamily = 0;
memcpy(&cfi1.FaceName, L"Lucida Console", 30);
if (0 == SetCurrentConsoleFontEx(out, FALSE, &cfi1))
return -1;
if (0 == GetCurrentConsoleFontEx(out, FALSE, &cfi2))
return -1;
printf("%d,%d %ls\n", cfi2.dwFontSize.X, cfi2.dwFontSize.Y, cfi2.FaceName);
return 0;
}
output:8,8 Terminal
11,18 Lucida Console
Didn“t tested it, but acordlying to http://www.cplusplus.com/forum/general/118967 and https://stackoverflow.com/questions/36590430/in-windows-does-setcurrentconsolefontex-change-consoles-font-size SetCurrentConsoleFontEx the values of X and Y are only affected if they already exists for the font and also the api changes the whole console text and not a particular char (mixed text)
C Version
void setFontSize(int FontSize)
{
CONSOLE_FONT_INFOEX info = {0};
info.cbSize = sizeof(info);
info.dwFontSize.Y = FontSize; // leave X as zero
info.FontWeight = FW_NORMAL;
wcscpy(info.FaceName, L"Lucida Console");
SetCurrentConsoleFontEx(GetStdHandle(STD_OUTPUT_HANDLE), NULL, &info);
}
RosAsm version
[CONSOLE_FONT_INFOEX:
CONSOLE_FONT_INFOEX.cbSize; len
CONSOLE_FONT_INFOEX.nFont; D$ 0
CONSOLE_FONT_INFOEX.FontSize.X: W$ 0
CONSOLE_FONT_INFOEX.FontSize.Y: W$ 0
CONSOLE_FONT_INFOEX.FontFamily;: D$ 0
CONSOLE_FONT_INFOEX.FontWeight; D$ &FW_NORMAL
CONSOLE_FONT_INFOEX.FaceName: W$ 0 #&LF_FACESIZE]
Proc SetFontSize:
Arguments @FontSize
C_Call 'mscvrt.wcscpy' CONSOLE_FONT_INFOEX.FaceName, {U$ "Lucida Console", 0}
mov eax D@FontSize
mov W$CONSOLE_FONT_INFOEX.FontSize.Y ax
call 'KERNEL32.GetStdHandle' &STD_OUTPUT_HANDLE
call 'kernel32.SetCurrentConsoleFontEx' eax, &NULL, CONSOLE_FONT_INFOEX
EndP
Yes, that's true, Guga. Timo's example works better than mine, though. Will investigate...
I came up with a workaround ,using another function of the WinAPI,with which the background color can be set.
http://www.cplusplus.com/forum/beginner/1640/
.386
.model flat,stdcall
.stack 4096
include c:\masm32\include\masm32rt.inc
extrn system :proc
.data
color BYTE "color 70",0
color2 BYTE "color 60",0
color3 BYTE "color 01",0
.code
main proc
push offset color
call system
push 1000
call Sleep
push offset color2
call system
push 1000
call Sleep
push offset color3
call system
push 0
call ExitProcess
main endp
end main
MSD,
When you include the masm32rt.inc file you already have the processor model. The ".stack" setting is not used in win32, a 32 bit PE file sets the stack reserve and stack commit in the linker options.
CHANGE THIS
.386
.model flat,stdcall
.stack 4096
include c:\masm32\include\masm32rt.inc
TO
include c:\masm32\include\masm32rt.inc
.686p
.mmx
.xmm