There is the function `Sys_Milliseconds`:

Code:
 int	curtime;
    int Sys_Milliseconds (void)
    {
    	static int		base;
    	static qboolean	initialized = false;
    
    	if (!initialized)
    	{	// let base retain 16 bits of effectively random data
    		base = timeGetTime() & 0xffff0000;
    		initialized = true;
    	}
    	curtime = timeGetTime() - base;
    
    	return curtime;
    }
Used in time calculation in the Quake 2 main game loop implementation:

Code:
//...
    oldtime = Sys_Milliseconds();
    //...
    while (1) {
    		//...
    
    		while (PeekMessage(&msg, NULL, 0, 0, PM_NOREMOVE)) {
    			if (!GetMessage(&msg, NULL, 0, 0))
    				Com_Quit();
    			sys_msg_time = msg.time;
    			TranslateMessage(&msg);
    			DispatchMessage(&msg);
    		}
    
    		do {
    			newtime = Sys_Milliseconds();    			
    			time = newtime - oldtime;
    		} while (time < 1);
    //...
    }


I'm wondering about the usage of the `Sys_Milliseconds` at all. What's the point in `base = timeGetTime() & 0xffff0000` line? Why are they applying the 0xffff0000 mask on the retrieved time? Why not just use the `timeGetTime` function directly:

Code:
//...
    oldtime = timeGetTime();
    //...
    while (1) {
    		//...
    
    		while (PeekMessage(&msg, NULL, 0, 0, PM_NOREMOVE)) {
    			if (!GetMessage(&msg, NULL, 0, 0))
    				Com_Quit();
    			sys_msg_time = msg.time;
    			TranslateMessage(&msg);
    			DispatchMessage(&msg);
    		}
    
    		do {
    			newtime = timeGetTime();    			
    			time = newtime - oldtime;
    		} while (time < 1);
    //...
    }