1
Due to server problems the website is temporarily offline! Visit http://www.smorgasbordet.com/pellesc/ to download Pelles C.
#include <stdio.h>
#include <Windows.h>
#include "pbk.h"
#include "pbm.h"
int main()
{
P_BINT pc;
P_BNUM p;
LARGE_INTEGER la, lb, lc;
pc = pbkCreateBint(0);
p = pbkCreateBnum(10);
QueryPerformanceFrequency(&lc);
QueryPerformanceCounter(&la);
pbmUbFactorial(pc, 2000); // 2000!
QueryPerformanceCounter(&lb);
printf("%lf\n", (double)(lb.QuadPart - la.QuadPart) / (double)lc.QuadPart);
pbkBintToDecimalBnum(p, pc);
pbkPrintBnum(p);
return 0;
}
#include <windows.h>
#include <stdio.h>
int main(void)
{
HMODULE hMod;
PIMAGE_NT_HEADERS pNThdr;
PIMAGE_EXPORT_DIRECTORY pDesc;
DWORD NumbOfNames,i;
DWORD* AddrOfNames;
hMod=LoadLibrary("kernel32.dll");
pNThdr=(PIMAGE_NT_HEADERS)((LPBYTE)hMod + ((PIMAGE_DOS_HEADER)hMod)->e_lfanew);
pDesc=(PIMAGE_EXPORT_DIRECTORY)((LPBYTE)hMod + pNThdr->OptionalHeader.DataDirectory[IMAGE_DIRECTORY_ENTRY_EXPORT].VirtualAddress);
NumbOfNames = pDesc -> NumberOfNames;
AddrOfNames = (DWORD *)((LPBYTE)hMod + pDesc -> AddressOfNames);
for(i=0;i<NumbOfNames;i++)
{
printf("%s\n",(*AddrOfNames+(LPBYTE)hMod));
AddrOfNames=AddrOfNames+1;
}
FreeLibrary(hMod);
return 0;
}
#include <windows.h>
#include <stdio.h>
/* KAST macro by MrBcx */
#define KAST(to_type,old_obj) ((to_type)(old_obj))
int main(void)
{
HMODULE hMod;
PIMAGE_NT_HEADERS pNThdr;
PIMAGE_EXPORT_DIRECTORY pDesc;
DWORD NumbOfNames,i;
DWORD* AddrOfNames;
hMod=LoadLibrary("kernel32.dll");
pNThdr=KAST(PIMAGE_NT_HEADERS,KAST(LPBYTE,hMod) + KAST(PIMAGE_DOS_HEADER,hMod)->e_lfanew);
pDesc=KAST(PIMAGE_EXPORT_DIRECTORY,KAST(LPBYTE,hMod) + pNThdr->OptionalHeader.DataDirectory[IMAGE_DIRECTORY_ENTRY_EXPORT].VirtualAddress);
NumbOfNames = pDesc -> NumberOfNames;
AddrOfNames = KAST(DWORD *,KAST(LPBYTE,hMod) + pDesc -> AddressOfNames);
for(i=0;i<NumbOfNames;i++)
{
printf("%s\n",*AddrOfNames+KAST(LPBYTE,hMod));
AddrOfNames=AddrOfNames+1;
}
FreeLibrary(hMod);
return 0;
}
For OpenAI a token is a group of three or four characters. The solution I have made is to divide the length of each word by three and add 1 word length is greather than three.
OpenAI tokenizer https://platform.openai.com/tokenizer
zlib 1.2.8 source converted for __stdcall.