Compare commits
3 Commits
3e9d6980df
...
d01cf53ff8
Author | SHA1 | Date | |
---|---|---|---|
d01cf53ff8 | |||
5817060a8b | |||
bc2da4df4c |
@ -322,9 +322,15 @@ pushd %part2_build_dir%
|
||||
cl %part2_dir%\haversine_generator.c /W4 /WX /Z7 /nologo /Fe:haversine_generator_debug || exit /b 1
|
||||
cl %part2_dir%\haversine_generator.c /W4 /WX /Z7 /nologo /O2 /Fe:haversine_generator_release || exit /b 1
|
||||
|
||||
cl %part2_dir%\haversine_generator.c /DHAV_PROFILER /W4 /WX /Z7 /nologo /Fe:haversine_generator_profiled_debug || exit /b 1
|
||||
cl %part2_dir%\haversine_generator.c /DHAV_PROFILER /W4 /WX /Z7 /nologo /O2 /Fe:haversine_generator_profiled_release || exit /b 1
|
||||
|
||||
cl %part2_dir%\haversine.c /W4 /WX /Z7 /nologo /Fe:haversine_debug || exit /b 1
|
||||
cl %part2_dir%\haversine.c /W4 /WX /Z7 /nologo /O2 /Fe:haversine_release || exit /b 1
|
||||
|
||||
cl %part2_dir%\haversine.c /DHAV_PROFILER /W4 /WX /Z7 /nologo /Fe:haversine_profiled_debug || exit /b 1
|
||||
cl %part2_dir%\haversine.c /DHAV_PROFILER /W4 /WX /Z7 /nologo /O2 /Fe:haversine_profiled_release || exit /b 1
|
||||
|
||||
cl %part2_dir%\listing_0071_os_timer_main.cpp /W4 /WX /Z7 /O2 /nologo /Fe:listing_0071_os_timer_main_release || exit /b 1
|
||||
cl %part2_dir%\listing_0072_cpu_timer_main.cpp /W4 /WX /Z7 /O2 /nologo /Fe:listing_0072_cpu_timer_main_release || exit /b 1
|
||||
cl %part2_dir%\listing_0073_cpu_timer_guessfreq_main.cpp /W4 /WX /Z7 /O2 /nologo /Fe:listing_0073_cpu_timer_guessfreq_release || exit /b 1
|
||||
|
@ -3,83 +3,12 @@
|
||||
#include <stdint.h>
|
||||
#include <stdio.h>
|
||||
#include <Windows.h>
|
||||
#include "haversine_stdlib.h"
|
||||
#include "haversine_stdlib.c"
|
||||
#include <math.h>
|
||||
|
||||
#include "haversine_stdlib.h"
|
||||
#include "listing_0065_haversine_formula.cpp"
|
||||
#include "listing_0074_platform_metrics.cpp"
|
||||
|
||||
typedef struct ProfilerRecord {
|
||||
HAV_Str8 label;
|
||||
u64 elapsed_tsc;
|
||||
u64 elapsed_tsc_child;
|
||||
u64 hits;
|
||||
} ProfilerRecord;
|
||||
|
||||
typedef struct ProfilerAnchor {
|
||||
u64 parent_index;
|
||||
uint32_t index;
|
||||
HAV_Str8 label;
|
||||
u64 tsc;
|
||||
} ProfilerAnchor;
|
||||
|
||||
typedef struct Profiler {
|
||||
ProfilerRecord records[4096];
|
||||
u64 begin_tsc;
|
||||
u64 end_tsc;
|
||||
u64 parent_index;
|
||||
} Profiler;
|
||||
|
||||
static Profiler g_profiler;
|
||||
|
||||
#define Profiler_BeginAnchor(label) Profiler_BeginAnchor_(HAV_STR8(label), __COUNTER__ + 1)
|
||||
static ProfilerAnchor Profiler_BeginAnchor_(HAV_Str8 label, uint32_t index)
|
||||
{
|
||||
ProfilerAnchor result = {0};
|
||||
result.index = index;
|
||||
result.label = label;
|
||||
result.tsc = ReadCPUTimer();
|
||||
result.parent_index = g_profiler.parent_index;
|
||||
g_profiler.parent_index = index;
|
||||
return result;
|
||||
}
|
||||
|
||||
static void Profiler_EndAnchor(ProfilerAnchor anchor)
|
||||
{
|
||||
u64 elapsed_tsc = ReadCPUTimer() - anchor.tsc;
|
||||
ProfilerRecord* record = g_profiler.records + anchor.index;
|
||||
record->elapsed_tsc += elapsed_tsc;
|
||||
record->label = anchor.label;
|
||||
record->hits++;
|
||||
|
||||
ProfilerRecord* parent = g_profiler.records + anchor.parent_index;
|
||||
parent->elapsed_tsc_child += elapsed_tsc;
|
||||
g_profiler.parent_index = anchor.parent_index;
|
||||
}
|
||||
|
||||
static void Profiler_Dump()
|
||||
{
|
||||
u64 total_elapsed_tsc = g_profiler.end_tsc - g_profiler.begin_tsc;
|
||||
u64 cpu_frequency = EstimateCPUTimerFreq();
|
||||
if (cpu_frequency)
|
||||
printf("\nTotal time: %0.4fms (CPU freq %llu)\n", 1000.0 * (f64)total_elapsed_tsc / (f64)cpu_frequency, cpu_frequency);
|
||||
|
||||
for (uint32_t index = 1; index < HAV_ARRAY_UCOUNT(g_profiler.records); index++) {
|
||||
ProfilerRecord const *record = g_profiler.records + index;
|
||||
if (!record->elapsed_tsc)
|
||||
break;
|
||||
|
||||
u64 record_exclusive_tsc = record->elapsed_tsc - record->elapsed_tsc_child;
|
||||
f64 percent = total_elapsed_tsc ? (f64)record_exclusive_tsc / (f64)total_elapsed_tsc * 100.0 : 100.0;
|
||||
printf(" %.*s[%zu]: %llu (%.2f%%", HAV_STR8_FMT(record->label), record->hits, record_exclusive_tsc, percent);
|
||||
if (record->elapsed_tsc_child) {
|
||||
f64 percent_w_children = total_elapsed_tsc ? ((f64)record->elapsed_tsc / (f64)total_elapsed_tsc * 100.0) : 100.0;
|
||||
printf(", %.2f%% w/children", percent_w_children);
|
||||
}
|
||||
printf(")\n");
|
||||
}
|
||||
}
|
||||
#include "haversine_stdlib.c"
|
||||
|
||||
typedef struct Str8FindResult {
|
||||
bool found;
|
||||
@ -163,20 +92,17 @@ int main(int argc, char **argv)
|
||||
if (argc == 3)
|
||||
arg_answers = (HAV_Str8){.data = argv[2], .size = strlen(argv[2])};
|
||||
|
||||
ProfilerAnchor prof_file_read_anchor = Profiler_BeginAnchor("File Read");
|
||||
HAV_Buffer json_buffer = HAV_FileRead(arg_json.data);
|
||||
Profiler_EndAnchor(prof_file_read_anchor);
|
||||
|
||||
if (!HAV_BufferIsValid(json_buffer))
|
||||
return 0;
|
||||
|
||||
ProfilerAnchor prof_parse_and_sum_anchor = Profiler_BeginAnchor("Parse&Hav Sum");
|
||||
HAV_ProfilerZone prof_parse_and_sum_zone = HAV_Profiler_BeginZone("Parse&Hav Sum");
|
||||
f64 haversine_sum = 0;
|
||||
size_t pair_count = 0;
|
||||
HAV_Str8 json_it = (HAV_Str8){.data = json_buffer.data, .size = json_buffer.size};
|
||||
for (;; pair_count++) {
|
||||
ProfilerAnchor prof_json_parse_anchor = Profiler_BeginAnchor("Parse");
|
||||
f64 x0 = 0.f, y0 = 0.f, x1 = 0.f, y1 = 0.f;
|
||||
HAV_ProfilerZone prof_json_parse_zone = HAV_Profiler_BeginZoneBandwidth("Parse", json_it.size);
|
||||
HAV_Str8BinarySplitResult x0_key = HAV_Str8_BinarySplit(json_it, HAV_STR8("x0"));
|
||||
if (x0_key.rhs.size) {
|
||||
Str8FindResult x0_find_value = FindFirstCharThatLooksLikeANumber(x0_key.rhs);
|
||||
@ -209,16 +135,16 @@ int main(int argc, char **argv)
|
||||
HAV_STR8_FMT(y1_value.lhs), y1);
|
||||
#endif
|
||||
|
||||
Profiler_EndAnchor(prof_json_parse_anchor);
|
||||
HAV_Profiler_EndZone(prof_json_parse_zone);
|
||||
if (!x0_key.rhs.size)
|
||||
break;
|
||||
|
||||
ProfilerAnchor prof_haversine_sum_anchor = Profiler_BeginAnchor("Hav Sum");
|
||||
HAV_ProfilerZone prof_haversine_sum_zone = HAV_Profiler_BeginZoneBandwidth("Hav Sum", sizeof(x0) + sizeof(y0) + sizeof(x1) + sizeof(y1));
|
||||
f64 haversine_dist = ReferenceHaversine(x0, y0, x1, y1, /*EarthRadius*/ 6372.8);
|
||||
haversine_sum += haversine_dist;
|
||||
Profiler_EndAnchor(prof_haversine_sum_anchor);
|
||||
HAV_Profiler_EndZone(prof_haversine_sum_zone);
|
||||
}
|
||||
Profiler_EndAnchor(prof_parse_and_sum_anchor);
|
||||
HAV_Profiler_EndZone(prof_parse_and_sum_zone);
|
||||
|
||||
haversine_sum /= pair_count;
|
||||
size_t input_size = json_buffer.size;
|
||||
@ -242,6 +168,6 @@ int main(int argc, char **argv)
|
||||
}
|
||||
|
||||
g_profiler.end_tsc = ReadCPUTimer();
|
||||
Profiler_Dump();
|
||||
HAV_Profiler_Dump();
|
||||
return 0;
|
||||
}
|
||||
|
@ -3,10 +3,12 @@
|
||||
#include <stdint.h>
|
||||
#include <stdio.h>
|
||||
#include <Windows.h>
|
||||
#include "haversine_stdlib.h"
|
||||
#include "haversine_stdlib.c"
|
||||
#include <math.h>
|
||||
|
||||
#include "haversine_stdlib.h"
|
||||
#include "listing_0074_platform_metrics.cpp"
|
||||
#include "listing_0065_haversine_formula.cpp"
|
||||
#include "haversine_stdlib.c"
|
||||
|
||||
#define PRINT_USAGE HAV_PrintLnFmt("Usage: %s [uniform/cluster] [random seed] [number of coordinate pairs to generate]", argv[0])
|
||||
int main(int argc, char **argv)
|
||||
|
@ -55,6 +55,73 @@ bool HAV_CharIsDigit(char ch)
|
||||
return result;
|
||||
}
|
||||
|
||||
void HAV_Profiler_Dump()
|
||||
{
|
||||
u64 total_elapsed_tsc = g_profiler.end_tsc - g_profiler.begin_tsc;
|
||||
u64 cpu_frequency = EstimateCPUTimerFreq();
|
||||
if (cpu_frequency)
|
||||
printf("\nTotal time: %0.4fms (CPU freq %llu)\n", 1000.0 * (f64)total_elapsed_tsc / (f64)cpu_frequency, cpu_frequency);
|
||||
|
||||
for (uint32_t index = 1; index < HAV_ARRAY_UCOUNT(g_profiler.anchors); index++) {
|
||||
HAV_ProfilerAnchor const *anchor = g_profiler.anchors + index;
|
||||
if (!anchor->elapsed_tsc_inclusive)
|
||||
break;
|
||||
|
||||
f64 percent = total_elapsed_tsc ? (f64)anchor->elapsed_tsc_exclusive / (f64)total_elapsed_tsc * 100.0 : 100.0;
|
||||
printf(" %.*s[%zu]: %llu (%.2f%%", HAV_STR8_FMT(anchor->label), anchor->hits, anchor->elapsed_tsc_exclusive, percent);
|
||||
if (anchor->elapsed_tsc_inclusive != anchor->elapsed_tsc_exclusive) {
|
||||
f64 percent_w_children = total_elapsed_tsc ? ((f64)anchor->elapsed_tsc_inclusive / (f64)total_elapsed_tsc * 100.0) : 100.0;
|
||||
printf(", %.2f%% w/children", percent_w_children);
|
||||
}
|
||||
printf(")");
|
||||
|
||||
if (anchor->byte_count) {
|
||||
f64 megabytes_processed = anchor->byte_count / (1024.f * 1024.f);
|
||||
f64 elapsed_s = anchor->elapsed_tsc_inclusive / HAV_CAST(f64)cpu_frequency;
|
||||
f64 bytes_per_s = anchor->byte_count / elapsed_s;
|
||||
f64 gigabytes_bandwidth = bytes_per_s / (1024.f * 1024.f * 1024.f);
|
||||
printf(" %.3fmb at %.2fgb/s", megabytes_processed, gigabytes_bandwidth);
|
||||
}
|
||||
printf("\n");
|
||||
}
|
||||
}
|
||||
|
||||
HAV_ProfilerZone HAV_Profiler_BeginZone_(HAV_Str8 label, uint32_t index, u64 byte_count)
|
||||
{
|
||||
HAV_ProfilerZone result = {0};
|
||||
#if defined(HAV_PROFILER)
|
||||
result.index = index;
|
||||
result.label = label;
|
||||
result.tsc = ReadCPUTimer();
|
||||
result.elapsed_tsc_inclusive = g_profiler.anchors[index].elapsed_tsc_inclusive;
|
||||
result.byte_count = byte_count;
|
||||
result.parent_index = g_profiler.parent_index;
|
||||
g_profiler.parent_index = index;
|
||||
#else
|
||||
(void)label; (void)index; (void)byte_count;
|
||||
#endif
|
||||
return result;
|
||||
}
|
||||
|
||||
void HAV_Profiler_EndZone(HAV_ProfilerZone zone)
|
||||
{
|
||||
#if defined(HAV_PROFILER)
|
||||
u64 elapsed_tsc = ReadCPUTimer() - zone.tsc;
|
||||
HAV_ProfilerAnchor* anchor = g_profiler.anchors + zone.index;
|
||||
HAV_ProfilerAnchor* parent = g_profiler.anchors + zone.parent_index;
|
||||
|
||||
anchor->elapsed_tsc_exclusive += elapsed_tsc;
|
||||
anchor->elapsed_tsc_inclusive = zone.elapsed_tsc_inclusive + elapsed_tsc;
|
||||
anchor->label = zone.label;
|
||||
anchor->byte_count += zone.byte_count;
|
||||
anchor->hits++;
|
||||
parent->elapsed_tsc_exclusive -= elapsed_tsc;
|
||||
g_profiler.parent_index = zone.parent_index;
|
||||
#else
|
||||
(void)zone;
|
||||
#endif
|
||||
}
|
||||
|
||||
#pragma warning(push)
|
||||
#pragma warning(disable: 4146) // warning C4146: unary minus operator applied to unsigned type, result still unsigned
|
||||
uint32_t HAV_PCG32_Pie (uint64_t *state)
|
||||
@ -152,6 +219,7 @@ HAV_Buffer HAV_FileRead(char const *file_path)
|
||||
// NOTE: Read file to buffer
|
||||
// =========================================================================
|
||||
DWORD bytes_read = 0;
|
||||
HAV_ProfilerZone prof_file_read_zone = HAV_Profiler_BeginZoneBandwidth("File Read", file_size);
|
||||
BOOL read_file_result = ReadFile(
|
||||
/*HANDLE hFile*/ file_handle,
|
||||
/*LPVOID lpBuffer*/ buffer,
|
||||
@ -159,6 +227,7 @@ HAV_Buffer HAV_FileRead(char const *file_path)
|
||||
/*LPDWORD lpNumberOfBytesRead*/ &bytes_read,
|
||||
/*LPOVERLAPPED lpOverlapped*/ NULL
|
||||
);
|
||||
HAV_Profiler_EndZone(prof_file_read_zone);
|
||||
|
||||
// NOTE: Handle read result
|
||||
// =========================================================================
|
||||
|
@ -60,6 +60,41 @@ HAV_Str8BinarySplitResult HAV_Str8_BinarySplit(HAV_Str8 buffer, HAV_Str8 find);
|
||||
bool HAV_CharIsWhiteSpace(char ch);
|
||||
bool HAV_CharIsDigit(char ch);
|
||||
|
||||
// NOTE: Profiler
|
||||
// ============================================================================
|
||||
typedef struct HAV_ProfilerAnchor {
|
||||
HAV_Str8 label;
|
||||
u64 elapsed_tsc_exclusive; // Does not include children
|
||||
u64 elapsed_tsc_inclusive; // Includes children
|
||||
u64 byte_count;
|
||||
u64 hits;
|
||||
} HAV_ProfilerAnchor;
|
||||
|
||||
typedef struct HAV_Profiler {
|
||||
HAV_ProfilerAnchor anchors[4096];
|
||||
u64 begin_tsc;
|
||||
u64 end_tsc;
|
||||
u64 parent_index;
|
||||
} HAV_Profiler;
|
||||
|
||||
typedef struct HAV_ProfilerZone {
|
||||
u64 parent_index;
|
||||
uint32_t index;
|
||||
HAV_Str8 label;
|
||||
u64 elapsed_tsc_inclusive;
|
||||
u64 tsc;
|
||||
u64 byte_count;
|
||||
} HAV_ProfilerZone;
|
||||
|
||||
static HAV_Profiler g_profiler;
|
||||
|
||||
#define HAV_Profiler_BeginZone(label) HAV_Profiler_BeginZone_(HAV_STR8(label), __COUNTER__ + 1, 0)
|
||||
#define HAV_Profiler_BeginZoneBandwidth(label, byte_count) HAV_Profiler_BeginZone_(HAV_STR8(label), __COUNTER__ + 1, byte_count)
|
||||
|
||||
static void HAV_Profiler_Dump();
|
||||
static HAV_ProfilerZone HAV_Profiler_BeginZone_(HAV_Str8 label, uint32_t index, u64 byte_count);
|
||||
static void HAV_Profiler_EndZone(HAV_ProfilerZone zone);
|
||||
|
||||
// NOTE: PCG32
|
||||
// ============================================================================
|
||||
// NOTE: PCG RNG from Demetri Spanos: https://github.com/demetri/scribbles
|
||||
|
101
part2/listing_0076_simple_profiler.cpp
Normal file
101
part2/listing_0076_simple_profiler.cpp
Normal file
@ -0,0 +1,101 @@
|
||||
/* ========================================================================
|
||||
|
||||
(C) Copyright 2023 by Molly Rocket, Inc., All Rights Reserved.
|
||||
|
||||
This software is provided 'as-is', without any express or implied
|
||||
warranty. In no event will the authors be held liable for any damages
|
||||
arising from the use of this software.
|
||||
|
||||
Please see https://computerenhance.com for more information
|
||||
|
||||
======================================================================== */
|
||||
|
||||
/* ========================================================================
|
||||
LISTING 76
|
||||
======================================================================== */
|
||||
|
||||
#include "listing_0074_platform_metrics.cpp"
|
||||
|
||||
struct profile_anchor
|
||||
{
|
||||
u64 TSCElapsed;
|
||||
u64 HitCount;
|
||||
char const *Label;
|
||||
};
|
||||
|
||||
struct profiler
|
||||
{
|
||||
profile_anchor Anchors[4096];
|
||||
|
||||
u64 StartTSC;
|
||||
u64 EndTSC;
|
||||
};
|
||||
static profiler GlobalProfiler;
|
||||
|
||||
struct profile_block
|
||||
{
|
||||
profile_block(char const *Label_, u32 AnchorIndex_)
|
||||
{
|
||||
AnchorIndex = AnchorIndex_;
|
||||
Label = Label_;
|
||||
StartTSC = ReadCPUTimer();
|
||||
}
|
||||
|
||||
~profile_block(void)
|
||||
{
|
||||
u64 Elapsed = ReadCPUTimer() - StartTSC;
|
||||
|
||||
profile_anchor *Anchor = GlobalProfiler.Anchors + AnchorIndex;
|
||||
Anchor->TSCElapsed += Elapsed;
|
||||
++Anchor->HitCount;
|
||||
|
||||
/* NOTE(casey): This write happens every time solely because there is no
|
||||
straightforward way in C++ to have the same ease-of-use. In a better programming
|
||||
language, it would be simple to have the anchor points gathered and labeled at compile
|
||||
time, and this repetative write would be eliminated. */
|
||||
Anchor->Label = Label;
|
||||
}
|
||||
|
||||
char const *Label;
|
||||
u64 StartTSC;
|
||||
u32 AnchorIndex;
|
||||
};
|
||||
|
||||
#define NameConcat2(A, B) A##B
|
||||
#define NameConcat(A, B) NameConcat2(A, B)
|
||||
#define TimeBlock(Name) profile_block NameConcat(Block, __LINE__)(Name, __COUNTER__ + 1);
|
||||
#define TimeFunction TimeBlock(__func__)
|
||||
|
||||
static void PrintTimeElapsed(u64 TotalTSCElapsed, profile_anchor *Anchor)
|
||||
{
|
||||
u64 Elapsed = Anchor->TSCElapsed;
|
||||
f64 Percent = 100.0 * ((f64)Elapsed / (f64)TotalTSCElapsed);
|
||||
printf(" %s[%llu]: %llu (%.2f%%)\n", Anchor->Label, Anchor->HitCount, Elapsed, Percent);
|
||||
}
|
||||
|
||||
static void BeginProfile(void)
|
||||
{
|
||||
GlobalProfiler.StartTSC = ReadCPUTimer();
|
||||
}
|
||||
|
||||
static void EndAndPrintProfile()
|
||||
{
|
||||
GlobalProfiler.EndTSC = ReadCPUTimer();
|
||||
u64 CPUFreq = EstimateCPUTimerFreq();
|
||||
|
||||
u64 TotalCPUElapsed = GlobalProfiler.EndTSC - GlobalProfiler.StartTSC;
|
||||
|
||||
if(CPUFreq)
|
||||
{
|
||||
printf("\nTotal time: %0.4fms (CPU freq %llu)\n", 1000.0 * (f64)TotalCPUElapsed / (f64)CPUFreq, CPUFreq);
|
||||
}
|
||||
|
||||
for(u32 AnchorIndex = 0; AnchorIndex < ArrayCount(GlobalProfiler.Anchors); ++AnchorIndex)
|
||||
{
|
||||
profile_anchor *Anchor = GlobalProfiler.Anchors + AnchorIndex;
|
||||
if(Anchor->TSCElapsed)
|
||||
{
|
||||
PrintTimeElapsed(TotalCPUElapsed, Anchor);
|
||||
}
|
||||
}
|
||||
}
|
509
part2/listing_0077_profiled_lookup_json_parser.cpp
Normal file
509
part2/listing_0077_profiled_lookup_json_parser.cpp
Normal file
@ -0,0 +1,509 @@
|
||||
/* ========================================================================
|
||||
|
||||
(C) Copyright 2023 by Molly Rocket, Inc., All Rights Reserved.
|
||||
|
||||
This software is provided 'as-is', without any express or implied
|
||||
warranty. In no event will the authors be held liable for any damages
|
||||
arising from the use of this software.
|
||||
|
||||
Please see https://computerenhance.com for more information
|
||||
|
||||
======================================================================== */
|
||||
|
||||
/* ========================================================================
|
||||
LISTING 77
|
||||
======================================================================== */
|
||||
|
||||
enum json_token_type
|
||||
{
|
||||
Token_end_of_stream,
|
||||
Token_error,
|
||||
|
||||
Token_open_brace,
|
||||
Token_open_bracket,
|
||||
Token_close_brace,
|
||||
Token_close_bracket,
|
||||
Token_comma,
|
||||
Token_colon,
|
||||
Token_string_literal,
|
||||
Token_number,
|
||||
Token_true,
|
||||
Token_false,
|
||||
Token_null,
|
||||
|
||||
Token_count,
|
||||
};
|
||||
|
||||
struct json_token
|
||||
{
|
||||
json_token_type Type;
|
||||
buffer Value;
|
||||
};
|
||||
|
||||
struct json_element
|
||||
{
|
||||
buffer Label;
|
||||
buffer Value;
|
||||
json_element *FirstSubElement;
|
||||
|
||||
json_element *NextSibling;
|
||||
};
|
||||
|
||||
struct json_parser
|
||||
{
|
||||
buffer Source;
|
||||
u64 At;
|
||||
b32 HadError;
|
||||
};
|
||||
|
||||
static b32 IsJSONDigit(buffer Source, u64 At)
|
||||
{
|
||||
b32 Result = false;
|
||||
if(IsInBounds(Source, At))
|
||||
{
|
||||
u8 Val = Source.Data[At];
|
||||
Result = ((Val >= '0') && (Val <= '9'));
|
||||
}
|
||||
|
||||
return Result;
|
||||
}
|
||||
|
||||
static b32 IsJSONWhitespace(buffer Source, u64 At)
|
||||
{
|
||||
b32 Result = false;
|
||||
if(IsInBounds(Source, At))
|
||||
{
|
||||
u8 Val = Source.Data[At];
|
||||
Result = ((Val == ' ') || (Val == '\t') || (Val == '\n') || (Val == '\r'));
|
||||
}
|
||||
|
||||
return Result;
|
||||
}
|
||||
|
||||
static b32 IsParsing(json_parser *Parser)
|
||||
{
|
||||
b32 Result = !Parser->HadError && IsInBounds(Parser->Source, Parser->At);
|
||||
return Result;
|
||||
}
|
||||
|
||||
static void Error(json_parser *Parser, json_token Token, char const *Message)
|
||||
{
|
||||
Parser->HadError = true;
|
||||
fprintf(stderr, "ERROR: \"%.*s\" - %s\n", (u32)Token.Value.Count, (char *)Token.Value.Data, Message);
|
||||
}
|
||||
|
||||
static void ParseKeyword(buffer Source, u64 *At, buffer KeywordRemaining, json_token_type Type, json_token *Result)
|
||||
{
|
||||
if((Source.Count - *At) >= KeywordRemaining.Count)
|
||||
{
|
||||
buffer Check = Source;
|
||||
Check.Data += *At;
|
||||
Check.Count = KeywordRemaining.Count;
|
||||
if(AreEqual(Check, KeywordRemaining))
|
||||
{
|
||||
Result->Type = Type;
|
||||
Result->Value.Count += KeywordRemaining.Count;
|
||||
*At += KeywordRemaining.Count;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static json_token GetJSONToken(json_parser *Parser)
|
||||
{
|
||||
json_token Result = {};
|
||||
|
||||
buffer Source = Parser->Source;
|
||||
u64 At = Parser->At;
|
||||
|
||||
while(IsJSONWhitespace(Source, At))
|
||||
{
|
||||
++At;
|
||||
}
|
||||
|
||||
if(IsInBounds(Source, At))
|
||||
{
|
||||
Result.Type = Token_error;
|
||||
Result.Value.Count = 1;
|
||||
Result.Value.Data = Source.Data + At;
|
||||
u8 Val = Source.Data[At++];
|
||||
switch(Val)
|
||||
{
|
||||
case '{': {Result.Type = Token_open_brace;} break;
|
||||
case '[': {Result.Type = Token_open_bracket;} break;
|
||||
case '}': {Result.Type = Token_close_brace;} break;
|
||||
case ']': {Result.Type = Token_close_bracket;} break;
|
||||
case ',': {Result.Type = Token_comma;} break;
|
||||
case ':': {Result.Type = Token_colon;} break;
|
||||
|
||||
case 'f':
|
||||
{
|
||||
ParseKeyword(Source, &At, CONSTANT_STRING("alse"), Token_false, &Result);
|
||||
} break;
|
||||
|
||||
case 'n':
|
||||
{
|
||||
ParseKeyword(Source, &At, CONSTANT_STRING("ull"), Token_null, &Result);
|
||||
} break;
|
||||
|
||||
case 't':
|
||||
{
|
||||
ParseKeyword(Source, &At, CONSTANT_STRING("rue"), Token_true, &Result);
|
||||
} break;
|
||||
|
||||
case '"':
|
||||
{
|
||||
Result.Type = Token_string_literal;
|
||||
|
||||
u64 StringStart = At;
|
||||
|
||||
while(IsInBounds(Source, At) && (Source.Data[At] != '"'))
|
||||
{
|
||||
if(IsInBounds(Source, (At + 1)) &&
|
||||
(Source.Data[At] == '\\') &&
|
||||
(Source.Data[At + 1] == '"'))
|
||||
{
|
||||
// NOTE(casey): Skip escaped quotation marks
|
||||
++At;
|
||||
}
|
||||
|
||||
++At;
|
||||
}
|
||||
|
||||
Result.Value.Data = Source.Data + StringStart;
|
||||
Result.Value.Count = At - StringStart;
|
||||
if(IsInBounds(Source, At))
|
||||
{
|
||||
++At;
|
||||
}
|
||||
} break;
|
||||
|
||||
case '-':
|
||||
case '0':
|
||||
case '1':
|
||||
case '2':
|
||||
case '3':
|
||||
case '4':
|
||||
case '5':
|
||||
case '6':
|
||||
case '7':
|
||||
case '8':
|
||||
case '9':
|
||||
{
|
||||
u64 Start = At - 1;
|
||||
Result.Type = Token_number;
|
||||
|
||||
// NOTE(casey): Move past a leading negative sign if one exists
|
||||
if((Val == '-') && IsInBounds(Source, At))
|
||||
{
|
||||
Val = Source.Data[At++];
|
||||
}
|
||||
|
||||
// NOTE(casey): If the leading digit wasn't 0, parse any digits before the decimal point
|
||||
if(Val != '0')
|
||||
{
|
||||
while(IsJSONDigit(Source, At))
|
||||
{
|
||||
++At;
|
||||
}
|
||||
}
|
||||
|
||||
// NOTE(casey): If there is a decimal point, parse any digits after the decimal point
|
||||
if(IsInBounds(Source, At) && (Source.Data[At] == '.'))
|
||||
{
|
||||
++At;
|
||||
while(IsJSONDigit(Source, At))
|
||||
{
|
||||
++At;
|
||||
}
|
||||
}
|
||||
|
||||
// NOTE(casey): If it's in scientific notation, parse any digits after the "e"
|
||||
if(IsInBounds(Source, At) && ((Source.Data[At] == 'e') || (Source.Data[At] == 'E')))
|
||||
{
|
||||
++At;
|
||||
|
||||
if(IsInBounds(Source, At) && ((Source.Data[At] == '+') || (Source.Data[At] == '-')))
|
||||
{
|
||||
++At;
|
||||
}
|
||||
|
||||
while(IsJSONDigit(Source, At))
|
||||
{
|
||||
++At;
|
||||
}
|
||||
}
|
||||
|
||||
Result.Value.Count = At - Start;
|
||||
} break;
|
||||
|
||||
default:
|
||||
{
|
||||
} break;
|
||||
}
|
||||
}
|
||||
|
||||
Parser->At = At;
|
||||
|
||||
return Result;
|
||||
}
|
||||
|
||||
static json_element *ParseJSONList(json_parser *Parser, json_token_type EndType, b32 HasLabels);
|
||||
static json_element *ParseJSONElement(json_parser *Parser, buffer Label, json_token Value)
|
||||
{
|
||||
b32 Valid = true;
|
||||
|
||||
json_element *SubElement = 0;
|
||||
if(Value.Type == Token_open_bracket)
|
||||
{
|
||||
SubElement = ParseJSONList(Parser, Token_close_bracket, false);
|
||||
}
|
||||
else if(Value.Type == Token_open_brace)
|
||||
{
|
||||
SubElement = ParseJSONList(Parser, Token_close_brace, true);
|
||||
}
|
||||
else if((Value.Type == Token_string_literal) ||
|
||||
(Value.Type == Token_true) ||
|
||||
(Value.Type == Token_false) ||
|
||||
(Value.Type == Token_null) ||
|
||||
(Value.Type == Token_number))
|
||||
{
|
||||
// NOTE(casey): Nothing to do here, since there is no additional data
|
||||
}
|
||||
else
|
||||
{
|
||||
Valid = false;
|
||||
}
|
||||
|
||||
json_element *Result = 0;
|
||||
|
||||
if(Valid)
|
||||
{
|
||||
Result = (json_element *)malloc(sizeof(json_element));
|
||||
Result->Label = Label;
|
||||
Result->Value = Value.Value;
|
||||
Result->FirstSubElement = SubElement;
|
||||
Result->NextSibling = 0;
|
||||
}
|
||||
|
||||
return Result;
|
||||
}
|
||||
|
||||
static json_element *ParseJSONList(json_parser *Parser, json_token_type EndType, b32 HasLabels)
|
||||
{
|
||||
json_element *FirstElement = {};
|
||||
json_element *LastElement = {};
|
||||
|
||||
while(IsParsing(Parser))
|
||||
{
|
||||
buffer Label = {};
|
||||
json_token Value = GetJSONToken(Parser);
|
||||
if(HasLabels)
|
||||
{
|
||||
if(Value.Type == Token_string_literal)
|
||||
{
|
||||
Label = Value.Value;
|
||||
|
||||
json_token Colon = GetJSONToken(Parser);
|
||||
if(Colon.Type == Token_colon)
|
||||
{
|
||||
Value = GetJSONToken(Parser);
|
||||
}
|
||||
else
|
||||
{
|
||||
Error(Parser, Colon, "Expected colon after field name");
|
||||
}
|
||||
}
|
||||
else if(Value.Type != EndType)
|
||||
{
|
||||
Error(Parser, Value, "Unexpected token in JSON");
|
||||
}
|
||||
}
|
||||
|
||||
json_element *Element = ParseJSONElement(Parser, Label, Value);
|
||||
if(Element)
|
||||
{
|
||||
LastElement = (LastElement ? LastElement->NextSibling : FirstElement) = Element;
|
||||
}
|
||||
else if(Value.Type == EndType)
|
||||
{
|
||||
break;
|
||||
}
|
||||
else
|
||||
{
|
||||
Error(Parser, Value, "Unexpected token in JSON");
|
||||
}
|
||||
|
||||
json_token Comma = GetJSONToken(Parser);
|
||||
if(Comma.Type == EndType)
|
||||
{
|
||||
break;
|
||||
}
|
||||
else if(Comma.Type != Token_comma)
|
||||
{
|
||||
Error(Parser, Comma, "Unexpected token in JSON");
|
||||
}
|
||||
}
|
||||
|
||||
return FirstElement;
|
||||
}
|
||||
|
||||
static json_element *ParseJSON(buffer InputJSON)
|
||||
{
|
||||
json_parser Parser = {};
|
||||
Parser.Source = InputJSON;
|
||||
|
||||
json_element *Result = ParseJSONElement(&Parser, {}, GetJSONToken(&Parser));
|
||||
return Result;
|
||||
}
|
||||
|
||||
static void FreeJSON(json_element *Element)
|
||||
{
|
||||
while(Element)
|
||||
{
|
||||
json_element *FreeElement = Element;
|
||||
Element = Element->NextSibling;
|
||||
|
||||
FreeJSON(FreeElement->FirstSubElement);
|
||||
free(FreeElement);
|
||||
}
|
||||
}
|
||||
|
||||
static json_element *LookupElement(json_element *Object, buffer ElementName)
|
||||
{
|
||||
json_element *Result = 0;
|
||||
|
||||
if(Object)
|
||||
{
|
||||
for(json_element *Search = Object->FirstSubElement; Search; Search = Search->NextSibling)
|
||||
{
|
||||
if(AreEqual(Search->Label, ElementName))
|
||||
{
|
||||
Result = Search;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Result;
|
||||
}
|
||||
|
||||
static f64 ConvertJSONSign(buffer Source, u64 *AtResult)
|
||||
{
|
||||
u64 At = *AtResult;
|
||||
|
||||
f64 Result = 1.0;
|
||||
if(IsInBounds(Source, At) && (Source.Data[At] == '-'))
|
||||
{
|
||||
Result = -1.0;
|
||||
++At;
|
||||
}
|
||||
|
||||
*AtResult = At;
|
||||
|
||||
return Result;
|
||||
}
|
||||
|
||||
static f64 ConvertJSONNumber(buffer Source, u64 *AtResult)
|
||||
{
|
||||
u64 At = *AtResult;
|
||||
|
||||
f64 Result = 0.0;
|
||||
while(IsInBounds(Source, At))
|
||||
{
|
||||
u8 Char = Source.Data[At] - (u8)'0';
|
||||
if(Char < 10)
|
||||
{
|
||||
Result = 10.0*Result + (f64)Char;
|
||||
++At;
|
||||
}
|
||||
else
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
*AtResult = At;
|
||||
|
||||
return Result;
|
||||
}
|
||||
|
||||
static f64 ConvertElementToF64(json_element *Object, buffer ElementName)
|
||||
{
|
||||
f64 Result = 0.0;
|
||||
|
||||
json_element *Element = LookupElement(Object, ElementName);
|
||||
if(Element)
|
||||
{
|
||||
buffer Source = Element->Value;
|
||||
u64 At = 0;
|
||||
|
||||
f64 Sign = ConvertJSONSign(Source, &At);
|
||||
f64 Number = ConvertJSONNumber(Source, &At);
|
||||
|
||||
if(IsInBounds(Source, At) && (Source.Data[At] == '.'))
|
||||
{
|
||||
++At;
|
||||
f64 C = 1.0 / 10.0;
|
||||
while(IsInBounds(Source, At))
|
||||
{
|
||||
u8 Char = Source.Data[At] - (u8)'0';
|
||||
if(Char < 10)
|
||||
{
|
||||
Number = Number + C*(f64)Char;
|
||||
C *= 1.0 / 10.0;
|
||||
++At;
|
||||
}
|
||||
else
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(IsInBounds(Source, At) && ((Source.Data[At] == 'e') || (Source.Data[At] == 'E')))
|
||||
{
|
||||
++At;
|
||||
if(IsInBounds(Source, At) && (Source.Data[At] == '+'))
|
||||
{
|
||||
++At;
|
||||
}
|
||||
|
||||
f64 ExponentSign = ConvertJSONSign(Source, &At);
|
||||
f64 Exponent = ExponentSign*ConvertJSONNumber(Source, &At);
|
||||
Number *= pow(10.0, Exponent);
|
||||
}
|
||||
|
||||
Result = Sign*Number;
|
||||
}
|
||||
|
||||
return Result;
|
||||
}
|
||||
|
||||
static u64 ParseHaversinePairs(buffer InputJSON, u64 MaxPairCount, haversine_pair *Pairs)
|
||||
{
|
||||
TimeFunction;
|
||||
|
||||
u64 PairCount = 0;
|
||||
|
||||
json_element *JSON = ParseJSON(InputJSON);
|
||||
|
||||
json_element *PairsArray = LookupElement(JSON, CONSTANT_STRING("pairs"));
|
||||
if(PairsArray)
|
||||
{
|
||||
for(json_element *Element = PairsArray->FirstSubElement;
|
||||
Element && (PairCount < MaxPairCount);
|
||||
Element = Element->NextSibling)
|
||||
{
|
||||
haversine_pair *Pair = Pairs + PairCount++;
|
||||
|
||||
Pair->X0 = ConvertElementToF64(Element, CONSTANT_STRING("x0"));
|
||||
Pair->Y0 = ConvertElementToF64(Element, CONSTANT_STRING("y0"));
|
||||
Pair->X1 = ConvertElementToF64(Element, CONSTANT_STRING("x1"));
|
||||
Pair->Y1 = ConvertElementToF64(Element, CONSTANT_STRING("y1"));
|
||||
}
|
||||
}
|
||||
|
||||
FreeJSON(JSON);
|
||||
|
||||
return PairCount;
|
||||
}
|
182
part2/listing_0078_profiled_haversine_main.cpp
Normal file
182
part2/listing_0078_profiled_haversine_main.cpp
Normal file
@ -0,0 +1,182 @@
|
||||
/* ========================================================================
|
||||
|
||||
(C) Copyright 2023 by Molly Rocket, Inc., All Rights Reserved.
|
||||
|
||||
This software is provided 'as-is', without any express or implied
|
||||
warranty. In no event will the authors be held liable for any damages
|
||||
arising from the use of this software.
|
||||
|
||||
Please see https://computerenhance.com for more information
|
||||
|
||||
======================================================================== */
|
||||
|
||||
/* ========================================================================
|
||||
LISTING 78
|
||||
======================================================================== */
|
||||
|
||||
/* NOTE(casey): _CRT_SECURE_NO_WARNINGS is here because otherwise we cannot
|
||||
call fopen(). If we replace fopen() with fopen_s() to avoid the warning,
|
||||
then the code doesn't compile on Linux anymore, since fopen_s() does not
|
||||
exist there.
|
||||
|
||||
What exactly the CRT maintainers were thinking when they made this choice,
|
||||
I have no idea. */
|
||||
#define _CRT_SECURE_NO_WARNINGS
|
||||
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <stdint.h>
|
||||
#include <math.h>
|
||||
#include <sys/stat.h>
|
||||
|
||||
typedef uint8_t u8;
|
||||
typedef uint32_t u32;
|
||||
typedef uint64_t u64;
|
||||
|
||||
typedef int32_t b32;
|
||||
|
||||
typedef float f32;
|
||||
typedef double f64;
|
||||
|
||||
#define ArrayCount(Array) (sizeof(Array)/sizeof((Array)[0]))
|
||||
|
||||
struct haversine_pair
|
||||
{
|
||||
f64 X0, Y0;
|
||||
f64 X1, Y1;
|
||||
};
|
||||
|
||||
#include "listing_0076_simple_profiler.cpp"
|
||||
#include "listing_0065_haversine_formula.cpp"
|
||||
#include "listing_0068_buffer.cpp"
|
||||
#include "listing_0077_profiled_lookup_json_parser.cpp"
|
||||
|
||||
static buffer ReadEntireFile(char *FileName)
|
||||
{
|
||||
TimeFunction;
|
||||
|
||||
buffer Result = {};
|
||||
|
||||
FILE *File = fopen(FileName, "rb");
|
||||
if(File)
|
||||
{
|
||||
#if _WIN32
|
||||
struct __stat64 Stat;
|
||||
_stat64(FileName, &Stat);
|
||||
#else
|
||||
struct stat Stat;
|
||||
stat(FileName, &Stat);
|
||||
#endif
|
||||
|
||||
Result = AllocateBuffer(Stat.st_size);
|
||||
if(Result.Data)
|
||||
{
|
||||
if(fread(Result.Data, Result.Count, 1, File) != 1)
|
||||
{
|
||||
fprintf(stderr, "ERROR: Unable to read \"%s\".\n", FileName);
|
||||
FreeBuffer(&Result);
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
fprintf(stderr, "ERROR: Unable to open \"%s\".\n", FileName);
|
||||
}
|
||||
|
||||
return Result;
|
||||
}
|
||||
|
||||
static f64 SumHaversineDistances(u64 PairCount, haversine_pair *Pairs)
|
||||
{
|
||||
TimeFunction;
|
||||
|
||||
f64 Sum = 0;
|
||||
|
||||
f64 SumCoef = 1 / (f64)PairCount;
|
||||
for(u64 PairIndex = 0; PairIndex < PairCount; ++PairIndex)
|
||||
{
|
||||
haversine_pair Pair = Pairs[PairIndex];
|
||||
f64 EarthRadius = 6372.8;
|
||||
f64 Dist = ReferenceHaversine(Pair.X0, Pair.Y0, Pair.X1, Pair.Y1, EarthRadius);
|
||||
Sum += SumCoef*Dist;
|
||||
}
|
||||
|
||||
return Sum;
|
||||
}
|
||||
|
||||
int main(int ArgCount, char **Args)
|
||||
{
|
||||
BeginProfile();
|
||||
|
||||
int Result = 1;
|
||||
|
||||
if((ArgCount == 2) || (ArgCount == 3))
|
||||
{
|
||||
buffer InputJSON = ReadEntireFile(Args[1]);
|
||||
|
||||
u32 MinimumJSONPairEncoding = 6*4;
|
||||
u64 MaxPairCount = InputJSON.Count / MinimumJSONPairEncoding;
|
||||
if(MaxPairCount)
|
||||
{
|
||||
buffer ParsedValues = AllocateBuffer(MaxPairCount * sizeof(haversine_pair));
|
||||
if(ParsedValues.Count)
|
||||
{
|
||||
haversine_pair *Pairs = (haversine_pair *)ParsedValues.Data;
|
||||
|
||||
u64 PairCount = ParseHaversinePairs(InputJSON, MaxPairCount, Pairs);
|
||||
f64 Sum = SumHaversineDistances(PairCount, Pairs);
|
||||
|
||||
Result = 0;
|
||||
|
||||
fprintf(stdout, "Input size: %llu\n", InputJSON.Count);
|
||||
fprintf(stdout, "Pair count: %llu\n", PairCount);
|
||||
fprintf(stdout, "Haversine sum: %.16f\n", Sum);
|
||||
|
||||
if(ArgCount == 3)
|
||||
{
|
||||
buffer AnswersF64 = ReadEntireFile(Args[2]);
|
||||
if(AnswersF64.Count >= sizeof(f64))
|
||||
{
|
||||
f64 *AnswerValues = (f64 *)AnswersF64.Data;
|
||||
|
||||
fprintf(stdout, "\nValidation:\n");
|
||||
|
||||
u64 RefAnswerCount = (AnswersF64.Count - sizeof(f64)) / sizeof(f64);
|
||||
if(PairCount != RefAnswerCount)
|
||||
{
|
||||
fprintf(stdout, "FAILED - pair count doesn't match %llu.\n", RefAnswerCount);
|
||||
}
|
||||
|
||||
f64 RefSum = AnswerValues[RefAnswerCount];
|
||||
fprintf(stdout, "Reference sum: %.16f\n", RefSum);
|
||||
fprintf(stdout, "Difference: %.16f\n", Sum - RefSum);
|
||||
|
||||
fprintf(stdout, "\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
FreeBuffer(&ParsedValues);
|
||||
}
|
||||
else
|
||||
{
|
||||
fprintf(stderr, "ERROR: Malformed input JSON\n");
|
||||
}
|
||||
|
||||
FreeBuffer(&InputJSON);
|
||||
}
|
||||
else
|
||||
{
|
||||
fprintf(stderr, "Usage: %s [haversine_input.json]\n", Args[0]);
|
||||
fprintf(stderr, " %s [haversine_input.json] [answers.f64]\n", Args[0]);
|
||||
}
|
||||
|
||||
if(Result == 0)
|
||||
{
|
||||
EndAndPrintProfile();
|
||||
}
|
||||
|
||||
return Result;
|
||||
}
|
||||
|
||||
static_assert(__COUNTER__ < ArrayCount(profiler::Anchors), "Number of profile points exceeds size of profiler::Anchors array");
|
510
part2/listing_0079_timedblock_lookup_json_parser.cpp
Normal file
510
part2/listing_0079_timedblock_lookup_json_parser.cpp
Normal file
@ -0,0 +1,510 @@
|
||||
/* ========================================================================
|
||||
|
||||
(C) Copyright 2023 by Molly Rocket, Inc., All Rights Reserved.
|
||||
|
||||
This software is provided 'as-is', without any express or implied
|
||||
warranty. In no event will the authors be held liable for any damages
|
||||
arising from the use of this software.
|
||||
|
||||
Please see https://computerenhance.com for more information
|
||||
|
||||
======================================================================== */
|
||||
|
||||
/* ========================================================================
|
||||
LISTING 79
|
||||
======================================================================== */
|
||||
|
||||
enum json_token_type
|
||||
{
|
||||
Token_end_of_stream,
|
||||
Token_error,
|
||||
|
||||
Token_open_brace,
|
||||
Token_open_bracket,
|
||||
Token_close_brace,
|
||||
Token_close_bracket,
|
||||
Token_comma,
|
||||
Token_colon,
|
||||
Token_string_literal,
|
||||
Token_number,
|
||||
Token_true,
|
||||
Token_false,
|
||||
Token_null,
|
||||
|
||||
Token_count,
|
||||
};
|
||||
|
||||
struct json_token
|
||||
{
|
||||
json_token_type Type;
|
||||
buffer Value;
|
||||
};
|
||||
|
||||
struct json_element
|
||||
{
|
||||
buffer Label;
|
||||
buffer Value;
|
||||
json_element *FirstSubElement;
|
||||
|
||||
json_element *NextSibling;
|
||||
};
|
||||
|
||||
struct json_parser
|
||||
{
|
||||
buffer Source;
|
||||
u64 At;
|
||||
b32 HadError;
|
||||
};
|
||||
|
||||
static b32 IsJSONDigit(buffer Source, u64 At)
|
||||
{
|
||||
b32 Result = false;
|
||||
if(IsInBounds(Source, At))
|
||||
{
|
||||
u8 Val = Source.Data[At];
|
||||
Result = ((Val >= '0') && (Val <= '9'));
|
||||
}
|
||||
|
||||
return Result;
|
||||
}
|
||||
|
||||
static b32 IsJSONWhitespace(buffer Source, u64 At)
|
||||
{
|
||||
b32 Result = false;
|
||||
if(IsInBounds(Source, At))
|
||||
{
|
||||
u8 Val = Source.Data[At];
|
||||
Result = ((Val == ' ') || (Val == '\t') || (Val == '\n') || (Val == '\r'));
|
||||
}
|
||||
|
||||
return Result;
|
||||
}
|
||||
|
||||
static b32 IsParsing(json_parser *Parser)
|
||||
{
|
||||
b32 Result = !Parser->HadError && IsInBounds(Parser->Source, Parser->At);
|
||||
return Result;
|
||||
}
|
||||
|
||||
static void Error(json_parser *Parser, json_token Token, char const *Message)
|
||||
{
|
||||
Parser->HadError = true;
|
||||
fprintf(stderr, "ERROR: \"%.*s\" - %s\n", (u32)Token.Value.Count, (char *)Token.Value.Data, Message);
|
||||
}
|
||||
|
||||
static void ParseKeyword(buffer Source, u64 *At, buffer KeywordRemaining, json_token_type Type, json_token *Result)
|
||||
{
|
||||
if((Source.Count - *At) >= KeywordRemaining.Count)
|
||||
{
|
||||
buffer Check = Source;
|
||||
Check.Data += *At;
|
||||
Check.Count = KeywordRemaining.Count;
|
||||
if(AreEqual(Check, KeywordRemaining))
|
||||
{
|
||||
Result->Type = Type;
|
||||
Result->Value.Count += KeywordRemaining.Count;
|
||||
*At += KeywordRemaining.Count;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static json_token GetJSONToken(json_parser *Parser)
|
||||
{
|
||||
json_token Result = {};
|
||||
|
||||
buffer Source = Parser->Source;
|
||||
u64 At = Parser->At;
|
||||
|
||||
while(IsJSONWhitespace(Source, At))
|
||||
{
|
||||
++At;
|
||||
}
|
||||
|
||||
if(IsInBounds(Source, At))
|
||||
{
|
||||
Result.Type = Token_error;
|
||||
Result.Value.Count = 1;
|
||||
Result.Value.Data = Source.Data + At;
|
||||
u8 Val = Source.Data[At++];
|
||||
switch(Val)
|
||||
{
|
||||
case '{': {Result.Type = Token_open_brace;} break;
|
||||
case '[': {Result.Type = Token_open_bracket;} break;
|
||||
case '}': {Result.Type = Token_close_brace;} break;
|
||||
case ']': {Result.Type = Token_close_bracket;} break;
|
||||
case ',': {Result.Type = Token_comma;} break;
|
||||
case ':': {Result.Type = Token_colon;} break;
|
||||
|
||||
case 'f':
|
||||
{
|
||||
ParseKeyword(Source, &At, CONSTANT_STRING("alse"), Token_false, &Result);
|
||||
} break;
|
||||
|
||||
case 'n':
|
||||
{
|
||||
ParseKeyword(Source, &At, CONSTANT_STRING("ull"), Token_null, &Result);
|
||||
} break;
|
||||
|
||||
case 't':
|
||||
{
|
||||
ParseKeyword(Source, &At, CONSTANT_STRING("rue"), Token_true, &Result);
|
||||
} break;
|
||||
|
||||
case '"':
|
||||
{
|
||||
Result.Type = Token_string_literal;
|
||||
|
||||
u64 StringStart = At;
|
||||
|
||||
while(IsInBounds(Source, At) && (Source.Data[At] != '"'))
|
||||
{
|
||||
if(IsInBounds(Source, (At + 1)) &&
|
||||
(Source.Data[At] == '\\') &&
|
||||
(Source.Data[At + 1] == '"'))
|
||||
{
|
||||
// NOTE(casey): Skip escaped quotation marks
|
||||
++At;
|
||||
}
|
||||
|
||||
++At;
|
||||
}
|
||||
|
||||
Result.Value.Data = Source.Data + StringStart;
|
||||
Result.Value.Count = At - StringStart;
|
||||
if(IsInBounds(Source, At))
|
||||
{
|
||||
++At;
|
||||
}
|
||||
} break;
|
||||
|
||||
case '-':
|
||||
case '0':
|
||||
case '1':
|
||||
case '2':
|
||||
case '3':
|
||||
case '4':
|
||||
case '5':
|
||||
case '6':
|
||||
case '7':
|
||||
case '8':
|
||||
case '9':
|
||||
{
|
||||
u64 Start = At - 1;
|
||||
Result.Type = Token_number;
|
||||
|
||||
// NOTE(casey): Move past a leading negative sign if one exists
|
||||
if((Val == '-') && IsInBounds(Source, At))
|
||||
{
|
||||
Val = Source.Data[At++];
|
||||
}
|
||||
|
||||
// NOTE(casey): If the leading digit wasn't 0, parse any digits before the decimal point
|
||||
if(Val != '0')
|
||||
{
|
||||
while(IsJSONDigit(Source, At))
|
||||
{
|
||||
++At;
|
||||
}
|
||||
}
|
||||
|
||||
// NOTE(casey): If there is a decimal point, parse any digits after the decimal point
|
||||
if(IsInBounds(Source, At) && (Source.Data[At] == '.'))
|
||||
{
|
||||
++At;
|
||||
while(IsJSONDigit(Source, At))
|
||||
{
|
||||
++At;
|
||||
}
|
||||
}
|
||||
|
||||
// NOTE(casey): If it's in scientific notation, parse any digits after the "e"
|
||||
if(IsInBounds(Source, At) && ((Source.Data[At] == 'e') || (Source.Data[At] == 'E')))
|
||||
{
|
||||
++At;
|
||||
|
||||
if(IsInBounds(Source, At) && ((Source.Data[At] == '+') || (Source.Data[At] == '-')))
|
||||
{
|
||||
++At;
|
||||
}
|
||||
|
||||
while(IsJSONDigit(Source, At))
|
||||
{
|
||||
++At;
|
||||
}
|
||||
}
|
||||
|
||||
Result.Value.Count = At - Start;
|
||||
} break;
|
||||
|
||||
default:
|
||||
{
|
||||
} break;
|
||||
}
|
||||
}
|
||||
|
||||
Parser->At = At;
|
||||
|
||||
return Result;
|
||||
}
|
||||
|
||||
static json_element *ParseJSONList(json_parser *Parser, json_token_type EndType, b32 HasLabels);
|
||||
static json_element *ParseJSONElement(json_parser *Parser, buffer Label, json_token Value)
|
||||
{
|
||||
b32 Valid = true;
|
||||
|
||||
json_element *SubElement = 0;
|
||||
if(Value.Type == Token_open_bracket)
|
||||
{
|
||||
SubElement = ParseJSONList(Parser, Token_close_bracket, false);
|
||||
}
|
||||
else if(Value.Type == Token_open_brace)
|
||||
{
|
||||
SubElement = ParseJSONList(Parser, Token_close_brace, true);
|
||||
}
|
||||
else if((Value.Type == Token_string_literal) ||
|
||||
(Value.Type == Token_true) ||
|
||||
(Value.Type == Token_false) ||
|
||||
(Value.Type == Token_null) ||
|
||||
(Value.Type == Token_number))
|
||||
{
|
||||
// NOTE(casey): Nothing to do here, since there is no additional data
|
||||
}
|
||||
else
|
||||
{
|
||||
Valid = false;
|
||||
}
|
||||
|
||||
json_element *Result = 0;
|
||||
|
||||
if(Valid)
|
||||
{
|
||||
Result = (json_element *)malloc(sizeof(json_element));
|
||||
Result->Label = Label;
|
||||
Result->Value = Value.Value;
|
||||
Result->FirstSubElement = SubElement;
|
||||
Result->NextSibling = 0;
|
||||
}
|
||||
|
||||
return Result;
|
||||
}
|
||||
|
||||
static json_element *ParseJSONList(json_parser *Parser, json_token_type EndType, b32 HasLabels)
|
||||
{
|
||||
json_element *FirstElement = {};
|
||||
json_element *LastElement = {};
|
||||
|
||||
while(IsParsing(Parser))
|
||||
{
|
||||
buffer Label = {};
|
||||
json_token Value = GetJSONToken(Parser);
|
||||
if(HasLabels)
|
||||
{
|
||||
if(Value.Type == Token_string_literal)
|
||||
{
|
||||
Label = Value.Value;
|
||||
|
||||
json_token Colon = GetJSONToken(Parser);
|
||||
if(Colon.Type == Token_colon)
|
||||
{
|
||||
Value = GetJSONToken(Parser);
|
||||
}
|
||||
else
|
||||
{
|
||||
Error(Parser, Colon, "Expected colon after field name");
|
||||
}
|
||||
}
|
||||
else if(Value.Type != EndType)
|
||||
{
|
||||
Error(Parser, Value, "Unexpected token in JSON");
|
||||
}
|
||||
}
|
||||
|
||||
json_element *Element = ParseJSONElement(Parser, Label, Value);
|
||||
if(Element)
|
||||
{
|
||||
LastElement = (LastElement ? LastElement->NextSibling : FirstElement) = Element;
|
||||
}
|
||||
else if(Value.Type == EndType)
|
||||
{
|
||||
break;
|
||||
}
|
||||
else
|
||||
{
|
||||
Error(Parser, Value, "Unexpected token in JSON");
|
||||
}
|
||||
|
||||
json_token Comma = GetJSONToken(Parser);
|
||||
if(Comma.Type == EndType)
|
||||
{
|
||||
break;
|
||||
}
|
||||
else if(Comma.Type != Token_comma)
|
||||
{
|
||||
Error(Parser, Comma, "Unexpected token in JSON");
|
||||
}
|
||||
}
|
||||
|
||||
return FirstElement;
|
||||
}
|
||||
|
||||
static json_element *ParseJSON(buffer InputJSON)
|
||||
{
|
||||
json_parser Parser = {};
|
||||
Parser.Source = InputJSON;
|
||||
|
||||
json_element *Result = ParseJSONElement(&Parser, {}, GetJSONToken(&Parser));
|
||||
return Result;
|
||||
}
|
||||
|
||||
static void FreeJSON(json_element *Element)
|
||||
{
|
||||
while(Element)
|
||||
{
|
||||
json_element *FreeElement = Element;
|
||||
Element = Element->NextSibling;
|
||||
|
||||
FreeJSON(FreeElement->FirstSubElement);
|
||||
free(FreeElement);
|
||||
}
|
||||
}
|
||||
|
||||
static json_element *LookupElement(json_element *Object, buffer ElementName)
|
||||
{
|
||||
json_element *Result = 0;
|
||||
|
||||
if(Object)
|
||||
{
|
||||
for(json_element *Search = Object->FirstSubElement; Search; Search = Search->NextSibling)
|
||||
{
|
||||
if(AreEqual(Search->Label, ElementName))
|
||||
{
|
||||
Result = Search;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Result;
|
||||
}
|
||||
|
||||
static f64 ConvertJSONSign(buffer Source, u64 *AtResult)
|
||||
{
|
||||
u64 At = *AtResult;
|
||||
|
||||
f64 Result = 1.0;
|
||||
if(IsInBounds(Source, At) && (Source.Data[At] == '-'))
|
||||
{
|
||||
Result = -1.0;
|
||||
++At;
|
||||
}
|
||||
|
||||
*AtResult = At;
|
||||
|
||||
return Result;
|
||||
}
|
||||
|
||||
static f64 ConvertJSONNumber(buffer Source, u64 *AtResult)
|
||||
{
|
||||
u64 At = *AtResult;
|
||||
|
||||
f64 Result = 0.0;
|
||||
while(IsInBounds(Source, At))
|
||||
{
|
||||
u8 Char = Source.Data[At] - (u8)'0';
|
||||
if(Char < 10)
|
||||
{
|
||||
Result = 10.0*Result + (f64)Char;
|
||||
++At;
|
||||
}
|
||||
else
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
*AtResult = At;
|
||||
|
||||
return Result;
|
||||
}
|
||||
|
||||
static f64 ConvertElementToF64(json_element *Object, buffer ElementName)
|
||||
{
|
||||
f64 Result = 0.0;
|
||||
|
||||
json_element *Element = LookupElement(Object, ElementName);
|
||||
if(Element)
|
||||
{
|
||||
buffer Source = Element->Value;
|
||||
u64 At = 0;
|
||||
|
||||
f64 Sign = ConvertJSONSign(Source, &At);
|
||||
f64 Number = ConvertJSONNumber(Source, &At);
|
||||
|
||||
if(IsInBounds(Source, At) && (Source.Data[At] == '.'))
|
||||
{
|
||||
++At;
|
||||
f64 C = 1.0 / 10.0;
|
||||
while(IsInBounds(Source, At))
|
||||
{
|
||||
u8 Char = Source.Data[At] - (u8)'0';
|
||||
if(Char < 10)
|
||||
{
|
||||
Number = Number + C*(f64)Char;
|
||||
C *= 1.0 / 10.0;
|
||||
++At;
|
||||
}
|
||||
else
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(IsInBounds(Source, At) && ((Source.Data[At] == 'e') || (Source.Data[At] == 'E')))
|
||||
{
|
||||
++At;
|
||||
if(IsInBounds(Source, At) && (Source.Data[At] == '+'))
|
||||
{
|
||||
++At;
|
||||
}
|
||||
|
||||
f64 ExponentSign = ConvertJSONSign(Source, &At);
|
||||
f64 Exponent = ExponentSign*ConvertJSONNumber(Source, &At);
|
||||
Number *= pow(10.0, Exponent);
|
||||
}
|
||||
|
||||
Result = Sign*Number;
|
||||
}
|
||||
|
||||
return Result;
|
||||
}
|
||||
|
||||
static u64 ParseHaversinePairs(buffer InputJSON, u64 MaxPairCount, haversine_pair *Pairs)
|
||||
{
|
||||
TimeFunction;
|
||||
|
||||
u64 PairCount = 0;
|
||||
|
||||
json_element *JSON = ParseJSON(InputJSON);
|
||||
|
||||
json_element *PairsArray = LookupElement(JSON, CONSTANT_STRING("pairs"));
|
||||
if(PairsArray)
|
||||
{
|
||||
TimeBlock("Lookup and Convert");
|
||||
for(json_element *Element = PairsArray->FirstSubElement;
|
||||
Element && (PairCount < MaxPairCount);
|
||||
Element = Element->NextSibling)
|
||||
{
|
||||
haversine_pair *Pair = Pairs + PairCount++;
|
||||
|
||||
Pair->X0 = ConvertElementToF64(Element, CONSTANT_STRING("x0"));
|
||||
Pair->Y0 = ConvertElementToF64(Element, CONSTANT_STRING("y0"));
|
||||
Pair->X1 = ConvertElementToF64(Element, CONSTANT_STRING("x1"));
|
||||
Pair->Y1 = ConvertElementToF64(Element, CONSTANT_STRING("y1"));
|
||||
}
|
||||
}
|
||||
|
||||
FreeJSON(JSON);
|
||||
|
||||
return PairCount;
|
||||
}
|
182
part2/listing_0080_double_counted_haversine_main.cpp
Normal file
182
part2/listing_0080_double_counted_haversine_main.cpp
Normal file
@ -0,0 +1,182 @@
|
||||
/* ========================================================================
|
||||
|
||||
(C) Copyright 2023 by Molly Rocket, Inc., All Rights Reserved.
|
||||
|
||||
This software is provided 'as-is', without any express or implied
|
||||
warranty. In no event will the authors be held liable for any damages
|
||||
arising from the use of this software.
|
||||
|
||||
Please see https://computerenhance.com for more information
|
||||
|
||||
======================================================================== */
|
||||
|
||||
/* ========================================================================
|
||||
LISTING 80
|
||||
======================================================================== */
|
||||
|
||||
/* NOTE(casey): _CRT_SECURE_NO_WARNINGS is here because otherwise we cannot
|
||||
call fopen(). If we replace fopen() with fopen_s() to avoid the warning,
|
||||
then the code doesn't compile on Linux anymore, since fopen_s() does not
|
||||
exist there.
|
||||
|
||||
What exactly the CRT maintainers were thinking when they made this choice,
|
||||
I have no idea. */
|
||||
#define _CRT_SECURE_NO_WARNINGS
|
||||
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <stdint.h>
|
||||
#include <math.h>
|
||||
#include <sys/stat.h>
|
||||
|
||||
typedef uint8_t u8;
|
||||
typedef uint32_t u32;
|
||||
typedef uint64_t u64;
|
||||
|
||||
typedef int32_t b32;
|
||||
|
||||
typedef float f32;
|
||||
typedef double f64;
|
||||
|
||||
#define ArrayCount(Array) (sizeof(Array)/sizeof((Array)[0]))
|
||||
|
||||
struct haversine_pair
|
||||
{
|
||||
f64 X0, Y0;
|
||||
f64 X1, Y1;
|
||||
};
|
||||
|
||||
#include "listing_0076_simple_profiler.cpp"
|
||||
#include "listing_0065_haversine_formula.cpp"
|
||||
#include "listing_0068_buffer.cpp"
|
||||
#include "listing_0079_timedblock_lookup_json_parser.cpp"
|
||||
|
||||
static buffer ReadEntireFile(char *FileName)
|
||||
{
|
||||
TimeFunction;
|
||||
|
||||
buffer Result = {};
|
||||
|
||||
FILE *File = fopen(FileName, "rb");
|
||||
if(File)
|
||||
{
|
||||
#if _WIN32
|
||||
struct __stat64 Stat;
|
||||
_stat64(FileName, &Stat);
|
||||
#else
|
||||
struct stat Stat;
|
||||
stat(FileName, &Stat);
|
||||
#endif
|
||||
|
||||
Result = AllocateBuffer(Stat.st_size);
|
||||
if(Result.Data)
|
||||
{
|
||||
if(fread(Result.Data, Result.Count, 1, File) != 1)
|
||||
{
|
||||
fprintf(stderr, "ERROR: Unable to read \"%s\".\n", FileName);
|
||||
FreeBuffer(&Result);
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
fprintf(stderr, "ERROR: Unable to open \"%s\".\n", FileName);
|
||||
}
|
||||
|
||||
return Result;
|
||||
}
|
||||
|
||||
static f64 SumHaversineDistances(u64 PairCount, haversine_pair *Pairs)
|
||||
{
|
||||
TimeFunction;
|
||||
|
||||
f64 Sum = 0;
|
||||
|
||||
f64 SumCoef = 1 / (f64)PairCount;
|
||||
for(u64 PairIndex = 0; PairIndex < PairCount; ++PairIndex)
|
||||
{
|
||||
haversine_pair Pair = Pairs[PairIndex];
|
||||
f64 EarthRadius = 6372.8;
|
||||
f64 Dist = ReferenceHaversine(Pair.X0, Pair.Y0, Pair.X1, Pair.Y1, EarthRadius);
|
||||
Sum += SumCoef*Dist;
|
||||
}
|
||||
|
||||
return Sum;
|
||||
}
|
||||
|
||||
int main(int ArgCount, char **Args)
|
||||
{
|
||||
BeginProfile();
|
||||
|
||||
int Result = 1;
|
||||
|
||||
if((ArgCount == 2) || (ArgCount == 3))
|
||||
{
|
||||
buffer InputJSON = ReadEntireFile(Args[1]);
|
||||
|
||||
u32 MinimumJSONPairEncoding = 6*4;
|
||||
u64 MaxPairCount = InputJSON.Count / MinimumJSONPairEncoding;
|
||||
if(MaxPairCount)
|
||||
{
|
||||
buffer ParsedValues = AllocateBuffer(MaxPairCount * sizeof(haversine_pair));
|
||||
if(ParsedValues.Count)
|
||||
{
|
||||
haversine_pair *Pairs = (haversine_pair *)ParsedValues.Data;
|
||||
|
||||
u64 PairCount = ParseHaversinePairs(InputJSON, MaxPairCount, Pairs);
|
||||
f64 Sum = SumHaversineDistances(PairCount, Pairs);
|
||||
|
||||
Result = 0;
|
||||
|
||||
fprintf(stdout, "Input size: %llu\n", InputJSON.Count);
|
||||
fprintf(stdout, "Pair count: %llu\n", PairCount);
|
||||
fprintf(stdout, "Haversine sum: %.16f\n", Sum);
|
||||
|
||||
if(ArgCount == 3)
|
||||
{
|
||||
buffer AnswersF64 = ReadEntireFile(Args[2]);
|
||||
if(AnswersF64.Count >= sizeof(f64))
|
||||
{
|
||||
f64 *AnswerValues = (f64 *)AnswersF64.Data;
|
||||
|
||||
fprintf(stdout, "\nValidation:\n");
|
||||
|
||||
u64 RefAnswerCount = (AnswersF64.Count - sizeof(f64)) / sizeof(f64);
|
||||
if(PairCount != RefAnswerCount)
|
||||
{
|
||||
fprintf(stdout, "FAILED - pair count doesn't match %llu.\n", RefAnswerCount);
|
||||
}
|
||||
|
||||
f64 RefSum = AnswerValues[RefAnswerCount];
|
||||
fprintf(stdout, "Reference sum: %.16f\n", RefSum);
|
||||
fprintf(stdout, "Difference: %.16f\n", Sum - RefSum);
|
||||
|
||||
fprintf(stdout, "\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
FreeBuffer(&ParsedValues);
|
||||
}
|
||||
else
|
||||
{
|
||||
fprintf(stderr, "ERROR: Malformed input JSON\n");
|
||||
}
|
||||
|
||||
FreeBuffer(&InputJSON);
|
||||
}
|
||||
else
|
||||
{
|
||||
fprintf(stderr, "Usage: %s [haversine_input.json]\n", Args[0]);
|
||||
fprintf(stderr, " %s [haversine_input.json] [answers.f64]\n", Args[0]);
|
||||
}
|
||||
|
||||
if(Result == 0)
|
||||
{
|
||||
EndAndPrintProfile();
|
||||
}
|
||||
|
||||
return Result;
|
||||
}
|
||||
|
||||
static_assert(__COUNTER__ < ArrayCount(profiler::Anchors), "Number of profile points exceeds size of profiler::Anchors array");
|
118
part2/listing_0081_nesting_profiler.cpp
Normal file
118
part2/listing_0081_nesting_profiler.cpp
Normal file
@ -0,0 +1,118 @@
|
||||
/* ========================================================================
|
||||
|
||||
(C) Copyright 2023 by Molly Rocket, Inc., All Rights Reserved.
|
||||
|
||||
This software is provided 'as-is', without any express or implied
|
||||
warranty. In no event will the authors be held liable for any damages
|
||||
arising from the use of this software.
|
||||
|
||||
Please see https://computerenhance.com for more information
|
||||
|
||||
======================================================================== */
|
||||
|
||||
/* ========================================================================
|
||||
LISTING 81
|
||||
======================================================================== */
|
||||
|
||||
#include "listing_0074_platform_metrics.cpp"
|
||||
|
||||
struct profile_anchor
|
||||
{
|
||||
u64 TSCElapsed;
|
||||
u64 TSCElapsedChildren;
|
||||
u64 HitCount;
|
||||
char const *Label;
|
||||
};
|
||||
|
||||
struct profiler
|
||||
{
|
||||
profile_anchor Anchors[4096];
|
||||
|
||||
u64 StartTSC;
|
||||
u64 EndTSC;
|
||||
};
|
||||
static profiler GlobalProfiler;
|
||||
static u32 GlobalProfilerParent;
|
||||
|
||||
struct profile_block
|
||||
{
|
||||
profile_block(char const *Label_, u32 AnchorIndex_)
|
||||
{
|
||||
ParentIndex = GlobalProfilerParent;
|
||||
|
||||
AnchorIndex = AnchorIndex_;
|
||||
Label = Label_;
|
||||
|
||||
GlobalProfilerParent = AnchorIndex;
|
||||
StartTSC = ReadCPUTimer();
|
||||
}
|
||||
|
||||
~profile_block(void)
|
||||
{
|
||||
u64 Elapsed = ReadCPUTimer() - StartTSC;
|
||||
GlobalProfilerParent = ParentIndex;
|
||||
|
||||
profile_anchor *Parent = GlobalProfiler.Anchors + ParentIndex;
|
||||
profile_anchor *Anchor = GlobalProfiler.Anchors + AnchorIndex;
|
||||
|
||||
Parent->TSCElapsedChildren += Elapsed;
|
||||
Anchor->TSCElapsed += Elapsed;
|
||||
++Anchor->HitCount;
|
||||
|
||||
/* NOTE(casey): This write happens every time solely because there is no
|
||||
straightforward way in C++ to have the same ease-of-use. In a better programming
|
||||
language, it would be simple to have the anchor points gathered and labeled at compile
|
||||
time, and this repetative write would be eliminated. */
|
||||
Anchor->Label = Label;
|
||||
}
|
||||
|
||||
char const *Label;
|
||||
u64 StartTSC;
|
||||
u32 ParentIndex;
|
||||
u32 AnchorIndex;
|
||||
};
|
||||
|
||||
#define NameConcat2(A, B) A##B
|
||||
#define NameConcat(A, B) NameConcat2(A, B)
|
||||
#define TimeBlock(Name) profile_block NameConcat(Block, __LINE__)(Name, __COUNTER__ + 1);
|
||||
#define TimeFunction TimeBlock(__func__)
|
||||
|
||||
static void PrintTimeElapsed(u64 TotalTSCElapsed, profile_anchor *Anchor)
|
||||
{
|
||||
u64 Elapsed = Anchor->TSCElapsed - Anchor->TSCElapsedChildren;
|
||||
f64 Percent = 100.0 * ((f64)Elapsed / (f64)TotalTSCElapsed);
|
||||
printf(" %s[%llu]: %llu (%.2f%%", Anchor->Label, Anchor->HitCount, Elapsed, Percent);
|
||||
if(Anchor->TSCElapsedChildren)
|
||||
{
|
||||
f64 PercentWithChildren = 100.0 * ((f64)Anchor->TSCElapsed / (f64)TotalTSCElapsed);
|
||||
printf(", %.2f%% w/children", PercentWithChildren);
|
||||
}
|
||||
printf(")\n");
|
||||
}
|
||||
|
||||
static void BeginProfile(void)
|
||||
{
|
||||
GlobalProfiler.StartTSC = ReadCPUTimer();
|
||||
}
|
||||
|
||||
static void EndAndPrintProfile()
|
||||
{
|
||||
GlobalProfiler.EndTSC = ReadCPUTimer();
|
||||
u64 CPUFreq = EstimateCPUTimerFreq();
|
||||
|
||||
u64 TotalCPUElapsed = GlobalProfiler.EndTSC - GlobalProfiler.StartTSC;
|
||||
|
||||
if(CPUFreq)
|
||||
{
|
||||
printf("\nTotal time: %0.4fms (CPU freq %llu)\n", 1000.0 * (f64)TotalCPUElapsed / (f64)CPUFreq, CPUFreq);
|
||||
}
|
||||
|
||||
for(u32 AnchorIndex = 0; AnchorIndex < ArrayCount(GlobalProfiler.Anchors); ++AnchorIndex)
|
||||
{
|
||||
profile_anchor *Anchor = GlobalProfiler.Anchors + AnchorIndex;
|
||||
if(Anchor->TSCElapsed)
|
||||
{
|
||||
PrintTimeElapsed(TotalCPUElapsed, Anchor);
|
||||
}
|
||||
}
|
||||
}
|
182
part2/listing_0082_nesting_haversine_main.cpp
Normal file
182
part2/listing_0082_nesting_haversine_main.cpp
Normal file
@ -0,0 +1,182 @@
|
||||
/* ========================================================================
|
||||
|
||||
(C) Copyright 2023 by Molly Rocket, Inc., All Rights Reserved.
|
||||
|
||||
This software is provided 'as-is', without any express or implied
|
||||
warranty. In no event will the authors be held liable for any damages
|
||||
arising from the use of this software.
|
||||
|
||||
Please see https://computerenhance.com for more information
|
||||
|
||||
======================================================================== */
|
||||
|
||||
/* ========================================================================
|
||||
LISTING 82
|
||||
======================================================================== */
|
||||
|
||||
/* NOTE(casey): _CRT_SECURE_NO_WARNINGS is here because otherwise we cannot
|
||||
call fopen(). If we replace fopen() with fopen_s() to avoid the warning,
|
||||
then the code doesn't compile on Linux anymore, since fopen_s() does not
|
||||
exist there.
|
||||
|
||||
What exactly the CRT maintainers were thinking when they made this choice,
|
||||
I have no idea. */
|
||||
#define _CRT_SECURE_NO_WARNINGS
|
||||
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <stdint.h>
|
||||
#include <math.h>
|
||||
#include <sys/stat.h>
|
||||
|
||||
typedef uint8_t u8;
|
||||
typedef uint32_t u32;
|
||||
typedef uint64_t u64;
|
||||
|
||||
typedef int32_t b32;
|
||||
|
||||
typedef float f32;
|
||||
typedef double f64;
|
||||
|
||||
#define ArrayCount(Array) (sizeof(Array)/sizeof((Array)[0]))
|
||||
|
||||
struct haversine_pair
|
||||
{
|
||||
f64 X0, Y0;
|
||||
f64 X1, Y1;
|
||||
};
|
||||
|
||||
#include "listing_0081_nesting_profiler.cpp"
|
||||
#include "listing_0065_haversine_formula.cpp"
|
||||
#include "listing_0068_buffer.cpp"
|
||||
#include "listing_0079_timedblock_lookup_json_parser.cpp"
|
||||
|
||||
static buffer ReadEntireFile(char *FileName)
|
||||
{
|
||||
TimeFunction;
|
||||
|
||||
buffer Result = {};
|
||||
|
||||
FILE *File = fopen(FileName, "rb");
|
||||
if(File)
|
||||
{
|
||||
#if _WIN32
|
||||
struct __stat64 Stat;
|
||||
_stat64(FileName, &Stat);
|
||||
#else
|
||||
struct stat Stat;
|
||||
stat(FileName, &Stat);
|
||||
#endif
|
||||
|
||||
Result = AllocateBuffer(Stat.st_size);
|
||||
if(Result.Data)
|
||||
{
|
||||
if(fread(Result.Data, Result.Count, 1, File) != 1)
|
||||
{
|
||||
fprintf(stderr, "ERROR: Unable to read \"%s\".\n", FileName);
|
||||
FreeBuffer(&Result);
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
fprintf(stderr, "ERROR: Unable to open \"%s\".\n", FileName);
|
||||
}
|
||||
|
||||
return Result;
|
||||
}
|
||||
|
||||
static f64 SumHaversineDistances(u64 PairCount, haversine_pair *Pairs)
|
||||
{
|
||||
TimeFunction;
|
||||
|
||||
f64 Sum = 0;
|
||||
|
||||
f64 SumCoef = 1 / (f64)PairCount;
|
||||
for(u64 PairIndex = 0; PairIndex < PairCount; ++PairIndex)
|
||||
{
|
||||
haversine_pair Pair = Pairs[PairIndex];
|
||||
f64 EarthRadius = 6372.8;
|
||||
f64 Dist = ReferenceHaversine(Pair.X0, Pair.Y0, Pair.X1, Pair.Y1, EarthRadius);
|
||||
Sum += SumCoef*Dist;
|
||||
}
|
||||
|
||||
return Sum;
|
||||
}
|
||||
|
||||
int main(int ArgCount, char **Args)
|
||||
{
|
||||
BeginProfile();
|
||||
|
||||
int Result = 1;
|
||||
|
||||
if((ArgCount == 2) || (ArgCount == 3))
|
||||
{
|
||||
buffer InputJSON = ReadEntireFile(Args[1]);
|
||||
|
||||
u32 MinimumJSONPairEncoding = 6*4;
|
||||
u64 MaxPairCount = InputJSON.Count / MinimumJSONPairEncoding;
|
||||
if(MaxPairCount)
|
||||
{
|
||||
buffer ParsedValues = AllocateBuffer(MaxPairCount * sizeof(haversine_pair));
|
||||
if(ParsedValues.Count)
|
||||
{
|
||||
haversine_pair *Pairs = (haversine_pair *)ParsedValues.Data;
|
||||
|
||||
u64 PairCount = ParseHaversinePairs(InputJSON, MaxPairCount, Pairs);
|
||||
f64 Sum = SumHaversineDistances(PairCount, Pairs);
|
||||
|
||||
Result = 0;
|
||||
|
||||
fprintf(stdout, "Input size: %llu\n", InputJSON.Count);
|
||||
fprintf(stdout, "Pair count: %llu\n", PairCount);
|
||||
fprintf(stdout, "Haversine sum: %.16f\n", Sum);
|
||||
|
||||
if(ArgCount == 3)
|
||||
{
|
||||
buffer AnswersF64 = ReadEntireFile(Args[2]);
|
||||
if(AnswersF64.Count >= sizeof(f64))
|
||||
{
|
||||
f64 *AnswerValues = (f64 *)AnswersF64.Data;
|
||||
|
||||
fprintf(stdout, "\nValidation:\n");
|
||||
|
||||
u64 RefAnswerCount = (AnswersF64.Count - sizeof(f64)) / sizeof(f64);
|
||||
if(PairCount != RefAnswerCount)
|
||||
{
|
||||
fprintf(stdout, "FAILED - pair count doesn't match %llu.\n", RefAnswerCount);
|
||||
}
|
||||
|
||||
f64 RefSum = AnswerValues[RefAnswerCount];
|
||||
fprintf(stdout, "Reference sum: %.16f\n", RefSum);
|
||||
fprintf(stdout, "Difference: %.16f\n", Sum - RefSum);
|
||||
|
||||
fprintf(stdout, "\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
FreeBuffer(&ParsedValues);
|
||||
}
|
||||
else
|
||||
{
|
||||
fprintf(stderr, "ERROR: Malformed input JSON\n");
|
||||
}
|
||||
|
||||
FreeBuffer(&InputJSON);
|
||||
}
|
||||
else
|
||||
{
|
||||
fprintf(stderr, "Usage: %s [haversine_input.json]\n", Args[0]);
|
||||
fprintf(stderr, " %s [haversine_input.json] [answers.f64]\n", Args[0]);
|
||||
}
|
||||
|
||||
if(Result == 0)
|
||||
{
|
||||
EndAndPrintProfile();
|
||||
}
|
||||
|
||||
return Result;
|
||||
}
|
||||
|
||||
static_assert(__COUNTER__ < ArrayCount(profiler::Anchors), "Number of profile points exceeds size of profiler::Anchors array");
|
512
part2/listing_0083_recursive_timed_lookup_json_parser.cpp
Normal file
512
part2/listing_0083_recursive_timed_lookup_json_parser.cpp
Normal file
@ -0,0 +1,512 @@
|
||||
/* ========================================================================
|
||||
|
||||
(C) Copyright 2023 by Molly Rocket, Inc., All Rights Reserved.
|
||||
|
||||
This software is provided 'as-is', without any express or implied
|
||||
warranty. In no event will the authors be held liable for any damages
|
||||
arising from the use of this software.
|
||||
|
||||
Please see https://computerenhance.com for more information
|
||||
|
||||
======================================================================== */
|
||||
|
||||
/* ========================================================================
|
||||
LISTING 83
|
||||
======================================================================== */
|
||||
|
||||
enum json_token_type
|
||||
{
|
||||
Token_end_of_stream,
|
||||
Token_error,
|
||||
|
||||
Token_open_brace,
|
||||
Token_open_bracket,
|
||||
Token_close_brace,
|
||||
Token_close_bracket,
|
||||
Token_comma,
|
||||
Token_colon,
|
||||
Token_string_literal,
|
||||
Token_number,
|
||||
Token_true,
|
||||
Token_false,
|
||||
Token_null,
|
||||
|
||||
Token_count,
|
||||
};
|
||||
|
||||
struct json_token
|
||||
{
|
||||
json_token_type Type;
|
||||
buffer Value;
|
||||
};
|
||||
|
||||
struct json_element
|
||||
{
|
||||
buffer Label;
|
||||
buffer Value;
|
||||
json_element *FirstSubElement;
|
||||
|
||||
json_element *NextSibling;
|
||||
};
|
||||
|
||||
struct json_parser
|
||||
{
|
||||
buffer Source;
|
||||
u64 At;
|
||||
b32 HadError;
|
||||
};
|
||||
|
||||
static b32 IsJSONDigit(buffer Source, u64 At)
|
||||
{
|
||||
b32 Result = false;
|
||||
if(IsInBounds(Source, At))
|
||||
{
|
||||
u8 Val = Source.Data[At];
|
||||
Result = ((Val >= '0') && (Val <= '9'));
|
||||
}
|
||||
|
||||
return Result;
|
||||
}
|
||||
|
||||
static b32 IsJSONWhitespace(buffer Source, u64 At)
|
||||
{
|
||||
b32 Result = false;
|
||||
if(IsInBounds(Source, At))
|
||||
{
|
||||
u8 Val = Source.Data[At];
|
||||
Result = ((Val == ' ') || (Val == '\t') || (Val == '\n') || (Val == '\r'));
|
||||
}
|
||||
|
||||
return Result;
|
||||
}
|
||||
|
||||
static b32 IsParsing(json_parser *Parser)
|
||||
{
|
||||
b32 Result = !Parser->HadError && IsInBounds(Parser->Source, Parser->At);
|
||||
return Result;
|
||||
}
|
||||
|
||||
static void Error(json_parser *Parser, json_token Token, char const *Message)
|
||||
{
|
||||
Parser->HadError = true;
|
||||
fprintf(stderr, "ERROR: \"%.*s\" - %s\n", (u32)Token.Value.Count, (char *)Token.Value.Data, Message);
|
||||
}
|
||||
|
||||
static void ParseKeyword(buffer Source, u64 *At, buffer KeywordRemaining, json_token_type Type, json_token *Result)
|
||||
{
|
||||
if((Source.Count - *At) >= KeywordRemaining.Count)
|
||||
{
|
||||
buffer Check = Source;
|
||||
Check.Data += *At;
|
||||
Check.Count = KeywordRemaining.Count;
|
||||
if(AreEqual(Check, KeywordRemaining))
|
||||
{
|
||||
Result->Type = Type;
|
||||
Result->Value.Count += KeywordRemaining.Count;
|
||||
*At += KeywordRemaining.Count;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static json_token GetJSONToken(json_parser *Parser)
|
||||
{
|
||||
json_token Result = {};
|
||||
|
||||
buffer Source = Parser->Source;
|
||||
u64 At = Parser->At;
|
||||
|
||||
while(IsJSONWhitespace(Source, At))
|
||||
{
|
||||
++At;
|
||||
}
|
||||
|
||||
if(IsInBounds(Source, At))
|
||||
{
|
||||
Result.Type = Token_error;
|
||||
Result.Value.Count = 1;
|
||||
Result.Value.Data = Source.Data + At;
|
||||
u8 Val = Source.Data[At++];
|
||||
switch(Val)
|
||||
{
|
||||
case '{': {Result.Type = Token_open_brace;} break;
|
||||
case '[': {Result.Type = Token_open_bracket;} break;
|
||||
case '}': {Result.Type = Token_close_brace;} break;
|
||||
case ']': {Result.Type = Token_close_bracket;} break;
|
||||
case ',': {Result.Type = Token_comma;} break;
|
||||
case ':': {Result.Type = Token_colon;} break;
|
||||
|
||||
case 'f':
|
||||
{
|
||||
ParseKeyword(Source, &At, CONSTANT_STRING("alse"), Token_false, &Result);
|
||||
} break;
|
||||
|
||||
case 'n':
|
||||
{
|
||||
ParseKeyword(Source, &At, CONSTANT_STRING("ull"), Token_null, &Result);
|
||||
} break;
|
||||
|
||||
case 't':
|
||||
{
|
||||
ParseKeyword(Source, &At, CONSTANT_STRING("rue"), Token_true, &Result);
|
||||
} break;
|
||||
|
||||
case '"':
|
||||
{
|
||||
Result.Type = Token_string_literal;
|
||||
|
||||
u64 StringStart = At;
|
||||
|
||||
while(IsInBounds(Source, At) && (Source.Data[At] != '"'))
|
||||
{
|
||||
if(IsInBounds(Source, (At + 1)) &&
|
||||
(Source.Data[At] == '\\') &&
|
||||
(Source.Data[At + 1] == '"'))
|
||||
{
|
||||
// NOTE(casey): Skip escaped quotation marks
|
||||
++At;
|
||||
}
|
||||
|
||||
++At;
|
||||
}
|
||||
|
||||
Result.Value.Data = Source.Data + StringStart;
|
||||
Result.Value.Count = At - StringStart;
|
||||
if(IsInBounds(Source, At))
|
||||
{
|
||||
++At;
|
||||
}
|
||||
} break;
|
||||
|
||||
case '-':
|
||||
case '0':
|
||||
case '1':
|
||||
case '2':
|
||||
case '3':
|
||||
case '4':
|
||||
case '5':
|
||||
case '6':
|
||||
case '7':
|
||||
case '8':
|
||||
case '9':
|
||||
{
|
||||
u64 Start = At - 1;
|
||||
Result.Type = Token_number;
|
||||
|
||||
// NOTE(casey): Move past a leading negative sign if one exists
|
||||
if((Val == '-') && IsInBounds(Source, At))
|
||||
{
|
||||
Val = Source.Data[At++];
|
||||
}
|
||||
|
||||
// NOTE(casey): If the leading digit wasn't 0, parse any digits before the decimal point
|
||||
if(Val != '0')
|
||||
{
|
||||
while(IsJSONDigit(Source, At))
|
||||
{
|
||||
++At;
|
||||
}
|
||||
}
|
||||
|
||||
// NOTE(casey): If there is a decimal point, parse any digits after the decimal point
|
||||
if(IsInBounds(Source, At) && (Source.Data[At] == '.'))
|
||||
{
|
||||
++At;
|
||||
while(IsJSONDigit(Source, At))
|
||||
{
|
||||
++At;
|
||||
}
|
||||
}
|
||||
|
||||
// NOTE(casey): If it's in scientific notation, parse any digits after the "e"
|
||||
if(IsInBounds(Source, At) && ((Source.Data[At] == 'e') || (Source.Data[At] == 'E')))
|
||||
{
|
||||
++At;
|
||||
|
||||
if(IsInBounds(Source, At) && ((Source.Data[At] == '+') || (Source.Data[At] == '-')))
|
||||
{
|
||||
++At;
|
||||
}
|
||||
|
||||
while(IsJSONDigit(Source, At))
|
||||
{
|
||||
++At;
|
||||
}
|
||||
}
|
||||
|
||||
Result.Value.Count = At - Start;
|
||||
} break;
|
||||
|
||||
default:
|
||||
{
|
||||
} break;
|
||||
}
|
||||
}
|
||||
|
||||
Parser->At = At;
|
||||
|
||||
return Result;
|
||||
}
|
||||
|
||||
static json_element *ParseJSONList(json_parser *Parser, json_token_type EndType, b32 HasLabels);
|
||||
static json_element *ParseJSONElement(json_parser *Parser, buffer Label, json_token Value)
|
||||
{
|
||||
TimeFunction;
|
||||
|
||||
b32 Valid = true;
|
||||
|
||||
json_element *SubElement = 0;
|
||||
if(Value.Type == Token_open_bracket)
|
||||
{
|
||||
SubElement = ParseJSONList(Parser, Token_close_bracket, false);
|
||||
}
|
||||
else if(Value.Type == Token_open_brace)
|
||||
{
|
||||
SubElement = ParseJSONList(Parser, Token_close_brace, true);
|
||||
}
|
||||
else if((Value.Type == Token_string_literal) ||
|
||||
(Value.Type == Token_true) ||
|
||||
(Value.Type == Token_false) ||
|
||||
(Value.Type == Token_null) ||
|
||||
(Value.Type == Token_number))
|
||||
{
|
||||
// NOTE(casey): Nothing to do here, since there is no additional data
|
||||
}
|
||||
else
|
||||
{
|
||||
Valid = false;
|
||||
}
|
||||
|
||||
json_element *Result = 0;
|
||||
|
||||
if(Valid)
|
||||
{
|
||||
Result = (json_element *)malloc(sizeof(json_element));
|
||||
Result->Label = Label;
|
||||
Result->Value = Value.Value;
|
||||
Result->FirstSubElement = SubElement;
|
||||
Result->NextSibling = 0;
|
||||
}
|
||||
|
||||
return Result;
|
||||
}
|
||||
|
||||
static json_element *ParseJSONList(json_parser *Parser, json_token_type EndType, b32 HasLabels)
|
||||
{
|
||||
json_element *FirstElement = {};
|
||||
json_element *LastElement = {};
|
||||
|
||||
while(IsParsing(Parser))
|
||||
{
|
||||
buffer Label = {};
|
||||
json_token Value = GetJSONToken(Parser);
|
||||
if(HasLabels)
|
||||
{
|
||||
if(Value.Type == Token_string_literal)
|
||||
{
|
||||
Label = Value.Value;
|
||||
|
||||
json_token Colon = GetJSONToken(Parser);
|
||||
if(Colon.Type == Token_colon)
|
||||
{
|
||||
Value = GetJSONToken(Parser);
|
||||
}
|
||||
else
|
||||
{
|
||||
Error(Parser, Colon, "Expected colon after field name");
|
||||
}
|
||||
}
|
||||
else if(Value.Type != EndType)
|
||||
{
|
||||
Error(Parser, Value, "Unexpected token in JSON");
|
||||
}
|
||||
}
|
||||
|
||||
json_element *Element = ParseJSONElement(Parser, Label, Value);
|
||||
if(Element)
|
||||
{
|
||||
LastElement = (LastElement ? LastElement->NextSibling : FirstElement) = Element;
|
||||
}
|
||||
else if(Value.Type == EndType)
|
||||
{
|
||||
break;
|
||||
}
|
||||
else
|
||||
{
|
||||
Error(Parser, Value, "Unexpected token in JSON");
|
||||
}
|
||||
|
||||
json_token Comma = GetJSONToken(Parser);
|
||||
if(Comma.Type == EndType)
|
||||
{
|
||||
break;
|
||||
}
|
||||
else if(Comma.Type != Token_comma)
|
||||
{
|
||||
Error(Parser, Comma, "Unexpected token in JSON");
|
||||
}
|
||||
}
|
||||
|
||||
return FirstElement;
|
||||
}
|
||||
|
||||
static json_element *ParseJSON(buffer InputJSON)
|
||||
{
|
||||
json_parser Parser = {};
|
||||
Parser.Source = InputJSON;
|
||||
|
||||
json_element *Result = ParseJSONElement(&Parser, {}, GetJSONToken(&Parser));
|
||||
return Result;
|
||||
}
|
||||
|
||||
static void FreeJSON(json_element *Element)
|
||||
{
|
||||
while(Element)
|
||||
{
|
||||
json_element *FreeElement = Element;
|
||||
Element = Element->NextSibling;
|
||||
|
||||
FreeJSON(FreeElement->FirstSubElement);
|
||||
free(FreeElement);
|
||||
}
|
||||
}
|
||||
|
||||
static json_element *LookupElement(json_element *Object, buffer ElementName)
|
||||
{
|
||||
json_element *Result = 0;
|
||||
|
||||
if(Object)
|
||||
{
|
||||
for(json_element *Search = Object->FirstSubElement; Search; Search = Search->NextSibling)
|
||||
{
|
||||
if(AreEqual(Search->Label, ElementName))
|
||||
{
|
||||
Result = Search;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Result;
|
||||
}
|
||||
|
||||
static f64 ConvertJSONSign(buffer Source, u64 *AtResult)
|
||||
{
|
||||
u64 At = *AtResult;
|
||||
|
||||
f64 Result = 1.0;
|
||||
if(IsInBounds(Source, At) && (Source.Data[At] == '-'))
|
||||
{
|
||||
Result = -1.0;
|
||||
++At;
|
||||
}
|
||||
|
||||
*AtResult = At;
|
||||
|
||||
return Result;
|
||||
}
|
||||
|
||||
static f64 ConvertJSONNumber(buffer Source, u64 *AtResult)
|
||||
{
|
||||
u64 At = *AtResult;
|
||||
|
||||
f64 Result = 0.0;
|
||||
while(IsInBounds(Source, At))
|
||||
{
|
||||
u8 Char = Source.Data[At] - (u8)'0';
|
||||
if(Char < 10)
|
||||
{
|
||||
Result = 10.0*Result + (f64)Char;
|
||||
++At;
|
||||
}
|
||||
else
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
*AtResult = At;
|
||||
|
||||
return Result;
|
||||
}
|
||||
|
||||
static f64 ConvertElementToF64(json_element *Object, buffer ElementName)
|
||||
{
|
||||
f64 Result = 0.0;
|
||||
|
||||
json_element *Element = LookupElement(Object, ElementName);
|
||||
if(Element)
|
||||
{
|
||||
buffer Source = Element->Value;
|
||||
u64 At = 0;
|
||||
|
||||
f64 Sign = ConvertJSONSign(Source, &At);
|
||||
f64 Number = ConvertJSONNumber(Source, &At);
|
||||
|
||||
if(IsInBounds(Source, At) && (Source.Data[At] == '.'))
|
||||
{
|
||||
++At;
|
||||
f64 C = 1.0 / 10.0;
|
||||
while(IsInBounds(Source, At))
|
||||
{
|
||||
u8 Char = Source.Data[At] - (u8)'0';
|
||||
if(Char < 10)
|
||||
{
|
||||
Number = Number + C*(f64)Char;
|
||||
C *= 1.0 / 10.0;
|
||||
++At;
|
||||
}
|
||||
else
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(IsInBounds(Source, At) && ((Source.Data[At] == 'e') || (Source.Data[At] == 'E')))
|
||||
{
|
||||
++At;
|
||||
if(IsInBounds(Source, At) && (Source.Data[At] == '+'))
|
||||
{
|
||||
++At;
|
||||
}
|
||||
|
||||
f64 ExponentSign = ConvertJSONSign(Source, &At);
|
||||
f64 Exponent = ExponentSign*ConvertJSONNumber(Source, &At);
|
||||
Number *= pow(10.0, Exponent);
|
||||
}
|
||||
|
||||
Result = Sign*Number;
|
||||
}
|
||||
|
||||
return Result;
|
||||
}
|
||||
|
||||
static u64 ParseHaversinePairs(buffer InputJSON, u64 MaxPairCount, haversine_pair *Pairs)
|
||||
{
|
||||
TimeFunction;
|
||||
|
||||
u64 PairCount = 0;
|
||||
|
||||
json_element *JSON = ParseJSON(InputJSON);
|
||||
|
||||
json_element *PairsArray = LookupElement(JSON, CONSTANT_STRING("pairs"));
|
||||
if(PairsArray)
|
||||
{
|
||||
TimeBlock("Lookup and Convert");
|
||||
for(json_element *Element = PairsArray->FirstSubElement;
|
||||
Element && (PairCount < MaxPairCount);
|
||||
Element = Element->NextSibling)
|
||||
{
|
||||
haversine_pair *Pair = Pairs + PairCount++;
|
||||
|
||||
Pair->X0 = ConvertElementToF64(Element, CONSTANT_STRING("x0"));
|
||||
Pair->Y0 = ConvertElementToF64(Element, CONSTANT_STRING("y0"));
|
||||
Pair->X1 = ConvertElementToF64(Element, CONSTANT_STRING("x1"));
|
||||
Pair->Y1 = ConvertElementToF64(Element, CONSTANT_STRING("y1"));
|
||||
}
|
||||
}
|
||||
|
||||
FreeJSON(JSON);
|
||||
|
||||
return PairCount;
|
||||
}
|
182
part2/listing_0084_recursive_timed_haversine_main.cpp
Normal file
182
part2/listing_0084_recursive_timed_haversine_main.cpp
Normal file
@ -0,0 +1,182 @@
|
||||
/* ========================================================================
|
||||
|
||||
(C) Copyright 2023 by Molly Rocket, Inc., All Rights Reserved.
|
||||
|
||||
This software is provided 'as-is', without any express or implied
|
||||
warranty. In no event will the authors be held liable for any damages
|
||||
arising from the use of this software.
|
||||
|
||||
Please see https://computerenhance.com for more information
|
||||
|
||||
======================================================================== */
|
||||
|
||||
/* ========================================================================
|
||||
LISTING 84
|
||||
======================================================================== */
|
||||
|
||||
/* NOTE(casey): _CRT_SECURE_NO_WARNINGS is here because otherwise we cannot
|
||||
call fopen(). If we replace fopen() with fopen_s() to avoid the warning,
|
||||
then the code doesn't compile on Linux anymore, since fopen_s() does not
|
||||
exist there.
|
||||
|
||||
What exactly the CRT maintainers were thinking when they made this choice,
|
||||
I have no idea. */
|
||||
#define _CRT_SECURE_NO_WARNINGS
|
||||
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <stdint.h>
|
||||
#include <math.h>
|
||||
#include <sys/stat.h>
|
||||
|
||||
typedef uint8_t u8;
|
||||
typedef uint32_t u32;
|
||||
typedef uint64_t u64;
|
||||
|
||||
typedef int32_t b32;
|
||||
|
||||
typedef float f32;
|
||||
typedef double f64;
|
||||
|
||||
#define ArrayCount(Array) (sizeof(Array)/sizeof((Array)[0]))
|
||||
|
||||
struct haversine_pair
|
||||
{
|
||||
f64 X0, Y0;
|
||||
f64 X1, Y1;
|
||||
};
|
||||
|
||||
#include "listing_0081_nesting_profiler.cpp"
|
||||
#include "listing_0065_haversine_formula.cpp"
|
||||
#include "listing_0068_buffer.cpp"
|
||||
#include "listing_0083_recursive_timed_lookup_json_parser.cpp"
|
||||
|
||||
static buffer ReadEntireFile(char *FileName)
|
||||
{
|
||||
TimeFunction;
|
||||
|
||||
buffer Result = {};
|
||||
|
||||
FILE *File = fopen(FileName, "rb");
|
||||
if(File)
|
||||
{
|
||||
#if _WIN32
|
||||
struct __stat64 Stat;
|
||||
_stat64(FileName, &Stat);
|
||||
#else
|
||||
struct stat Stat;
|
||||
stat(FileName, &Stat);
|
||||
#endif
|
||||
|
||||
Result = AllocateBuffer(Stat.st_size);
|
||||
if(Result.Data)
|
||||
{
|
||||
if(fread(Result.Data, Result.Count, 1, File) != 1)
|
||||
{
|
||||
fprintf(stderr, "ERROR: Unable to read \"%s\".\n", FileName);
|
||||
FreeBuffer(&Result);
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
fprintf(stderr, "ERROR: Unable to open \"%s\".\n", FileName);
|
||||
}
|
||||
|
||||
return Result;
|
||||
}
|
||||
|
||||
static f64 SumHaversineDistances(u64 PairCount, haversine_pair *Pairs)
|
||||
{
|
||||
TimeFunction;
|
||||
|
||||
f64 Sum = 0;
|
||||
|
||||
f64 SumCoef = 1 / (f64)PairCount;
|
||||
for(u64 PairIndex = 0; PairIndex < PairCount; ++PairIndex)
|
||||
{
|
||||
haversine_pair Pair = Pairs[PairIndex];
|
||||
f64 EarthRadius = 6372.8;
|
||||
f64 Dist = ReferenceHaversine(Pair.X0, Pair.Y0, Pair.X1, Pair.Y1, EarthRadius);
|
||||
Sum += SumCoef*Dist;
|
||||
}
|
||||
|
||||
return Sum;
|
||||
}
|
||||
|
||||
int main(int ArgCount, char **Args)
|
||||
{
|
||||
BeginProfile();
|
||||
|
||||
int Result = 1;
|
||||
|
||||
if((ArgCount == 2) || (ArgCount == 3))
|
||||
{
|
||||
buffer InputJSON = ReadEntireFile(Args[1]);
|
||||
|
||||
u32 MinimumJSONPairEncoding = 6*4;
|
||||
u64 MaxPairCount = InputJSON.Count / MinimumJSONPairEncoding;
|
||||
if(MaxPairCount)
|
||||
{
|
||||
buffer ParsedValues = AllocateBuffer(MaxPairCount * sizeof(haversine_pair));
|
||||
if(ParsedValues.Count)
|
||||
{
|
||||
haversine_pair *Pairs = (haversine_pair *)ParsedValues.Data;
|
||||
|
||||
u64 PairCount = ParseHaversinePairs(InputJSON, MaxPairCount, Pairs);
|
||||
f64 Sum = SumHaversineDistances(PairCount, Pairs);
|
||||
|
||||
Result = 0;
|
||||
|
||||
fprintf(stdout, "Input size: %llu\n", InputJSON.Count);
|
||||
fprintf(stdout, "Pair count: %llu\n", PairCount);
|
||||
fprintf(stdout, "Haversine sum: %.16f\n", Sum);
|
||||
|
||||
if(ArgCount == 3)
|
||||
{
|
||||
buffer AnswersF64 = ReadEntireFile(Args[2]);
|
||||
if(AnswersF64.Count >= sizeof(f64))
|
||||
{
|
||||
f64 *AnswerValues = (f64 *)AnswersF64.Data;
|
||||
|
||||
fprintf(stdout, "\nValidation:\n");
|
||||
|
||||
u64 RefAnswerCount = (AnswersF64.Count - sizeof(f64)) / sizeof(f64);
|
||||
if(PairCount != RefAnswerCount)
|
||||
{
|
||||
fprintf(stdout, "FAILED - pair count doesn't match %llu.\n", RefAnswerCount);
|
||||
}
|
||||
|
||||
f64 RefSum = AnswerValues[RefAnswerCount];
|
||||
fprintf(stdout, "Reference sum: %.16f\n", RefSum);
|
||||
fprintf(stdout, "Difference: %.16f\n", Sum - RefSum);
|
||||
|
||||
fprintf(stdout, "\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
FreeBuffer(&ParsedValues);
|
||||
}
|
||||
else
|
||||
{
|
||||
fprintf(stderr, "ERROR: Malformed input JSON\n");
|
||||
}
|
||||
|
||||
FreeBuffer(&InputJSON);
|
||||
}
|
||||
else
|
||||
{
|
||||
fprintf(stderr, "Usage: %s [haversine_input.json]\n", Args[0]);
|
||||
fprintf(stderr, " %s [haversine_input.json] [answers.f64]\n", Args[0]);
|
||||
}
|
||||
|
||||
if(Result == 0)
|
||||
{
|
||||
EndAndPrintProfile();
|
||||
}
|
||||
|
||||
return Result;
|
||||
}
|
||||
|
||||
static_assert(__COUNTER__ < ArrayCount(profiler::Anchors), "Number of profile points exceeds size of profiler::Anchors array");
|
125
part2/listing_0085_recursive_profiler.cpp
Normal file
125
part2/listing_0085_recursive_profiler.cpp
Normal file
@ -0,0 +1,125 @@
|
||||
/* ========================================================================
|
||||
|
||||
(C) Copyright 2023 by Molly Rocket, Inc., All Rights Reserved.
|
||||
|
||||
This software is provided 'as-is', without any express or implied
|
||||
warranty. In no event will the authors be held liable for any damages
|
||||
arising from the use of this software.
|
||||
|
||||
Please see https://computerenhance.com for more information
|
||||
|
||||
======================================================================== */
|
||||
|
||||
/* ========================================================================
|
||||
LISTING 85
|
||||
======================================================================== */
|
||||
|
||||
#include "listing_0074_platform_metrics.cpp"
|
||||
|
||||
struct profile_anchor
|
||||
{
|
||||
u64 TSCElapsed;
|
||||
u64 TSCElapsedChildren;
|
||||
u64 TSCElapsedAtRoot;
|
||||
u64 HitCount;
|
||||
char const *Label;
|
||||
};
|
||||
|
||||
struct profiler
|
||||
{
|
||||
profile_anchor Anchors[4096];
|
||||
|
||||
u64 StartTSC;
|
||||
u64 EndTSC;
|
||||
};
|
||||
static profiler GlobalProfiler;
|
||||
static u32 GlobalProfilerParent;
|
||||
|
||||
struct profile_block
|
||||
{
|
||||
profile_block(char const *Label_, u32 AnchorIndex_)
|
||||
{
|
||||
ParentIndex = GlobalProfilerParent;
|
||||
|
||||
AnchorIndex = AnchorIndex_;
|
||||
Label = Label_;
|
||||
|
||||
profile_anchor *Anchor = GlobalProfiler.Anchors + AnchorIndex;
|
||||
OldTSCElapsedAtRoot = Anchor->TSCElapsedAtRoot;
|
||||
|
||||
GlobalProfilerParent = AnchorIndex;
|
||||
StartTSC = ReadCPUTimer();
|
||||
}
|
||||
|
||||
~profile_block(void)
|
||||
{
|
||||
u64 Elapsed = ReadCPUTimer() - StartTSC;
|
||||
GlobalProfilerParent = ParentIndex;
|
||||
|
||||
profile_anchor *Parent = GlobalProfiler.Anchors + ParentIndex;
|
||||
profile_anchor *Anchor = GlobalProfiler.Anchors + AnchorIndex;
|
||||
|
||||
Parent->TSCElapsedChildren += Elapsed;
|
||||
Anchor->TSCElapsedAtRoot = OldTSCElapsedAtRoot + Elapsed;
|
||||
Anchor->TSCElapsed += Elapsed;
|
||||
++Anchor->HitCount;
|
||||
|
||||
/* NOTE(casey): This write happens every time solely because there is no
|
||||
straightforward way in C++ to have the same ease-of-use. In a better programming
|
||||
language, it would be simple to have the anchor points gathered and labeled at compile
|
||||
time, and this repetative write would be eliminated. */
|
||||
Anchor->Label = Label;
|
||||
}
|
||||
|
||||
char const *Label;
|
||||
u64 OldTSCElapsedAtRoot;
|
||||
u64 StartTSC;
|
||||
u32 ParentIndex;
|
||||
u32 AnchorIndex;
|
||||
};
|
||||
|
||||
#define NameConcat2(A, B) A##B
|
||||
#define NameConcat(A, B) NameConcat2(A, B)
|
||||
#define TimeBlock(Name) profile_block NameConcat(Block, __LINE__)(Name, __COUNTER__ + 1);
|
||||
#define TimeFunction TimeBlock(__func__)
|
||||
|
||||
static void PrintTimeElapsed(u64 TotalTSCElapsed, profile_anchor *Anchor)
|
||||
{
|
||||
u64 TSCElapsedSelf = Anchor->TSCElapsed - Anchor->TSCElapsedChildren;
|
||||
f64 Percent = 100.0 * ((f64)TSCElapsedSelf / (f64)TotalTSCElapsed);
|
||||
printf(" %s[%llu]: %llu (%.2f%%", Anchor->Label, Anchor->HitCount, TSCElapsedSelf, Percent);
|
||||
if(Anchor->TSCElapsedAtRoot != TSCElapsedSelf)
|
||||
{
|
||||
f64 PercentWithChildren = 100.0 * ((f64)Anchor->TSCElapsedAtRoot / (f64)TotalTSCElapsed);
|
||||
printf(", %.2f%% w/children", PercentWithChildren);
|
||||
}
|
||||
printf(")\n");
|
||||
}
|
||||
|
||||
static void BeginProfile(void)
|
||||
{
|
||||
GlobalProfiler.StartTSC = ReadCPUTimer();
|
||||
}
|
||||
|
||||
static void EndAndPrintProfile()
|
||||
{
|
||||
GlobalProfiler.EndTSC = ReadCPUTimer();
|
||||
u64 CPUFreq = EstimateCPUTimerFreq();
|
||||
|
||||
u64 TotalCPUElapsed = GlobalProfiler.EndTSC - GlobalProfiler.StartTSC;
|
||||
|
||||
if(CPUFreq)
|
||||
{
|
||||
printf("\nTotal time: %0.4fms (CPU freq %llu)\n", 1000.0 * (f64)TotalCPUElapsed / (f64)CPUFreq, CPUFreq);
|
||||
}
|
||||
|
||||
for(u32 AnchorIndex = 0; AnchorIndex < ArrayCount(GlobalProfiler.Anchors); ++AnchorIndex)
|
||||
{
|
||||
profile_anchor *Anchor = GlobalProfiler.Anchors + AnchorIndex;
|
||||
if(Anchor->TSCElapsed)
|
||||
{
|
||||
PrintTimeElapsed(TotalCPUElapsed, Anchor);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
182
part2/listing_0086_recursive_haversine_main.cpp
Normal file
182
part2/listing_0086_recursive_haversine_main.cpp
Normal file
@ -0,0 +1,182 @@
|
||||
/* ========================================================================
|
||||
|
||||
(C) Copyright 2023 by Molly Rocket, Inc., All Rights Reserved.
|
||||
|
||||
This software is provided 'as-is', without any express or implied
|
||||
warranty. In no event will the authors be held liable for any damages
|
||||
arising from the use of this software.
|
||||
|
||||
Please see https://computerenhance.com for more information
|
||||
|
||||
======================================================================== */
|
||||
|
||||
/* ========================================================================
|
||||
LISTING 86
|
||||
======================================================================== */
|
||||
|
||||
/* NOTE(casey): _CRT_SECURE_NO_WARNINGS is here because otherwise we cannot
|
||||
call fopen(). If we replace fopen() with fopen_s() to avoid the warning,
|
||||
then the code doesn't compile on Linux anymore, since fopen_s() does not
|
||||
exist there.
|
||||
|
||||
What exactly the CRT maintainers were thinking when they made this choice,
|
||||
I have no idea. */
|
||||
#define _CRT_SECURE_NO_WARNINGS
|
||||
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <stdint.h>
|
||||
#include <math.h>
|
||||
#include <sys/stat.h>
|
||||
|
||||
typedef uint8_t u8;
|
||||
typedef uint32_t u32;
|
||||
typedef uint64_t u64;
|
||||
|
||||
typedef int32_t b32;
|
||||
|
||||
typedef float f32;
|
||||
typedef double f64;
|
||||
|
||||
#define ArrayCount(Array) (sizeof(Array)/sizeof((Array)[0]))
|
||||
|
||||
struct haversine_pair
|
||||
{
|
||||
f64 X0, Y0;
|
||||
f64 X1, Y1;
|
||||
};
|
||||
|
||||
#include "listing_0085_recursive_profiler.cpp"
|
||||
#include "listing_0065_haversine_formula.cpp"
|
||||
#include "listing_0068_buffer.cpp"
|
||||
#include "listing_0083_recursive_timed_lookup_json_parser.cpp"
|
||||
|
||||
static buffer ReadEntireFile(char *FileName)
|
||||
{
|
||||
TimeFunction;
|
||||
|
||||
buffer Result = {};
|
||||
|
||||
FILE *File = fopen(FileName, "rb");
|
||||
if(File)
|
||||
{
|
||||
#if _WIN32
|
||||
struct __stat64 Stat;
|
||||
_stat64(FileName, &Stat);
|
||||
#else
|
||||
struct stat Stat;
|
||||
stat(FileName, &Stat);
|
||||
#endif
|
||||
|
||||
Result = AllocateBuffer(Stat.st_size);
|
||||
if(Result.Data)
|
||||
{
|
||||
if(fread(Result.Data, Result.Count, 1, File) != 1)
|
||||
{
|
||||
fprintf(stderr, "ERROR: Unable to read \"%s\".\n", FileName);
|
||||
FreeBuffer(&Result);
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
fprintf(stderr, "ERROR: Unable to open \"%s\".\n", FileName);
|
||||
}
|
||||
|
||||
return Result;
|
||||
}
|
||||
|
||||
static f64 SumHaversineDistances(u64 PairCount, haversine_pair *Pairs)
|
||||
{
|
||||
TimeFunction;
|
||||
|
||||
f64 Sum = 0;
|
||||
|
||||
f64 SumCoef = 1 / (f64)PairCount;
|
||||
for(u64 PairIndex = 0; PairIndex < PairCount; ++PairIndex)
|
||||
{
|
||||
haversine_pair Pair = Pairs[PairIndex];
|
||||
f64 EarthRadius = 6372.8;
|
||||
f64 Dist = ReferenceHaversine(Pair.X0, Pair.Y0, Pair.X1, Pair.Y1, EarthRadius);
|
||||
Sum += SumCoef*Dist;
|
||||
}
|
||||
|
||||
return Sum;
|
||||
}
|
||||
|
||||
int main(int ArgCount, char **Args)
|
||||
{
|
||||
BeginProfile();
|
||||
|
||||
int Result = 1;
|
||||
|
||||
if((ArgCount == 2) || (ArgCount == 3))
|
||||
{
|
||||
buffer InputJSON = ReadEntireFile(Args[1]);
|
||||
|
||||
u32 MinimumJSONPairEncoding = 6*4;
|
||||
u64 MaxPairCount = InputJSON.Count / MinimumJSONPairEncoding;
|
||||
if(MaxPairCount)
|
||||
{
|
||||
buffer ParsedValues = AllocateBuffer(MaxPairCount * sizeof(haversine_pair));
|
||||
if(ParsedValues.Count)
|
||||
{
|
||||
haversine_pair *Pairs = (haversine_pair *)ParsedValues.Data;
|
||||
|
||||
u64 PairCount = ParseHaversinePairs(InputJSON, MaxPairCount, Pairs);
|
||||
f64 Sum = SumHaversineDistances(PairCount, Pairs);
|
||||
|
||||
Result = 0;
|
||||
|
||||
fprintf(stdout, "Input size: %llu\n", InputJSON.Count);
|
||||
fprintf(stdout, "Pair count: %llu\n", PairCount);
|
||||
fprintf(stdout, "Haversine sum: %.16f\n", Sum);
|
||||
|
||||
if(ArgCount == 3)
|
||||
{
|
||||
buffer AnswersF64 = ReadEntireFile(Args[2]);
|
||||
if(AnswersF64.Count >= sizeof(f64))
|
||||
{
|
||||
f64 *AnswerValues = (f64 *)AnswersF64.Data;
|
||||
|
||||
fprintf(stdout, "\nValidation:\n");
|
||||
|
||||
u64 RefAnswerCount = (AnswersF64.Count - sizeof(f64)) / sizeof(f64);
|
||||
if(PairCount != RefAnswerCount)
|
||||
{
|
||||
fprintf(stdout, "FAILED - pair count doesn't match %llu.\n", RefAnswerCount);
|
||||
}
|
||||
|
||||
f64 RefSum = AnswerValues[RefAnswerCount];
|
||||
fprintf(stdout, "Reference sum: %.16f\n", RefSum);
|
||||
fprintf(stdout, "Difference: %.16f\n", Sum - RefSum);
|
||||
|
||||
fprintf(stdout, "\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
FreeBuffer(&ParsedValues);
|
||||
}
|
||||
else
|
||||
{
|
||||
fprintf(stderr, "ERROR: Malformed input JSON\n");
|
||||
}
|
||||
|
||||
FreeBuffer(&InputJSON);
|
||||
}
|
||||
else
|
||||
{
|
||||
fprintf(stderr, "Usage: %s [haversine_input.json]\n", Args[0]);
|
||||
fprintf(stderr, " %s [haversine_input.json] [answers.f64]\n", Args[0]);
|
||||
}
|
||||
|
||||
if(Result == 0)
|
||||
{
|
||||
EndAndPrintProfile();
|
||||
}
|
||||
|
||||
return Result;
|
||||
}
|
||||
|
||||
static_assert(__COUNTER__ < ArrayCount(profiler::Anchors), "Number of profile points exceeds size of profiler::Anchors array");
|
123
part2/listing_0087_simplified_profiler.cpp
Normal file
123
part2/listing_0087_simplified_profiler.cpp
Normal file
@ -0,0 +1,123 @@
|
||||
/* ========================================================================
|
||||
|
||||
(C) Copyright 2023 by Molly Rocket, Inc., All Rights Reserved.
|
||||
|
||||
This software is provided 'as-is', without any express or implied
|
||||
warranty. In no event will the authors be held liable for any damages
|
||||
arising from the use of this software.
|
||||
|
||||
Please see https://computerenhance.com for more information
|
||||
|
||||
======================================================================== */
|
||||
|
||||
/* ========================================================================
|
||||
LISTING 87
|
||||
======================================================================== */
|
||||
|
||||
#include "listing_0074_platform_metrics.cpp"
|
||||
|
||||
struct profile_anchor
|
||||
{
|
||||
u64 TSCElapsedExclusive; // NOTE(casey): Does NOT include children
|
||||
u64 TSCElapsedInclusive; // NOTE(casey): DOES include children
|
||||
u64 HitCount;
|
||||
char const *Label;
|
||||
};
|
||||
|
||||
struct profiler
|
||||
{
|
||||
profile_anchor Anchors[4096];
|
||||
|
||||
u64 StartTSC;
|
||||
u64 EndTSC;
|
||||
};
|
||||
static profiler GlobalProfiler;
|
||||
static u32 GlobalProfilerParent;
|
||||
|
||||
struct profile_block
|
||||
{
|
||||
profile_block(char const *Label_, u32 AnchorIndex_)
|
||||
{
|
||||
ParentIndex = GlobalProfilerParent;
|
||||
|
||||
AnchorIndex = AnchorIndex_;
|
||||
Label = Label_;
|
||||
|
||||
profile_anchor *Anchor = GlobalProfiler.Anchors + AnchorIndex;
|
||||
OldTSCElapsedInclusive = Anchor->TSCElapsedInclusive;
|
||||
|
||||
GlobalProfilerParent = AnchorIndex;
|
||||
StartTSC = ReadCPUTimer();
|
||||
}
|
||||
|
||||
~profile_block(void)
|
||||
{
|
||||
u64 Elapsed = ReadCPUTimer() - StartTSC;
|
||||
GlobalProfilerParent = ParentIndex;
|
||||
|
||||
profile_anchor *Parent = GlobalProfiler.Anchors + ParentIndex;
|
||||
profile_anchor *Anchor = GlobalProfiler.Anchors + AnchorIndex;
|
||||
|
||||
Parent->TSCElapsedExclusive -= Elapsed;
|
||||
Anchor->TSCElapsedExclusive += Elapsed;
|
||||
Anchor->TSCElapsedInclusive = OldTSCElapsedInclusive + Elapsed;
|
||||
++Anchor->HitCount;
|
||||
|
||||
/* NOTE(casey): This write happens every time solely because there is no
|
||||
straightforward way in C++ to have the same ease-of-use. In a better programming
|
||||
language, it would be simple to have the anchor points gathered and labeled at compile
|
||||
time, and this repetative write would be eliminated. */
|
||||
Anchor->Label = Label;
|
||||
}
|
||||
|
||||
char const *Label;
|
||||
u64 OldTSCElapsedInclusive;
|
||||
u64 StartTSC;
|
||||
u32 ParentIndex;
|
||||
u32 AnchorIndex;
|
||||
};
|
||||
|
||||
#define NameConcat2(A, B) A##B
|
||||
#define NameConcat(A, B) NameConcat2(A, B)
|
||||
#define TimeBlock(Name) profile_block NameConcat(Block, __LINE__)(Name, __COUNTER__ + 1);
|
||||
#define TimeFunction TimeBlock(__func__)
|
||||
|
||||
static void PrintTimeElapsed(u64 TotalTSCElapsed, profile_anchor *Anchor)
|
||||
{
|
||||
f64 Percent = 100.0 * ((f64)Anchor->TSCElapsedExclusive / (f64)TotalTSCElapsed);
|
||||
printf(" %s[%llu]: %llu (%.2f%%", Anchor->Label, Anchor->HitCount, Anchor->TSCElapsedExclusive, Percent);
|
||||
if(Anchor->TSCElapsedInclusive != Anchor->TSCElapsedExclusive)
|
||||
{
|
||||
f64 PercentWithChildren = 100.0 * ((f64)Anchor->TSCElapsedInclusive / (f64)TotalTSCElapsed);
|
||||
printf(", %.2f%% w/children", PercentWithChildren);
|
||||
}
|
||||
printf(")\n");
|
||||
}
|
||||
|
||||
static void BeginProfile(void)
|
||||
{
|
||||
GlobalProfiler.StartTSC = ReadCPUTimer();
|
||||
}
|
||||
|
||||
static void EndAndPrintProfile()
|
||||
{
|
||||
GlobalProfiler.EndTSC = ReadCPUTimer();
|
||||
u64 CPUFreq = EstimateCPUTimerFreq();
|
||||
|
||||
u64 TotalCPUElapsed = GlobalProfiler.EndTSC - GlobalProfiler.StartTSC;
|
||||
|
||||
if(CPUFreq)
|
||||
{
|
||||
printf("\nTotal time: %0.4fms (CPU freq %llu)\n", 1000.0 * (f64)TotalCPUElapsed / (f64)CPUFreq, CPUFreq);
|
||||
}
|
||||
|
||||
for(u32 AnchorIndex = 0; AnchorIndex < ArrayCount(GlobalProfiler.Anchors); ++AnchorIndex)
|
||||
{
|
||||
profile_anchor *Anchor = GlobalProfiler.Anchors + AnchorIndex;
|
||||
if(Anchor->TSCElapsedInclusive)
|
||||
{
|
||||
PrintTimeElapsed(TotalCPUElapsed, Anchor);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
182
part2/listing_0088_simplified_haversine_main.cpp
Normal file
182
part2/listing_0088_simplified_haversine_main.cpp
Normal file
@ -0,0 +1,182 @@
|
||||
/* ========================================================================
|
||||
|
||||
(C) Copyright 2023 by Molly Rocket, Inc., All Rights Reserved.
|
||||
|
||||
This software is provided 'as-is', without any express or implied
|
||||
warranty. In no event will the authors be held liable for any damages
|
||||
arising from the use of this software.
|
||||
|
||||
Please see https://computerenhance.com for more information
|
||||
|
||||
======================================================================== */
|
||||
|
||||
/* ========================================================================
|
||||
LISTING 88
|
||||
======================================================================== */
|
||||
|
||||
/* NOTE(casey): _CRT_SECURE_NO_WARNINGS is here because otherwise we cannot
|
||||
call fopen(). If we replace fopen() with fopen_s() to avoid the warning,
|
||||
then the code doesn't compile on Linux anymore, since fopen_s() does not
|
||||
exist there.
|
||||
|
||||
What exactly the CRT maintainers were thinking when they made this choice,
|
||||
I have no idea. */
|
||||
#define _CRT_SECURE_NO_WARNINGS
|
||||
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <stdint.h>
|
||||
#include <math.h>
|
||||
#include <sys/stat.h>
|
||||
|
||||
typedef uint8_t u8;
|
||||
typedef uint32_t u32;
|
||||
typedef uint64_t u64;
|
||||
|
||||
typedef int32_t b32;
|
||||
|
||||
typedef float f32;
|
||||
typedef double f64;
|
||||
|
||||
#define ArrayCount(Array) (sizeof(Array)/sizeof((Array)[0]))
|
||||
|
||||
struct haversine_pair
|
||||
{
|
||||
f64 X0, Y0;
|
||||
f64 X1, Y1;
|
||||
};
|
||||
|
||||
#include "listing_0087_simplified_profiler.cpp"
|
||||
#include "listing_0065_haversine_formula.cpp"
|
||||
#include "listing_0068_buffer.cpp"
|
||||
#include "listing_0083_recursive_timed_lookup_json_parser.cpp"
|
||||
|
||||
static buffer ReadEntireFile(char *FileName)
|
||||
{
|
||||
TimeFunction;
|
||||
|
||||
buffer Result = {};
|
||||
|
||||
FILE *File = fopen(FileName, "rb");
|
||||
if(File)
|
||||
{
|
||||
#if _WIN32
|
||||
struct __stat64 Stat;
|
||||
_stat64(FileName, &Stat);
|
||||
#else
|
||||
struct stat Stat;
|
||||
stat(FileName, &Stat);
|
||||
#endif
|
||||
|
||||
Result = AllocateBuffer(Stat.st_size);
|
||||
if(Result.Data)
|
||||
{
|
||||
if(fread(Result.Data, Result.Count, 1, File) != 1)
|
||||
{
|
||||
fprintf(stderr, "ERROR: Unable to read \"%s\".\n", FileName);
|
||||
FreeBuffer(&Result);
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
fprintf(stderr, "ERROR: Unable to open \"%s\".\n", FileName);
|
||||
}
|
||||
|
||||
return Result;
|
||||
}
|
||||
|
||||
static f64 SumHaversineDistances(u64 PairCount, haversine_pair *Pairs)
|
||||
{
|
||||
TimeFunction;
|
||||
|
||||
f64 Sum = 0;
|
||||
|
||||
f64 SumCoef = 1 / (f64)PairCount;
|
||||
for(u64 PairIndex = 0; PairIndex < PairCount; ++PairIndex)
|
||||
{
|
||||
haversine_pair Pair = Pairs[PairIndex];
|
||||
f64 EarthRadius = 6372.8;
|
||||
f64 Dist = ReferenceHaversine(Pair.X0, Pair.Y0, Pair.X1, Pair.Y1, EarthRadius);
|
||||
Sum += SumCoef*Dist;
|
||||
}
|
||||
|
||||
return Sum;
|
||||
}
|
||||
|
||||
int main(int ArgCount, char **Args)
|
||||
{
|
||||
BeginProfile();
|
||||
|
||||
int Result = 1;
|
||||
|
||||
if((ArgCount == 2) || (ArgCount == 3))
|
||||
{
|
||||
buffer InputJSON = ReadEntireFile(Args[1]);
|
||||
|
||||
u32 MinimumJSONPairEncoding = 6*4;
|
||||
u64 MaxPairCount = InputJSON.Count / MinimumJSONPairEncoding;
|
||||
if(MaxPairCount)
|
||||
{
|
||||
buffer ParsedValues = AllocateBuffer(MaxPairCount * sizeof(haversine_pair));
|
||||
if(ParsedValues.Count)
|
||||
{
|
||||
haversine_pair *Pairs = (haversine_pair *)ParsedValues.Data;
|
||||
|
||||
u64 PairCount = ParseHaversinePairs(InputJSON, MaxPairCount, Pairs);
|
||||
f64 Sum = SumHaversineDistances(PairCount, Pairs);
|
||||
|
||||
Result = 0;
|
||||
|
||||
fprintf(stdout, "Input size: %llu\n", InputJSON.Count);
|
||||
fprintf(stdout, "Pair count: %llu\n", PairCount);
|
||||
fprintf(stdout, "Haversine sum: %.16f\n", Sum);
|
||||
|
||||
if(ArgCount == 3)
|
||||
{
|
||||
buffer AnswersF64 = ReadEntireFile(Args[2]);
|
||||
if(AnswersF64.Count >= sizeof(f64))
|
||||
{
|
||||
f64 *AnswerValues = (f64 *)AnswersF64.Data;
|
||||
|
||||
fprintf(stdout, "\nValidation:\n");
|
||||
|
||||
u64 RefAnswerCount = (AnswersF64.Count - sizeof(f64)) / sizeof(f64);
|
||||
if(PairCount != RefAnswerCount)
|
||||
{
|
||||
fprintf(stdout, "FAILED - pair count doesn't match %llu.\n", RefAnswerCount);
|
||||
}
|
||||
|
||||
f64 RefSum = AnswerValues[RefAnswerCount];
|
||||
fprintf(stdout, "Reference sum: %.16f\n", RefSum);
|
||||
fprintf(stdout, "Difference: %.16f\n", Sum - RefSum);
|
||||
|
||||
fprintf(stdout, "\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
FreeBuffer(&ParsedValues);
|
||||
}
|
||||
else
|
||||
{
|
||||
fprintf(stderr, "ERROR: Malformed input JSON\n");
|
||||
}
|
||||
|
||||
FreeBuffer(&InputJSON);
|
||||
}
|
||||
else
|
||||
{
|
||||
fprintf(stderr, "Usage: %s [haversine_input.json]\n", Args[0]);
|
||||
fprintf(stderr, " %s [haversine_input.json] [answers.f64]\n", Args[0]);
|
||||
}
|
||||
|
||||
if(Result == 0)
|
||||
{
|
||||
EndAndPrintProfile();
|
||||
}
|
||||
|
||||
return Result;
|
||||
}
|
||||
|
||||
static_assert(__COUNTER__ < ArrayCount(profiler::Anchors), "Number of profile points exceeds size of profiler::Anchors array");
|
Loading…
Reference in New Issue
Block a user