summary refs log tree commit diff
path: root/src/Http_Web.c
diff options
context:
space:
mode:
authorWlodekM <[email protected]>2024-06-16 10:35:45 +0300
committerWlodekM <[email protected]>2024-06-16 10:35:45 +0300
commitabef6da56913f1c55528103e60a50451a39628b1 (patch)
treeb3c8092471ecbb73e568cd0d336efa0e7871ee8d /src/Http_Web.c
initial commit
Diffstat (limited to 'src/Http_Web.c')
-rw-r--r--src/Http_Web.c148
1 files changed, 148 insertions, 0 deletions
diff --git a/src/Http_Web.c b/src/Http_Web.c
new file mode 100644
index 0000000..17da0a1
--- /dev/null
+++ b/src/Http_Web.c
@@ -0,0 +1,148 @@
+#include "Core.h"
+#ifdef CC_BUILD_WEB
+#include "_HttpBase.h"
+#include <emscripten/emscripten.h>
+#include "Errors.h"
+extern int interop_DownloadAsync(const char* url, int method, int reqID);
+extern int interop_IsHttpsOnly(void);
+static struct RequestList workingReqs, queuedReqs;
+static cc_uint64 startTime;
+
+
+/*########################################################################################################################*
+*----------------------------------------------------Http public api------------------------------------------------------*
+*#########################################################################################################################*/
+cc_bool Http_GetResult(int reqID, struct HttpRequest* item) {
+	int i = RequestList_Find(&processedReqs, reqID);
+
+	if (i >= 0) *item = processedReqs.entries[i];
+	if (i >= 0) RequestList_RemoveAt(&processedReqs, i);
+	return i >= 0;
+}
+
+cc_bool Http_GetCurrent(int* reqID, int* progress) {
+	/* TODO: Stubbed as this isn't required at the moment */
+	*progress = 0;
+	return 0;
+}
+
+int Http_CheckProgress(int reqID) {
+	int idx = RequestList_Find(&workingReqs, reqID);
+	if (idx == -1) return HTTP_PROGRESS_NOT_WORKING_ON;
+
+	return workingReqs.entries[idx].progress;
+}
+
+void Http_ClearPending(void) {
+	RequestList_Free(&queuedReqs);
+	RequestList_Free(&workingReqs);
+}
+
+void Http_TryCancel(int reqID) {
+	RequestList_TryFree(&queuedReqs,    reqID);
+	RequestList_TryFree(&workingReqs,   reqID);
+	RequestList_TryFree(&processedReqs, reqID);
+}
+
+
+/*########################################################################################################################*
+*----------------------------------------------------Emscripten backend---------------------------------------------------*
+*#########################################################################################################################*/
+static cc_bool HttpBackend_DescribeError(cc_result res, cc_string* dst) { 
+	return false; 
+}
+
+#define HTTP_MAX_CONCURRENCY 6
+static void Http_StartNextDownload(void) {
+	char urlBuffer[URL_MAX_SIZE]; cc_string url;
+	char urlStr[NATIVE_STR_LEN];
+	struct HttpRequest* req;
+	cc_result res;
+
+	/* Avoid making too many requests at once */
+	if (workingReqs.count >= HTTP_MAX_CONCURRENCY) return;
+	if (!queuedReqs.count) return;
+	String_InitArray(url, urlBuffer);
+
+	req = &queuedReqs.entries[0];
+	Http_GetUrl(req, &url);
+	Platform_Log1("Fetching %s", &url);
+
+	String_EncodeUtf8(urlStr, &url);
+	res = interop_DownloadAsync(urlStr, req->requestType, req->id);
+
+	if (res) {
+		/* interop error code -> ClassiCube error code */
+		if (res == 1) res = ERR_INVALID_DATA_URL;
+		req->result = res;
+		
+		/* Invalid URL so move onto next request */
+		Http_FinishRequest(req);
+		RequestList_RemoveAt(&queuedReqs, 0);
+		Http_StartNextDownload();
+	} else {
+		RequestList_Append(&workingReqs, req, false);
+		RequestList_RemoveAt(&queuedReqs, 0);
+	}
+}
+
+EMSCRIPTEN_KEEPALIVE void Http_OnUpdateProgress(int reqID, int read, int total) {
+	int idx = RequestList_Find(&workingReqs, reqID);
+	if (idx == -1 || !total) return;
+
+	workingReqs.entries[idx].progress = (int)(100.0f * read / total);
+}
+
+EMSCRIPTEN_KEEPALIVE void Http_OnFinishedAsync(int reqID, void* data, int len, int status) {
+	struct HttpRequest* req;
+	int idx = RequestList_Find(&workingReqs, reqID);
+
+	if (idx == -1) {
+		/* Shouldn't ever happen, but log a warning anyways */
+		Mem_Free(data); 
+		Platform_Log1("Ignoring invalid request (%i)", &reqID);
+	} else {
+		req = &workingReqs.entries[idx];
+		req->data          = data;
+		req->size          = len;
+		req->statusCode    = status;
+		req->contentLength = len;
+
+		/* Usually this happens when denied by CORS */
+		if (!status && !data) req->result = ERR_DOWNLOAD_INVALID;
+
+		if (req->data) Platform_Log1("HTTP returned data: %i bytes", &req->size);
+		Http_FinishRequest(req);
+		RequestList_RemoveAt(&workingReqs, idx);
+	}
+	Http_StartNextDownload();
+}
+
+/* Adds a req to the list of pending requests, waking up worker thread if needed */
+static void HttpBackend_Add(struct HttpRequest* req, cc_uint8 flags) {
+	/* Add time based query string parameter to bypass browser cache */
+	if (flags & HTTP_FLAG_NOCACHE) {
+		cc_string url = String_FromRawArray(req->url);
+		int lo = (int)(startTime), hi = (int)(startTime >> 32);
+		String_Format2(&url, "?t=%i%i", &hi, &lo);
+	}
+
+	RequestList_Append(&queuedReqs, req, flags);
+	Http_StartNextDownload();
+}
+
+
+/*########################################################################################################################*
+*-----------------------------------------------------Http component------------------------------------------------------*
+*#########################################################################################################################*/
+static void Http_Init(void) {
+	Http_InitCommon();
+	/* If this webpage is https://, browsers deny any http:// downloading */
+	httpsOnly = interop_IsHttpsOnly();
+	startTime = DateTime_CurrentUTC();
+
+	RequestList_Init(&queuedReqs);
+	RequestList_Init(&workingReqs);
+	RequestList_Init(&processedReqs);
+}
+#endif