defer log compression to backend and significantly improve compression (#358)
This commit is contained in:
parent
7abd8426b7
commit
7ed1fbf0aa
|
@ -1,3 +1,7 @@
|
|||
using System;
|
||||
using System.IO;
|
||||
using System.IO.Compression;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
@ -20,6 +24,10 @@ namespace StardewModdingAPI.Web.Controllers
|
|||
/// <summary>The underlying Pastebin client.</summary>
|
||||
private readonly PastebinClient PastebinClient;
|
||||
|
||||
/// <summary>The first bytes in a valid zip file.</summary>
|
||||
/// <remarks>See <a href="https://en.wikipedia.org/wiki/Zip_(file_format)#File_headers"/>.</remarks>
|
||||
private const uint GzipLeadBytes = 0x8b1f;
|
||||
|
||||
|
||||
/*********
|
||||
** Public methods
|
||||
|
@ -60,7 +68,9 @@ namespace StardewModdingAPI.Web.Controllers
|
|||
[Route("log/fetch/{id}")]
|
||||
public async Task<GetPasteResponse> GetAsync(string id)
|
||||
{
|
||||
return await this.PastebinClient.GetAsync(id);
|
||||
GetPasteResponse response = await this.PastebinClient.GetAsync(id);
|
||||
response.Content = this.DecompressString(response.Content);
|
||||
return response;
|
||||
}
|
||||
|
||||
/// <summary>Save raw log data.</summary>
|
||||
|
@ -69,7 +79,79 @@ namespace StardewModdingAPI.Web.Controllers
|
|||
[Route("log/save")]
|
||||
public async Task<SavePasteResponse> PostAsync([FromBody] string content)
|
||||
{
|
||||
content = this.CompressString(content);
|
||||
return await this.PastebinClient.PostAsync(content);
|
||||
}
|
||||
|
||||
|
||||
/*********
|
||||
** Private methods
|
||||
*********/
|
||||
/// <summary>Compress a string.</summary>
|
||||
/// <param name="text">The text to compress.</param>
|
||||
/// <remarks>Derived from <a href="https://stackoverflow.com/a/17993002/262123"/>.</remarks>
|
||||
private string CompressString(string text)
|
||||
{
|
||||
// get raw bytes
|
||||
byte[] buffer = Encoding.UTF8.GetBytes(text);
|
||||
|
||||
// compressed
|
||||
byte[] compressedData;
|
||||
using (MemoryStream stream = new MemoryStream())
|
||||
{
|
||||
using (GZipStream zipStream = new GZipStream(stream, CompressionLevel.Optimal, leaveOpen: true))
|
||||
zipStream.Write(buffer, 0, buffer.Length);
|
||||
|
||||
stream.Position = 0;
|
||||
compressedData = new byte[stream.Length];
|
||||
stream.Read(compressedData, 0, compressedData.Length);
|
||||
}
|
||||
|
||||
// prefix length
|
||||
var zipBuffer = new byte[compressedData.Length + 4];
|
||||
Buffer.BlockCopy(compressedData, 0, zipBuffer, 4, compressedData.Length);
|
||||
Buffer.BlockCopy(BitConverter.GetBytes(buffer.Length), 0, zipBuffer, 0, 4);
|
||||
|
||||
// return string representation
|
||||
return Convert.ToBase64String(zipBuffer);
|
||||
}
|
||||
|
||||
/// <summary>Decompress a string.</summary>
|
||||
/// <param name="rawText">The compressed text.</param>
|
||||
/// <remarks>Derived from <a href="https://stackoverflow.com/a/17993002/262123"/>.</remarks>
|
||||
private string DecompressString(string rawText)
|
||||
{
|
||||
// get raw bytes
|
||||
byte[] zipBuffer;
|
||||
try
|
||||
{
|
||||
zipBuffer = Convert.FromBase64String(rawText);
|
||||
}
|
||||
catch
|
||||
{
|
||||
return rawText; // not valid base64, wasn't compressed by the log parser
|
||||
}
|
||||
|
||||
// skip if not gzip
|
||||
if (BitConverter.ToUInt16(zipBuffer, 4) != LogParserController.GzipLeadBytes)
|
||||
return rawText;
|
||||
|
||||
// decompress
|
||||
using (MemoryStream memoryStream = new MemoryStream())
|
||||
{
|
||||
// read length prefix
|
||||
int dataLength = BitConverter.ToInt32(zipBuffer, 0);
|
||||
memoryStream.Write(zipBuffer, 4, zipBuffer.Length - 4);
|
||||
|
||||
// read data
|
||||
var buffer = new byte[dataLength];
|
||||
memoryStream.Position = 0;
|
||||
using (GZipStream gZipStream = new GZipStream(memoryStream, CompressionMode.Decompress))
|
||||
gZipStream.Read(buffer, 0, buffer.Length);
|
||||
|
||||
// return original string
|
||||
return Encoding.UTF8.GetString(buffer);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,7 +5,6 @@
|
|||
@section Head {
|
||||
<link rel="stylesheet" href="~/Content/css/log-parser.css" />
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.2.1/jquery.min.js" crossorigin="anonymous"></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/lz-string/1.4.4/lz-string.min.js" crossorigin="anonymous"></script>
|
||||
<script src="~/Content/js/log-parser.js"></script>
|
||||
<style type="text/css" id="modflags"></style>
|
||||
<script>
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
/* globals $, LZString */
|
||||
/* globals $ */
|
||||
|
||||
var smapi = smapi || {};
|
||||
smapi.logParser = function(sectionUrl, pasteID) {
|
||||
|
@ -63,14 +63,9 @@ smapi.logParser = function(sectionUrl, pasteID) {
|
|||
|
||||
$("#submit").on("click", function() {
|
||||
$("#popup-upload").fadeOut();
|
||||
var raw = $("#input").val();
|
||||
if (raw) {
|
||||
var paste = $("#input").val();
|
||||
if (paste) {
|
||||
memory = "";
|
||||
var paste = LZString.compressToUTF16(raw);
|
||||
if (paste.length * 2 > 524288) {
|
||||
$("#output").html('<div id="log" class="color-red"><h1>Unable to save!</h1>This log cannot be saved due to its size.<hr />' + $("#input").val() + "</div>");
|
||||
return;
|
||||
}
|
||||
$("#uploader").attr("data-text", "Saving...");
|
||||
$("#uploader").fadeIn();
|
||||
$
|
||||
|
@ -271,7 +266,7 @@ smapi.logParser = function(sectionUrl, pasteID) {
|
|||
$("#uploader").fadeIn();
|
||||
$.get(sectionUrl + "/fetch/" + pasteID, function(data) {
|
||||
if (data.success) {
|
||||
$("#input").val(LZString.decompressFromUTF16(data.content) || data.content);
|
||||
$("#input").val(data.content);
|
||||
loadData();
|
||||
} else {
|
||||
$("#output").html('<div id="log" class="color-red"><h1>Fetching the log failed!</h1><p>' + data.error + '</p><pre id="rawlog"></pre></div>');
|
||||
|
|
Loading…
Reference in New Issue