Skip to content

Commit

Permalink
A number of features and fixes
Browse files Browse the repository at this point in the history
 - welcome and license messages, when referencing files to use the contents of, were not displayed properly if the file was given relative to the location of the sfxpp file itself. this is now fixed.
 - files can now be given http[s] source paths and they will be downloaded when the installer runs.
 - related to that, there was a bug with the downloader; it could corrupt files by potentially writing more data than was received. this is now fixed
 - added GetFileNameFromPath js function, which finds the file name portion of a full path and returns a string containing only that.
 - the TextFileReadLn js function could potentially return bad data, in addition to leaving the trailing \n. this is now fixed.
  • Loading branch information
keelanstuart committed Jun 12, 2020
1 parent 17b8e9c commit 8c17ce6
Show file tree
Hide file tree
Showing 10 changed files with 357 additions and 155 deletions.
2 changes: 2 additions & 0 deletions Archiver/Include/Archiver.h
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,8 @@ class IExtractor

ER_DONE,

ER_MUSTDOWNLOAD,

ER_UNKNOWN_ERROR
};

Expand Down
201 changes: 123 additions & 78 deletions Archiver/Source/FastLZArchiver.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -65,101 +65,130 @@ size_t CFastLZArchiver::GetFileCount(IArchiver::INFO_MODE mode)
// Adds a file to the archive
CFastLZArchiver::ADD_RESULT CFastLZArchiver::AddFile(const TCHAR *src_filename, const TCHAR *dst_filename, uint64_t *sz_uncomp, uint64_t *sz_comp, const TCHAR *scriptsnippet)
{
CFastLZArchiver::ADD_RESULT ret = AR_OK;
CFastLZArchiver::ADD_RESULT ret = AR_UNKNOWN_ERROR;

HANDLE hin = CreateFile(src_filename, GENERIC_READ, FILE_SHARE_READ, NULL, OPEN_EXISTING, FILE_FLAG_SEQUENTIAL_SCAN, NULL);
if (hin != INVALID_HANDLE_VALUE)
SFileTableEntry fte;

TCHAR dst_path[MAX_PATH];
_tcscpy_s(dst_path, MAX_PATH, dst_filename);

// for download references (filenames that contain the "http[s]:\\" marker, we don't care about disk spanning, etc...
// there's no actual file, so just add it to the file table and mark it as a doanloadable
const TCHAR *pss = src_filename;
if (!_tcsnicmp(pss, _T("http"), 4))
{
TCHAR dst_path[MAX_PATH];
_tcscpy_s(dst_path, MAX_PATH, dst_filename);
PathRemoveFileSpec(dst_path);
pss += 4;
if (!_tcsnicmp(pss, _T("s"), 1))
pss++;

TCHAR *fn = PathFindFileName(dst_filename);
if (!_tcsnicmp(pss, _T("://"), 3))
{
fte.m_Flags |= SFileTableEntry::FTEFLAG_DOWNLOAD;
}
}

SFileTableEntry fte;
if (!(fte.m_Flags & SFileTableEntry::FTEFLAG_DOWNLOAD))
PathRemoveFileSpec(dst_path);

fte.m_Filename = fn;
fte.m_Path = dst_path;
const TCHAR *fn = (fte.m_Flags & SFileTableEntry::FTEFLAG_DOWNLOAD) ? src_filename : PathFindFileName(dst_filename);

// store the size of the uncompressed file
LARGE_INTEGER fsz;
GetFileSizeEx(hin, &fsz);
fte.m_UncompressedSize = fsz.QuadPart;
if (sz_uncomp)
*sz_uncomp = fte.m_UncompressedSize;
fte.m_Filename = fn;
fte.m_Path = dst_path;

// store the file time
GetFileTime(hin, &(fte.m_FTCreated), NULL, &(fte.m_FTModified));
fte.m_ScriptSnippet = scriptsnippet;

SFileBlock b;
if (fte.m_Flags & SFileTableEntry::FTEFLAG_DOWNLOAD)
{
ret = AR_OK_UNCOMPRESSED;
}
else
{
HANDLE hin = CreateFile(src_filename, GENERIC_READ, FILE_SHARE_READ, NULL, OPEN_EXISTING, FILE_FLAG_SEQUENTIAL_SCAN, NULL);
if (hin != INVALID_HANDLE_VALUE)
{
// store the size of the uncompressed file
LARGE_INTEGER fsz;
GetFileSizeEx(hin, &fsz);
fte.m_UncompressedSize = fsz.QuadPart;
if (sz_uncomp)
*sz_uncomp = fte.m_UncompressedSize;

fte.m_Offset = m_pah->GetOffset();
// store the file time
GetFileTime(hin, &(fte.m_FTCreated), NULL, &(fte.m_FTModified));

fte.m_ScriptSnippet = scriptsnippet;
SFileBlock b;

// read uncompressed blocks of data from the file
while (b.ReadUncompressedData(hin))
{
fte.m_BlockCount++;
fte.m_Offset = m_pah->GetOffset();

// compress and write the data to the archive
if (b.CompressData())
{
// update the compressed size of the file
fte.m_CompressedSize += b.m_Header.m_SizeC;
}
else
// read uncompressed blocks of data from the file
while (b.ReadUncompressedData(hin))
{
fte.m_CompressedSize += b.m_Header.m_SizeU;
}
fte.m_BlockCount++;

// compress and write the data to the archive
if (b.CompressData())
{
// update the compressed size of the file
fte.m_CompressedSize += b.m_Header.m_SizeC;
}
else
{
fte.m_CompressedSize += b.m_Header.m_SizeU;
}

b.WriteCompressedData(m_pah->GetHandle());
b.WriteCompressedData(m_pah->GetHandle());

// spanning logic
if ((m_MaxSize != UINT64_MAX) && ((m_pah->GetLength() + (uint64_t)ComputeFileTableSize()) >= m_MaxSize))
{
// if we're spanning, then we need to add this entry to the file table now and do some cleanup
m_FileTable.push_back( fte );
// spanning logic
if ((m_MaxSize != UINT64_MAX) && ((m_pah->GetLength() + (uint64_t)ComputeFileTableSize()) >= m_MaxSize))
{
// if we're spanning, then we need to add this entry to the file table now and do some cleanup
m_FileTable.push_back(fte);

// reset the block count and compressed size (because this should technically be a new data stream)
fte.m_BlockCount = 0;
fte.m_CompressedSize = 0;
// reset the block count and compressed size (because this should technically be a new data stream)
fte.m_BlockCount = 0;
fte.m_CompressedSize = 0;

// have the stream handle spanning behind the scenes
m_pah->Span();
// have the stream handle spanning behind the scenes
m_pah->Span();

DWORD bw;
uint32_t magic = IArchiver::MAGIC;
WriteFile(m_pah->GetHandle(), &magic, sizeof(uint32_t), &bw, NULL);
DWORD bw;
uint32_t magic = IArchiver::MAGIC;
WriteFile(m_pah->GetHandle(), &magic, sizeof(uint32_t), &bw, NULL);

uint32_t comp_magic = CFastLZArchiver::MAGIC_FASTLZ;
WriteFile(m_pah->GetHandle(), &comp_magic, sizeof(uint32_t), &bw, NULL);
uint32_t comp_magic = CFastLZArchiver::MAGIC_FASTLZ;
WriteFile(m_pah->GetHandle(), &comp_magic, sizeof(uint32_t), &bw, NULL);

uint64_t flags = 0;
WriteFile(m_pah->GetHandle(), &flags, sizeof(uint64_t), &bw, NULL);
uint64_t flags = 0;
WriteFile(m_pah->GetHandle(), &flags, sizeof(uint64_t), &bw, NULL);

// after the span, we should expect that offset will be different
m_InitialOffset = m_pah->GetOffset();
// after the span, we should expect that offset will be different
m_InitialOffset = m_pah->GetOffset();

fte.m_Offset = m_pah->GetOffset();
fte.m_Offset = m_pah->GetOffset();

// mark it as spanned so that the next archive can append to, rather than create, the file
fte.m_Flags |= SFileTableEntry::FTEFLAG_SPANNED;
// mark it as spanned so that the next archive can append to, rather than create, the file
fte.m_Flags |= SFileTableEntry::FTEFLAG_SPANNED;
}
}
}

if (fte.m_CompressedSize == fte.m_UncompressedSize)
ret = AR_OK_UNCOMPRESSED;
if (fte.m_CompressedSize >= fte.m_UncompressedSize)
ret = AR_OK_UNCOMPRESSED;
else
ret = AR_OK;

if (sz_comp)
*sz_comp = (fte.m_CompressedSize != (uint64_t)-1) ? fte.m_CompressedSize : fte.m_UncompressedSize;

if (sz_comp)
*sz_comp = (fte.m_CompressedSize != (uint64_t)-1) ? fte.m_CompressedSize : fte.m_UncompressedSize;
CloseHandle(hin);
}
}

if (ret <= AR_OK_UNCOMPRESSED)
{
// add the file to the file table
m_FileTable.push_back( fte );

m_OverallFileCount++;

CloseHandle(hin);
}

return ret;
Expand Down Expand Up @@ -711,23 +740,39 @@ IExtractor::EXTRACT_RESULT CFastLZExtractor::ExtractFile(size_t file_idx, tstrin

SFileTableEntry &fte = m_FileTable.at(file_idx);

// if we're not where we're supposed to be for the file indicated, then we need to move the file pointer
if ((m_CachedFilePosition != m_pah->GetOffset()) || (m_CachedFilePosition != fte.m_Offset))
{
LARGE_INTEGER p;
p.QuadPart = fte.m_Offset;
SetFilePointerEx(m_pah->GetHandle(), p, NULL, FILE_BEGIN);
}

TCHAR path[MAX_PATH];

tstring _cvtpath, cvtpath;
tstring _cvtfile, cvtfile;

if (!(fte.m_Flags & SFileTableEntry::FTEFLAG_DOWNLOAD))
{
// if we're not where we're supposed to be for the file indicated, then we need to move the file pointer
if ((m_CachedFilePosition != m_pah->GetOffset()) || (m_CachedFilePosition != fte.m_Offset))
{
LARGE_INTEGER p;
p.QuadPart = fte.m_Offset;
SetFilePointerEx(m_pah->GetHandle(), p, NULL, FILE_BEGIN);
}

ReplaceEnvironmentVariables(fte.m_Filename, _cvtfile);
ReplaceRegistryKeys(_cvtfile, cvtfile);
}
else
{
cvtfile = fte.m_Filename;
}

ReplaceEnvironmentVariables(fte.m_Path, _cvtpath);
ReplaceRegistryKeys(_cvtpath, cvtpath);

tstring _cvtfile, cvtfile;
ReplaceEnvironmentVariables(fte.m_Filename, _cvtfile);
ReplaceRegistryKeys(_cvtfile, cvtfile);
if (fte.m_Flags & SFileTableEntry::FTEFLAG_DOWNLOAD)
{
if (output_filename)
*output_filename = cvtpath;

return IExtractor::ER_MUSTDOWNLOAD;
}

if (!override_filename)
{
Expand All @@ -753,6 +798,9 @@ IExtractor::EXTRACT_RESULT CFastLZExtractor::ExtractFile(size_t file_idx, tstrin
_tcscat_s(path, MAX_PATH, override_filename);
}

if (output_filename)
*output_filename = path;

bool append = false;
// if we're spanned and the file index we want is 0, then we need to append to the file rather than creating a new one
if ((fte.m_Flags & SFileTableEntry::FTEFLAG_SPANNED) && (file_idx == 0))
Expand All @@ -764,9 +812,6 @@ IExtractor::EXTRACT_RESULT CFastLZExtractor::ExtractFile(size_t file_idx, tstrin

if ((hf != INVALID_HANDLE_VALUE) || test_only)
{
if (output_filename)
*output_filename = path;

if (append && !test_only)
SetFilePointer(hf, 0, NULL, FILE_END);

Expand Down
3 changes: 2 additions & 1 deletion Archiver/Source/FastLZArchiver.h
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,8 @@ struct sFileTableEntry
{
enum
{
FTEFLAG_SPANNED = 0x0000000000000001, // a spanned file will be partially in multiple files
FTEFLAG_SPANNED = 0x0000000000000001, // a spanned file will be partially in multiple files
FTEFLAG_DOWNLOAD = 0x0000000000000002, // an empty file that is just a download reference
};

uint64_t m_Flags;
Expand Down
34 changes: 21 additions & 13 deletions sfx/sfx/HttpDownload.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -108,11 +108,17 @@ extern bool FLZACreateDirectories(const TCHAR *dir);

BOOL CHttpDownloader::DownloadHttpFile(const TCHAR *szUrl, const TCHAR *szDestFile, const TCHAR *szDestDir, float *ppct, BOOL *pabortque, UINT expected_size)
{
if (!szUrl)
return false;

if (sCommFailures > 10)
return false;

BOOL retval = false;

tstring url = szUrl;
std::replace(url.begin(), url.end(), _T('\\'), _T('/'));

FLZACreateDirectories(szDestDir);

#if defined(DOWNLOADER_USES_WININET)
Expand All @@ -133,11 +139,11 @@ BOOL CHttpDownloader::DownloadHttpFile(const TCHAR *szUrl, const TCHAR *szDestFi
PathAppend(filepath, szDestFile);
}

if (!PathIsURL(szUrl))
if (!PathIsURL(url.c_str()))
{
if (PathFileExists(szUrl))
if (PathFileExists(url.c_str()))
{
return CopyFile(szUrl, filepath, false);
return CopyFile(url.c_str(), filepath, false);
}
}

Expand All @@ -152,11 +158,11 @@ BOOL CHttpDownloader::DownloadHttpFile(const TCHAR *szUrl, const TCHAR *szDestFi

DWORD flags = INTERNET_FLAG_RESYNCHRONIZE | INTERNET_FLAG_NO_COOKIES | INTERNET_FLAG_NO_UI;

if (_tcsnicmp(szUrl, _T("https"), 5) == 0)
if (_tcsnicmp(url.c_str(), _T("https"), 5) == 0)
flags |= INTERNET_FLAG_IGNORE_CERT_DATE_INVALID | INTERNET_FLAG_IGNORE_CERT_CN_INVALID | INTERNET_FLAG_SECURE;

// Open the url specified
m_hUrl = InternetOpenUrl(m_hInet, szUrl, NULL, 0, flags, (DWORD_PTR)this);
m_hUrl = InternetOpenUrl(m_hInet, url.c_str(), NULL, 0, flags, (DWORD_PTR)this);

// if we didn't get the hurl back immediately (which we won't, because it's an async op)
// then wait until the semaphore clears
Expand Down Expand Up @@ -219,13 +225,14 @@ BOOL CHttpDownloader::DownloadHttpFile(const TCHAR *szUrl, const TCHAR *szDestFi
hfile = 0;
}

DWORD amount_to_download = chunked ? BUFFER_SIZE : _ttoi(buf_query);
uint64_t amount_to_download = chunked ? BUFFER_SIZE : _ttoi64(buf_query);

if (amount_to_download && (errcode < 400))
{
empty_file = false;

DWORD amount_downloaded = 0;
uint64_t amount_downloaded = 0;
uint64_t amount_remaining = amount_to_download;
INTERNET_BUFFERS inbuf[2];

int bufidx;
Expand Down Expand Up @@ -269,16 +276,17 @@ BOOL CHttpDownloader::DownloadHttpFile(const TCHAR *szUrl, const TCHAR *szDestFi
{
void *data_start = inbuf[bufidx].lpvBuffer;

DWORD data_size = inbuf[bufidx].dwBufferLength;
uint64_t data_size = std::min<uint64_t>(amount_remaining, uint64_t(inbuf[bufidx].dwBufferLength));

// and if we have a valid file handle, write data
if (hfile)
{
DWORD bwritten;
WriteFile(hfile, data_start, data_size, &bwritten, NULL);
WriteFile(hfile, data_start, (DWORD)data_size, &bwritten, NULL);
}

amount_downloaded += inbuf[bufidx].dwBufferLength;
amount_downloaded += data_size;
amount_remaining -= data_size;

if (ppct)
*ppct = chunked ? 0 : (float)amount_downloaded / (float)amount_to_download;
Expand Down Expand Up @@ -365,9 +373,9 @@ BOOL CHttpDownloader::DownloadHttpFile(const TCHAR *szUrl, const TCHAR *szDestFi
{
CURLcode curlret;

TCHAR *canonUrl = (TCHAR *)_alloca(((_tcslen(szUrl) * 3) + 1) * sizeof(TCHAR));;
DWORD cch = (UINT)_tcslen(szUrl) * 3;
HRESULT hr = UrlCanonicalize(szUrl, canonUrl, &cch, URL_ESCAPE_UNSAFE);
TCHAR *canonUrl = (TCHAR *)_alloca(((_tcslen(url.c_str()) * 3) + 1) * sizeof(TCHAR));;
DWORD cch = (UINT)_tcslen(url.c_str()) * 3;
HRESULT hr = UrlCanonicalize(url.c_str(), canonUrl, &cch, URL_ESCAPE_UNSAFE);
canonUrl[cch] = '\0';

int len = WideCharToMultiByte(CP_UTF8, 0, canonUrl, -1, NULL, 0, NULL, NULL);
Expand Down
Loading

0 comments on commit 8c17ce6

Please sign in to comment.