This source file includes following definitions.
- scale_factor_
- LoadFromPath
- LoadFromFile
- LoadImpl
- HasResource
- GetStringPiece
- GetStaticMemory
- GetTextEncodingType
- GetScaleFactor
- WritePack
#include "ui/base/resource/data_pack.h"
#include <errno.h>
#include "base/file_util.h"
#include "base/files/memory_mapped_file.h"
#include "base/logging.h"
#include "base/memory/ref_counted_memory.h"
#include "base/metrics/histogram.h"
#include "base/strings/string_piece.h"
namespace {
static const uint32 kFileFormatVersion = 4;
static const size_t kHeaderLength = 2 * sizeof(uint32) + sizeof(uint8);
#pragma pack(push,2)
struct DataPackEntry {
uint16 resource_id;
uint32 file_offset;
static int CompareById(const void* void_key, const void* void_entry) {
uint16 key = *reinterpret_cast<const uint16*>(void_key);
const DataPackEntry* entry =
reinterpret_cast<const DataPackEntry*>(void_entry);
if (key < entry->resource_id) {
return -1;
} else if (key > entry->resource_id) {
return 1;
} else {
return 0;
}
}
};
#pragma pack(pop)
COMPILE_ASSERT(sizeof(DataPackEntry) == 6, size_of_entry_must_be_six);
enum LoadErrors {
INIT_FAILED = 1,
BAD_VERSION,
INDEX_TRUNCATED,
ENTRY_NOT_FOUND,
HEADER_TRUNCATED,
WRONG_ENCODING,
INIT_FAILED_FROM_FILE,
LOAD_ERRORS_COUNT,
};
}
namespace ui {
DataPack::DataPack(ui::ScaleFactor scale_factor)
: resource_count_(0),
text_encoding_type_(BINARY),
scale_factor_(scale_factor) {
}
DataPack::~DataPack() {
}
bool DataPack::LoadFromPath(const base::FilePath& path) {
mmap_.reset(new base::MemoryMappedFile);
if (!mmap_->Initialize(path)) {
DLOG(ERROR) << "Failed to mmap datapack";
UMA_HISTOGRAM_ENUMERATION("DataPack.Load", INIT_FAILED,
LOAD_ERRORS_COUNT);
mmap_.reset();
return false;
}
return LoadImpl();
}
bool DataPack::LoadFromFile(base::File file) {
mmap_.reset(new base::MemoryMappedFile);
if (!mmap_->Initialize(file.Pass())) {
DLOG(ERROR) << "Failed to mmap datapack";
UMA_HISTOGRAM_ENUMERATION("DataPack.Load", INIT_FAILED_FROM_FILE,
LOAD_ERRORS_COUNT);
mmap_.reset();
return false;
}
return LoadImpl();
}
bool DataPack::LoadImpl() {
if (kHeaderLength > mmap_->length()) {
DLOG(ERROR) << "Data pack file corruption: incomplete file header.";
UMA_HISTOGRAM_ENUMERATION("DataPack.Load", HEADER_TRUNCATED,
LOAD_ERRORS_COUNT);
mmap_.reset();
return false;
}
const uint32* ptr = reinterpret_cast<const uint32*>(mmap_->data());
uint32 version = ptr[0];
if (version != kFileFormatVersion) {
LOG(ERROR) << "Bad data pack version: got " << version << ", expected "
<< kFileFormatVersion;
UMA_HISTOGRAM_ENUMERATION("DataPack.Load", BAD_VERSION,
LOAD_ERRORS_COUNT);
mmap_.reset();
return false;
}
resource_count_ = ptr[1];
const uint8* ptr_encoding = reinterpret_cast<const uint8*>(ptr + 2);
text_encoding_type_ = static_cast<TextEncodingType>(*ptr_encoding);
if (text_encoding_type_ != UTF8 && text_encoding_type_ != UTF16 &&
text_encoding_type_ != BINARY) {
LOG(ERROR) << "Bad data pack text encoding: got " << text_encoding_type_
<< ", expected between " << BINARY << " and " << UTF16;
UMA_HISTOGRAM_ENUMERATION("DataPack.Load", WRONG_ENCODING,
LOAD_ERRORS_COUNT);
mmap_.reset();
return false;
}
if (kHeaderLength + resource_count_ * sizeof(DataPackEntry) >
mmap_->length()) {
LOG(ERROR) << "Data pack file corruption: too short for number of "
"entries specified.";
UMA_HISTOGRAM_ENUMERATION("DataPack.Load", INDEX_TRUNCATED,
LOAD_ERRORS_COUNT);
mmap_.reset();
return false;
}
for (size_t i = 0; i < resource_count_ + 1; ++i) {
const DataPackEntry* entry = reinterpret_cast<const DataPackEntry*>(
mmap_->data() + kHeaderLength + (i * sizeof(DataPackEntry)));
if (entry->file_offset > mmap_->length()) {
LOG(ERROR) << "Entry #" << i << " in data pack points off end of file. "
<< "Was the file corrupted?";
UMA_HISTOGRAM_ENUMERATION("DataPack.Load", ENTRY_NOT_FOUND,
LOAD_ERRORS_COUNT);
mmap_.reset();
return false;
}
}
return true;
}
bool DataPack::HasResource(uint16 resource_id) const {
return !!bsearch(&resource_id, mmap_->data() + kHeaderLength, resource_count_,
sizeof(DataPackEntry), DataPackEntry::CompareById);
}
bool DataPack::GetStringPiece(uint16 resource_id,
base::StringPiece* data) const {
#if defined(__BYTE_ORDER)
COMPILE_ASSERT(__BYTE_ORDER == __LITTLE_ENDIAN,
datapack_assumes_little_endian);
#elif defined(__BIG_ENDIAN__)
#error DataPack assumes little endian
#endif
const DataPackEntry* target = reinterpret_cast<const DataPackEntry*>(
bsearch(&resource_id, mmap_->data() + kHeaderLength, resource_count_,
sizeof(DataPackEntry), DataPackEntry::CompareById));
if (!target) {
return false;
}
const DataPackEntry* next_entry = target + 1;
size_t length = next_entry->file_offset - target->file_offset;
data->set(reinterpret_cast<const char*>(mmap_->data() + target->file_offset),
length);
return true;
}
base::RefCountedStaticMemory* DataPack::GetStaticMemory(
uint16 resource_id) const {
base::StringPiece piece;
if (!GetStringPiece(resource_id, &piece))
return NULL;
return new base::RefCountedStaticMemory(piece.data(), piece.length());
}
ResourceHandle::TextEncodingType DataPack::GetTextEncodingType() const {
return text_encoding_type_;
}
ui::ScaleFactor DataPack::GetScaleFactor() const {
return scale_factor_;
}
bool DataPack::WritePack(const base::FilePath& path,
const std::map<uint16, base::StringPiece>& resources,
TextEncodingType textEncodingType) {
FILE* file = base::OpenFile(path, "wb");
if (!file)
return false;
if (fwrite(&kFileFormatVersion, sizeof(kFileFormatVersion), 1, file) != 1) {
LOG(ERROR) << "Failed to write file version";
base::CloseFile(file);
return false;
}
uint32 entry_count = resources.size();
if (fwrite(&entry_count, sizeof(entry_count), 1, file) != 1) {
LOG(ERROR) << "Failed to write entry count";
base::CloseFile(file);
return false;
}
if (textEncodingType != UTF8 && textEncodingType != UTF16 &&
textEncodingType != BINARY) {
LOG(ERROR) << "Invalid text encoding type, got " << textEncodingType
<< ", expected between " << BINARY << " and " << UTF16;
base::CloseFile(file);
return false;
}
uint8 write_buffer = textEncodingType;
if (fwrite(&write_buffer, sizeof(uint8), 1, file) != 1) {
LOG(ERROR) << "Failed to write file text resources encoding";
base::CloseFile(file);
return false;
}
uint32 index_length = (entry_count + 1) * sizeof(DataPackEntry);
uint32 data_offset = kHeaderLength + index_length;
for (std::map<uint16, base::StringPiece>::const_iterator it =
resources.begin();
it != resources.end(); ++it) {
uint16 resource_id = it->first;
if (fwrite(&resource_id, sizeof(resource_id), 1, file) != 1) {
LOG(ERROR) << "Failed to write id for " << resource_id;
base::CloseFile(file);
return false;
}
if (fwrite(&data_offset, sizeof(data_offset), 1, file) != 1) {
LOG(ERROR) << "Failed to write offset for " << resource_id;
base::CloseFile(file);
return false;
}
data_offset += it->second.length();
}
uint16 resource_id = 0;
if (fwrite(&resource_id, sizeof(resource_id), 1, file) != 1) {
LOG(ERROR) << "Failed to write extra resource id.";
base::CloseFile(file);
return false;
}
if (fwrite(&data_offset, sizeof(data_offset), 1, file) != 1) {
LOG(ERROR) << "Failed to write extra offset.";
base::CloseFile(file);
return false;
}
for (std::map<uint16, base::StringPiece>::const_iterator it =
resources.begin();
it != resources.end(); ++it) {
if (fwrite(it->second.data(), it->second.length(), 1, file) != 1) {
LOG(ERROR) << "Failed to write data for " << it->first;
base::CloseFile(file);
return false;
}
}
base::CloseFile(file);
return true;
}
}