summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorEd Tanous <ed.tanous@intel.com>2018-08-29 21:10:47 +0300
committerEd Tanous <ed.tanous@intel.com>2018-08-29 21:14:43 +0300
commita38b0b206300c792979b900f506b85e535f5708a (patch)
tree969429b98ef207b959dd041013d0515f64f285c5
parent9b243a4ee4e58406df4fecc4f98f7b701cc26f18 (diff)
downloadbmcweb-a38b0b206300c792979b900f506b85e535f5708a.tar.xz
Resolve issue with duplicated static files
Resolves: https://github.com/openbmc/bmcweb/issues/5 In certain contexts when using nginx, there are cheats required to add duplicated files into the filesystem, making nginx beleive it has both compressed and uncompressed resources. This messes with bmcweb, as it previously treated this as a fatal error, given that it doesn't have a filesystem limitation. This patchset changes the behavior so that bmcweb now treats this as an ok situation, and only creates the route for the gzipped version, under the assumption that they are the same. Change-Id: I5744d651e9764242c5e52eeafb8c4df72b8a81a2 Signed-off-by: Ed Tanous <ed.tanous@intel.com>
-rw-r--r--include/webassets.hpp24
1 files changed, 18 insertions, 6 deletions
diff --git a/include/webassets.hpp b/include/webassets.hpp
index 816d5430c0..5eabffedb9 100644
--- a/include/webassets.hpp
+++ b/include/webassets.hpp
@@ -48,14 +48,19 @@ void requestRoutes(Crow<Middlewares...>& app) {
{".map", "application/json"}}};
filesystem::path rootpath{"/usr/share/www/"};
filesystem::recursive_directory_iterator dirIter(rootpath);
-
- for (const filesystem::directory_entry& dir : dirIter) {
+ // In certain cases, we might have both a gzipped version of the file AND a
+ // non-gzipped version. To avoid duplicated routes, we need to make sure we
+ // get the gzipped version first. Because the gzipped path should be longer
+ // than the non gzipped path, if we sort in Ascending order, we should be
+ // guaranteed to get the gzip version first.
+ std::vector<filesystem::directory_entry> paths(filesystem::begin(dirIter),
+ filesystem::end(dirIter));
+ std::sort(paths.rbegin(), paths.rend());
+
+ for (const filesystem::directory_entry& dir : paths) {
filesystem::path absolutePath = dir.path();
filesystem::path relativePath{
absolutePath.string().substr(rootpath.string().size() - 1)};
- // make sure we don't recurse into certain directories
- // note: maybe check for is_directory() here as well...
-
if (filesystem::is_directory(dir)) {
// don't recurse into hidden directories or symlinks
if (boost::starts_with(dir.path().filename().string(), ".") ||
@@ -83,7 +88,14 @@ void requestRoutes(Crow<Middlewares...>& app) {
}
}
- routes.insert(webpath);
+ std::pair<boost::container::flat_set<std::string>::iterator, bool>
+ inserted = routes.insert(webpath);
+
+ if (!inserted.second) {
+ // Got a duplicated path. This is expected in certain situations
+ BMCWEB_LOG_DEBUG << "Got duplicated path " << webpath;
+ continue;
+ }
const char* contentType = nullptr;
auto contentTypeIt = contentTypes.find(extension.c_str());