Skip to content

Commit

Permalink
Merge pull request #202 from perry-mitchell/bugfix/encoded_paths
Browse files Browse the repository at this point in the history
Use 'he'/browser-utils to handle html entities - fixes #198
  • Loading branch information
perry-mitchell authored Apr 19, 2020
2 parents 4180ecb + 8d6537a commit b9a0a3f
Show file tree
Hide file tree
Showing 10 changed files with 36 additions and 17 deletions.
2 changes: 1 addition & 1 deletion .babelrc
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"presets": [
["@babel/preset-env", {
"targets": {
"node": 6
"node": 10
},
"debug": true
}]
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ Please read the [contribution guide](CONTRIBUTING.md) if you plan on making an i

### Node support

This library is compatibale with NodeJS version 10 and above (For version 6/8 support, use versions in the range of `2.*`. For version 4 support, use versions in the range of `1.*`). Version 2.x is now in maintenance mode and will receive no further feature additions. It will receive the odd bug fix when necessary. Version 1.x is no longer supported.
This library is compatibale with **NodeJS version 10** and above (For version 6/8 support, use versions in the range of `2.*`. For version 4 support, use versions in the range of `1.*`). Version 2.x is now in maintenance mode and will receive no further feature additions. It will receive the odd bug fix when necessary. Version 1.x is no longer supported.

### Usage in the Browser

Expand Down
3 changes: 1 addition & 2 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@
"axios": "^0.19.2",
"base-64": "^0.1.0",
"fast-xml-parser": "^3.16.0",
"he": "^1.2.0",
"hot-patcher": "^0.5.0",
"minimatch": "^3.0.4",
"nested-property": "^1.0.4",
Expand Down
14 changes: 14 additions & 0 deletions source/encode.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,18 @@
const { decode, encode } = require("base-64");

function decodeHTMLEntities(str) {
if (typeof WEB === "undefined") {
// Node
const he = require("he");
return he.decode(str);
} else {
// Nasty browser way
const txt = document.createElement("textarea");
txt.innerHTML = str;
return txt.value;
}
}

function fromBase64(str) {
return decode(str);
}
Expand All @@ -9,6 +22,7 @@ function toBase64(str) {
}

module.exports = {
decodeHTMLEntities,
fromBase64,
toBase64
};
10 changes: 8 additions & 2 deletions source/interface/dav.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
const xmlParser = require("fast-xml-parser");
const nestedProp = require("nested-property");
const { decodeHTMLEntities } = require("../encode.js");

function getPropertyOfType(obj, prop, type) {
const val = nestedProp.get(obj, prop);
Expand Down Expand Up @@ -39,12 +40,16 @@ function parseXML(xml) {
const result = xmlParser.parse(xml, {
arrayMode: false,
ignoreNameSpace: true
// // We don't use the processors here as decoding is done manually
// // later on - decoding early would break some path checks.
// attrValueProcessor: val => decodeHTMLEntities(decodeURIComponent(val)),
// tagValueProcessor: val => decodeHTMLEntities(decodeURIComponent(val))
});
resolve(normaliseResult(result));
});
}

function propsToStat(props, filename, isDetailed = false) {
function prepareFileFromProps(props, rawFilename, isDetailed = false) {
const path = require("path-posix");
// Last modified time, raw size, item type and mime
const {
Expand All @@ -58,6 +63,7 @@ function propsToStat(props, filename, isDetailed = false) {
resourceType && typeof resourceType === "object" && typeof resourceType.collection !== "undefined"
? "directory"
: "file";
const filename = decodeHTMLEntities(rawFilename);
const stat = {
filename: filename,
basename: path.basename(filename),
Expand Down Expand Up @@ -91,6 +97,6 @@ function translateDiskSpace(value) {

module.exports = {
parseXML,
propsToStat,
prepareFileFromProps,
translateDiskSpace
};
8 changes: 3 additions & 5 deletions source/interface/directoryContents.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ const pathPosix = require("path-posix");
const { merge } = require("../merge.js");
const { handleResponseCode, processGlobFilter, processResponsePayload } = require("../response.js");
const { normaliseHREF, normalisePath } = require("../url.js");
const { parseXML, propsToStat } = require("./dav.js");
const { parseXML, prepareFileFromProps } = require("./dav.js");
const { encodePath, joinURL, prepareRequestOptions, request } = require("../request.js");

function getDirectoryContents(remotePath, options) {
Expand Down Expand Up @@ -40,24 +40,22 @@ function getDirectoryFiles(result, serverBasePath, requestPath, isDetailed = fal
responseItems
// Filter out the item pointing to the current directory (not needed)
.filter(item => {
// let href = getSingleValue(getValueForKey("href", item));
let href = item.href;
href = pathPosix.join(normalisePath(normaliseHREF(href)), "/");
return href !== serverBase && href !== remoteTargetPath;
})
// Map all items to a consistent output structure (results)
.map(item => {
// HREF is the file path (in full)
let href = item.href;
href = normaliseHREF(href);
const href = normaliseHREF(item.href);
// Each item should contain a stat object
const {
propstat: { prop: props }
} = item;
// Process the true full filename (minus the base server path)
const filename =
serverBase === "/" ? normalisePath(href) : normalisePath(pathPosix.relative(serverBase, href));
return propsToStat(props, filename, isDetailed);
return prepareFileFromProps(props, filename, isDetailed);
})
);
}
Expand Down
4 changes: 2 additions & 2 deletions source/interface/stat.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
const { merge } = require("../merge.js");
const { handleResponseCode, processResponsePayload } = require("../response.js");
const { parseXML, propsToStat } = require("./dav.js");
const { parseXML, prepareFileFromProps } = require("./dav.js");
const urlTools = require("../url.js");
const { encodePath, joinURL, prepareRequestOptions, request } = require("../request.js");

Expand Down Expand Up @@ -41,7 +41,7 @@ function parseStat(result, filename, isDetailed = false) {
propstat: { prop: props }
} = responseItem;
const filePath = urlTools.normalisePath(filename);
return propsToStat(props, filePath, isDetailed);
return prepareFileFromProps(props, filePath, isDetailed);
}

module.exports = {
Expand Down
1 change: 1 addition & 0 deletions test/serverContents/file&name.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
test
8 changes: 4 additions & 4 deletions test/specs/getDirectoryContents.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -56,12 +56,12 @@ describe("getDirectoryContents", function() {

it("returns correct file results", function() {
return this.client.getDirectoryContents("/").then(function(contents) {
const sub1 = contents.find(function(item) {
return item.basename === "alrighty.jpg";
});
const sub1 = contents.find(item =>item.basename === "alrighty.jpg");
const sub2 = contents.find(item => item.basename === "file&name.txt");
expect(sub1.filename).to.equal("/alrighty.jpg");
expect(sub1.size).to.equal(52130);
expect(sub1.type).to.equal("file");
expect(sub2.filename).to.equal("/file&name.txt");
});
});

Expand Down Expand Up @@ -101,7 +101,7 @@ describe("getDirectoryContents", function() {
});

it("returns correct directory contents when path contains encoded sequences (issue #93)", function() {
return this.client.getDirectoryContents("/two%20words").then(function(contents) {
return this.client.getDirectoryContents("/two%20words").then(contents => {
expect(contents).to.have.lengthOf(1);
expect(contents[0].basename).to.equal("file2.txt");
});
Expand Down

0 comments on commit b9a0a3f

Please sign in to comment.