Skip to content

Commit 4f85a32

Browse files
author
Sylvestre
authored
MSSQL to handle duplicated columns names like other DB clients (#42)
* only keep the last returned value per duplicated columns * comments and more tests
1 parent a441e7e commit 4f85a32

File tree

2 files changed

+89
-10
lines changed

2 files changed

+89
-10
lines changed

lib/mssql.js

Lines changed: 28 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ import JSONStream from "JSONStream";
33
import {json} from "micro";
44
import mssql from "mssql";
55
import {failedCheck, badRequest, notImplemented} from "./errors.js";
6+
import {Transform} from "stream";
67

78
const TYPES = mssql.TYPES;
89

@@ -64,36 +65,53 @@ export async function queryStream(req, res, pool) {
6465
try {
6566
await new Promise((resolve, reject) => {
6667
const request = new mssql.Request(db);
68+
// We are using the arrayRowMode to handle potential duplicated column name.
69+
// See: https://github.com/tediousjs/node-mssql#handling-duplicate-column-names
70+
request.arrayRowMode = true;
6771
const stream = request.toReadableStream();
6872

6973
params.forEach((param, idx) => {
7074
request.input(`${idx + 1}`, param);
7175
});
7276

77+
const columnNameMap = new Map();
7378
request.query(sql);
7479
request.once("recordset", () => clearInterval(keepAlive));
7580
request.on("recordset", (columns) => {
7681
const schema = {
7782
type: "array",
7883
items: {
7984
type: "object",
80-
properties: Object.entries(columns).reduce(
81-
(schema, [name, props]) => {
82-
return {
83-
...schema,
84-
...{[name]: dataTypeSchema({type: props.type.name})},
85-
};
86-
},
87-
{}
88-
),
85+
properties: columns.reduce((schema, col, idx) => {
86+
columnNameMap.set(idx, col.name);
87+
return {
88+
...schema,
89+
...{[col.name]: dataTypeSchema({type: col.type.name})},
90+
};
91+
}, {}),
8992
},
9093
};
9194

9295
res.write(`${JSON.stringify(schema)}`);
9396
res.write("\n");
9497
});
9598

96-
stream.pipe(JSONStream.stringify("", "\n", "\n")).pipe(res);
99+
stream
100+
.pipe(
101+
new Transform({
102+
objectMode: true,
103+
transform(chunk, encoding, cb) {
104+
const row = chunk.reduce((acc, r, idx) => {
105+
const key = columnNameMap.get(idx);
106+
return {...acc, [key]: r};
107+
}, {});
108+
109+
cb(null, row);
110+
},
111+
})
112+
)
113+
.pipe(JSONStream.stringify("", "\n", "\n"))
114+
.pipe(res);
97115
stream.on("done", () => {
98116
resolve();
99117
});

test/mssql.test.js

Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -119,6 +119,67 @@ describe("mssql", () => {
119119
);
120120
expect(row).to.equal(JSON.stringify({CustomerID: testCustomerId}));
121121

122+
resolve();
123+
}
124+
});
125+
});
126+
it("should handle duplicated column names", () => {
127+
return new Promise(async (resolve, reject) => {
128+
const req = new MockReq({method: "POST", url: "/query-stream"}).end({
129+
sql: "SELECT 1 as _a1, 2 as _a1 FROM test.SalesLT.SalesOrderDetail",
130+
params: [],
131+
});
132+
133+
const res = new MockRes(onEnd);
134+
135+
const index = mssql(credentials);
136+
await index(req, res);
137+
138+
function onEnd() {
139+
const [schema, row] = this._getString().split("\n");
140+
141+
expect(row).to.equal(
142+
JSON.stringify({
143+
_a1: 2,
144+
})
145+
);
146+
147+
resolve();
148+
}
149+
});
150+
});
151+
it("should select the last value of any detected duplicated columns", () => {
152+
return new Promise(async (resolve, reject) => {
153+
const req = new MockReq({method: "POST", url: "/query-stream"}).end({
154+
sql: "SELECT TOP 1 ModifiedDate, ModifiedDate FROM test.SalesLT.SalesOrderDetail",
155+
params: [],
156+
});
157+
158+
const res = new MockRes(onEnd);
159+
160+
const index = mssql(credentials);
161+
await index(req, res);
162+
163+
function onEnd() {
164+
const [schema, row] = this._getString().split("\n");
165+
166+
expect(schema).to.equal(
167+
JSON.stringify({
168+
type: "array",
169+
items: {
170+
type: "object",
171+
properties: {
172+
ModifiedDate: {type: ["null", "string"], date: true},
173+
},
174+
},
175+
})
176+
);
177+
expect(row).to.equal(
178+
JSON.stringify({
179+
ModifiedDate: "2008-06-01T00:00:00.000Z",
180+
})
181+
);
182+
122183
resolve();
123184
}
124185
});

0 commit comments

Comments
 (0)