Make cabana function properly on only rlogs (#17)

* Make cabana function properly on only rlogs

* Remove dead code that called old t endpoints
main
Chris Vickery 2019-08-28 11:25:35 -07:00 committed by GitHub
parent d5a532a60a
commit 48948d8dd0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 6 additions and 199 deletions

View File

@ -31,7 +31,6 @@ import { hash } from "./utils/string";
const RLogDownloader = require("./workers/rlog-downloader.worker.js");
const LogCSVDownloader = require("./workers/dbc-csv-downloader.worker.js");
const MessageParser = require("./workers/message-parser.worker.js");
const CanOffsetFinder = require("./workers/can-offset-finder.worker.js");
const CanStreamerWorker = require("./workers/CanStreamerWorker.worker.js");
export default class CanExplorer extends Component {
@ -52,7 +51,7 @@ export default class CanExplorer extends Component {
messages: {},
selectedMessages: [],
route: null,
canFrameOffset: -1,
canFrameOffset: 0,
firstCanTime: null,
lastBusTime: null,
selectedMessage: null,
@ -181,23 +180,7 @@ export default class CanExplorer extends Component {
initCanData() {
const { route } = this.state;
const offsetFinder = new CanOffsetFinder();
offsetFinder.postMessage({
partCount: route.proclog,
base: route.url
});
offsetFinder.onmessage = e => {
if ("error" in e.data) {
this.spawnWorker(this.state.currentParts);
} else {
const { canFrameOffset, firstCanTime } = e.data;
this.setState({ canFrameOffset, firstCanTime }, () => {
this.spawnWorker(this.state.currentParts);
});
}
};
this.spawnWorker(this.state.currentParts);
}
onDbcSelected(dbcFilename, dbc) {

View File

@ -8,6 +8,7 @@ export async function fetchCanTimes(base, part) {
return canData.data;
}
// only used by logs that we can't read, missing ones and .7z ones (no js support)
export async function fetchCanPart(base, part) {
var urls = [
base + "/Log/" + part + "/can/t",

View File

@ -1,106 +0,0 @@
/* eslint-disable no-restricted-globals */
import Sentry from "../logging/Sentry";
import NumpyLoader from "../utils/loadnpy";
import DBC from "../models/can/dbc";
import DbcUtils from "../utils/dbc";
import * as CanApi from "../api/can";
const Int64LE = require("int64-buffer").Int64LE;
export async function loadCanPart(
dbc,
base,
num,
canStartTime,
prevMsgEntries,
maxByteStateChangeCount
) {
var messages = {};
const { times, sources, addresses, datas } = await CanApi.fetchCanPart(
base,
num
);
for (var i = 0; i < times.length; i++) {
var t = times[i];
var src = Int64LE(sources, i * 8).toString(10);
var address = Int64LE(addresses, i * 8);
var addressHexStr = address.toString(16);
var id = src + ":" + addressHexStr;
var addressNum = address.toNumber();
var data = datas.slice(i * 8, (i + 1) * 8);
if (messages[id] === undefined)
messages[id] = DbcUtils.createMessageSpec(
dbc,
address.toNumber(),
id,
src
);
const prevMsgEntry =
messages[id].entries.length > 0
? messages[id].entries[messages[id].entries.length - 1]
: prevMsgEntries[id] || null;
const { msgEntry, byteStateChangeCounts } = DbcUtils.parseMessage(
dbc,
t,
address.toNumber(),
data,
canStartTime,
prevMsgEntry
);
messages[id].byteStateChangeCounts = byteStateChangeCounts.map(
(count, idx) => messages[id].byteStateChangeCounts[idx] + count
);
messages[id].entries.push(msgEntry);
}
const newMaxByteStateChangeCount = DbcUtils.findMaxByteStateChangeCount(
messages
);
if (newMaxByteStateChangeCount > maxByteStateChangeCount) {
maxByteStateChangeCount = newMaxByteStateChangeCount;
}
Object.keys(messages).forEach(key => {
messages[key] = DbcUtils.setMessageByteColors(
messages[key],
maxByteStateChangeCount
);
});
return {
newMessages: messages,
maxByteStateChangeCount
};
// self.postMessage({
// newMessages: messages,
// maxByteStateChangeCount
// });
// self.close();
}
// self.onmessage = function(e) {
// const {
// dbcText,
// base,
// num,
// canStartTime,
// prevMsgEntries,
// maxByteStateChangeCount
// } = e.data;
// const dbc = new DBC(dbcText);
// loadCanPart(
// dbc,
// base,
// num,
// canStartTime,
// prevMsgEntries,
// maxByteStateChangeCount
// );
// };

View File

@ -1,61 +0,0 @@
/* eslint-disable no-restricted-globals */
import Sentry from "../logging/Sentry";
import * as CanApi from "../api/can";
var window = self;
function calcCanFrameOffset(firstCanPart, partCanTimes) {
const firstCanTime = partCanTimes[0];
const firstPartLastCanTime = partCanTimes[partCanTimes.length - 1];
return 60 * firstCanPart + (60 - (firstPartLastCanTime - firstCanTime));
}
async function fetchCanTimes(base, part) {
const times = await CanApi.fetchCanTimes(base, part);
return times.length > 0 ? times : null;
}
async function onMessage(e) {
const { base, partCount } = e.data;
var canTimes = null;
// intentional off by one error!
// we never want to check the very last segment because the code doesn't actually work on last segments
// we don't have enough info in memory to do this...
// if can messages start in the final segment of a route, then you don't get any can messages.
try {
for (let part = 0; part < partCount; part++) {
canTimes = await fetchCanTimes(base, part);
if (canTimes !== null) {
const canFrameOffset = calcCanFrameOffset(part, canTimes);
self.postMessage({ canFrameOffset, firstCanTime: canTimes[0] });
canTimes = null;
self.close();
break;
}
}
if (!canTimes || !canTimes.length) {
// get the last segment but dont do any of the fancy stuff
// we fakin it
canTimes = await fetchCanTimes(base, partCount);
}
if (canTimes && canTimes.length) {
// if we didn't find anything, return the first can message and fake the offset
self.postMessage({
canFrameOffset: 0,
firstCanTime: canTimes[0]
});
}
} catch (err) {
self.postMessage({ error: "Could not fetch numpy can times" });
}
self.close();
}
self.onmessage = onMessage;

View File

@ -6,7 +6,6 @@ import { getLogPart, getLogURLList } from "../api/rlog";
import DbcUtils from "../utils/dbc";
import DBC from "../models/can/dbc";
import { addressForName } from "../models/can/logSignals";
import { loadCanPart } from "./can-fetcher";
const DEBOUNCE_DELAY = 100;
@ -74,18 +73,9 @@ async function loadData(entry) {
url = (await getLogURLList(entry.route))[entry.part];
}
if (!url || url.indexOf(".7z") !== -1) {
// this is a shit log we can't read...
var data = await loadCanPart(
entry.dbc,
entry.options.base,
entry.options.num,
entry.options.canStartTime,
entry.options.prevMsgEntries,
entry.options.maxByteStateChangeCount
);
data.isFinished = true;
return self.postMessage(data);
return self.postMessage({
error: "Invalid or missing log files"
});
}
var res = await getLogPart(entry.route, entry.part);
var logReader = new LogStream(res);