diff --git a/src/CanExplorer.js b/src/CanExplorer.js index 1e83a99..e2282f6 100644 --- a/src/CanExplorer.js +++ b/src/CanExplorer.js @@ -31,6 +31,7 @@ import { hash } from "./utils/string"; const RLogDownloader = require("./workers/rlog-downloader.worker.js"); const LogCSVDownloader = require("./workers/dbc-csv-downloader.worker.js"); const MessageParser = require("./workers/message-parser.worker.js"); +const CanOffsetFinder = require("./workers/can-offset-finder.worker.js"); const CanStreamerWorker = require("./workers/CanStreamerWorker.worker.js"); export default class CanExplorer extends Component { @@ -180,7 +181,23 @@ export default class CanExplorer extends Component { initCanData() { const { route } = this.state; - this.spawnWorker(this.state.currentParts); + const offsetFinder = new CanOffsetFinder(); + offsetFinder.postMessage({ + partCount: route.proclog, + base: route.url + }); + + offsetFinder.onmessage = e => { + if ("error" in e.data) { + this.spawnWorker(this.state.currentParts); + } else { + const { canFrameOffset, firstCanTime } = e.data; + + this.setState({ canFrameOffset, firstCanTime }, () => { + this.spawnWorker(this.state.currentParts); + }); + } + }; } onDbcSelected(dbcFilename, dbc) { diff --git a/src/index.js b/src/index.js index d9f3034..1a1cec8 100644 --- a/src/index.js +++ b/src/index.js @@ -31,7 +31,7 @@ if (routeFullName) { let max = getUrlParameter("max"), url = getUrlParameter("url"); if (max) { - props.max = max; + props.max = Number(max); } if (url) { props.url = url; diff --git a/src/workers/can-fetcher.js b/src/workers/can-fetcher.js new file mode 100644 index 0000000..540e763 --- /dev/null +++ b/src/workers/can-fetcher.js @@ -0,0 +1,106 @@ +/* eslint-disable no-restricted-globals */ +import Sentry from "../logging/Sentry"; +import NumpyLoader from "../utils/loadnpy"; +import DBC from "../models/can/dbc"; +import DbcUtils from "../utils/dbc"; +import * as CanApi from "../api/can"; + +const Int64LE = require("int64-buffer").Int64LE; + +export async function loadCanPart( + dbc, + base, + num, + canStartTime, + prevMsgEntries, + maxByteStateChangeCount +) { + var messages = {}; + const { times, sources, addresses, datas } = await CanApi.fetchCanPart( + base, + num + ); + + for (var i = 0; i < times.length; i++) { + var t = times[i]; + var src = Int64LE(sources, i * 8).toString(10); + var address = Int64LE(addresses, i * 8); + var addressHexStr = address.toString(16); + var id = src + ":" + addressHexStr; + + var addressNum = address.toNumber(); + var data = datas.slice(i * 8, (i + 1) * 8); + if (messages[id] === undefined) + messages[id] = DbcUtils.createMessageSpec( + dbc, + address.toNumber(), + id, + src + ); + + const prevMsgEntry = + messages[id].entries.length > 0 + ? messages[id].entries[messages[id].entries.length - 1] + : prevMsgEntries[id] || null; + + const { msgEntry, byteStateChangeCounts } = DbcUtils.parseMessage( + dbc, + t, + address.toNumber(), + data, + canStartTime, + prevMsgEntry + ); + messages[id].byteStateChangeCounts = byteStateChangeCounts.map( + (count, idx) => messages[id].byteStateChangeCounts[idx] + count + ); + + messages[id].entries.push(msgEntry); + } + + const newMaxByteStateChangeCount = DbcUtils.findMaxByteStateChangeCount( + messages + ); + if (newMaxByteStateChangeCount > maxByteStateChangeCount) { + maxByteStateChangeCount = newMaxByteStateChangeCount; + } + + Object.keys(messages).forEach(key => { + messages[key] = DbcUtils.setMessageByteColors( + messages[key], + maxByteStateChangeCount + ); + }); + + return { + newMessages: messages, + maxByteStateChangeCount + }; + + // self.postMessage({ + // newMessages: messages, + // maxByteStateChangeCount + // }); + // self.close(); +} + +// self.onmessage = function(e) { +// const { +// dbcText, +// base, +// num, +// canStartTime, +// prevMsgEntries, +// maxByteStateChangeCount +// } = e.data; + +// const dbc = new DBC(dbcText); +// loadCanPart( +// dbc, +// base, +// num, +// canStartTime, +// prevMsgEntries, +// maxByteStateChangeCount +// ); +// }; diff --git a/src/workers/can-offset-finder.worker.js b/src/workers/can-offset-finder.worker.js new file mode 100644 index 0000000..b22cf05 --- /dev/null +++ b/src/workers/can-offset-finder.worker.js @@ -0,0 +1,61 @@ +/* eslint-disable no-restricted-globals */ +import Sentry from "../logging/Sentry"; + +import * as CanApi from "../api/can"; + +var window = self; + +function calcCanFrameOffset(firstCanPart, partCanTimes) { + const firstCanTime = partCanTimes[0]; + const firstPartLastCanTime = partCanTimes[partCanTimes.length - 1]; + + return 60 * firstCanPart + (60 - (firstPartLastCanTime - firstCanTime)); +} + +async function fetchCanTimes(base, part) { + const times = await CanApi.fetchCanTimes(base, part); + return times.length > 0 ? times : null; +} + +async function onMessage(e) { + const { base, partCount } = e.data; + var canTimes = null; + + // intentional off by one error! + // we never want to check the very last segment because the code doesn't actually work on last segments + // we don't have enough info in memory to do this... + // if can messages start in the final segment of a route, then you don't get any can messages. + try { + for (let part = 0; part < partCount; part++) { + canTimes = await fetchCanTimes(base, part); + + if (canTimes !== null) { + const canFrameOffset = calcCanFrameOffset(part, canTimes); + self.postMessage({ canFrameOffset, firstCanTime: canTimes[0] }); + canTimes = null; + self.close(); + break; + } + } + + if (!canTimes || !canTimes.length) { + // get the last segment but dont do any of the fancy stuff + // we fakin it + canTimes = await fetchCanTimes(base, partCount); + } + + if (canTimes && canTimes.length) { + // if we didn't find anything, return the first can message and fake the offset + self.postMessage({ + canFrameOffset: 0, + firstCanTime: canTimes[0] + }); + } + } catch (err) { + self.postMessage({ error: "Could not fetch numpy can times" }); + } + + self.close(); +} + +self.onmessage = onMessage; diff --git a/src/workers/rlog-downloader.worker.js b/src/workers/rlog-downloader.worker.js index 2575841..8c7d678 100644 --- a/src/workers/rlog-downloader.worker.js +++ b/src/workers/rlog-downloader.worker.js @@ -6,6 +6,7 @@ import { getLogPart, getLogURLList } from "../api/rlog"; import DbcUtils from "../utils/dbc"; import DBC from "../models/can/dbc"; import { addressForName } from "../models/can/logSignals"; +import { loadCanPart } from "./can-fetcher"; const DEBOUNCE_DELAY = 100; @@ -73,9 +74,18 @@ async function loadData(entry) { url = (await getLogURLList(entry.route))[entry.part]; } if (!url || url.indexOf(".7z") !== -1) { - return self.postMessage({ - error: "Invalid or missing log files" - }); + // this is a shit log we can't read... + var data = await loadCanPart( + entry.dbc, + entry.options.base, + entry.options.num, + entry.options.canStartTime, + entry.options.prevMsgEntries, + entry.options.maxByteStateChangeCount + ); + data.isFinished = true; + + return self.postMessage(data); } var res = await getLogPart(entry.route, entry.part); var logReader = new LogStream(res);