diff --git a/package.json b/package.json index ac39590..4021e6d 100644 --- a/package.json +++ b/package.json @@ -33,6 +33,7 @@ "react-visibility-sensor": "^3.10.1", "simple-statistics": "^4.1.0", "socket.io-client": "^2.0.3", + "streamsaver": "^1.0.1", "vega": "git+ssh://git@github.com/commaai/vega.git#HEAD", "vega-tooltip": "^0.4.0" }, diff --git a/src/CanExplorer.js b/src/CanExplorer.js index 59cc1b2..3db749f 100644 --- a/src/CanExplorer.js +++ b/src/CanExplorer.js @@ -5,6 +5,8 @@ import PropTypes from 'prop-types'; import {USE_UNLOGGER, PART_SEGMENT_LENGTH, STREAMING_WINDOW} from './config'; import * as GithubAuth from './api/github-auth'; import cx from 'classnames'; +import { createWriteStream, supported, version } from 'streamsaver' + import auth from './api/comma-auth'; import DBC from './models/can/dbc'; @@ -15,6 +17,7 @@ import OnboardingModal from './components/Modals/OnboardingModal'; import SaveDbcModal from './components/SaveDbcModal'; import LoadDbcModal from './components/LoadDbcModal'; const CanFetcher = require('./workers/can-fetcher.worker.js'); +const LogCSVDownloader = require('./workers/dbc-csv-downloader.worker.js'); const MessageParser = require("./workers/message-parser.worker.js"); const CanOffsetFinder = require('./workers/can-offset-finder.worker.js'); const CanStreamerWorker = require('./workers/CanStreamerWorker.worker.js'); @@ -191,6 +194,38 @@ export default class CanExplorer extends Component { this.hideSaveDbc(); } + // async downloadDbcFile() { + // const blob = new Blob([this.props.dbc.text()], {type: "text/plain;charset=utf-8"}); + // const filename = this.state.dbcFilename.replace(/\.dbc/g, '') + '.dbc'; + // FileSaver.saveAs(blob, filename, true); + // } + + downloadRawLogAsCSV = () => { + console.log('downloadRawLogAsCSV:start'); + // Trigger file processing and dowload in worker + const { firstCanTime, canFrameOffset, route, currentParts, dbcFilename } = this.state; + const worker = new LogCSVDownloader(); + const fileStream = createWriteStream(`${dbcFilename.replace(/\.dbc/g, '-')}${+new Date()}.csv`) + const writer = fileStream.getWriter() + const encoder = new TextEncoder() + + worker.onmessage = e => { + const { progress, logData, shouldClose } = e.data; + if(shouldClose) { + console.log('downloadRawLogAsCSV:close'); + writer.close() + return; + } + const uint8array = encoder.encode(logData + "\n") + writer.write(uint8array) + } + worker.postMessage({ + base: route.url, + parts: [0,this.props.max], + canStartTime: firstCanTime - canFrameOffset, + }); + } + addAndRehydrateMessages(newMessages, options) { // Adds new message entries to messages state // and "rehydrates" ES6 classes (message frame) @@ -645,6 +680,7 @@ export default class CanExplorer extends Component { maxByteStateChangeCount={this.state.maxByteStateChangeCount} isDemo={this.props.isDemo} live={this.state.live} + saveLog={debounce(this.downloadRawLogAsCSV, 500)} /> {this.state.route || this.state.live ? +
+ +
{this.props.route ? -
ref ? new Clipboard(ref) : null}> diff --git a/src/components/Meta/meta.scss b/src/components/Meta/meta.scss index dd59cea..38d9960 100644 --- a/src/components/Meta/meta.scss +++ b/src/components/Meta/meta.scss @@ -35,7 +35,7 @@ &-action { float: left; padding: 0 1%; - width: (100%/3); + width: 22%; /* 22% by default. 33.33% for copy via .special-wide */ &:first-child { padding-left: 0; } @@ -46,6 +46,9 @@ width: 100%; } } + & .special-wide { + width: 33%; + } } &-messages { display: flex; diff --git a/src/index.js b/src/index.js index 268945a..9ca7b1b 100644 --- a/src/index.js +++ b/src/index.js @@ -38,6 +38,22 @@ if (routeFullName) { props.dongleId = 'cb38263377b873ee'; props.dbc = AcuraDbc; props.dbcFilename = 'acura_ilx_2016_can.dbc'; + + // lots of 404s on this one + // props.max = 752; + // props.url = 'https://chffrprivate.blob.core.windows.net/chffrprivate3/v2/07e243287e48432a/d97fcc321a58e660a14de72b749269ba_2017-09-09--22-00-00'; + // props.name = '2017-09-09--22-00-00'; + // props.dongleId = '07e243287e48432a'; + // props.dbc = AcuraDbc; + // props.dbcFilename = 'acura_ilx_2016_can.dbc'; + + // really long one with real content + // props.max = 597; + // props.url = 'https://chffrprivate.blob.core.windows.net/chffrprivate3/v2/0c249898b339e978/957935e6a75bc2bf6f626fcbe6db93ba_2017-08-11--04-47-54'; + // props.name = '2017-08-11--04-47-54'; + // props.dongleId = '0c249898b339e978'; + // props.dbc = AcuraDbc; + // props.dbcFilename = 'acura_ilx_2016_can.dbc'; } if (persistedDbc) { diff --git a/src/utils/loadnpy.js b/src/utils/loadnpy.js index edad196..c427df2 100644 --- a/src/utils/loadnpy.js +++ b/src/utils/loadnpy.js @@ -99,6 +99,9 @@ const NumpyLoader = (function NumpyLoader() { var buf = req.response; // not responseText var ndarray = fromArrayBuffer(buf); resolve(ndarray); + } else if (req.status == 404) { + console.log('yup') + reject({is404: true}) } else { // Otherwise reject with the status text // which will hopefully be a meaningful error diff --git a/src/workers/dbc-csv-downloader.worker.js b/src/workers/dbc-csv-downloader.worker.js new file mode 100644 index 0000000..5498b73 --- /dev/null +++ b/src/workers/dbc-csv-downloader.worker.js @@ -0,0 +1,75 @@ +// import Sentry from '../logging/Sentry'; + +var window = self; +require('core-js/fn/object/values'); +import NumpyLoader from '../utils/loadnpy'; +import * as CanApi from '../api/can'; + + +const Int64LE = require('int64-buffer').Int64LE + +function transformData(data) {} +async function fetchAndPostData(base, currentPart, [minPart, maxPart], canStartTime) { + console.log('\n\nfetchAndPostData', `${currentPart} of ${maxPart}`); + + // if we've exhausted the parts, close up shop + if(currentPart > maxPart) { + self.postMessage({ + progress: 100, + shouldClose: true + }) + self.close(); + return; + } + + let awaitedData = null; + try { + awaitedData = await CanApi.fetchCanPart(base, currentPart); + } catch(e) { + console.log('fetchCanPart missing part', e) + return fetchAndPostData(base, currentPart+1, [minPart, maxPart], canStartTime) + } + const { + times, + sources, + addresses, + datas, + } = awaitedData; + + // times is a float64array, which we want to be a normal array for now + const logData = [].slice.call(times).map((t, i) => { + var src = Int64LE(sources, i*8).toString(10); + var address = Int64LE(addresses, i*8); + var addressHexStr = address.toString(16); + var id = src + ":" + addressHexStr; + + var addressNum = address.toNumber(); + var data = datas.slice(i*8, (i+1)*8); + + return `${t-canStartTime},${addressNum},${src},${Buffer.from(data).toString('hex')}\n` + }).join('') + + console.log('posting message') + self.postMessage({ + progress: 10, + logData, + shouldClose: false + }) + + fetchAndPostData(base, currentPart+1, [minPart, maxPart], canStartTime) +} + +self.onmessage = function(e) { + console.log('onmessage worker') + self.postMessage({ + progress: 0, + logData: 'time,addr,bus,data', + shouldClose: false + }) + const {base, parts, canStartTime, prevMsgEntries, maxByteStateChangeCount} = e.data; + + // const dbc = new DBC(dbcText); + // saveDBC(dbc, base, num, canStartTime); + fetchAndPostData(base, parts[0], parts, canStartTime) +} +